lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
bsd-3-clause
70c5b8e0af6d4e3c5745bc51f31ad81d10579fea
0
edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon
package org.lockss.devtools.plugindef; import java.util.*; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.event.*; import org.lockss.daemon.*; import javax.swing.text.*; import org.lockss.util.*; public class PrintfEditor extends JDialog implements EDPEditor, ConfigParamListener { protected PrintfTemplate originalTemplate; protected PrintfTemplate editableTemplate; private EDPCellData m_data; private HashMap paramKeys; private HashMap matchesKeys = new HashMap(); static char[] RESERVED_CHARS = {'[','\\','^','$','.','|','?','*','+','(',')'}; static String RESERVED_STRING = new String(RESERVED_CHARS); static SimpleAttributeSet PLAIN_ATTR = new SimpleAttributeSet(); static { StyleConstants.setForeground(PLAIN_ATTR, Color.black); StyleConstants.setBold(PLAIN_ATTR, false); StyleConstants.setFontFamily(PLAIN_ATTR, "Helvetica"); StyleConstants.setFontSize(PLAIN_ATTR, 14); } int numParameters = 0; JPanel printfPanel = new JPanel(); ButtonGroup buttonGroup = new ButtonGroup(); JPanel buttonPanel = new JPanel(); JButton cancelButton = new JButton(); JButton saveButton = new JButton(); JLabel formatLabel = new JLabel(); FlowLayout flowLayout1 = new FlowLayout(); JTextArea formatTextArea = new JTextArea(); JPanel parameterPanel = new JPanel(); JLabel parameterLabel = new JLabel(); JTextArea parameterTextArea = new JTextArea(); JButton insertButton = new JButton(); JComboBox paramComboBox = new JComboBox(); GridBagLayout gridBagLayout1 = new GridBagLayout(); ButtonGroup buttonGroup1 = new ButtonGroup(); TitledBorder titledBorder2; JComboBox matchComboBox = new JComboBox(); JButton insertMatchButton = new JButton(); JPanel matchPanel = new JPanel(); JPanel InsertPanel = new JPanel(); GridLayout gridLayout1 = new GridLayout(); GridBagLayout gridBagLayout2 = new GridBagLayout(); GridBagLayout gridBagLayout3 = new GridBagLayout(); JTabbedPane printfTabPane = new JTabbedPane(); JTextPane editorPane = new JTextPane(); JScrollPane editorPanel = new JScrollPane(); int selectedPane = 0; public PrintfEditor(Frame frame, String title) { super(frame, title, false); originalTemplate = new PrintfTemplate(); editableTemplate = new PrintfTemplate(); try { jbInit(); pack(); initMatches(); } catch (Exception e) { e.printStackTrace(); } } private void initMatches() { matchesKeys.put("String Literal", ""); matchesKeys.put("Any Number", "[0-9]+"); matchesKeys.put("Anything", ".*"); matchesKeys.put("Start", "^"); matchesKeys.put("End", "$"); for(Iterator it = matchesKeys.keySet().iterator(); it.hasNext();) { matchComboBox.addItem(it.next()); } } private void jbInit() throws Exception { saveButton.setText("Save"); saveButton.addActionListener(new PrintfTemplateEditor_saveButton_actionAdapter(this)); cancelButton.setText("Cancel"); cancelButton.addActionListener(new PrintfTemplateEditor_cancelButton_actionAdapter(this)); this.setTitle(this.getTitle() + " Template Editor"); printfPanel.setLayout(gridBagLayout1); formatLabel.setFont(new java.awt.Font("DialogInput", 0, 12)); formatLabel.setText("Format String:"); buttonPanel.setLayout(flowLayout1); printfPanel.setBorder(BorderFactory.createEtchedBorder()); printfPanel.setMinimumSize(new Dimension(100, 160)); printfPanel.setPreferredSize(new Dimension(380, 160)); parameterPanel.setLayout(gridBagLayout2); parameterLabel.setText("Parameters:"); parameterLabel.setFont(new java.awt.Font("DialogInput", 0, 12)); parameterTextArea.setMinimumSize(new Dimension(100, 25)); parameterTextArea.setPreferredSize(new Dimension(200, 25)); parameterTextArea.setEditable(true); parameterTextArea.setText(""); insertButton.setMaximumSize(new Dimension(136, 20)); insertButton.setMinimumSize(new Dimension(136, 20)); insertButton.setPreferredSize(new Dimension(136, 20)); insertButton.setToolTipText( "insert the format in the format string and add parameter to list."); insertButton.setText("Insert Parameter"); insertButton.addActionListener(new PrintfTemplateEditor_insertButton_actionAdapter(this)); formatTextArea.setMinimumSize(new Dimension(100, 25)); formatTextArea.setPreferredSize(new Dimension(200, 15)); formatTextArea.setText(""); parameterPanel.setBorder(null); parameterPanel.setMinimumSize(new Dimension(60, 40)); parameterPanel.setPreferredSize(new Dimension(300, 40)); insertMatchButton.addActionListener(new PrintfTemplateEditor_insertMatchButton_actionAdapter(this)); insertMatchButton.setText("Insert Match"); insertMatchButton.setToolTipText("insert the match in the format string and add parameter to list."); insertMatchButton.setPreferredSize(new Dimension(136, 20)); insertMatchButton.setMinimumSize(new Dimension(136, 20)); insertMatchButton.setMaximumSize(new Dimension(136, 20)); matchPanel.setPreferredSize(new Dimension(300, 40)); matchPanel.setBorder(null); matchPanel.setMinimumSize(new Dimension(60, 60)); matchPanel.setLayout(gridBagLayout3); InsertPanel.setLayout(gridLayout1); gridLayout1.setColumns(1); gridLayout1.setRows(2); gridLayout1.setVgap(0); InsertPanel.setBorder(BorderFactory.createEtchedBorder()); InsertPanel.setMinimumSize(new Dimension(100, 100)); InsertPanel.setPreferredSize(new Dimension(380, 120)); editorPane.setText(""); editorPane.addKeyListener(new PrintfEditor_editorPane_keyAdapter(this)); printfTabPane.addChangeListener(new PrintfEditor_printfTabPane_changeAdapter(this)); parameterPanel.add(insertButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(8, 6, 13, 8), 0, 10)); parameterPanel.add(paramComboBox, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(8, 8, 13, 0), 258, 11)); InsertPanel.add(matchPanel, null); InsertPanel.add(parameterPanel, null); buttonPanel.add(cancelButton, null); buttonPanel.add(saveButton, null); this.getContentPane().add(printfTabPane, BorderLayout.NORTH); this.getContentPane().add(InsertPanel, BorderLayout.CENTER); matchPanel.add(insertMatchButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(8, 6, 13, 8), 0, 10)); matchPanel.add(matchComboBox, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(8, 8, 13, 0), 258, 11)); printfPanel.add(parameterLabel, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0 ,GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(7, 5, 0, 5), 309, 0)); printfPanel.add(formatLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0 ,GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(4, 5, 0, 5), 288, 0)); printfPanel.add(formatTextArea, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0 ,GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(6, 5, 0, 5), 300, 34)); printfPanel.add(parameterTextArea, new GridBagConstraints(0, 3, 1, 1, 1.0, 1.0 ,GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(6, 5, 6, 5), 300, 34)); printfTabPane.addTab("Editor View", null, editorPanel,"View in Editor"); printfTabPane.addTab("Printf View", null, printfPanel,"Vies as Printf"); editorPane.setCharacterAttributes(PLAIN_ATTR, true); editorPane.addStyle("PLAIN",editorPane.getLogicalStyle()); editorPanel.getViewport().add(editorPane, null); this.getContentPane().add(buttonPanel, BorderLayout.SOUTH); buttonGroup.add(cancelButton); } /** * notifiyParamsChanged */ public void notifiyParamsChanged() { updateParams(m_data); } /** * setEDPData * * @param data EDPCellData */ public void setCellData(EDPCellData data) { m_data = data; data.getPlugin().addParamListener(this); setTemplate( (PrintfTemplate) data.getData()); // initialize the combobox updateParams(data); if(data.getKey().equals(EditableDefinablePlugin.AU_RULES)) { matchPanel.setVisible(true); } else { matchPanel.setVisible(false); } } void saveButton_actionPerformed(ActionEvent e) { updateEditableTemplate(selectedPane); originalTemplate.setFormat(editableTemplate.m_format); originalTemplate.setTokens(editableTemplate.m_tokens); m_data.updateTemplateData(originalTemplate); setVisible(false); } void cancelButton_actionPerformed(ActionEvent e) { setVisible(false); } void printfTabPane_stateChanged(ChangeEvent e) { updateEditableTemplate(selectedPane); selectedPane = printfTabPane.getSelectedIndex(); updatePane(selectedPane); } void updateEditableTemplate(int pane) { switch (pane) { case 0: // use the editor to update the template updateTemplateFromEditor(editableTemplate); break; case 1: // use the printf text areas to update the template. updateTemplateFromPrintf(); break; } } void insertButton_actionPerformed(ActionEvent e) { String key = (String) paramComboBox.getSelectedItem(); String format = ""; if (key.equals("String Literal")) { format = escapePrintfChars( (String) JOptionPane.showInputDialog(this, "Enter the string you wish to input", "String Literal Input", JOptionPane.OK_CANCEL_OPTION)); if (StringUtil.isNullString(format)) { return; } } else { int type = ( (Integer) paramKeys.get(key)).intValue(); switch (type) { case ConfigParamDescr.TYPE_STRING: case ConfigParamDescr.TYPE_URL: case ConfigParamDescr.TYPE_BOOLEAN: format = "%s"; break; case ConfigParamDescr.TYPE_INT: case ConfigParamDescr.TYPE_POS_INT: NumericPaddingDialog dialog = new NumericPaddingDialog(); Point pos = this.getLocationOnScreen(); dialog.setLocation(pos.x, pos.y); dialog.pack(); dialog.show(); StringBuffer fbuf = new StringBuffer("%"); int width = dialog.getPaddingSize(); boolean is_zero = dialog.useZero(); if (width > 0) { fbuf.append("."); if (is_zero) { fbuf.append(0); } fbuf.append(width); } fbuf.append("d"); format = fbuf.toString(); break; case ConfigParamDescr.TYPE_YEAR: format = "%d"; } } if(selectedPane == 0) { if(key.equals("String Literal")) { insertText(format, PLAIN_ATTR, editorPane.getSelectionStart()); } else { insertParameter(key, format, editorPane.getSelectionStart()); } } else if (selectedPane == 1) { // add the combobox data value to the edit box int pos = formatTextArea.getCaretPosition(); formatTextArea.insert(format, pos); if (!key.equals("String Literal")) { pos = parameterTextArea.getCaretPosition(); parameterTextArea.insert(", " + key, pos); } } } void insertMatchButton_actionPerformed(ActionEvent e) { String key = (String) matchComboBox.getSelectedItem(); String format = (String)matchesKeys.get(key); if(key.equals("String Literal")) { format = escapeReservedChars((String) JOptionPane.showInputDialog(this, "Enter the string you wish to match", "String Literal Input", JOptionPane.OK_CANCEL_OPTION)); if(StringUtil.isNullString(format)) { return; } } if(selectedPane == 0) { insertText(format, PLAIN_ATTR, editorPane.getSelectionStart()); } else { // add the combobox data value to the edit box int pos = formatTextArea.getCaretPosition(); formatTextArea.insert(format, pos); } } void editorPane_keyTyped(KeyEvent e) { int pos = editorPane.getCaretPosition()-1; StyledDocument doc = editorPane.getStyledDocument(); Element el = doc.getCharacterElement(pos); AttributeSet attr = el.getAttributes(); String el_name = (String) attr.getAttribute(StyleConstants.NameAttribute); if(e.getKeyChar() == '\b' && el_name.startsWith("Parameter") && StyleConstants.getComponent(attr) != null){ try { doc.remove(el.getStartOffset(), el.getEndOffset() - el.getStartOffset()); } catch (BadLocationException ex) { } } } private void insertParameter(String param, String format, int pos) { try { StyledDocument doc = (StyledDocument) editorPane.getDocument(); // The component must first be wrapped in a style Style style = doc.addStyle("Parameter-" + numParameters, null); JLabel label = new JLabel(param); label.setAlignmentY(0.8f); // make sure we line up label.setFont(new Font("Helvetica", Font.PLAIN, 14)); label.setForeground(Color.BLUE); label.setToolTipText(format); StyleConstants.setComponent(style, label); doc.insertString(pos, format, style); numParameters++; } catch (BadLocationException e) { } } private void insertText(String text, AttributeSet set, int pos) { try { editorPane.getDocument().insertString(pos, text, set); } catch (BadLocationException ex) { } } private void appendText(String text, AttributeSet set) { insertText(text, set, editorPane.getDocument().getLength()); } private void updateTemplateFromPrintf() { String format = formatTextArea.getText(); String parameters = parameterTextArea.getText(); editableTemplate.setFormat(format); editableTemplate.setParameters(parameters); } private void updateTemplateFromEditor(PrintfTemplate template) { ArrayList params = new ArrayList(); String format = null; int text_length = editorPane.getDocument().getLength(); try { format = editorPane.getDocument().getText(0,text_length); } catch (BadLocationException ex1) { } Element section_el = editorPane.getDocument().getDefaultRootElement(); // Get number of paragraphs. int num_para = section_el.getElementCount(); for (int p_count = 0; p_count < num_para; p_count++) { Element para_el = section_el.getElement(p_count); // Enumerate the content elements int num_cont = para_el.getElementCount(); for (int c_count = 0; c_count < num_cont; c_count++) { Element content_el = para_el.getElement(c_count); AttributeSet attr = content_el.getAttributes(); // Get the name of the style applied to this content element; may be null String sn = (String) attr.getAttribute(StyleConstants.NameAttribute); // Check if style name match if (sn.startsWith("Parameter")) { // we extract the label. JLabel l = (JLabel) StyleConstants.getComponent(attr); if (l != null) { params.add(l.getText()); } } } } template.setFormat(format); template.setTokens(params); } protected void setTemplate(PrintfTemplate template) { originalTemplate = template; editableTemplate.setFormat(template.m_format); editableTemplate.setTokens(template.m_tokens); updatePane(selectedPane); } private void updateParams(EDPCellData data) { paramComboBox.removeAllItems(); paramKeys = data.getPlugin().getPrintfDescrs(); paramComboBox.addItem("String Literal"); for (Iterator it = paramKeys.keySet().iterator(); it.hasNext(); ) { paramComboBox.addItem(it.next()); } paramComboBox.setEnabled(true); paramComboBox.setSelectedIndex(0); paramComboBox.setToolTipText( "Select a parameter to insert into the format string"); insertButton.setEnabled(true); } /** * updatePane * * @param sel int */ private void updatePane(int sel) { switch(sel) { case 0: // editor view updateEditorView(); break; case 1: // printf view updatePrintfView(); break; } } private void updatePrintfView() { formatTextArea.setText(editableTemplate.m_format); parameterTextArea.setText(editableTemplate.getTokenString()); } private void updateEditorView() { editorPane.setText(""); numParameters = 0; java.util.List elements = editableTemplate.getPrintfElements(); for(Iterator it = elements.iterator(); it.hasNext(); ) { PrintfUtil.PrintfElement el = (PrintfUtil.PrintfElement) it.next(); if(el.getFormat().equals("\0")) { appendText(el.getElement(), PLAIN_ATTR); } else { insertParameter(el.getElement(), el.getFormat(), editorPane.getDocument().getLength()); } } } /** * Return a copy of the string with all reserved regexp chars * escaped by backslash. * @param str the string to add escapes to * @return String return a string with escapes or "" if str is null */ private String escapeReservedChars(String str) { if(str == null) return ""; StringBuffer sb = new StringBuffer(); for(int ci = 0; ci < str.length(); ci++) { char ch = str.charAt(ci); if(RESERVED_STRING.indexOf(ch) >=0) { sb.append('\\'); } sb.append(ch); } return sb.toString(); } private String escapePrintfChars(String str) { if(str == null) return ""; StringBuffer sb = new StringBuffer(); for(int ci = 0; ci < str.length(); ci++) { char ch = str.charAt(ci); if(ch == '%') { sb.append('%'); } sb.append(ch); } return sb.toString(); } } class PrintfTemplateEditor_saveButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_saveButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.saveButton_actionPerformed(e); } } class PrintfTemplateEditor_cancelButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_cancelButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.cancelButton_actionPerformed(e); } } class PrintfTemplateEditor_insertButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_insertButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.insertButton_actionPerformed(e); } } class PrintfTemplateEditor_insertMatchButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_insertMatchButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.insertMatchButton_actionPerformed(e); } } class PrintfEditor_printfTabPane_changeAdapter implements javax.swing.event.ChangeListener { PrintfEditor adaptee; PrintfEditor_printfTabPane_changeAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void stateChanged(ChangeEvent e) { adaptee.printfTabPane_stateChanged(e); } } class PrintfEditor_editorPane_keyAdapter extends java.awt.event.KeyAdapter { PrintfEditor adaptee; PrintfEditor_editorPane_keyAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void keyTyped(KeyEvent e) { adaptee.editorPane_keyTyped(e); } }
tools/src/org/lockss/devtools/plugindef/PrintfEditor.java
package org.lockss.devtools.plugindef; import java.util.*; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.event.*; import org.lockss.daemon.*; import javax.swing.text.*; import org.lockss.util.*; public class PrintfEditor extends JDialog implements EDPEditor, ConfigParamListener { protected PrintfTemplate originalTemplate; protected PrintfTemplate editableTemplate; private EDPCellData m_data; private HashMap paramKeys; private HashMap matchesKeys = new HashMap(); static char[] RESERVED_CHARS = {'[','\\','^','$','.','|','?','*','+','(',')'}; static String RESERVED_STRING = new String(RESERVED_CHARS); static SimpleAttributeSet PLAIN_ATTR = new SimpleAttributeSet(); static { StyleConstants.setForeground(PLAIN_ATTR, Color.black); StyleConstants.setBold(PLAIN_ATTR, false); StyleConstants.setFontFamily(PLAIN_ATTR, "Helvetica"); StyleConstants.setFontSize(PLAIN_ATTR, 14); } int numParameters = 0; JPanel printfPanel = new JPanel(); ButtonGroup buttonGroup = new ButtonGroup(); JPanel buttonPanel = new JPanel(); JButton cancelButton = new JButton(); JButton saveButton = new JButton(); JLabel formatLabel = new JLabel(); FlowLayout flowLayout1 = new FlowLayout(); JTextArea formatTextArea = new JTextArea(); JPanel parameterPanel = new JPanel(); JLabel parameterLabel = new JLabel(); JTextArea parameterTextArea = new JTextArea(); JButton insertButton = new JButton(); JComboBox paramComboBox = new JComboBox(); GridBagLayout gridBagLayout1 = new GridBagLayout(); ButtonGroup buttonGroup1 = new ButtonGroup(); TitledBorder titledBorder2; JComboBox matchComboBox = new JComboBox(); JButton insertMatchButton = new JButton(); JPanel matchPanel = new JPanel(); JPanel InsertPanel = new JPanel(); GridLayout gridLayout1 = new GridLayout(); GridBagLayout gridBagLayout2 = new GridBagLayout(); GridBagLayout gridBagLayout3 = new GridBagLayout(); JTabbedPane printfTabPane = new JTabbedPane(); JTextPane editorPane = new JTextPane(); JScrollPane editorPanel = new JScrollPane(); int selectedPane = 0; public PrintfEditor(Frame frame, String title) { super(frame, title, false); originalTemplate = new PrintfTemplate(); editableTemplate = new PrintfTemplate(); try { jbInit(); pack(); initMatches(); } catch (Exception e) { e.printStackTrace(); } } private void initMatches() { matchesKeys.put("String Literal", ""); matchesKeys.put("Any Number", "[0-9]+"); matchesKeys.put("Anything", ".*"); matchesKeys.put("Start", "^"); matchesKeys.put("End", "$"); for(Iterator it = matchesKeys.keySet().iterator(); it.hasNext();) { matchComboBox.addItem(it.next()); } } private void jbInit() throws Exception { saveButton.setText("Save"); saveButton.addActionListener(new PrintfTemplateEditor_saveButton_actionAdapter(this)); cancelButton.setText("Cancel"); cancelButton.addActionListener(new PrintfTemplateEditor_cancelButton_actionAdapter(this)); this.setTitle(this.getTitle() + " Template Editor"); printfPanel.setLayout(gridBagLayout1); formatLabel.setFont(new java.awt.Font("DialogInput", 0, 12)); formatLabel.setText("Format String:"); buttonPanel.setLayout(flowLayout1); printfPanel.setBorder(BorderFactory.createEtchedBorder()); printfPanel.setMinimumSize(new Dimension(100, 160)); printfPanel.setPreferredSize(new Dimension(380, 160)); parameterPanel.setLayout(gridBagLayout2); parameterLabel.setText("Parameters:"); parameterLabel.setFont(new java.awt.Font("DialogInput", 0, 12)); parameterTextArea.setMinimumSize(new Dimension(100, 25)); parameterTextArea.setPreferredSize(new Dimension(200, 25)); parameterTextArea.setEditable(true); parameterTextArea.setText(""); insertButton.setMaximumSize(new Dimension(136, 20)); insertButton.setMinimumSize(new Dimension(136, 20)); insertButton.setPreferredSize(new Dimension(136, 20)); insertButton.setToolTipText( "insert the format in the format string and add parameter to list."); insertButton.setText("Insert Parameter"); insertButton.addActionListener(new PrintfTemplateEditor_insertButton_actionAdapter(this)); formatTextArea.setMinimumSize(new Dimension(100, 25)); formatTextArea.setPreferredSize(new Dimension(200, 15)); formatTextArea.setText(""); parameterPanel.setBorder(null); parameterPanel.setMinimumSize(new Dimension(60, 40)); parameterPanel.setPreferredSize(new Dimension(300, 40)); insertMatchButton.addActionListener(new PrintfTemplateEditor_insertMatchButton_actionAdapter(this)); insertMatchButton.setText("Insert Match"); insertMatchButton.setToolTipText("insert the match in the format string and add parameter to list."); insertMatchButton.setPreferredSize(new Dimension(136, 20)); insertMatchButton.setMinimumSize(new Dimension(136, 20)); insertMatchButton.setMaximumSize(new Dimension(136, 20)); matchPanel.setPreferredSize(new Dimension(300, 40)); matchPanel.setBorder(null); matchPanel.setMinimumSize(new Dimension(60, 60)); matchPanel.setLayout(gridBagLayout3); InsertPanel.setLayout(gridLayout1); gridLayout1.setColumns(1); gridLayout1.setRows(2); gridLayout1.setVgap(0); InsertPanel.setBorder(BorderFactory.createEtchedBorder()); InsertPanel.setMinimumSize(new Dimension(100, 100)); InsertPanel.setPreferredSize(new Dimension(380, 120)); editorPane.setText(""); editorPane.addKeyListener(new PrintfEditor_editorPane_keyAdapter(this)); printfTabPane.addChangeListener(new PrintfEditor_printfTabPane_changeAdapter(this)); parameterPanel.add(insertButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(8, 6, 13, 8), 0, 10)); parameterPanel.add(paramComboBox, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(8, 8, 13, 0), 258, 11)); InsertPanel.add(matchPanel, null); InsertPanel.add(parameterPanel, null); buttonPanel.add(cancelButton, null); buttonPanel.add(saveButton, null); this.getContentPane().add(printfTabPane, BorderLayout.NORTH); this.getContentPane().add(InsertPanel, BorderLayout.CENTER); matchPanel.add(insertMatchButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(8, 6, 13, 8), 0, 10)); matchPanel.add(matchComboBox, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0 ,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(8, 8, 13, 0), 258, 11)); printfPanel.add(parameterLabel, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0 ,GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(7, 5, 0, 5), 309, 0)); printfPanel.add(formatLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0 ,GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(4, 5, 0, 5), 288, 0)); printfPanel.add(formatTextArea, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0 ,GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(6, 5, 0, 5), 300, 34)); printfPanel.add(parameterTextArea, new GridBagConstraints(0, 3, 1, 1, 1.0, 1.0 ,GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(6, 5, 6, 5), 300, 34)); printfTabPane.addTab("Editor View", null, editorPanel,"View in Editor"); printfTabPane.addTab("Printf View", null, printfPanel,"Vies as Printf"); editorPane.setCharacterAttributes(PLAIN_ATTR, true); editorPane.addStyle("PLAIN",editorPane.getLogicalStyle()); editorPanel.getViewport().add(editorPane, null); this.getContentPane().add(buttonPanel, BorderLayout.SOUTH); buttonGroup.add(cancelButton); } /** * notifiyParamsChanged */ public void notifiyParamsChanged() { updateParams(m_data); } /** * setEDPData * * @param data EDPCellData */ public void setCellData(EDPCellData data) { m_data = data; data.getPlugin().addParamListener(this); setTemplate( (PrintfTemplate) data.getData()); // initialize the combobox updateParams(data); if(data.getKey().equals(EditableDefinablePlugin.AU_RULES)) { matchPanel.setVisible(true); } else { matchPanel.setVisible(false); } } void saveButton_actionPerformed(ActionEvent e) { updateEditableTemplate(selectedPane); originalTemplate.setFormat(editableTemplate.m_format); originalTemplate.setTokens(editableTemplate.m_tokens); m_data.updateTemplateData(originalTemplate); setVisible(false); } void cancelButton_actionPerformed(ActionEvent e) { setVisible(false); } void printfTabPane_stateChanged(ChangeEvent e) { updateEditableTemplate(selectedPane); selectedPane = printfTabPane.getSelectedIndex(); updatePane(selectedPane); } void updateEditableTemplate(int pane) { switch (pane) { case 0: // use the editor to update the template updateTemplateFromEditor(editableTemplate); break; case 1: // use the printf text areas to update the template. updateTemplateFromPrintf(); break; } } void insertButton_actionPerformed(ActionEvent e) { String key = (String) paramComboBox.getSelectedItem(); String format = ""; if (key.equals("String Literal")) { format = escapePrintfChars( (String) JOptionPane.showInputDialog(this, "Enter the string you wish to input", "String Literal Input", JOptionPane.OK_CANCEL_OPTION)); if (StringUtil.isNullString(format)) { return; } } else { int type = ( (Integer) paramKeys.get(key)).intValue(); switch (type) { case ConfigParamDescr.TYPE_STRING: case ConfigParamDescr.TYPE_URL: case ConfigParamDescr.TYPE_BOOLEAN: format = "%s"; break; case ConfigParamDescr.TYPE_INT: case ConfigParamDescr.TYPE_POS_INT: NumericPaddingDialog dialog = new NumericPaddingDialog(); Point pos = this.getLocationOnScreen(); dialog.setLocation(pos.x, pos.y); dialog.pack(); dialog.show(); StringBuffer fbuf = new StringBuffer("%"); int width = dialog.getPaddingSize(); boolean is_zero = dialog.useZero(); if (width > 0) { fbuf.append("."); if (is_zero) { fbuf.append(0); } fbuf.append(width); } fbuf.append("d"); format = fbuf.toString(); break; case ConfigParamDescr.TYPE_YEAR: format = "%d"; } } if(selectedPane == 0) { if(key.equals("String Literal")) { insertText(format, PLAIN_ATTR, editorPane.getSelectionStart()); } else { insertParameter(key, format, editorPane.getSelectionStart()); } } else if (selectedPane == 1) { // add the combobox data value to the edit box int pos = formatTextArea.getCaretPosition(); formatTextArea.insert(format, pos); if (!key.equals("String Literal")) { pos = parameterTextArea.getCaretPosition(); parameterTextArea.insert(", " + key, pos); } } } void insertMatchButton_actionPerformed(ActionEvent e) { String key = (String) matchComboBox.getSelectedItem(); String format = (String)matchesKeys.get(key); if(key.equals("String Literal")) { format = escapeReservedChars((String) JOptionPane.showInputDialog(this, "Enter the string you wish to match", "String Literal Input", JOptionPane.OK_CANCEL_OPTION)); if(StringUtil.isNullString(format)) { return; } } if(selectedPane == 0) { insertText(format, PLAIN_ATTR, editorPane.getSelectionStart()); } else { // add the combobox data value to the edit box int pos = formatTextArea.getCaretPosition(); formatTextArea.insert(format, pos); } } void editorPane_keyTyped(KeyEvent e) { int pos = editorPane.getCaretPosition()-1; StyledDocument doc = editorPane.getStyledDocument(); Element el = doc.getCharacterElement(pos); AttributeSet attr = el.getAttributes(); String el_name = (String) attr.getAttribute(StyleConstants.NameAttribute); System.out.println("Element at " + pos +": " + el + " name: " + el_name); if(e.getKeyChar() == '\b' && el_name.startsWith("Parameter")) { try { doc.remove(el.getStartOffset(), el.getEndOffset() - el.getStartOffset()); } catch (BadLocationException ex) { } } else { } } private void insertParameter(String param, String format, int pos) { try { StyledDocument doc = (StyledDocument) editorPane.getDocument(); // The component must first be wrapped in a style Style style = doc.addStyle("Parameter-" + numParameters, null); JLabel label = new JLabel(param); label.setAlignmentY(0.8f); // make sure we line up label.setFont(new Font("Helvetica", Font.PLAIN, 14)); label.setForeground(Color.BLUE); label.setToolTipText(format); StyleConstants.setComponent(style, label); doc.insertString(pos, format, style); numParameters++; } catch (BadLocationException e) { } } private void insertText(String text, AttributeSet set, int pos) { try { editorPane.getDocument().insertString(pos, text, set); } catch (BadLocationException ex) { } } private void appendText(String text, AttributeSet set) { insertText(text, set, editorPane.getDocument().getLength()); } private void updateTemplateFromPrintf() { String format = formatTextArea.getText(); String parameters = parameterTextArea.getText(); editableTemplate.setFormat(format); editableTemplate.setParameters(parameters); } private void updateTemplateFromEditor(PrintfTemplate template) { ArrayList params = new ArrayList(); String format = null; int text_length = editorPane.getDocument().getLength(); try { format = editorPane.getDocument().getText(0,text_length); } catch (BadLocationException ex1) { } Element section_el = editorPane.getDocument().getDefaultRootElement(); // Get number of paragraphs. int num_para = section_el.getElementCount(); for (int p_count = 0; p_count < num_para; p_count++) { Element para_el = section_el.getElement(p_count); // Enumerate the content elements int num_cont = para_el.getElementCount(); for (int c_count = 0; c_count < num_cont; c_count++) { Element content_el = para_el.getElement(c_count); AttributeSet attr = content_el.getAttributes(); // Get the name of the style applied to this content element; may be null String sn = (String) attr.getAttribute(StyleConstants.NameAttribute); // Check if style name match if (sn.startsWith("Parameter")) { // we extract the label. JLabel l = (JLabel) StyleConstants.getComponent(attr); if (l != null) { params.add(l.getText()); } } } } template.setFormat(format); template.setTokens(params); } protected void setTemplate(PrintfTemplate template) { originalTemplate = template; editableTemplate.setFormat(template.m_format); editableTemplate.setTokens(template.m_tokens); updatePane(selectedPane); } private void updateParams(EDPCellData data) { paramComboBox.removeAllItems(); paramKeys = data.getPlugin().getPrintfDescrs(); paramComboBox.addItem("String Literal"); for (Iterator it = paramKeys.keySet().iterator(); it.hasNext(); ) { paramComboBox.addItem(it.next()); } paramComboBox.setEnabled(true); paramComboBox.setSelectedIndex(0); paramComboBox.setToolTipText( "Select a parameter to insert into the format string"); insertButton.setEnabled(true); } /** * updatePane * * @param sel int */ private void updatePane(int sel) { switch(sel) { case 0: // editor view updateEditorView(); break; case 1: // printf view updatePrintfView(); break; } } private void updatePrintfView() { formatTextArea.setText(editableTemplate.m_format); parameterTextArea.setText(editableTemplate.getTokenString()); } private void updateEditorView() { editorPane.setText(""); numParameters = 0; java.util.List elements = editableTemplate.getPrintfElements(); for(Iterator it = elements.iterator(); it.hasNext(); ) { PrintfUtil.PrintfElement el = (PrintfUtil.PrintfElement) it.next(); if(el.getFormat().equals("\0")) { appendText(el.getElement(), PLAIN_ATTR); } else { insertParameter(el.getElement(), el.getFormat(), editorPane.getDocument().getLength()); } } } /** * Return a copy of the string with all reserved regexp chars * escaped by backslash. * @param str the string to add escapes to * @return String return a string with escapes or "" if str is null */ private String escapeReservedChars(String str) { if(str == null) return ""; StringBuffer sb = new StringBuffer(); for(int ci = 0; ci < str.length(); ci++) { char ch = str.charAt(ci); if(RESERVED_STRING.indexOf(ch) >=0) { sb.append('\\'); } sb.append(ch); } return sb.toString(); } private String escapePrintfChars(String str) { if(str == null) return ""; StringBuffer sb = new StringBuffer(); for(int ci = 0; ci < str.length(); ci++) { char ch = str.charAt(ci); if(ch == '%') { sb.append('%'); } sb.append(ch); } return sb.toString(); } } class PrintfTemplateEditor_saveButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_saveButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.saveButton_actionPerformed(e); } } class PrintfTemplateEditor_cancelButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_cancelButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.cancelButton_actionPerformed(e); } } class PrintfTemplateEditor_insertButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_insertButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.insertButton_actionPerformed(e); } } class PrintfTemplateEditor_insertMatchButton_actionAdapter implements java.awt.event.ActionListener { PrintfEditor adaptee; PrintfTemplateEditor_insertMatchButton_actionAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void actionPerformed(ActionEvent e) { adaptee.insertMatchButton_actionPerformed(e); } } class PrintfEditor_printfTabPane_changeAdapter implements javax.swing.event.ChangeListener { PrintfEditor adaptee; PrintfEditor_printfTabPane_changeAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void stateChanged(ChangeEvent e) { adaptee.printfTabPane_stateChanged(e); } } class PrintfEditor_editorPane_keyAdapter extends java.awt.event.KeyAdapter { PrintfEditor adaptee; PrintfEditor_editorPane_keyAdapter(PrintfEditor adaptee) { this.adaptee = adaptee; } public void keyTyped(KeyEvent e) { adaptee.editorPane_keyTyped(e); } }
Fix backspacing error in PrintfEditor git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@3056 4f837ed2-42f5-46e7-a7a5-fa17313484d4
tools/src/org/lockss/devtools/plugindef/PrintfEditor.java
Fix backspacing error in PrintfEditor
Java
bsd-3-clause
131c5b61053aa8ec3c09acc7a83fa162c4df01ba
0
NCIP/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,NCIP/caaers
package gov.nih.nci.cabig.caaers.api.impl; import gov.nih.nci.cabig.caaers.api.AdverseEventQueryService; import gov.nih.nci.cabig.caaers.api.SearchOptions; import gov.nih.nci.cabig.caaers.dao.AdverseEventDao; import gov.nih.nci.cabig.caaers.domain.AdverseEvent; import gov.nih.nci.cabig.caaers.domain.Participant; import gov.nih.nci.cabig.caaers.domain.Study; import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment; import java.util.List; public class AdverseEventQueryServiceImpl implements AdverseEventQueryService { private AdverseEventDao adverseEventDao; public List<AdverseEvent> getByParticipant(Participant participant) { return adverseEventDao.getByParticipant(participant); } public List<AdverseEvent> getByParticipant(Participant participant, AdverseEvent adverseEvent){ return adverseEventDao.getByParticipant(participant,adverseEvent); } public List<AdverseEvent> getByStudy(Study study) { return adverseEventDao.getByStudy(study); } public List<AdverseEvent> getByStudy(Study study, AdverseEvent adverseEvent) { return adverseEventDao.getByStudy(study, adverseEvent); } //public List<AdverseEvent> getByStudyParticipantAssignment(StudyParticipantAssignment studyParticipantAssignment) { //return adverseEventDao.getByStudyParticipantAssignment(studyParticipantAssignment); //} public void setAdverseEventDao(AdverseEventDao adverseEventDao) { this.adverseEventDao = adverseEventDao; } }
projects/core/src/main/java/gov/nih/nci/cabig/caaers/api/impl/AdverseEventQueryServiceImpl.java
package gov.nih.nci.cabig.caaers.api.impl; import gov.nih.nci.cabig.caaers.api.AdverseEventQueryService; import gov.nih.nci.cabig.caaers.api.SearchOptions; import gov.nih.nci.cabig.caaers.dao.AdverseEventDao; import gov.nih.nci.cabig.caaers.domain.AdverseEvent; import gov.nih.nci.cabig.caaers.domain.Participant; import gov.nih.nci.cabig.caaers.domain.Study; import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment; import java.util.List; public class AdverseEventQueryServiceImpl implements AdverseEventQueryService { private AdverseEventDao adverseEventDao; public List<AdverseEvent> getByParticipant(Participant participant) { return adverseEventDao.getByParticipant(participant); } public List<AdverseEvent> getByParticipant(Participant participant, AdverseEvent adverseEvent){ return adverseEventDao.getByParticipant(participant,adverseEvent); } public List<AdverseEvent> getByStudy(Study study) { return adverseEventDao.getByStudy(study); } public List<AdverseEvent> getByStudy(Study study, AdverseEvent adverseEvent) { return adverseEventDao.getByStudy(study, adverseEvent); } public List<AdverseEvent> getByStudyParticipantAssignment(StudyParticipantAssignment studyParticipantAssignment) { return adverseEventDao.getByStudyParticipantAssignment(studyParticipantAssignment); } public void setAdverseEventDao(AdverseEventDao adverseEventDao) { this.adverseEventDao = adverseEventDao; } }
SVN-Revision: 5646
projects/core/src/main/java/gov/nih/nci/cabig/caaers/api/impl/AdverseEventQueryServiceImpl.java
Java
bsd-3-clause
2420b5406b41b8833b7339671e6053162df6c280
0
EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,openxal/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,luxiaohan/openxal-csns-luxh,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,openxal/openxal,openxal/openxal,luxiaohan/openxal-csns-luxh,openxal/openxal,openxal/openxal,luxiaohan/openxal-csns-luxh
/* * PVLogSnapshotChooser.java * * Created on February 2, 2005, 4:44 PM * * Copyright (c) 2001-2005 Oak Ridge National Laboratory * Oak Ridge, Tenessee 37831, U.S.A. * All rights reserved. */ package xal.service.pvlogger.apputils.browser; import xal.service.pvlogger.*; import xal.tools.database.*; import xal.extension.widgets.swing.KeyValueTableModel; import java.sql.*; import java.util.Date; import java.util.Collection; import javax.swing.*; import java.awt.Dimension; import java.awt.Container; import java.awt.event.*; import java.awt.*; import javax.swing.event.*; /** * This class provides a UI component (a JDialog) for selecting PV Logger ID. It is modified from the * pvlogbrowser application. * * @author Paul Chu */ public class PVLogSnapshotChooser { JDialog pvLogDialog; /** browser model */ protected BrowserModel _model = new BrowserModel(); /** controller of the selection state */ protected BrowserController _controller; private JTextField pvLogIdField = new JTextField(8); private long pvLogId = 0; private String groupName = "default"; /** Creates a new instance of PVLogSnapshotChooser */ public PVLogSnapshotChooser() { this( null ); } /** Constructor */ public PVLogSnapshotChooser( final Frame owner ) { this( owner, false ); } /** Primary constructor */ public PVLogSnapshotChooser( final Frame owner, final boolean modal ) { if ( owner != null ) { pvLogDialog = new JDialog( owner ); pvLogDialog.setLocationRelativeTo( owner ); } else { pvLogDialog = new JDialog(); } pvLogDialog.setTitle( "PV Logger Snapshot Chooser" ); pvLogDialog.setModal( modal ); _controller = new BrowserController( _model ); } public void setGroup(String name) { groupName = name; } public JDialog choosePVLogId() { requestUserConnection(); JSplitPane mainView = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, buildSnapshotListView(), buildSnapshotDetailView()); pvLogDialog.setLayout(new BorderLayout()); pvLogDialog.add(buildQueryView(), BorderLayout.NORTH); pvLogDialog.add(mainView, BorderLayout.CENTER); pvLogDialog.add(buildResultView(), BorderLayout.SOUTH); pvLogDialog.pack(); pvLogDialog.setVisible( true ); return pvLogDialog; } protected void requestUserConnection() { // initialize PVLogger ConnectionDictionary dict = PVLogger.newBrowsingConnectionDictionary(); Connection conn = dict.getDatabaseAdaptor().getConnection(dict); _model.setDatabaseConnection(conn, dict); _model.connect(); } /** * Build the view for querying the database for the machine snapshots. * @return the query view */ protected Container buildQueryView() { Box queryView = new Box(BoxLayout.X_AXIS); queryView.setBorder( BorderFactory.createEtchedBorder() ); final int BUTTON_GAP = 20; queryView.add( Box.createHorizontalStrut(BUTTON_GAP) ); queryView.add( new JLabel("From:") ); final SpinnerDateModel fromDateModel = new SpinnerDateModel(); JSpinner fromSpinner = new JSpinner(fromDateModel); fromSpinner.setEditor( new JSpinner.DateEditor(fromSpinner, "MMM dd, yyyy HH:mm:ss") ); fromSpinner.setMaximumSize( new Dimension(200, 25) ); queryView.add(fromSpinner); try { _model.selectGroup(groupName); } catch( Exception exception ) { throw new RuntimeException( exception ); } queryView.add( Box.createHorizontalStrut(10) ); queryView.add( new JLabel("To:") ); final SpinnerDateModel toDateModel = new SpinnerDateModel(); JSpinner toSpinner = new JSpinner(toDateModel); toSpinner.setEditor( new JSpinner.DateEditor(toSpinner, "MMM dd, yyyy HH:mm:ss") ); toSpinner.setMaximumSize( new Dimension(200, 25) ); queryView.add(toSpinner); queryView.add( Box.createHorizontalStrut(BUTTON_GAP) ); JButton fetchButton = new JButton("Fetch"); queryView.add(fetchButton); fetchButton.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent event) { try { Date startDate = fromDateModel.getDate(); Date endDate = toDateModel.getDate(); _model.fetchMachineSnapshots(startDate, endDate); } catch( Exception exception ) { throw new RuntimeException( exception ); } } }); queryView.add( Box.createHorizontalGlue() ); return queryView; } /** * Build the view that displays the list of fetched machine snapshots * @return the snapshot list view */ protected Container buildSnapshotListView() { Box listView = new Box(BoxLayout.Y_AXIS); listView.add( new JLabel("Machine Snapshots:") ); Box tableView = new Box(BoxLayout.Y_AXIS); listView.add(tableView); final KeyValueTableModel<MachineSnapshot> machineSnapshotTableModel = _controller.getMachineSnapshotTableModel(); final JTable snapshotTable = new JTable( machineSnapshotTableModel ); snapshotTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); tableView.add( snapshotTable.getTableHeader() ); tableView.add( new JScrollPane(snapshotTable) ); snapshotTable.getSelectionModel().addListSelectionListener( new ListSelectionListener() { public void valueChanged(ListSelectionEvent event) { if ( !event.getValueIsAdjusting() ) { final int selectedRow = snapshotTable.getSelectedRow(); final int selectedModelRow = snapshotTable.convertRowIndexToModel( selectedRow ); if ( selectedModelRow < 0 ) { _controller.setSelectedSnapshot( null ); } else if ( selectedModelRow < machineSnapshotTableModel.getRowCount() ) { final MachineSnapshot selectedSnapshot = machineSnapshotTableModel.getRecordAtRow( selectedModelRow ); _controller.setSelectedSnapshot( selectedSnapshot ); } else { snapshotTable.clearSelection(); } } } }); return listView; } /** * Build the snapshot detail view which displays detailed information about the snapshot including * the comment and the list of channel snapshots associated with the selected signals. * @return the snapshot detail view */ protected Container buildSnapshotDetailView() { Box detailView = new Box(BoxLayout.Y_AXIS); JLabel titleLabel = new JLabel("Selected Snapshot:"); detailView.add(titleLabel); detailView.add( new JLabel("Comment:") ); final JTextArea commentTextView = new JTextArea(); commentTextView.setEditable(false); Box tableBox = new Box(BoxLayout.Y_AXIS); final KeyValueTableModel<ChannelSnapshot> detailTableModel = _controller.getChannelSnapshotTableModel(); final JTable dataTable = new JTable( detailTableModel ); tableBox.add( dataTable.getTableHeader() ); tableBox.add( new JScrollPane( dataTable ) ); _controller.addBrowserControllerListener( new BrowserControllerListener() { /** * event indicating that a snapshot has been selected * @param controller The controller managing selection state * @param snapshot The snapshot that has been selected */ public void snapshotSelected(BrowserController controller, MachineSnapshot snapshot) { if ( snapshot != null ) { commentTextView.setText( snapshot.getComment() ); pvLogIdField.setText((new Long(snapshot.getId())).toString()); } else { commentTextView.setText(""); } } /** * event indicating that the selected channel group changed * @param source the browser controller sending this notice * @param newGroup the newly selected channel group */ public void selectedChannelGroupChanged(BrowserController source, ChannelGroup newGroup) {} /** * Event indicating that the selected signals have changed * @param source the controller sending the event * @param selectedSignals the new collection of selected signals */ public void selectedSignalsChanged(BrowserController source, Collection<String> selectedSignals) {} }); JSplitPane mainPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, commentTextView, tableBox); detailView.add(mainPane); return detailView; } protected Container buildResultView() { Box result = new Box(BoxLayout.X_AXIS); JLabel titleLabel = new JLabel("Selected Snapshot:"); result.add(titleLabel); pvLogIdField.setMaximumSize(new Dimension(100, 25)); result.add(pvLogIdField); result.add( Box.createHorizontalStrut(20) ); JButton done = new JButton("Select"); done.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { pvLogId = (new Long(pvLogIdField.getText())).longValue(); pvLogDialog.setVisible(false); System.out.println("pvLogId = " + pvLogId); // for testing purpose // System.exit(0); } }); result.add(done); return result; } public long getPVLogId() { return pvLogId; } // for testing purpose // public static void main(String[] args) { // PVLogSnapshotChooser psc = new PVLogSnapshotChooser(); // psc.choosePVLogId(); // } }
services/pvlogger/extension/src/xal/service/pvlogger/apputils/browser/PVLogSnapshotChooser.java
/* * PVLogSnapshotChooser.java * * Created on February 2, 2005, 4:44 PM * * Copyright (c) 2001-2005 Oak Ridge National Laboratory * Oak Ridge, Tenessee 37831, U.S.A. * All rights reserved. */ package xal.service.pvlogger.apputils.browser; import xal.service.pvlogger.*; import xal.tools.database.*; import xal.extension.widgets.swing.KeyValueTableModel; import java.sql.*; import java.util.Date; import java.util.Collection; import javax.swing.*; import java.awt.Dimension; import java.awt.Container; import java.awt.event.*; import java.awt.*; import javax.swing.event.*; /** * This class provides a UI component (a JDialog) for selecting PV Logger ID. It is modified from the * pvlogbrowser application. * * @author Paul Chu */ public class PVLogSnapshotChooser { JDialog pvLogDialog; /** browser model */ protected BrowserModel _model = new BrowserModel(); /** controller of the selection state */ protected BrowserController _controller; private JTextField pvLogIdField = new JTextField(8); private long pvLogId = 0; private String groupName = "default"; /** Creates a new instance of PVLogSnapshotChooser */ public PVLogSnapshotChooser() { this( null ); } /** Constructor */ public PVLogSnapshotChooser( final Frame owner ) { this( owner, false ); } /** Primary constructor */ public PVLogSnapshotChooser( final Frame owner, final boolean modal ) { if ( owner != null ) { pvLogDialog = new JDialog( owner ); pvLogDialog.setLocationRelativeTo( owner ); } else { pvLogDialog = new JDialog(); } pvLogDialog.setTitle( "PV Logger Snapshot Chooser" ); pvLogDialog.setModal( modal ); _controller = new BrowserController( _model ); } public void setGroup(String name) { groupName = name; } public JDialog choosePVLogId() { requestUserConnection(); JSplitPane mainView = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, buildSnapshotListView(), buildSnapshotDetailView()); pvLogDialog.setLayout(new BorderLayout()); pvLogDialog.add(buildQueryView(), BorderLayout.NORTH); pvLogDialog.add(mainView, BorderLayout.CENTER); pvLogDialog.add(buildResultView(), BorderLayout.SOUTH); pvLogDialog.pack(); pvLogDialog.setVisible( true ); return pvLogDialog; } protected void requestUserConnection() { // initialize PVLogger ConnectionDictionary dict = PVLogger.newBrowsingConnectionDictionary(); Connection conn = dict.getDatabaseAdaptor().getConnection(dict); _model.setDatabaseConnection(conn, dict); _model.connect(); } /** * Build the view for querying the database for the machine snapshots. * @return the query view */ protected Container buildQueryView() { Box queryView = new Box(BoxLayout.X_AXIS); queryView.setBorder( BorderFactory.createEtchedBorder() ); final int BUTTON_GAP = 20; queryView.add( Box.createHorizontalStrut(BUTTON_GAP) ); queryView.add( new JLabel("From:") ); final SpinnerDateModel fromDateModel = new SpinnerDateModel(); JSpinner fromSpinner = new JSpinner(fromDateModel); fromSpinner.setEditor( new JSpinner.DateEditor(fromSpinner, "MMM dd, yyyy HH:mm:ss") ); fromSpinner.setMaximumSize( new Dimension(200, 25) ); queryView.add(fromSpinner); try { _model.selectGroup(groupName); } catch( Exception exception ) { throw new RuntimeException( exception ); } queryView.add( Box.createHorizontalStrut(10) ); queryView.add( new JLabel("To:") ); final SpinnerDateModel toDateModel = new SpinnerDateModel(); JSpinner toSpinner = new JSpinner(toDateModel); toSpinner.setEditor( new JSpinner.DateEditor(toSpinner, "MMM dd, yyyy HH:mm:ss") ); toSpinner.setMaximumSize( new Dimension(200, 25) ); queryView.add(toSpinner); queryView.add( Box.createHorizontalStrut(BUTTON_GAP) ); JButton fetchButton = new JButton("Fetch"); queryView.add(fetchButton); fetchButton.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent event) { try { Date startDate = fromDateModel.getDate(); Date endDate = toDateModel.getDate(); _model.fetchMachineSnapshots(startDate, endDate); } catch( Exception exception ) { throw new RuntimeException( exception ); } } }); queryView.add( Box.createHorizontalGlue() ); return queryView; } /** * Build the view that displays the list of fetched machine snapshots * @return the snapshot list view */ protected Container buildSnapshotListView() { Box listView = new Box(BoxLayout.Y_AXIS); listView.add( new JLabel("Machine Snapshots:") ); Box tableView = new Box(BoxLayout.Y_AXIS); listView.add(tableView); final KeyValueTableModel<MachineSnapshot> machineSnapshotTableModel = _controller.getMachineSnapshotTableModel(); final JTable snapshotTable = new JTable( machineSnapshotTableModel ); snapshotTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); tableView.add( snapshotTable.getTableHeader() ); tableView.add( new JScrollPane(snapshotTable) ); snapshotTable.getSelectionModel().addListSelectionListener( new ListSelectionListener() { public void valueChanged(ListSelectionEvent event) { if ( !event.getValueIsAdjusting() ) { final int selectedRow = snapshotTable.getSelectedRow(); final int selectedModelRow = snapshotTable.convertRowIndexToModel( selectedRow ); if ( selectedModelRow < 0 ) { _controller.setSelectedSnapshot( null ); } else if ( selectedModelRow < machineSnapshotTableModel.getRowCount() ) { final MachineSnapshot selectedSnapshot = machineSnapshotTableModel.getRecordAtRow( selectedModelRow ); _controller.setSelectedSnapshot( selectedSnapshot ); } else { snapshotTable.clearSelection(); } } } }); return listView; } /** * Build the snapshot detail view which displays detailed information about the snapshot including * the comment and the list of channel snapshots associated with the selected signals. * @return the snapshot detail view */ protected Container buildSnapshotDetailView() { Box detailView = new Box(BoxLayout.Y_AXIS); JLabel titleLabel = new JLabel("Selected Snapshot:"); detailView.add(titleLabel); detailView.add( new JLabel("Comment:") ); final JTextArea commentTextView = new JTextArea(); commentTextView.setEditable(false); Box tableBox = new Box(BoxLayout.Y_AXIS); final KeyValueTableModel<ChannelSnapshot> detailTableModel = _controller.getChannelSnapshotTableModel(); final JTable dataTable = new JTable( detailTableModel ); tableBox.add( dataTable.getTableHeader() ); tableBox.add( new JScrollPane( dataTable ) ); _controller.addBrowserControllerListener( new BrowserControllerListener() { /** * event indicating that a snapshot has been selected * @param controller The controller managing selection state * @param snapshot The snapshot that has been selected */ public void snapshotSelected(BrowserController controller, MachineSnapshot snapshot) { if ( snapshot != null ) { commentTextView.setText( snapshot.getComment() ); pvLogIdField.setText((new Long(snapshot.getId())).toString()); } else { commentTextView.setText(""); } } /** * event indicating that the selected channel group changed * @param source the browser controller sending this notice * @param newGroup the newly selected channel group */ public void selectedChannelGroupChanged(BrowserController source, ChannelGroup newGroup) {} /** * Event indicating that the selected signals have changed * @param source the controller sending the event * @param selectedSignals the new collection of selected signals */ public void selectedSignalsChanged(BrowserController source, Collection selectedSignals) {} }); JSplitPane mainPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, commentTextView, tableBox); detailView.add(mainPane); return detailView; } protected Container buildResultView() { Box result = new Box(BoxLayout.X_AXIS); JLabel titleLabel = new JLabel("Selected Snapshot:"); result.add(titleLabel); pvLogIdField.setMaximumSize(new Dimension(100, 25)); result.add(pvLogIdField); result.add( Box.createHorizontalStrut(20) ); JButton done = new JButton("Select"); done.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { pvLogId = (new Long(pvLogIdField.getText())).longValue(); pvLogDialog.setVisible(false); System.out.println("pvLogId = " + pvLogId); // for testing purpose // System.exit(0); } }); result.add(done); return result; } public long getPVLogId() { return pvLogId; } // for testing purpose // public static void main(String[] args) { // PVLogSnapshotChooser psc = new PVLogSnapshotChooser(); // psc.choosePVLogId(); // } }
Correct raw type usage.
services/pvlogger/extension/src/xal/service/pvlogger/apputils/browser/PVLogSnapshotChooser.java
Correct raw type usage.
Java
mit
67f3997783ff4b6df7dd9d9d3b708c60d3896457
0
Jacobingalls/coldboot
package edu.utexas.ece.jacobingalls; import edu.utexas.ece.jacobingalls.buildings.RobotFactory; import edu.utexas.ece.jacobingalls.buildings.TeamBase; import edu.utexas.ece.jacobingalls.gui.RightSideBar; import edu.utexas.ece.jacobingalls.robots.AIRobot; import edu.utexas.ece.jacobingalls.robots.Blueprint; import edu.utexas.ece.jacobingalls.robots.Robot; import edu.utexas.ece.jacobingalls.robots.Thing; import edu.utexas.ece.jacobingalls.robots.blocks.*; import javafx.application.Application; import javafx.application.Platform; import javafx.scene.Scene; import javafx.scene.canvas.Canvas; import javafx.scene.canvas.GraphicsContext; import javafx.scene.input.KeyCode; import javafx.scene.input.MouseButton; import javafx.scene.layout.GridPane; import javafx.scene.paint.Color; import javafx.stage.Stage; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; /** * Hello world! * */ public class App extends Application { public static boolean displayFPS = true; public static int target_fps = 60; public static int world_width = 800; public static int world_height = 600; public static double viewportX = 0; public static double viewportY = 0; private static double viewportXVelocity = 0; private static double viewportYVelocity = 0; public static boolean movingViewPort = false; public static boolean viewpointOverride = false; public static double mouseX = 0; public static double mouseY = 0; public static double mouseXViewport = 0; public static double mouseYViewport = 0; private static boolean dragging = false; private static double mouseXDrag = 0; private static double mouseYDrag = 0; private static GraphicsContext graphicsContext; private static TickThread tickThread; public static Player player = new Player(Team.GREEN); private static Game game = new Game(); public static void main(String[] args) { launch(App.class, args); } public static Game getGame(){return game;} @Override public void start(Stage primaryStage) throws Exception { graphicsContext = setupStage(primaryStage); // Mouse management setupMouseActions(primaryStage.getScene()); // Keybindings setupKeyboardActions(primaryStage.getScene()); // Begin the background thread tickThread = new TickThread(); tickThread.start(); // Temp configureWorld(); // Finally, allow the user to see the game. primaryStage.show(); } @Override public void stop(){ if(tickThread != null) tickThread.running = false; } private GraphicsContext setupStage(Stage stage){ // Set the stage window up stage.setTitle("Cold Boot"); stage.setHeight(world_height); stage.setWidth(world_width); // Create the canvas Canvas canvas = new Canvas(world_width, world_height); GraphicsContext gc = canvas.getGraphicsContext2D(); gc.setFill(Color.BLACK); gc.fillRect(0,0,world_width, world_height); // Add the canvas to a pane what will span the entire scene GridPane pane = new GridPane(); pane.add(canvas, 0, 0); // Create the scene we will be using. It will fill the entire stage. Scene scene = new Scene(pane,world_width,world_height); stage.setScene(scene); // When the width is changed update the width right now (so it looks smooth) scene.widthProperty().addListener((observableValue, oldSceneWidth, newSceneWidth) -> { world_width = newSceneWidth.intValue(); canvas.setWidth(world_width); tick(); }); // When the height is changed update the width right now (so it looks smooth) scene.heightProperty().addListener((observableValue, oldSceneHeight, newSceneHeight) -> { world_height = newSceneHeight.intValue(); canvas.setHeight(world_height); tick(); }); return gc; } private void configureWorld(){ double greenX = 10; game.getThings().add(new TeamBase(player.getTeam()).setX(greenX - 100)); game.getThings().add(new RobotFactory(player.getTeam(), greenX + 100, 100, team -> Blueprint.SMALL_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(3).setX(greenX).setY(10)); game.getThings().add(new RobotFactory(player.getTeam(), greenX+100, 300, team -> Blueprint.MEDIUM_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(5).setX(greenX).setY(300)); game.getThings().add(new RobotFactory(player.getTeam(), greenX+100, 600, team -> Blueprint.BIG_GUN.build(Robot.class, team).get() ).setNumberToBuild(3).setX(greenX).setY(600)); double redX = App.world_width-10; game.getThings().add(new TeamBase(Team.RED).setX(redX + 100)); game.getThings().add(new RobotFactory(Team.RED, redX-100, 100, team -> Blueprint.SMALL_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(3).setX(redX).setY(10)); game.getThings().add(new RobotFactory(Team.RED, redX-100, 300, team -> Blueprint.MEDIUM_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(5).setX(redX).setY(300)); game.getThings().add(new RobotFactory(Team.RED, redX-100, 600, team -> Blueprint.BIG_GUN.build(AIRobot.class, team).get() ).setNumberToBuild(3).setX(redX).setY(600)); } private String fpsStr = "---"; private long last_tick = System.currentTimeMillis(); private synchronized void tick(){ long time_elapsed = System.currentTimeMillis() - last_tick; last_tick = System.currentTimeMillis(); tick(time_elapsed); render(graphicsContext); } private void tick(long time_elapsed){ // Update FPS Meter if(time_elapsed > 0) fpsStr = Math.round((1e3/time_elapsed))+""; //Update Viewport updateViewport(time_elapsed); //Update Mouse Pos mouseXViewport = mouseX-viewportX; mouseYViewport = mouseY-viewportY; // Tick the game game.tick(time_elapsed); } private void render(GraphicsContext gc){ //clear screen gc.setFill(Color.BLACK); gc.fillRect(0 , 0,world_width, world_height); // Create Parallax Background renderParallaxBackground(gc); if(displayFPS) { gc.setFill(Color.YELLOW); gc.fillText(fpsStr, world_width - 20, 10, 20); } game.getThings().forEach(thing -> thing.render(gc)); if(dragging){ game.getThings().forEach(thing -> thing.setHovering(false)); getThingsInDragArea().forEach(thing -> thing.setHovering(true)); double x = mouseX; double y = mouseY; double width = mouseXDrag - mouseX; double height = mouseYDrag - mouseY; if(width < 0){ x = mouseXDrag; width = -width; } if(height < 0){ y = mouseYDrag; height = -height; } gc.setStroke(Color.GRAY); gc.strokeRect(x, y, width, height); } else { game.getThings().forEach(thing -> thing.setHovering(false)); List<Thing> hovered = game.getThings().parallelStream().filter(thing -> thing.isCollidingRoughBox(mouseXViewport, mouseYViewport)).collect(Collectors.toList()); if(hovered.size() >= 1) { hovered.get(hovered.size() - 1).setHovering(true); } } game.getRightSideBar().render(gc); } private void renderParallaxBackground(GraphicsContext gc) { renderParallaxBackgroundPart(gc, 25, .25, Color.rgb(10,10,10)); renderParallaxBackgroundPart(gc, 50, .5, Color.rgb(20,20,20)); renderParallaxBackgroundPart(gc, 100, 1, Color.rgb(35,35,35)); } private void renderParallaxBackgroundPart(GraphicsContext gc, int boxSize, double parallax, Color color) { int divisionsX = (world_width/boxSize)+2; int divisionsY = (world_height/boxSize)+2; gc.setStroke(color); double xOffset = (viewportX*parallax) % boxSize; double yOffset = (viewportY*parallax) % boxSize; for (int i = -1; i < divisionsX; i++) { gc.strokeLine(i*boxSize+xOffset, 0, i*boxSize+xOffset, world_height); } for (int i = -1; i < divisionsY; i++) { gc.strokeLine(0, i*boxSize+yOffset, world_width, i*boxSize+yOffset); } } private class TickThread extends Thread{ public boolean running = true; private int fps; private int max_tick_len; private long prev_tick = System.currentTimeMillis(); private long next_tick = System.currentTimeMillis() + max_tick_len; public void tick() { prev_tick = System.currentTimeMillis(); next_tick = prev_tick + max_tick_len; Platform.runLater(App.this::tick); } public void run() { try { while (running) { if(fps != target_fps){ fps= target_fps; max_tick_len = (int)(Math.floor(1e3/(fps))); } tick(); long l = next_tick - prev_tick - 1; if(l > 1) Thread.sleep(l); } } catch (InterruptedException e) { e.printStackTrace(); } } } private void updateViewport(long time_elapsed){ if(viewpointOverride && time_elapsed > 0 && game.getRightSideBar().getSelectedThing() != null){ double desiredViewportX = -game.getRightSideBar().getSelectedThing().getXCenter()+world_width/2; double desiredViewportY = -game.getRightSideBar().getSelectedThing().getYCenter()+world_height/2; if(viewportX > desiredViewportX-1 && viewportX < desiredViewportX+1 && viewportY > desiredViewportY-1 && viewportY < desiredViewportY+1){ viewportX = desiredViewportX; viewportY = desiredViewportY; } else { double acc = 1000; double maxV = 100000; double stop_distance_x = (viewportXVelocity*viewportXVelocity)/(2 * acc); double stop_distance_y = (viewportYVelocity*viewportYVelocity)/(2 * acc); if(viewportXVelocity > 0) stop_distance_x = viewportX + stop_distance_x + 1; else stop_distance_x = viewportX - stop_distance_x - 1; if(viewportYVelocity > 0) stop_distance_y = viewportY + stop_distance_y + 1; else stop_distance_y = viewportY - stop_distance_y - 1; double desiredVelocityX = 0; if(desiredViewportX > viewportX && stop_distance_x > desiredViewportX) desiredVelocityX = 0; else if(desiredViewportX < viewportX && stop_distance_x < desiredViewportX) desiredVelocityX = 0; else if(desiredViewportX > viewportX) desiredVelocityX = maxV; else if(desiredViewportX < viewportX) desiredVelocityX = -maxV; double desiredVelocityY = 0; if(desiredViewportY > viewportY && stop_distance_y > desiredViewportY) desiredVelocityY = 0; else if(desiredViewportY < viewportY && stop_distance_y < desiredViewportY) desiredVelocityY = 0; else if(desiredViewportY > viewportY) desiredVelocityY = maxV; else if(desiredViewportY < viewportY) desiredVelocityY = -maxV; double tacc = time_elapsed/1000.0; if(viewportXVelocity < desiredVelocityX) viewportXVelocity += acc * tacc; else if(viewportXVelocity > desiredVelocityX) viewportXVelocity -= acc * tacc; if(viewportXVelocity > maxV) viewportXVelocity = maxV; else if(viewportXVelocity < -maxV) viewportXVelocity = -maxV; if(desiredVelocityY > viewportYVelocity) viewportYVelocity += acc * tacc; else if(desiredVelocityY < viewportYVelocity) viewportYVelocity -= acc * tacc; if(viewportYVelocity > maxV) viewportYVelocity = maxV; else if(viewportYVelocity < -maxV) viewportYVelocity = -maxV; viewportX += viewportXVelocity * tacc; viewportY += viewportYVelocity * tacc; } } } // TODO be able to specify a home base private void setupKeyboardActions(Scene scene){ scene.setOnKeyPressed(event -> { if (event.getCode().isLetterKey()) { if (event.getCode().equals(KeyCode.H)) { Optional<Thing> teamBaseOptional = App.getGame().getThings().parallelStream() .filter(thing -> thing instanceof TeamBase) .filter(thing -> thing.getTeam().equals(App.player.getTeam())) .findFirst(); if(teamBaseOptional.isPresent()){ if(teamBaseOptional.get().isSelected()) App.viewpointOverride = true; else { App.getGame().getThings().parallelStream().forEach(thing -> thing.setSelected(false)); teamBaseOptional.get().setSelected(true); App.viewpointOverride = false; } } else { System.err.println("No home base!"); } } } }); scene.setOnKeyReleased(event -> { System.out.println("r"+event.getCode().getName()); }); } private void setupMouseActions(Scene scene){ scene.setOnMouseMoved(event -> { mouseX = event.getX(); mouseY = event.getY(); }); scene.setOnMouseClicked(event -> { viewpointOverride = true; if(movingViewPort) { viewpointOverride = false; movingViewPort = false; }else if(event.getButton() == MouseButton.PRIMARY) { RightSideBar rightSideBar = game.getRightSideBar(); if(rightSideBar.getOffset() < 10 && mouseX >= rightSideBar.getX() && mouseX <= rightSideBar.getX() + rightSideBar.getWidth() && mouseX >= rightSideBar.getY() && mouseY <= rightSideBar.getY() + rightSideBar.getHeight()){ rightSideBar.wasClicked = true; } else { game.getThings().forEach(thing -> thing.setSelected(false)); if (dragging) { getThingsInDragArea().forEach(thing -> thing.setSelected(true)); } else { List<Thing> hovered = game.getThings().parallelStream().filter(thing -> thing.isCollidingRoughBox(mouseXViewport, mouseYViewport)).collect(Collectors.toList()); if (hovered.size() >= 1) hovered.get(hovered.size() - 1).click(mouseXViewport, mouseYViewport); } mouseXDrag = event.getX(); mouseYDrag = event.getY(); dragging = false; } } else if(event.getButton() == MouseButton.SECONDARY){ List<Robot> selectedRobots = game.getThings().parallelStream() .filter(Thing::isSelected) .filter(thing -> thing.getTeam().equals(player.getTeam())) .filter(thing -> thing instanceof Robot) .map(thing -> (Robot)thing) .collect(Collectors.toList()); selectedRobots.forEach(robot -> robot.setTargetLocation(event.getX()-viewportX, event.getY()-viewportY)); } }); scene.setOnDragDetected(event -> { if(event.getButton() == MouseButton.SECONDARY) { movingViewPort = true; viewpointOverride = false; viewportXVelocity = 0; viewportYVelocity = 0; dragging = false; } else { mouseXDrag = event.getX(); mouseYDrag = event.getY(); dragging = true; movingViewPort = false; } }); scene.setOnMouseDragged(event -> { if(movingViewPort){ double xDelta = event.getX() - mouseXDrag; double yDelta = event.getY() - mouseYDrag; viewportX += xDelta; viewportY += yDelta; } mouseXDrag = event.getX(); mouseYDrag = event.getY(); }); } private List<Thing> getThingsInDragArea(){ double x = mouseXViewport; double y = mouseYViewport; double width = mouseXDrag - viewportX - x; double height = mouseYDrag - viewportY - y; if (width < 0) { x = mouseXDrag - viewportX; width = -width; } if(height < 0){ y = mouseYDrag -viewportY; height = -height; } final double x1 = x; final double y1 = y; final double x2 = x+width; final double y2 = y+height; return game.getThings().parallelStream().filter(thing -> thing.getXCenter() >= x1 && thing.getYCenter() >= y1 && thing.getXCenter() <= x2 && thing.getYCenter() <= y2).collect(Collectors.toList()); } }
src/main/java/edu/utexas/ece/jacobingalls/App.java
package edu.utexas.ece.jacobingalls; import edu.utexas.ece.jacobingalls.buildings.RobotFactory; import edu.utexas.ece.jacobingalls.buildings.TeamBase; import edu.utexas.ece.jacobingalls.gui.RightSideBar; import edu.utexas.ece.jacobingalls.robots.AIRobot; import edu.utexas.ece.jacobingalls.robots.Blueprint; import edu.utexas.ece.jacobingalls.robots.Robot; import edu.utexas.ece.jacobingalls.robots.Thing; import edu.utexas.ece.jacobingalls.robots.blocks.*; import javafx.application.Application; import javafx.application.Platform; import javafx.scene.Scene; import javafx.scene.canvas.Canvas; import javafx.scene.canvas.GraphicsContext; import javafx.scene.input.MouseButton; import javafx.scene.layout.GridPane; import javafx.scene.paint.Color; import javafx.stage.Stage; import java.util.List; import java.util.stream.Collectors; /** * Hello world! * */ public class App extends Application { public static boolean displayFPS = true; public static int target_fps = 60; public static int world_width = 800; public static int world_height = 600; public static double viewportX = 0; public static double viewportY = 0; private static double viewportXVelocity = 0; private static double viewportYVelocity = 0; public static boolean movingViewPort = false; public static boolean viewpointOverride = false; public static double mouseX = 0; public static double mouseY = 0; public static double mouseXViewport = 0; public static double mouseYViewport = 0; private static boolean dragging = false; private static double mouseXDrag = 0; private static double mouseYDrag = 0; private static GraphicsContext graphicsContext; private static TickThread tickThread; public static Player player = new Player(Team.GREEN); private static Game game = new Game(); public static void main(String[] args) { launch(App.class, args); } public static Game getGame(){return game;} @Override public void start(Stage primaryStage) throws Exception { graphicsContext = setupStage(primaryStage); // Mouse management setupMouseActions(primaryStage.getScene()); // Begin the background thread tickThread = new TickThread(); tickThread.start(); // Temp configureWorld(); // Finally, allow the user to see the game. primaryStage.show(); } @Override public void stop(){ if(tickThread != null) tickThread.running = false; } private GraphicsContext setupStage(Stage stage){ // Set the stage window up stage.setTitle("Cold Boot"); stage.setHeight(world_height); stage.setWidth(world_width); // Create the canvas Canvas canvas = new Canvas(world_width, world_height); GraphicsContext gc = canvas.getGraphicsContext2D(); gc.setFill(Color.BLACK); gc.fillRect(0,0,world_width, world_height); // Add the canvas to a pane what will span the entire scene GridPane pane = new GridPane(); pane.add(canvas, 0, 0); // Create the scene we will be using. It will fill the entire stage. Scene scene = new Scene(pane,world_width,world_height); stage.setScene(scene); // When the width is changed update the width right now (so it looks smooth) scene.widthProperty().addListener((observableValue, oldSceneWidth, newSceneWidth) -> { world_width = newSceneWidth.intValue(); canvas.setWidth(world_width); tick(); }); // When the height is changed update the width right now (so it looks smooth) scene.heightProperty().addListener((observableValue, oldSceneHeight, newSceneHeight) -> { world_height = newSceneHeight.intValue(); canvas.setHeight(world_height); tick(); }); return gc; } private void configureWorld(){ double greenX = 10; game.getThings().add(new TeamBase(player.getTeam()).setX(greenX - 100)); game.getThings().add(new RobotFactory(player.getTeam(), greenX + 100, 100, team -> Blueprint.SMALL_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(3).setX(greenX).setY(10)); game.getThings().add(new RobotFactory(player.getTeam(), greenX+100, 300, team -> Blueprint.MEDIUM_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(5).setX(greenX).setY(300)); game.getThings().add(new RobotFactory(player.getTeam(), greenX+100, 600, team -> Blueprint.BIG_GUN.build(Robot.class, team).get() ).setNumberToBuild(3).setX(greenX).setY(600)); double redX = App.world_width-10; game.getThings().add(new TeamBase(Team.RED).setX(redX + 100)); game.getThings().add(new RobotFactory(Team.RED, redX-100, 100, team -> Blueprint.SMALL_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(3).setX(redX).setY(10)); game.getThings().add(new RobotFactory(Team.RED, redX-100, 300, team -> Blueprint.MEDIUM_FIGHTER.build(AIRobot.class, team).get() ).setNumberToBuild(5).setX(redX).setY(300)); game.getThings().add(new RobotFactory(Team.RED, redX-100, 600, team -> Blueprint.BIG_GUN.build(AIRobot.class, team).get() ).setNumberToBuild(3).setX(redX).setY(600)); } private String fpsStr = "---"; private long last_tick = System.currentTimeMillis(); private synchronized void tick(){ long time_elapsed = System.currentTimeMillis() - last_tick; last_tick = System.currentTimeMillis(); tick(time_elapsed); render(graphicsContext); } private void tick(long time_elapsed){ // Update FPS Meter if(time_elapsed > 0) fpsStr = Math.round((1e3/time_elapsed))+""; //Update Viewport updateViewport(time_elapsed); //Update Mouse Pos mouseXViewport = mouseX-viewportX; mouseYViewport = mouseY-viewportY; // Tick the game game.tick(time_elapsed); } private void render(GraphicsContext gc){ //clear screen gc.setFill(Color.BLACK); gc.fillRect(0 , 0,world_width, world_height); // Create Parallax Background renderParallaxBackground(gc); if(displayFPS) { gc.setFill(Color.YELLOW); gc.fillText(fpsStr, world_width - 20, 10, 20); } game.getThings().forEach(thing -> thing.render(gc)); if(dragging){ game.getThings().forEach(thing -> thing.setHovering(false)); getThingsInDragArea().forEach(thing -> thing.setHovering(true)); double x = mouseX; double y = mouseY; double width = mouseXDrag - mouseX; double height = mouseYDrag - mouseY; if(width < 0){ x = mouseXDrag; width = -width; } if(height < 0){ y = mouseYDrag; height = -height; } gc.setStroke(Color.GRAY); gc.strokeRect(x, y, width, height); } else { game.getThings().forEach(thing -> thing.setHovering(false)); List<Thing> hovered = game.getThings().parallelStream().filter(thing -> thing.isCollidingRoughBox(mouseXViewport, mouseYViewport)).collect(Collectors.toList()); if(hovered.size() >= 1) { hovered.get(hovered.size() - 1).setHovering(true); } } game.getRightSideBar().render(gc); } private void renderParallaxBackground(GraphicsContext gc) { renderParallaxBackgroundPart(gc, 25, .25, Color.rgb(10,10,10)); renderParallaxBackgroundPart(gc, 50, .5, Color.rgb(20,20,20)); renderParallaxBackgroundPart(gc, 100, 1, Color.rgb(35,35,35)); } private void renderParallaxBackgroundPart(GraphicsContext gc, int boxSize, double parallax, Color color) { int divisionsX = (world_width/boxSize)+2; int divisionsY = (world_height/boxSize)+2; gc.setStroke(color); double xOffset = (viewportX*parallax) % boxSize; double yOffset = (viewportY*parallax) % boxSize; for (int i = -1; i < divisionsX; i++) { gc.strokeLine(i*boxSize+xOffset, 0, i*boxSize+xOffset, world_height); } for (int i = -1; i < divisionsY; i++) { gc.strokeLine(0, i*boxSize+yOffset, world_width, i*boxSize+yOffset); } } private class TickThread extends Thread{ public boolean running = true; private int fps; private int max_tick_len; private long prev_tick = System.currentTimeMillis(); private long next_tick = System.currentTimeMillis() + max_tick_len; public void tick() { prev_tick = System.currentTimeMillis(); next_tick = prev_tick + max_tick_len; Platform.runLater(App.this::tick); } public void run() { try { while (running) { if(fps != target_fps){ fps= target_fps; max_tick_len = (int)(Math.floor(1e3/(fps))); } tick(); long l = next_tick - prev_tick - 1; if(l > 1) Thread.sleep(l); } } catch (InterruptedException e) { e.printStackTrace(); } } } private void updateViewport(long time_elapsed){ if(viewpointOverride && time_elapsed > 0 && game.getRightSideBar().getSelectedThing() != null){ double desiredViewportX = -game.getRightSideBar().getSelectedThing().getXCenter()+world_width/2; double desiredViewportY = -game.getRightSideBar().getSelectedThing().getYCenter()+world_height/2; if(viewportX > desiredViewportX-1 && viewportX < desiredViewportX+1 && viewportY > desiredViewportY-1 && viewportY < desiredViewportY+1){ viewportX = desiredViewportX; viewportY = desiredViewportY; } else { double acc = 1000; double maxV = 100000; double stop_distance_x = (viewportXVelocity*viewportXVelocity)/(2 * acc); double stop_distance_y = (viewportYVelocity*viewportYVelocity)/(2 * acc); if(viewportXVelocity > 0) stop_distance_x = viewportX + stop_distance_x + 1; else stop_distance_x = viewportX - stop_distance_x - 1; if(viewportYVelocity > 0) stop_distance_y = viewportY + stop_distance_y + 1; else stop_distance_y = viewportY - stop_distance_y - 1; double desiredVelocityX = 0; if(desiredViewportX > viewportX && stop_distance_x > desiredViewportX) desiredVelocityX = 0; else if(desiredViewportX < viewportX && stop_distance_x < desiredViewportX) desiredVelocityX = 0; else if(desiredViewportX > viewportX) desiredVelocityX = maxV; else if(desiredViewportX < viewportX) desiredVelocityX = -maxV; double desiredVelocityY = 0; if(desiredViewportY > viewportY && stop_distance_y > desiredViewportY) desiredVelocityY = 0; else if(desiredViewportY < viewportY && stop_distance_y < desiredViewportY) desiredVelocityY = 0; else if(desiredViewportY > viewportY) desiredVelocityY = maxV; else if(desiredViewportY < viewportY) desiredVelocityY = -maxV; double tacc = time_elapsed/1000.0; if(viewportXVelocity < desiredVelocityX) viewportXVelocity += acc * tacc; else if(viewportXVelocity > desiredVelocityX) viewportXVelocity -= acc * tacc; if(viewportXVelocity > maxV) viewportXVelocity = maxV; else if(viewportXVelocity < -maxV) viewportXVelocity = -maxV; if(desiredVelocityY > viewportYVelocity) viewportYVelocity += acc * tacc; else if(desiredVelocityY < viewportYVelocity) viewportYVelocity -= acc * tacc; if(viewportYVelocity > maxV) viewportYVelocity = maxV; else if(viewportYVelocity < -maxV) viewportYVelocity = -maxV; viewportX += viewportXVelocity * tacc; viewportY += viewportYVelocity * tacc; } } } private void setupMouseActions(Scene scene){ scene.setOnMouseMoved(event -> { mouseX = event.getX(); mouseY = event.getY(); }); scene.setOnMouseClicked(event -> { viewpointOverride = true; if(movingViewPort) { viewpointOverride = false; movingViewPort = false; }else if(event.getButton() == MouseButton.PRIMARY) { RightSideBar rightSideBar = game.getRightSideBar(); if(rightSideBar.getOffset() < 10 && mouseX >= rightSideBar.getX() && mouseX <= rightSideBar.getX() + rightSideBar.getWidth() && mouseX >= rightSideBar.getY() && mouseY <= rightSideBar.getY() + rightSideBar.getHeight()){ rightSideBar.wasClicked = true; } else { game.getThings().forEach(thing -> thing.setSelected(false)); if (dragging) { getThingsInDragArea().forEach(thing -> thing.setSelected(true)); } else { List<Thing> hovered = game.getThings().parallelStream().filter(thing -> thing.isCollidingRoughBox(mouseXViewport, mouseYViewport)).collect(Collectors.toList()); if (hovered.size() >= 1) hovered.get(hovered.size() - 1).click(mouseXViewport, mouseYViewport); } mouseXDrag = event.getX(); mouseYDrag = event.getY(); dragging = false; } } else if(event.getButton() == MouseButton.SECONDARY){ List<Robot> selectedRobots = game.getThings().parallelStream() .filter(Thing::isSelected) .filter(thing -> thing.getTeam().equals(player.getTeam())) .filter(thing -> thing instanceof Robot) .map(thing -> (Robot)thing) .collect(Collectors.toList()); selectedRobots.forEach(robot -> robot.setTargetLocation(event.getX()-viewportX, event.getY()-viewportY)); } }); scene.setOnDragDetected(event -> { if(event.getButton() == MouseButton.SECONDARY) { movingViewPort = true; viewpointOverride = false; viewportXVelocity = 0; viewportYVelocity = 0; dragging = false; } else { mouseXDrag = event.getX(); mouseYDrag = event.getY(); dragging = true; movingViewPort = false; } }); scene.setOnMouseDragged(event -> { if(movingViewPort){ double xDelta = event.getX() - mouseXDrag; double yDelta = event.getY() - mouseYDrag; viewportX += xDelta; viewportY += yDelta; } mouseXDrag = event.getX(); mouseYDrag = event.getY(); }); } private List<Thing> getThingsInDragArea(){ double x = mouseXViewport; double y = mouseYViewport; double width = mouseXDrag - viewportX - x; double height = mouseYDrag - viewportY - y; if (width < 0) { x = mouseXDrag - viewportX; width = -width; } if(height < 0){ y = mouseYDrag -viewportY; height = -height; } final double x1 = x; final double y1 = y; final double x2 = x+width; final double y2 = y+height; return game.getThings().parallelStream().filter(thing -> thing.getXCenter() >= x1 && thing.getYCenter() >= y1 && thing.getXCenter() <= x2 && thing.getYCenter() <= y2).collect(Collectors.toList()); } }
Add the beginings of keyboard controls
src/main/java/edu/utexas/ece/jacobingalls/App.java
Add the beginings of keyboard controls
Java
mit
1f4bf46324c0b3b5a4094b7a6486d1359912bdc1
0
alternet/alternet.ml,alternet/alternet.ml
package ml.alternet.util; /** * Bytes-related utilities. * * @author Philippe Poulard */ @Util public final class BytesUtil { private BytesUtil() { } /** * Byte-to-byte copy of an array of chars to an array of bytes without * conversion (a char contains 2 bytes). * * This method converts non-ASCII chars. * * @param chars * The chars to cast. * * @return The same input, but as bytes. */ public static byte[] cast(char[] chars) { byte[] bytes = new byte[chars.length << 1]; for (int i = 0; i < chars.length; i++) { int pos = i << 1; bytes[pos] = (byte) ((chars[i] & 0xFF00) >> 8); bytes[pos + 1] = (byte) (chars[i] & 0x00FF); } return bytes; } /** * Byte-to-byte copy of an array of bytes to an array of chars without * conversion. (a char contains 2 bytes). * * This method converts from non-ASCII chars. * * @param bytes * The bytes to cast. * * @return The same input, but as chars. */ public static char[] cast(byte[] bytes) { char[] chars = new char[bytes.length >> 1]; for (int i = 0; i < chars.length; i++) { int pos = i << 1; char c = (char) (((bytes[pos] & 0x00FF) << 8) + (bytes[pos + 1] & 0x00FF)); chars[i] = c; } return chars; } /** * Byte-to-byte copy of an array of bytes to an array of chars without * conversion. (a char contains 2 bytes). * * This method converts from non-ASCII chars. * * @param bytes * The bytes to cast. * @param offset The offset from which to start the cast. * @param len The number of bytes to cast. * * @return The same input, but as chars. */ public static char[] cast(byte[] bytes, int offset, int len) { char[] chars = new char[len >> 1]; for (int i = 0; i < chars.length; i++) { int pos = i << 1; char c = (char) (((bytes[offset + pos] & 0x00FF) << 8) + (bytes[offset + pos + 1] & 0x00FF)); chars[i] = c; } return chars; } }
tools/src/main/java/ml/alternet/util/BytesUtil.java
package ml.alternet.util; import java.util.Arrays; /** * Bytes-related utilities. * * @author Philippe Poulard */ @Util public final class BytesUtil { private BytesUtil() { } /** * Byte-to-byte copy of an array of chars to an array of bytes without * conversion (a char contains 2 bytes). * * This method converts non-ASCII chars. * * @param chars * The chars to cast. * * @return The same input, but as bytes. */ public static byte[] cast(char[] chars) { byte[] bytes = new byte[chars.length << 1]; for (int i = 0; i < chars.length; i++) { int pos = i << 1; bytes[pos] = (byte) ((chars[i] & 0xFF00) >> 8); bytes[pos + 1] = (byte) (chars[i] & 0x00FF); } return bytes; } /** * Byte-to-byte copy of an array of bytes to an array of chars without * conversion. (a char contains 2 bytes). * * This method converts from non-ASCII chars. * * @param bytes * The bytes to cast. * * @return The same input, but as chars. */ public static char[] cast(byte[] bytes) { char[] chars = new char[bytes.length >> 1]; for (int i = 0; i < chars.length; i++) { int pos = i << 1; char c = (char) (((bytes[pos] & 0x00FF) << 8) + (bytes[pos + 1] & 0x00FF)); chars[i] = c; } return chars; } /** * Byte-to-byte copy of an array of bytes to an array of chars without * conversion. (a char contains 2 bytes). * * This method converts from non-ASCII chars. * * @param bytes * The bytes to cast. * @param offset The offset from which to start the cast. * @param len The number of bytes to cast. * * @return The same input, but as chars. */ public static char[] cast(byte[] bytes, int offset, int len) { char[] chars = new char[len >> 1]; for (int i = 0; i < chars.length; i++) { int pos = i << 1; char c = (char) (((bytes[offset + pos] & 0x00FF) << 8) + (bytes[offset + pos + 1] & 0x00FF)); chars[i] = c; } return chars; } }
Clear unused import
tools/src/main/java/ml/alternet/util/BytesUtil.java
Clear unused import
Java
mit
038f912b6fafce0379f97cbac63e934ff326bd4d
0
wmi-students/wmi-timetable-android
package pl.edu.amu.wmi.wmitimetable; import android.content.Intent; import android.support.design.widget.TabLayout; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.os.Bundle; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ListView; import org.joda.time.DateTime; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Locale; import lombok.Setter; import pl.edu.amu.wmi.wmitimetable.adapter.MeetingListAdapter; import pl.edu.amu.wmi.wmitimetable.model.Meeting; import pl.edu.amu.wmi.wmitimetable.model.MeetingDay; import pl.edu.amu.wmi.wmitimetable.model.Schedule; import pl.edu.amu.wmi.wmitimetable.model.World; import pl.edu.amu.wmi.wmitimetable.service.DataService; import pl.edu.amu.wmi.wmitimetable.service.SettingsService; public class MainActivity extends AppCompatActivity { private DataService dataService; private SettingsService settingsService; private ArrayList<Meeting> meetings = new ArrayList<>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); this.dataService = new DataService(getApplicationContext()); this.settingsService = new SettingsService(this); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. /* The {@link android.support.v4.view.PagerAdapter} that will provide fragments for each of the sections. We use a {@link FragmentPagerAdapter} derivative, which will keep every loaded fragment in memory. If this becomes too memory intensive, it may be best to switch to a {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); loadData(); // Set up the ViewPager with the sections adapter. /* The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager = (ViewPager) findViewById(R.id.container); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.setOffscreenPageLimit(3); TabLayout tabLayout = (TabLayout) findViewById(R.id.tabs); tabLayout.setupWithViewPager(mViewPager); } private void loadData() { meetings = filterMeetings(dataService.getMeetings()); } private ArrayList<Meeting> filterMeetings(ArrayList<Meeting> meetings) { ArrayList<Meeting> filteredMeetings = new ArrayList<>(); for (Meeting meeting : meetings) { if(meetingHasFiteredSchedules(meeting)){ filteredMeetings.add(meeting); } } return filteredMeetings; } private boolean meetingHasFiteredSchedules(Meeting meeting) { for (MeetingDay meetingDay : meeting.getMeetingDays()) { for (Schedule schedule : meetingDay.getSchedules()) { if(settingsService.scheduleInFilter(schedule)){ return true; } } } return false; } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_settings) { showSettings(); return true; } if (id == R.id.action_reset) { settingsReset(); return true; } return super.onOptionsItemSelected(item); } private void settingsReset() { deleteData(); resetSettings(); goSettings(); } private void resetSettings(){ settingsService.saveSetting("study", null); settingsService.saveSetting("year", null); settingsService.saveSetting("group", null); } private void deleteData() { dataService.deleteLocalData(); } private void showSettings() { goSettings(); } private void goSettings() { Intent intent = new Intent(this, SettingsActivity.class); startActivity(intent); } /** * A placeholder fragment containing a simple view. */ public static class PlaceholderFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; private static final String ARG_MEETING = "meeting_object"; MeetingListAdapter meetingArrayAdapter; ListView meetingListView; Meeting meeting; public PlaceholderFragment() { } /** * Returns a new instance of this fragment for the given section * number. */ public static PlaceholderFragment newInstance(int sectionNumber, Meeting meeting) { PlaceholderFragment fragment = new PlaceholderFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); fragment.setMeeting(meeting); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_main, container, false); int pageNr = getArguments().getInt(ARG_SECTION_NUMBER); meetingListView = (ListView) rootView.findViewById(R.id.list_meeting_days); ArrayList<MeetingDay> meetingDays = meeting.getMeetingDays(); meetingArrayAdapter = new MeetingListAdapter(getActivity(), R.layout.meeting_list_item, meetingDays); meetingListView.setAdapter(meetingArrayAdapter); return rootView; } public void setMeeting(Meeting meeting) { this.meeting = meeting; } } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { int offset = 0; for (Meeting meeting : meetings) { if (meeting.getDate().before(DateTime.now().plusDays(-2).toDate())) { offset++; } else { break; } } Meeting meeting; int meetingIndex = position + offset; if(meetingIndex>meetings.size()-1){ meeting = new Meeting(); }else{ meeting = meetings.get(meetingIndex); } return PlaceholderFragment.newInstance(meetingIndex, meeting); } @Override public int getCount() { return 5; } @Override public CharSequence getPageTitle(int position) { if (position < meetings.size() - 1) { int offset = 0; Meeting meeting = meetings.get(position); if (meeting.getDate().before(DateTime.now().plusDays(-2).toDate())) { offset++; } meeting = meetings.get(position + offset); SimpleDateFormat simpleDate = new SimpleDateFormat("dd/MM", new Locale("pl", "PL")); return simpleDate.format(meeting.getDate()); } else { return "..."; } } } }
app/src/main/java/pl/edu/amu/wmi/wmitimetable/MainActivity.java
package pl.edu.amu.wmi.wmitimetable; import android.content.Intent; import android.support.design.widget.TabLayout; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.os.Bundle; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ListView; import org.joda.time.DateTime; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Locale; import lombok.Setter; import pl.edu.amu.wmi.wmitimetable.adapter.MeetingListAdapter; import pl.edu.amu.wmi.wmitimetable.model.Meeting; import pl.edu.amu.wmi.wmitimetable.model.MeetingDay; import pl.edu.amu.wmi.wmitimetable.model.Schedule; import pl.edu.amu.wmi.wmitimetable.model.World; import pl.edu.amu.wmi.wmitimetable.service.DataService; import pl.edu.amu.wmi.wmitimetable.service.SettingsService; public class MainActivity extends AppCompatActivity { private DataService dataService; private SettingsService settingsService; private ArrayList<Meeting> meetings = new ArrayList<>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); this.dataService = new DataService(getApplicationContext()); this.settingsService = new SettingsService(this); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. /* The {@link android.support.v4.view.PagerAdapter} that will provide fragments for each of the sections. We use a {@link FragmentPagerAdapter} derivative, which will keep every loaded fragment in memory. If this becomes too memory intensive, it may be best to switch to a {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); loadData(); // Set up the ViewPager with the sections adapter. /* The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager = (ViewPager) findViewById(R.id.container); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.setOffscreenPageLimit(3); TabLayout tabLayout = (TabLayout) findViewById(R.id.tabs); tabLayout.setupWithViewPager(mViewPager); } private void loadData() { meetings = filterMeetings(dataService.getMeetings()); } private ArrayList<Meeting> filterMeetings(ArrayList<Meeting> meetings) { ArrayList<Meeting> filteredMeetings = new ArrayList<>(); for (Meeting meeting : meetings) { if(meetingHasFiteredSchedules(meeting)){ filteredMeetings.add(meeting); } } return filteredMeetings; } private boolean meetingHasFiteredSchedules(Meeting meeting) { for (MeetingDay meetingDay : meeting.getMeetingDays()) { for (Schedule schedule : meetingDay.getSchedules()) { if(settingsService.scheduleInFilter(schedule)){ return true; } } } return false; } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_settings) { showSettings(); return true; } if (id == R.id.action_reset) { settingsReset(); return true; } return super.onOptionsItemSelected(item); } private void settingsReset() { deleteData(); resetSettings(); goSettings(); } private void resetSettings(){ settingsService.saveSetting("study", null); settingsService.saveSetting("year", null); settingsService.saveSetting("group", null); } private void deleteData() { dataService.deleteLocalData(); } private void showSettings() { goSettings(); } private void goSettings() { Intent intent = new Intent(this, SettingsActivity.class); startActivity(intent); } /** * A placeholder fragment containing a simple view. */ public static class PlaceholderFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; private static final String ARG_MEETING = "meeting_object"; MeetingListAdapter meetingArrayAdapter; ListView meetingListView; Meeting meeting; public PlaceholderFragment() { } /** * Returns a new instance of this fragment for the given section * number. */ public static PlaceholderFragment newInstance(int sectionNumber, Meeting meeting) { PlaceholderFragment fragment = new PlaceholderFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); fragment.setMeeting(meeting); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_main, container, false); int pageNr = getArguments().getInt(ARG_SECTION_NUMBER); meetingListView = (ListView) rootView.findViewById(R.id.list_meeting_days); ArrayList<MeetingDay> meetingDays = meeting.getMeetingDays(); meetingArrayAdapter = new MeetingListAdapter(getActivity(), R.layout.meeting_list_item, meetingDays); meetingListView.setAdapter(meetingArrayAdapter); return rootView; } public void setMeeting(Meeting meeting) { this.meeting = meeting; } } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { int offset = 0; for (Meeting meeting : meetings) { if (meeting.getDate().before(DateTime.now().plusDays(-2).toDate())) { offset++; } else { break; } } Meeting meeting; int meetingIndex = position + offset; if(meetingIndex>meetings.size()-1){ meeting = new Meeting(); }else{ meeting = meetings.get(meetingIndex); } return PlaceholderFragment.newInstance(meetingIndex, meeting); } @Override public int getCount() { return 5; } @Override public CharSequence getPageTitle(int position) { if(position < meetings.size()-1) { Meeting meeting = meetings.get(position); SimpleDateFormat simpleDate = new SimpleDateFormat("dd/MM", new Locale("pl", "PL")); return simpleDate.format(meeting.getDate()); }else{ return "..."; } } } }
#14 Page titles not updating on date change
app/src/main/java/pl/edu/amu/wmi/wmitimetable/MainActivity.java
#14 Page titles not updating on date change
Java
mit
d7ef82197c61ac30fb4ca8931ec4e369453e0084
0
xxyy/xyc
/* * Copyright (c) 2013 - 2015 xxyy (Philipp Nowak; [email protected]). All rights reserved. * * Any usage, including, but not limited to, compiling, running, redistributing, printing, * copying and reverse-engineering is strictly prohibited without explicit written permission * from the original author and may result in legal steps being taken. * * See the included LICENSE file (core/src/main/resources) or email [email protected] for details. */ package li.l1t.common.util.inventory; import com.google.common.base.Preconditions; import org.apache.commons.lang.Validate; import org.bukkit.Color; import org.bukkit.DyeColor; import org.bukkit.Material; import org.bukkit.enchantments.Enchantment; import org.bukkit.inventory.ItemFlag; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.inventory.meta.LeatherArmorMeta; import org.bukkit.inventory.meta.SkullMeta; import org.bukkit.material.MaterialData; import org.bukkit.material.Wool; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * This factory helps with creating {@link org.bukkit.inventory.ItemStack}s. Kept in this package * instead of {@link li.l1t.common.inventory} for historic reasons. * * @author <a href="http://xxyy.github.io/">xxyy</a> * @since 30/01/14 */ @SuppressWarnings("UnusedDeclaration") public class ItemStackFactory { private final ItemStack base; private String displayName; private List<String> lore; private MaterialData materialData; private ItemMeta meta; /** * Creates a factory from a base {@link org.bukkit.inventory.ItemStack}. * * @param source the item stack to use as base for this factory */ public ItemStackFactory(ItemStack source) { base = source; materialData = source.getData(); meta = source.getItemMeta(); //returns new meta if unset if (source.hasItemMeta()) { if (meta.hasDisplayName()) { displayName = meta.getDisplayName(); } if (meta.hasLore()) { lore = meta.getLore(); } } } /** * Creates a factory from a {@link org.bukkit.Material}. * The resulting stack will have an amount of 1. * * @param material the material of the product */ public ItemStackFactory(Material material) { base = new ItemStack(material); } /** * @param newAmount the new amount of the product * @return this factory */ public ItemStackFactory amount(int newAmount) { base.setAmount(newAmount); return this; } /** * @param displayName the new display name of the product * @return this factory */ public ItemStackFactory displayName(String displayName) { this.displayName = displayName; return this; } /** * Sets the display name of this factory if the resulting stack would not have a custom display name. * * @param defaultDisplayName the display name to set * @return this factory */ public ItemStackFactory defaultDisplayName(String defaultDisplayName) { if (!(base.hasItemMeta() && base.getItemMeta().hasDisplayName()) || displayName == null) { return displayName(defaultDisplayName); } return this; } /** * Sets the resulting item stack's lore, overriding any previous values. * * @param lore the new lore * @return this factory */ public ItemStackFactory lore(List<String> lore) { this.lore = lore; return this; } /** * Appends a collection of strings to the resulting item stack's lore, treating every element as a separate line. * If this factory was constructed with a template item stack, this method will append to its existing lore, if any. * * @param loreToAppend the lines to add to the lore * @return this factory */ public ItemStackFactory appendLore(Collection<String> loreToAppend) { if (this.lore == null) { return lore(loreToAppend instanceof List ? (List<String>) loreToAppend : new ArrayList<>(loreToAppend)); } this.lore.addAll(loreToAppend); return this; } /** * Adds a string to the lore of the product. If given a simple string, it will be added as * new line. If given a String containing newlines, it will split the input by {@code \n} * and add each result String to the lore. If the factory was constructed with a template item * stack, this will be appended to its existing lore, if any. * * @param whatToAdd the input string * @return this factory */ public ItemStackFactory lore(String whatToAdd) { if (lore == null) { lore = new LinkedList<>(); } Collections.addAll(lore, whatToAdd.split("\r?\n")); return this; } /** * Adds an enchantment to the product. * * @param enchantment the enchantment to apply * @param level the level of the enchantment * @return this factory */ public ItemStackFactory enchant(Enchantment enchantment, int level) { base.addEnchantment(enchantment, level); return this; } /** * Adds an enchantment to the product, but without checking level and type restrictions. * * @param enchantment the enchantment to apply * @param level the level of the enchantment * @return this factory */ public ItemStackFactory enchantUnsafe(Enchantment enchantment, int level) { base.addUnsafeEnchantment(enchantment, level); return this; } /** * @param newData the new {@link org.bukkit.material.MaterialData} for the product * @return this factory */ public ItemStackFactory materialData(MaterialData newData) { materialData = newData; return this; } /** * @param newData the future legacy byte data value for the product * @return this factory */ @Deprecated @SuppressWarnings("deprecation") public ItemStackFactory legacyData(byte newData) { MaterialData data = base.getData(); data.setData(newData); materialData(data); return this; } /** * Sets the color of the wool product. * * @param color the new color of the product * @return this factory * @throws IllegalArgumentException if the base stack is not of material WOOL */ public ItemStackFactory woolColor(DyeColor color) { Preconditions.checkArgument(base.getType() == Material.WOOL, "material of base stack must be WOOL (is: %s)", base.getType()); materialData = new Wool(color); base.setDurability(materialData.toItemStack().getDurability()); return this; } /** * Sets the color of the leather armor product. * * @param color the new color of the product * @return this factory * @throws IllegalArgumentException if the base stack is not of type leather armor */ public ItemStackFactory leatherArmorColor(Color color) { Preconditions.checkArgument(meta instanceof LeatherArmorMeta, "Base stack must be leather armor (is: %s)", meta.getClass()); ((LeatherArmorMeta) meta).setColor(color); return this; } /** * Sets the owner of the skull product. * * @param ownerName the new skull owner name * @return this factory * @throws IllegalArgumentException if the base stack is not of material SKULL_ITEM */ public ItemStackFactory skullOwner(String ownerName) { Validate.isTrue(base.getType() == Material.SKULL_ITEM, "Material of base stack must be SKULL_ITEM (" + base.getType() + ')'); ((SkullMeta) meta).setOwner(ownerName); base.setDurability((short) 3); return this; } /** * Adds given item flags to the item meta of the result. * * @param itemFlags the flags to add * @return this factory */ public ItemStackFactory withFlags(ItemFlag... itemFlags) { meta.addItemFlags(itemFlags); return this; } /** * Marks the product's item meta to hide enchantment information. * * @return this factory */ public ItemStackFactory hideEnchants() { return withFlags(ItemFlag.HIDE_ENCHANTS); } /** * Marks the product's item meta to hide all information normally shown by Minecraft in its * lore text. * * @return this factory */ public ItemStackFactory hideAll() { return withFlags( ItemFlag.HIDE_ENCHANTS, ItemFlag.HIDE_DESTROYS, ItemFlag.HIDE_ATTRIBUTES, ItemFlag.HIDE_PLACED_ON, ItemFlag.HIDE_POTION_EFFECTS, ItemFlag.HIDE_UNBREAKABLE ); } /** * Marks the product's item meta to hide enchantment information and adds a dummy enchantment * to make the item glow without enchantment data in the lore text. * * @return this factory */ public ItemStackFactory glow() { enchantUnsafe(Enchantment.WATER_WORKER, 1); return withFlags(ItemFlag.HIDE_ENCHANTS); } public ItemStack produce() { final ItemStack product = new ItemStack(base); if (materialData != null) { product.setData(materialData); } if (displayName != null || lore != null) { final ItemMeta finalMeta = (meta == null ? product.getItemMeta() : meta); if (lore != null) { finalMeta.setLore(lore); } if (displayName != null) { finalMeta.setDisplayName(displayName); } product.setItemMeta(finalMeta); } return product; } @Nonnull public ItemStack getBase() { return this.base; } public String getDisplayName() { return this.displayName; } public List<String> getLore() { return this.lore; } public MaterialData getMaterialData() { return this.materialData; } }
bukkit/src/main/java/li/l1t/common/util/inventory/ItemStackFactory.java
/* * Copyright (c) 2013 - 2015 xxyy (Philipp Nowak; [email protected]). All rights reserved. * * Any usage, including, but not limited to, compiling, running, redistributing, printing, * copying and reverse-engineering is strictly prohibited without explicit written permission * from the original author and may result in legal steps being taken. * * See the included LICENSE file (core/src/main/resources) or email [email protected] for details. */ package li.l1t.common.util.inventory; import com.google.common.base.Preconditions; import org.apache.commons.lang.Validate; import org.bukkit.Color; import org.bukkit.DyeColor; import org.bukkit.Material; import org.bukkit.enchantments.Enchantment; import org.bukkit.inventory.ItemFlag; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.inventory.meta.LeatherArmorMeta; import org.bukkit.inventory.meta.SkullMeta; import org.bukkit.material.MaterialData; import org.bukkit.material.Wool; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * This factory helps with creating {@link org.bukkit.inventory.ItemStack}s. Kept in this package * instead of {@link li.l1t.common.inventory} for historic reasons. * * @author <a href="http://xxyy.github.io/">xxyy</a> * @since 30/01/14 */ @SuppressWarnings("UnusedDeclaration") public class ItemStackFactory { private final ItemStack base; private String displayName; private List<String> lore; private MaterialData materialData; private ItemMeta meta; /** * Creates a factory from a base {@link org.bukkit.inventory.ItemStack}. * * @param source the item stack to use as base for this factory */ public ItemStackFactory(ItemStack source) { base = source; materialData = source.getData(); meta = source.getItemMeta(); //returns new meta if unset if (source.hasItemMeta()) { if (meta.hasDisplayName()) { displayName = meta.getDisplayName(); } if (meta.hasLore()) { lore = meta.getLore(); } } } /** * Creates a factory from a {@link org.bukkit.Material}. * The resulting stack will have an amount of 1. * * @param material the material of the product */ public ItemStackFactory(Material material) { base = new ItemStack(material); } /** * @param newAmount the new amount of the product * @return this factory */ public ItemStackFactory amount(int newAmount) { base.setAmount(newAmount); return this; } /** * @param displayName the new display name of the product * @return this factory */ public ItemStackFactory displayName(String displayName) { this.displayName = displayName; return this; } /** * Sets the display name of this factory if the resulting stack would not have a custom display name. * * @param defaultDisplayName the display name to set * @return this factory */ public ItemStackFactory defaultDisplayName(String defaultDisplayName) { if (!(base.hasItemMeta() && base.getItemMeta().hasDisplayName()) || displayName == null) { return displayName(defaultDisplayName); } return this; } /** * Sets the resulting item stack's lore, overriding any previous values. * * @param lore the new lore * @return this factory */ public ItemStackFactory lore(List<String> lore) { this.lore = lore; return this; } /** * Appends a collection of strings to the resulting item stack's lore, treating every element as a separate line. * If this factory was constructed with a template item stack, this method will append to its existing lore, if any. * * @param loreToAppend the lines to add to the lore * @return this factory */ public ItemStackFactory appendLore(Collection<String> loreToAppend) { if (this.lore == null) { return lore(loreToAppend instanceof List ? (List<String>) loreToAppend : new ArrayList<>(loreToAppend)); } this.lore.addAll(loreToAppend); return this; } /** * Adds a string to the lore of the product. If given a simple string, it will be added as * new line. If given a String containing newlines, it will split the input by {@code \n} * and add each result String to the lore. If the factory was constructed with a template item * stack, this will be appended to its existing lore, if any. * * @param whatToAdd the input string * @return this factory */ public ItemStackFactory lore(String whatToAdd) { if (lore == null) { lore = new LinkedList<>(); } Collections.addAll(lore, whatToAdd.split("\n")); return this; } /** * Adds an enchantment to the product. * * @param enchantment the enchantment to apply * @param level the level of the enchantment * @return this factory */ public ItemStackFactory enchant(Enchantment enchantment, int level) { base.addEnchantment(enchantment, level); return this; } /** * Adds an enchantment to the product, but without checking level and type restrictions. * * @param enchantment the enchantment to apply * @param level the level of the enchantment * @return this factory */ public ItemStackFactory enchantUnsafe(Enchantment enchantment, int level) { base.addUnsafeEnchantment(enchantment, level); return this; } /** * @param newData the new {@link org.bukkit.material.MaterialData} for the product * @return this factory */ public ItemStackFactory materialData(MaterialData newData) { materialData = newData; return this; } /** * @param newData the future legacy byte data value for the product * @return this factory */ @Deprecated @SuppressWarnings("deprecation") public ItemStackFactory legacyData(byte newData) { MaterialData data = base.getData(); data.setData(newData); materialData(data); return this; } /** * Sets the color of the wool product. * * @param color the new color of the product * @return this factory * @throws IllegalArgumentException if the base stack is not of material WOOL */ public ItemStackFactory woolColor(DyeColor color) { Preconditions.checkArgument(base.getType() == Material.WOOL, "material of base stack must be WOOL (is: %s)", base.getType()); materialData = new Wool(color); base.setDurability(materialData.toItemStack().getDurability()); return this; } /** * Sets the color of the leather armor product. * * @param color the new color of the product * @return this factory * @throws IllegalArgumentException if the base stack is not of type leather armor */ public ItemStackFactory leatherArmorColor(Color color) { Preconditions.checkArgument(meta instanceof LeatherArmorMeta, "Base stack must be leather armor (is: %s)", meta.getClass()); ((LeatherArmorMeta) meta).setColor(color); return this; } /** * Sets the owner of the skull product. * * @param ownerName the new skull owner name * @return this factory * @throws IllegalArgumentException if the base stack is not of material SKULL_ITEM */ public ItemStackFactory skullOwner(String ownerName) { Validate.isTrue(base.getType() == Material.SKULL_ITEM, "Material of base stack must be SKULL_ITEM (" + base.getType() + ')'); ((SkullMeta) meta).setOwner(ownerName); base.setDurability((short) 3); return this; } /** * Adds given item flags to the item meta of the result. * * @param itemFlags the flags to add * @return this factory */ public ItemStackFactory withFlags(ItemFlag... itemFlags) { meta.addItemFlags(itemFlags); return this; } /** * Marks the product's item meta to hide enchantment information. * * @return this factory */ public ItemStackFactory hideEnchants() { return withFlags(ItemFlag.HIDE_ENCHANTS); } /** * Marks the product's item meta to hide all information normally shown by Minecraft in its * lore text. * * @return this factory */ public ItemStackFactory hideAll() { return withFlags( ItemFlag.HIDE_ENCHANTS, ItemFlag.HIDE_DESTROYS, ItemFlag.HIDE_ATTRIBUTES, ItemFlag.HIDE_PLACED_ON, ItemFlag.HIDE_POTION_EFFECTS, ItemFlag.HIDE_UNBREAKABLE ); } /** * Marks the product's item meta to hide enchantment information and adds a dummy enchantment * to make the item glow without enchantment data in the lore text. * * @return this factory */ public ItemStackFactory glow() { enchantUnsafe(Enchantment.WATER_WORKER, 1); return withFlags(ItemFlag.HIDE_ENCHANTS); } public ItemStack produce() { final ItemStack product = new ItemStack(base); if (materialData != null) { product.setData(materialData); } if (displayName != null || lore != null) { final ItemMeta finalMeta = (meta == null ? product.getItemMeta() : meta); if (lore != null) { finalMeta.setLore(lore); } if (displayName != null) { finalMeta.setDisplayName(displayName); } product.setItemMeta(finalMeta); } return product; } @Nonnull public ItemStack getBase() { return this.base; } public String getDisplayName() { return this.displayName; } public List<String> getLore() { return this.lore; } public MaterialData getMaterialData() { return this.materialData; } }
Fix ISF lore method being incompatible with Windows CRLF database support, basically
bukkit/src/main/java/li/l1t/common/util/inventory/ItemStackFactory.java
Fix ISF lore method being incompatible with Windows CRLF
Java
epl-1.0
cdc46cc5afb88ac93a921d5cc55d0920c76e93a0
0
jtrfp/terminal-recall,jtrfp/terminal-recall,jtrfp/terminal-recall
package org.jtrfp.trcl.math; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; public class Vect3D { public static void subtract(double[] l, double[] r, double[] dest) { dest[0]=l[0]-r[0]; dest[1]=l[1]-r[1]; dest[2]=l[2]-r[2]; } public static double[] scalarMultiply(double [] src, double scalar, double[] dest) { dest[0]=src[0]*scalar; dest[1]=src[1]*scalar; dest[2]=src[2]*scalar; return dest; } public static double[] normalize(double[] src){ return normalize(src,new double[3]); } public static double[] normalize(double[] src, double [] dest) { final double norm = norm(src); dest[0]=src[0]/norm; dest[1]=src[1]/norm; dest[2]=src[2]/norm; return dest; } private static double norm(double[] src) { final double x=src[0]; final double y=src[1]; final double z=src[2]; return Math.sqrt(x*x+y*y+z*z); } public static double [] add(double[] l, double rx, double ry, double rz, double[] dest) { dest[0]=l[0]+rx; dest[1]=l[1]+ry; dest[2]=l[2]+rz; return dest; } public static double[] add(double[] l, double[] r, double[] dest) { dest[0]=l[0]+r[0]; dest[1]=l[1]+r[1]; dest[2]=l[2]+r[2]; return dest; } public static double distance(double[] l, double[] r) { final double dx=l[0]-r[0]; final double dy=l[1]-r[1]; final double dz=l[2]-r[2]; return Math.sqrt(dx*dx + dy*dy + dz*dz); } public static double[] negate(double[] in) { in[0]=-in[0]; in[1]=-in[1]; in[2]=-in[2]; return in; } public static double [] cross(double [] l, double [] r, double []dest){ dest[0]=l[1]*r[2]-l[2]*r[1]; dest[1]=l[2]*r[0]-l[0]*r[2]; dest[2]=l[0]*r[1]-l[1]*r[0]; return dest; } }//end Vect3D
src/main/java/org/jtrfp/trcl/math/Vect3D.java
package org.jtrfp.trcl.math; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; public class Vect3D { public static void subtract(double[] l, double[] r, double[] dest) { dest[0]=l[0]-r[0]; dest[1]=l[1]-r[1]; dest[2]=l[2]-r[2]; } public static double[] scalarMultiply(double [] src, double scalar, double[] dest) { dest[0]=src[0]*scalar; dest[1]=src[1]*scalar; dest[2]=src[2]*scalar; return dest; } public static double[] normalize(double[] src){ return normalize(src,new double[3]); } public static double[] normalize(double[] src, double [] dest) { final double norm = norm(src); dest[0]=src[0]/norm; dest[1]=src[1]/norm; dest[2]=src[2]/norm; return dest; } private static double norm(double[] src) { final double x=src[0]; final double y=src[1]; final double z=src[2]; return Math.sqrt(x*x+y*y+z*z); } public static double [] add(double[] l, double rx, double ry, double rz, double[] dest) { dest[0]=l[0]+rx; dest[1]=l[1]+ry; dest[2]=l[2]+rz; return dest; } public static double[] add(double[] l, double[] r, double[] dest) { dest[0]=l[0]+r[0]; dest[1]=l[1]+r[1]; dest[2]=l[2]+r[2]; return dest; } public static double distance(double[] l, double[] r) { final double dx=l[0]-r[0]; final double dy=l[1]-r[1]; final double dz=l[2]-r[2]; return Math.sqrt(dx*dx + dy*dy + dz*dz); } public static double[] negate(double[] in) { in[0]=-in[0]; in[1]=-in[1]; in[2]=-in[2]; return in; } }//end Vect3D
Added cross product.
src/main/java/org/jtrfp/trcl/math/Vect3D.java
Added cross product.
Java
agpl-3.0
e64f4fe258068b093e48d7e0036edb94a3025476
0
duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot
/* * Skybot, a multipurpose discord bot * Copyright (C) 2017 Duncan "duncte123" Sterken & Ramid "ramidzkh" Khan & Sanduhr32 * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package ml.duncte123.skybot; import ml.duncte123.skybot.commands.essentials.eval.EvalCommand; import ml.duncte123.skybot.objects.guild.GuildSettings; import ml.duncte123.skybot.parsers.CommandParser; import ml.duncte123.skybot.utils.*; import net.dv8tion.jda.bot.sharding.ShardManager; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.Permission; import net.dv8tion.jda.core.entities.Guild; import net.dv8tion.jda.core.entities.Message; import net.dv8tion.jda.core.entities.TextChannel; import net.dv8tion.jda.core.events.ReadyEvent; import net.dv8tion.jda.core.events.ShutdownEvent; import net.dv8tion.jda.core.events.guild.GuildJoinEvent; import net.dv8tion.jda.core.events.guild.GuildLeaveEvent; import net.dv8tion.jda.core.events.guild.member.GuildMemberJoinEvent; import net.dv8tion.jda.core.events.guild.voice.GuildVoiceLeaveEvent; import net.dv8tion.jda.core.events.guild.voice.GuildVoiceMoveEvent; import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent; import net.dv8tion.jda.core.hooks.ListenerAdapter; import org.apache.commons.lang3.time.DateUtils; import org.slf4j.event.Level; import java.sql.SQLException; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; public class BotListener extends ListenerAdapter { /** * This is the command parser */ private static CommandParser parser = new CommandParser(); /** * This filter helps us to fiter out swearing */ private BadWordFilter filter = new BadWordFilter(); /** * When a command gets ran, it'll be stored in here */ private static Map<Guild, TextChannel> lastGuildChannel = new HashMap<>(); /** * This timer is for checking unbans */ public Timer unbanTimer = new Timer(); /** * This tells us if the {@link #unbanTimer} is running */ public boolean unbanTimerRunning = false; /** * This timer is for checking new quotes */ public Timer settingsUpdateTimer = new Timer(); /** * This tells us if the {@link #settingsUpdateTimer} is running */ public boolean settingsUpdateTimerRunning = false; /** * Listen for messages send to the bot * * @param event The corresponding {@link GuildMessageReceivedEvent} */ @Override public void onGuildMessageReceived(GuildMessageReceivedEvent event) { //We only want to respond to members/users if (event.getAuthor().isFake() || event.getAuthor().isBot() || event.getMember() == null) { return; } GuildSettings settings = GuildSettingsUtils.getGuild(event.getGuild()); if (event.getMessage().getContent().equals(Settings.prefix + "shutdown") && Arrays.asList(Settings.wbkxwkZPaG4ni5lm8laY).contains(event.getAuthor().getId())) { AirUtils.log(Level.INFO, "Initialising shutdown!!!"); ShardManager manager = event.getJDA().asBot().getShardManager(); for (JDA shard : manager.getShards()) { AirUtils.log(Level.INFO, "Shard " + shard.getShardInfo().getShardId() + " has been shut down"); shard.shutdown(); } //Kill other things ((EvalCommand) AirUtils.commandManager.getCommand("eval")).shutdown(); if (unbanTimerRunning) { this.unbanTimer.cancel(); this.unbanTimer.purge(); } if (settingsUpdateTimerRunning) { this.settingsUpdateTimer.cancel(); this.settingsUpdateTimer.purge(); } try { AirUtils.db.getConnManager().getConnection().close(); } catch (SQLException e) { } System.exit(0); return; } Permission[] adminPerms = { Permission.MESSAGE_MANAGE }; if (event.getGuild().getSelfMember().hasPermission(adminPerms) && AirUtils.guildSettings.get(event.getGuild().getId()).isEnableSwearFilter()) { if (!event.getMember().hasPermission(adminPerms)) { Message messageToCheck = event.getMessage(); if (filter.filterText(messageToCheck.getRawContent())) { messageToCheck.delete().reason("Blocked for bad swearing: " + messageToCheck.getContent()).queue(); event.getChannel().sendMessage("Hello there, " + event.getAuthor().getAsMention() + " please do not use cursive language within this Discord.").queue( m -> m.delete().queueAfter(10, TimeUnit.SECONDS)); return; } } } //If the topic contains -commands ignore it if (event.getChannel().getTopic() != null && event.getChannel().getTopic().contains("-commands")) { return; } if (!event.getMessage().getRawContent().startsWith(Settings.prefix) && !event.getMessage().getRawContent().startsWith(settings.getCustomPrefix())) { return; } else if (event.getMessage().getMentionedUsers().contains(event.getJDA().getSelfUser()) && event.getChannel().canTalk()) { if (!event.getMessage().getRawContent().startsWith(event.getJDA().getSelfUser().getAsMention())) { event.getChannel().sendMessage("Hey <@" + event.getAuthor().getId() + ">, try `" + Settings.prefix + "help` for a list of commands. If it doesn't work scream at _duncte123#1245_").queue(); return; } } // run the a command lastGuildChannel.put(event.getGuild(), event.getChannel()); String rw = event.getMessage().getRawContent(); if (!Settings.prefix.equals(settings.getCustomPrefix())) { rw = rw.replaceFirst( Pattern.quote(settings.getCustomPrefix()), Settings.prefix); } AirUtils.commandManager.runCommand(parser.parse(rw.replaceFirst("<@" + event.getJDA().getSelfUser().getId() + "> ", Settings.prefix) , event )); } /** * When the bot is ready to go * * @param event The corresponding {@link ReadyEvent} */ @Override public void onReady(ReadyEvent event){ AirUtils.log(Level.INFO, "Logged in as " + String.format("%#s", event.getJDA().getSelfUser()) + " (Shard #" + event.getJDA().getShardInfo().getShardId() + ")"); //Start the timers if they have not been started yet if (!unbanTimerRunning && AirUtils.nonsqlite) { AirUtils.log(Level.INFO, "Starting the unban timer."); //Register the timer for the auto unbans //I moved the timer here to make sure that every running jar has this only once TimerTask unbanTask = new TimerTask() { @Override public void run() { AirUtils.checkUnbans(event.getJDA().asBot().getShardManager()); } }; unbanTimer.schedule(unbanTask, DateUtils.MILLIS_PER_MINUTE * 10, DateUtils.MILLIS_PER_MINUTE * 10); unbanTimerRunning = true; } if (!settingsUpdateTimerRunning && AirUtils.nonsqlite) { AirUtils.log(Level.INFO, "Starting the settings timer."); //This handles the updating from the setting and quotes TimerTask settingsTask = new TimerTask() { @Override public void run() { GuildSettingsUtils.loadAllSettings(); } }; settingsUpdateTimer.schedule(settingsTask, DateUtils.MILLIS_PER_HOUR, DateUtils.MILLIS_PER_HOUR); settingsUpdateTimerRunning = true; } } /** * This will fire when a new member joins * * @param event The corresponding {@link GuildMemberJoinEvent} */ @Override public void onGuildMemberJoin(GuildMemberJoinEvent event) { /* {{USER_MENTION}} = mention user {{USER_NAME}} = return username {{GUILD_NAME}} = the name of the guild {{GUILD_USER_COUNT}} = member count {{GUILD_OWNER_MENTION}} = mention the guild owner {{GUILD_OWNER_NAME}} = return the name form the owner */ GuildSettings settings = GuildSettingsUtils.getGuild(event.getGuild()); if (settings.isEnableJoinMessage()) { TextChannel publicChannel = AirUtils.getPublicChannel(event.getGuild()); String msg = settings.getCustomJoinMessage() .replaceAll("\\{\\{USER_MENTION}}", event.getUser().getAsMention()) .replaceAll("\\{\\{USER_NAME}}", event.getUser().getName()) .replaceAll("\\{\\{GUILD_NAME}}", event.getGuild().getName()) .replaceAll("\\{\\{GUILD_USER_COUNT}}", event.getGuild().getMemberCache().size() + ""); publicChannel.sendMessage(msg).queue(); } } /** * This will fire when the bot joins a guild and we check if we are allowed to join this guild * * @param event The corresponding {@link GuildJoinEvent} */ @Override public void onGuildJoin(GuildJoinEvent event) { //if 60 of a guild is bots, we'll leave it double[] botToUserRatio = AirUtils.getBotRatio(event.getGuild()); if (botToUserRatio[1] > 60) { AirUtils.getPublicChannel(event.getGuild()).sendMessage("Hey " + event.getGuild().getOwner().getAsMention() + ", " + botToUserRatio[1] + "% of this guild are bots (" + event.getGuild().getMemberCache().size() + " is the total btw). " + "I'm outta here").queue( message -> message.getGuild().leave().queue() ); AirUtils.log(Settings.defaultName + "GuildJoin", Level.INFO, "Joining guild: " + event.getGuild().getName() + ", and leaving it after. BOT ALERT"); return; } AirUtils.log(Settings.defaultName + "GuildJoin", Level.INFO, "Joining guild: " + event.getGuild().getName() + "."); GuildSettingsUtils.registerNewGuild(event.getGuild()); AirUtils.updateGuildCount(event.getJDA(), event.getJDA().asBot().getShardManager().getGuildCache().size()); } @Override public void onGuildLeave(GuildLeaveEvent event) { GuildSettingsUtils.deleteGuild(event.getGuild()); AirUtils.updateGuildCount(event.getJDA(), event.getJDA().asBot().getShardManager().getGuildCache().size()); } /** * This will fire when a member leaves a channel in a guild, we check if the channel is empty and if it is we leave it * * @param event {@link GuildVoiceLeaveEvent} */ @Override public void onGuildVoiceLeave(GuildVoiceLeaveEvent event) { if (!event.getVoiceState().getMember().getUser().getId().equals(event.getJDA().getSelfUser().getId()) && event.getGuild().getAudioManager().isConnected()) { if (!event.getChannelLeft().getId().equals(event.getGuild().getAudioManager().getConnectedChannel().getId())) { return; } if (event.getChannelLeft().getMembers().size() <= 1) { AirUtils.audioUtils.getMusicManager(event.getGuild()).player.stopTrack(); AirUtils.audioUtils.getMusicManager(event.getGuild()).player.setPaused(false); AirUtils.audioUtils.getMusicManager(event.getGuild()).scheduler.queue.clear(); lastGuildChannel.get(event.getGuild()).sendMessage(EmbedUtils.embedMessage("Leaving voice channel because all the members have left it.")).queue(); if (event.getGuild().getAudioManager().isConnected()) { event.getGuild().getAudioManager().closeAudioConnection(); event.getGuild().getAudioManager().setSendingHandler(null); } } } } /** * This will fire when a member moves from channel, if a member moves we will check if our channel is empty * * @param event {@link GuildVoiceMoveEvent} */ @Override public void onGuildVoiceMove(GuildVoiceMoveEvent event) { if (!event.getVoiceState().getMember().getUser().getId().equals(event.getJDA().getSelfUser().getId()) && event.getGuild().getAudioManager().isConnected()) { if (event.getChannelLeft() != null) { if (!event.getChannelLeft().getId().equals(event.getGuild().getAudioManager().getConnectedChannel().getId())) { return; } if (event.getChannelLeft().getMembers().size() <= 1) { AirUtils.audioUtils.getMusicManager(event.getGuild()).player.stopTrack(); AirUtils.audioUtils.getMusicManager(event.getGuild()).player.setPaused(false); AirUtils.audioUtils.getMusicManager(event.getGuild()).scheduler.queue.clear(); lastGuildChannel.get(event.getGuild()).sendMessage(EmbedUtils.embedMessage("Leaving voice channel because all the members have left it.")).queue(); if (event.getGuild().getAudioManager().isConnected()) { event.getGuild().getAudioManager().closeAudioConnection(); event.getGuild().getAudioManager().setSendingHandler(null); } } } if (event.getChannelJoined() != null) { if (!event.getChannelJoined().getId().equals(event.getGuild().getAudioManager().getConnectedChannel().getId())) { return; } if (event.getChannelJoined().getMembers().size() <= 1) { AirUtils.audioUtils.getMusicManager(event.getGuild()).player.stopTrack(); AirUtils.audioUtils.getMusicManager(event.getGuild()).player.setPaused(false); AirUtils.audioUtils.getMusicManager(event.getGuild()).scheduler.queue.clear(); lastGuildChannel.get(event.getGuild()).sendMessage(EmbedUtils.embedMessage("Leaving voice channel because all the members have left it.")).queue(); if (event.getGuild().getAudioManager().isConnected()) { event.getGuild().getAudioManager().setSendingHandler(null); event.getGuild().getAudioManager().closeAudioConnection(); } } } } } }
src/main/java/ml/duncte123/skybot/BotListener.java
/* * Skybot, a multipurpose discord bot * Copyright (C) 2017 Duncan "duncte123" Sterken & Ramid "ramidzkh" Khan & Sanduhr32 * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package ml.duncte123.skybot; import ml.duncte123.skybot.commands.essentials.eval.EvalCommand; import ml.duncte123.skybot.objects.guild.GuildSettings; import ml.duncte123.skybot.parsers.CommandParser; import ml.duncte123.skybot.utils.*; import net.dv8tion.jda.bot.sharding.ShardManager; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.Permission; import net.dv8tion.jda.core.entities.Guild; import net.dv8tion.jda.core.entities.Message; import net.dv8tion.jda.core.entities.TextChannel; import net.dv8tion.jda.core.events.ReadyEvent; import net.dv8tion.jda.core.events.ShutdownEvent; import net.dv8tion.jda.core.events.guild.GuildJoinEvent; import net.dv8tion.jda.core.events.guild.GuildLeaveEvent; import net.dv8tion.jda.core.events.guild.member.GuildMemberJoinEvent; import net.dv8tion.jda.core.events.guild.voice.GuildVoiceLeaveEvent; import net.dv8tion.jda.core.events.guild.voice.GuildVoiceMoveEvent; import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent; import net.dv8tion.jda.core.hooks.ListenerAdapter; import org.apache.commons.lang3.time.DateUtils; import org.slf4j.event.Level; import java.sql.SQLException; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; public class BotListener extends ListenerAdapter { /** * This is the command parser */ private static CommandParser parser = new CommandParser(); /** * This filter helps us to fiter out swearing */ private BadWordFilter filter = new BadWordFilter(); /** * When a command gets ran, it'll be stored in here */ private static Map<Guild, TextChannel> lastGuildChannel = new HashMap<>(); /** * This timer is for checking unbans */ public Timer unbanTimer = new Timer(); /** * This tells us if the {@link #unbanTimer} is running */ public boolean unbanTimerRunning = false; /** * This timer is for checking new quotes */ public Timer settingsUpdateTimer = new Timer(); /** * This tells us if the {@link #settingsUpdateTimer} is running */ public boolean settingsUpdateTimerRunning = false; /** * Listen for messages send to the bot * * @param event The corresponding {@link GuildMessageReceivedEvent} */ @Override public void onGuildMessageReceived(GuildMessageReceivedEvent event) { //We only want to respond to members/users if (event.getAuthor().isFake() || event.getAuthor().isBot() || event.getMember() == null) { return; } GuildSettings settings = GuildSettingsUtils.getGuild(event.getGuild()); if (event.getMessage().getContent().equals(Settings.prefix + "shutdown") && Arrays.asList(Settings.wbkxwkZPaG4ni5lm8laY).contains(event.getAuthor().getId())) { AirUtils.log(Level.INFO, "Initialising shutdown!!!"); ShardManager manager = event.getJDA().asBot().getShardManager(); for (JDA shard : manager.getShards()) { AirUtils.log(Level.INFO, "Shard " + shard.getShardInfo().getShardId() + " has been shut down"); shard.shutdown(); } //Kill other things ((EvalCommand) AirUtils.commandManager.getCommand("eval")).shutdown(); if (unbanTimerRunning) { this.unbanTimer.cancel(); this.unbanTimer.purge(); } if (settingsUpdateTimerRunning) { this.settingsUpdateTimer.cancel(); this.settingsUpdateTimer.purge(); } try { AirUtils.db.getConnManager().getConnection().close(); } catch (SQLException e) { } System.exit(0); return; } Permission[] adminPerms = { Permission.MESSAGE_MANAGE }; if (event.getGuild().getSelfMember().hasPermission(adminPerms) && AirUtils.guildSettings.get(event.getGuild().getId()).isEnableSwearFilter()) { if (!event.getMember().hasPermission(adminPerms)) { Message messageToCheck = event.getMessage(); if (filter.filterText(messageToCheck.getRawContent())) { messageToCheck.delete().reason("Blocked for bad swearing: " + messageToCheck.getContent()).queue(); event.getChannel().sendMessage("Hello there, " + event.getAuthor().getAsMention() + " please do not use cursive language within this Discord.").queue( m -> m.delete().queueAfter(10, TimeUnit.SECONDS)); return; } } } //If the topic contains -commands ignore it if (event.getChannel().getTopic() != null && event.getChannel().getTopic().contains("-commands")) { return; } if (!event.getMessage().getRawContent().startsWith(Settings.prefix) && !event.getMessage().getRawContent().startsWith(settings.getCustomPrefix())) { return; } else if (event.getMessage().getMentionedUsers().contains(event.getJDA().getSelfUser()) && event.getChannel().canTalk()) { if (!event.getMessage().getRawContent().startsWith(event.getJDA().getSelfUser().getAsMention())) { event.getChannel().sendMessage("Hey <@" + event.getAuthor().getId() + ">, try `" + Settings.prefix + "help` for a list of commands. If it doesn't work scream at _duncte123#1245_").queue(); return; } } // run the a command lastGuildChannel.put(event.getGuild(), event.getChannel()); String rw = event.getMessage().getRawContent(); if (!Settings.prefix.equals(settings.getCustomPrefix())) { rw = rw.replaceFirst( Pattern.quote(settings.getCustomPrefix()), Settings.prefix); } AirUtils.commandManager.runCommand(parser.parse(rw.replaceFirst("<@" + event.getJDA().getSelfUser().getId() + "> ", Settings.prefix) , event )); } /** * When the bot is ready to go * * @param event The corresponding {@link ReadyEvent} */ @Override public void onReady(ReadyEvent event){ AirUtils.log(Level.INFO, "Logged in as " + String.format("%#s", event.getJDA().getSelfUser()) + " (Shard #" + event.getJDA().getShardInfo().getShardId() + ")"); //Start the timers if they have not been started yet if (!unbanTimerRunning && AirUtils.nonsqlite) { AirUtils.log(Level.INFO, "Starting the unban timer."); //Register the timer for the auto unbans //I moved the timer here to make sure that every running jar has this only once TimerTask unbanTask = new TimerTask() { @Override public void run() { AirUtils.checkUnbans(event.getJDA().asBot().getShardManager()); } }; unbanTimer.schedule(unbanTask, DateUtils.MILLIS_PER_MINUTE * 10, DateUtils.MILLIS_PER_MINUTE * 10); unbanTimerRunning = true; } if (!settingsUpdateTimerRunning && AirUtils.nonsqlite) { AirUtils.log(Level.INFO, "Starting the settings timer."); //This handles the updating from the setting and quotes TimerTask settingsTask = new TimerTask() { @Override public void run() { GuildSettingsUtils.loadAllSettings(); } }; settingsUpdateTimer.schedule(settingsTask, DateUtils.MILLIS_PER_HOUR, DateUtils.MILLIS_PER_HOUR); settingsUpdateTimerRunning = true; } } @Override public void onShutdown(ShutdownEvent event) { } /** * This will fire when a new member joins * * @param event The corresponding {@link GuildMemberJoinEvent} */ @Override public void onGuildMemberJoin(GuildMemberJoinEvent event) { /* {{USER_MENTION}} = mention user {{USER_NAME}} = return username {{GUILD_NAME}} = the name of the guild {{GUILD_USER_COUNT}} = member count {{GUILD_OWNER_MENTION}} = mention the guild owner {{GUILD_OWNER_NAME}} = return the name form the owner */ GuildSettings settings = GuildSettingsUtils.getGuild(event.getGuild()); if (settings.isEnableJoinMessage()) { TextChannel publicChannel = AirUtils.getPublicChannel(event.getGuild()); String msg = settings.getCustomJoinMessage() .replaceAll("\\{\\{USER_MENTION}}", event.getUser().getAsMention()) .replaceAll("\\{\\{USER_NAME}}", event.getUser().getName()) .replaceAll("\\{\\{GUILD_NAME}}", event.getGuild().getName()) .replaceAll("\\{\\{GUILD_USER_COUNT}}", event.getGuild().getMemberCache().size() + ""); publicChannel.sendMessage(msg).queue(); } } /** * This will fire when the bot joins a guild and we check if we are allowed to join this guild * * @param event The corresponding {@link GuildJoinEvent} */ @Override public void onGuildJoin(GuildJoinEvent event) { //if 60 of a guild is bots, we'll leave it double[] botToUserRatio = AirUtils.getBotRatio(event.getGuild()); if (botToUserRatio[1] > 60) { AirUtils.getPublicChannel(event.getGuild()).sendMessage("Hey " + event.getGuild().getOwner().getAsMention() + ", " + botToUserRatio[1] + "% of this guild are bots (" + event.getGuild().getMemberCache().size() + " is the total btw). " + "I'm outta here").queue( message -> message.getGuild().leave().queue() ); AirUtils.log(Settings.defaultName + "GuildJoin", Level.INFO, "Joining guild: " + event.getGuild().getName() + ", and leaving it after. BOT ALERT"); return; } AirUtils.log(Settings.defaultName + "GuildJoin", Level.INFO, "Joining guild: " + event.getGuild().getName() + "."); GuildSettingsUtils.registerNewGuild(event.getGuild()); AirUtils.updateGuildCount(event.getJDA(), event.getJDA().asBot().getShardManager().getGuildCache().size()); } @Override public void onGuildLeave(GuildLeaveEvent event) { GuildSettingsUtils.deleteGuild(event.getGuild()); AirUtils.updateGuildCount(event.getJDA(), event.getJDA().asBot().getShardManager().getGuildCache().size()); } /** * This will fire when a member leaves a channel in a guild, we check if the channel is empty and if it is we leave it * * @param event {@link GuildVoiceLeaveEvent} */ @Override public void onGuildVoiceLeave(GuildVoiceLeaveEvent event) { if (!event.getVoiceState().getMember().getUser().getId().equals(event.getJDA().getSelfUser().getId()) && event.getGuild().getAudioManager().isConnected()) { if (!event.getChannelLeft().getId().equals(event.getGuild().getAudioManager().getConnectedChannel().getId())) { return; } if (event.getChannelLeft().getMembers().size() <= 1) { AirUtils.audioUtils.getMusicManager(event.getGuild()).player.stopTrack(); AirUtils.audioUtils.getMusicManager(event.getGuild()).player.setPaused(false); AirUtils.audioUtils.getMusicManager(event.getGuild()).scheduler.queue.clear(); lastGuildChannel.get(event.getGuild()).sendMessage(EmbedUtils.embedMessage("Leaving voice channel because all the members have left it.")).queue(); if (event.getGuild().getAudioManager().isConnected()) { event.getGuild().getAudioManager().closeAudioConnection(); event.getGuild().getAudioManager().setSendingHandler(null); } } } } /** * This will fire when a member moves from channel, if a member moves we will check if our channel is empty * * @param event {@link GuildVoiceMoveEvent} */ @Override public void onGuildVoiceMove(GuildVoiceMoveEvent event) { if (!event.getVoiceState().getMember().getUser().getId().equals(event.getJDA().getSelfUser().getId()) && event.getGuild().getAudioManager().isConnected()) { if (event.getChannelLeft() != null) { if (!event.getChannelLeft().getId().equals(event.getGuild().getAudioManager().getConnectedChannel().getId())) { return; } if (event.getChannelLeft().getMembers().size() <= 1) { AirUtils.audioUtils.getMusicManager(event.getGuild()).player.stopTrack(); AirUtils.audioUtils.getMusicManager(event.getGuild()).player.setPaused(false); AirUtils.audioUtils.getMusicManager(event.getGuild()).scheduler.queue.clear(); lastGuildChannel.get(event.getGuild()).sendMessage(EmbedUtils.embedMessage("Leaving voice channel because all the members have left it.")).queue(); if (event.getGuild().getAudioManager().isConnected()) { event.getGuild().getAudioManager().closeAudioConnection(); event.getGuild().getAudioManager().setSendingHandler(null); } } } if (event.getChannelJoined() != null) { if (!event.getChannelJoined().getId().equals(event.getGuild().getAudioManager().getConnectedChannel().getId())) { return; } if (event.getChannelJoined().getMembers().size() <= 1) { AirUtils.audioUtils.getMusicManager(event.getGuild()).player.stopTrack(); AirUtils.audioUtils.getMusicManager(event.getGuild()).player.setPaused(false); AirUtils.audioUtils.getMusicManager(event.getGuild()).scheduler.queue.clear(); lastGuildChannel.get(event.getGuild()).sendMessage(EmbedUtils.embedMessage("Leaving voice channel because all the members have left it.")).queue(); if (event.getGuild().getAudioManager().isConnected()) { event.getGuild().getAudioManager().setSendingHandler(null); event.getGuild().getAudioManager().closeAudioConnection(); } } } } } }
Remove unused method
src/main/java/ml/duncte123/skybot/BotListener.java
Remove unused method
Java
agpl-3.0
30eb9fec6ad467b8cb1a46de9a4a5aee66b1827e
0
opencadc/caom2,opencadc/caom2,opencadc/caom2,opencadc/caom2
/* ************************************************************************ **** C A N A D I A N A S T R O N O M Y D A T A C E N T R E ***** * * (c) 2017. (c) 2017. * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 * All rights reserved Tous droits reserves * * NRC disclaims any warranties Le CNRC denie toute garantie * expressed, implied, or statu- enoncee, implicite ou legale, * tory, of any kind with respect de quelque nature que se soit, * to the software, including concernant le logiciel, y com- * without limitation any war- pris sans restriction toute * ranty of merchantability or garantie de valeur marchande * fitness for a particular pur- ou de pertinence pour un usage * pose. NRC shall not be liable particulier. Le CNRC ne * in any event for any damages, pourra en aucun cas etre tenu * whether direct or indirect, responsable de tout dommage, * special or general, consequen- direct ou indirect, particul- * tial or incidental, arising ier ou general, accessoire ou * from the use of the software. fortuit, resultant de l'utili- * sation du logiciel. * **** C A N A D I A N A S T R O N O M Y D A T A C E N T R E ***** ************************************************************************ */ package ca.nrc.cadc.caom2.artifact.resolvers; import ca.nrc.cadc.caom2.artifact.resolvers.util.ResolverUtil; import ca.nrc.cadc.net.StorageResolver; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import org.apache.log4j.Logger; /** * This class can convert a GEMINI URI into a URL. * * @author jeevesh */ public class GeminiResolver implements StorageResolver { public static final String SCHEME = "gemini"; public static final String ARCHIVE = "Gemini"; public static final String FILE_URI = "file"; public static final String PREVIEW_URI = "preview"; private static final Logger log = Logger.getLogger(GeminiResolver.class); private static final String BASE_URL = "https://archive.gemini.edu"; private static final String JPEG_SUFFIX = ".jpg"; public GeminiResolver() { } @Override public URL toURL(URI uri) { ResolverUtil.validate(uri, SCHEME); String urlStr = ""; try { String path = getPath(uri); urlStr = BASE_URL + path; URL url = null; if (urlStr != null) { url = new URL(urlStr); } log.debug(uri + " --> " + url); return url; } catch (MalformedURLException ex) { throw new RuntimeException("BUG: could not generate URL from uri " + urlStr, ex); } } private String getPath(URI uri) { String[] path = uri.getSchemeSpecificPart().split("/"); if (path.length != 2) { throw new IllegalArgumentException("Malformed URI. Expected 2 path components, found " + path.length); } String archive = path[0]; if (!(archive.equals(ARCHIVE))) { throw new IllegalArgumentException("Invalid URI. Expected archive: " + ARCHIVE + ", actual archive: " + archive); } String fileName = path[1]; String fileType = FILE_URI; if (fileName.endsWith(JPEG_SUFFIX)) { fileName = fileName.substring(0, fileName.length() - JPEG_SUFFIX.length()) + ".fits"; fileType = PREVIEW_URI; } StringBuilder sb = new StringBuilder(); sb.append("/"); sb.append(fileType); sb.append("/"); sb.append(fileName); return sb.toString(); } @Override public String getScheme() { return SCHEME; } }
caom2-artifact-resolvers/src/main/java/ca/nrc/cadc/caom2/artifact/resolvers/GeminiResolver.java
/* ************************************************************************ **** C A N A D I A N A S T R O N O M Y D A T A C E N T R E ***** * * (c) 2017. (c) 2017. * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 * All rights reserved Tous droits reserves * * NRC disclaims any warranties Le CNRC denie toute garantie * expressed, implied, or statu- enoncee, implicite ou legale, * tory, of any kind with respect de quelque nature que se soit, * to the software, including concernant le logiciel, y com- * without limitation any war- pris sans restriction toute * ranty of merchantability or garantie de valeur marchande * fitness for a particular pur- ou de pertinence pour un usage * pose. NRC shall not be liable particulier. Le CNRC ne * in any event for any damages, pourra en aucun cas etre tenu * whether direct or indirect, responsable de tout dommage, * special or general, consequen- direct ou indirect, particul- * tial or incidental, arising ier ou general, accessoire ou * from the use of the software. fortuit, resultant de l'utili- * sation du logiciel. * **** C A N A D I A N A S T R O N O M Y D A T A C E N T R E ***** ************************************************************************ */ package ca.nrc.cadc.caom2.artifact.resolvers; import ca.nrc.cadc.caom2.artifact.resolvers.util.ResolverUtil; import ca.nrc.cadc.net.StorageResolver; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import org.apache.log4j.Logger; /** * This class can convert a GEMINI URI into a URL. * * @author jeevesh */ public class GeminiResolver implements StorageResolver { public static final String SCHEME = "gemini"; public static final String ARCHIVE = "GEM"; public static final String FILE_URI = "file"; public static final String PREVIEW_URI = "preview"; private static final Logger log = Logger.getLogger(GeminiResolver.class); private static final String BASE_URL = "https://archive.gemini.edu"; private static final String JPEG_SUFFIX = ".jpg"; public GeminiResolver() { } @Override public URL toURL(URI uri) { ResolverUtil.validate(uri, SCHEME); String urlStr = ""; try { String path = getPath(uri); urlStr = BASE_URL + path; URL url = null; if (urlStr != null) { url = new URL(urlStr); } log.debug(uri + " --> " + url); return url; } catch (MalformedURLException ex) { throw new RuntimeException("BUG: could not generate URL from uri " + urlStr, ex); } } private String getPath(URI uri) { String[] path = uri.getSchemeSpecificPart().split("/"); if (path.length != 2) { throw new IllegalArgumentException("Malformed URI. Expected 2 path components, found " + path.length); } String archive = path[0]; if (!(archive.equals(ARCHIVE))) { throw new IllegalArgumentException("Invalid URI. Expected archive: " + ARCHIVE + ", actual archive: " + archive); } String fileName = path[1]; String fileType = FILE_URI; if (fileName.endsWith(JPEG_SUFFIX)) { fileName = fileName.substring(0, fileName.length() - JPEG_SUFFIX.length()) + ".fits"; fileType = PREVIEW_URI; } StringBuilder sb = new StringBuilder(); sb.append("/"); sb.append(fileType); sb.append("/"); sb.append(fileName); return sb.toString(); } @Override public String getScheme() { return SCHEME; } }
Update Gemini archive name in GeminiResolver
caom2-artifact-resolvers/src/main/java/ca/nrc/cadc/caom2/artifact/resolvers/GeminiResolver.java
Update Gemini archive name in GeminiResolver
Java
agpl-3.0
8f32047f832dafb94c6c0502f041aeff324b8066
0
shunwang/sql-layer-1,relateiq/sql-layer,wfxiang08/sql-layer-1,jaytaylor/sql-layer,relateiq/sql-layer,jaytaylor/sql-layer,shunwang/sql-layer-1,qiuyesuifeng/sql-layer,qiuyesuifeng/sql-layer,ngaut/sql-layer,ngaut/sql-layer,ngaut/sql-layer,shunwang/sql-layer-1,wfxiang08/sql-layer-1,wfxiang08/sql-layer-1,relateiq/sql-layer,ngaut/sql-layer,qiuyesuifeng/sql-layer,jaytaylor/sql-layer,wfxiang08/sql-layer-1,shunwang/sql-layer-1,jaytaylor/sql-layer,qiuyesuifeng/sql-layer,relateiq/sql-layer
/** * END USER LICENSE AGREEMENT (“EULA”) * * READ THIS AGREEMENT CAREFULLY (date: 9/13/2011): * http://www.akiban.com/licensing/20110913 * * BY INSTALLING OR USING ALL OR ANY PORTION OF THE SOFTWARE, YOU ARE ACCEPTING * ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. YOU AGREE THAT THIS * AGREEMENT IS ENFORCEABLE LIKE ANY WRITTEN AGREEMENT SIGNED BY YOU. * * IF YOU HAVE PAID A LICENSE FEE FOR USE OF THE SOFTWARE AND DO NOT AGREE TO * THESE TERMS, YOU MAY RETURN THE SOFTWARE FOR A FULL REFUND PROVIDED YOU (A) DO * NOT USE THE SOFTWARE AND (B) RETURN THE SOFTWARE WITHIN THIRTY (30) DAYS OF * YOUR INITIAL PURCHASE. * * IF YOU WISH TO USE THE SOFTWARE AS AN EMPLOYEE, CONTRACTOR, OR AGENT OF A * CORPORATION, PARTNERSHIP OR SIMILAR ENTITY, THEN YOU MUST BE AUTHORIZED TO SIGN * FOR AND BIND THE ENTITY IN ORDER TO ACCEPT THE TERMS OF THIS AGREEMENT. THE * LICENSES GRANTED UNDER THIS AGREEMENT ARE EXPRESSLY CONDITIONED UPON ACCEPTANCE * BY SUCH AUTHORIZED PERSONNEL. * * IF YOU HAVE ENTERED INTO A SEPARATE WRITTEN LICENSE AGREEMENT WITH AKIBAN FOR * USE OF THE SOFTWARE, THE TERMS AND CONDITIONS OF SUCH OTHER AGREEMENT SHALL * PREVAIL OVER ANY CONFLICTING TERMS OR CONDITIONS IN THIS AGREEMENT. */ package com.akiban.sql.optimizer.rule; import com.akiban.sql.optimizer.plan.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** Move WHERE clauses closer to their table origin. * This rule runs after joins of various sorts has been laid out, but * before while they are still in data-flow order. * * Note: <i>prepone</i>, while not an American or British English * word, is the transparent opposite of <i>postpone</i>. */ // TODO: Something similar is needed to handle moving HAVING // conditions on the group by fields across the aggregation boundary // and WHERE conditions on subqueries (views) into the subquery // itself. These need to run earlier to affect indexing. Not sure how // to integrate all these. Maybe move everything earlier on and then // recognize joins of such filtered tables as Joinable. public class SelectPreponer extends BaseRule { private static final Logger logger = LoggerFactory.getLogger(SelectPreponer.class); @Override protected Logger getLogger() { return logger; } @Override public void apply(PlanContext plan) { TableOriginFinder finder = new TableOriginFinder(); finder.find(plan.getPlan()); Preponer preponer = new Preponer(); for (PlanNode origin : finder.getOrigins()) { preponer.addOrigin(origin); } preponer.moveDeferred(); } /** Find all the places where data starts, such as * <code>IndexScan</code> and <code><i>XxxLookup</i></code>. */ static class TableOriginFinder implements PlanVisitor, ExpressionVisitor { List<PlanNode> origins = new ArrayList<PlanNode>(); public void find(PlanNode root) { root.accept(this); } public List<PlanNode> getOrigins() { return origins; } @Override public boolean visitEnter(PlanNode n) { return visit(n); } @Override public boolean visitLeave(PlanNode n) { return true; } @Override public boolean visit(PlanNode n) { if (n instanceof IndexScan) { origins.add(n); } else if (n instanceof TableLoader) { if (n instanceof BasePlanWithInput) { PlanNode input = ((BasePlanWithInput)n).getInput(); if (!((input instanceof TableLoader) || (input instanceof IndexScan))) { // Will put input in, so don't bother putting both in. origins.add(n); } } else { origins.add(n); } } return true; } @Override public boolean visitEnter(ExpressionNode n) { return visit(n); } @Override public boolean visitLeave(ExpressionNode n) { return true; } @Override public boolean visit(ExpressionNode n) { return true; } } /** Holds the state of a single side of a loop, which usually * means a group with its in-group joins. */ static class Loop { Map<TableSource,PlanNode> loaders; // Lookup operators. Map<ExpressionNode,PlanNode> indexColumns; // Individual columns of IndexScan. List<PlanNode> flattens; // Flatten & Product operators that do in-group join. Map<PlanNode,Set<TableSource>> flattened; // Tables that participate in those. public Loop() { loaders = new HashMap<TableSource,PlanNode>(); } public void setIndex(IndexScan index) { indexColumns = new HashMap<ExpressionNode,PlanNode>(); for (ExpressionNode column : index.getColumns()) { if (column != null) { indexColumns.put(column, index); } } } public void addLoader(PlanNode loader) { for (TableSource table : ((TableLoader)loader).getTables()) { loaders.put(table, loader); } } /** Add a within-group join: Flatten or Product. */ public Set<TableSource> addFlattenOrProduct(PlanNode join) { if (flattens == null) flattens = new ArrayList<PlanNode>(); flattens.add(join); // Might be able to place multi-table conditions after a flatten join, // so record what is available. if (flattened == null) flattened = new HashMap<PlanNode,Set<TableSource>>(); Set<TableSource> tables = new HashSet<TableSource>(loaders.keySet()); flattened.put(join, tables); return tables; } public void addFlatten(Flatten flatten) { // Limit to tables that are inner joined (and on the outer // side of outer joins.) Set<TableSource> inner = flatten.getInnerJoinedTables(); loaders.keySet().retainAll(inner); if (indexColumns != null) { Iterator<ExpressionNode> iter = indexColumns.keySet().iterator(); while (iter.hasNext()) { ExpressionNode expr = iter.next(); if (expr.isColumn() && !inner.contains(((ColumnExpression)expr).getTable())) iter.remove(); } } // A Flatten can get more tables than directly feed it when in a Product. // Really, it's the Lookup_Nested that gets them, but the // sources don't advertize that, since only one node is // allowed to introduce a table. addFlattenOrProduct(flatten).addAll(inner); } /** Merge another loop into this one. Although * <code>Product</code> starts with separate lookup operators, * it's a single loop for purposes of nesting. */ public Loop merge(Loop other, PlanNode before) { loaders.putAll(other.loaders); if (indexColumns == null) indexColumns = other.indexColumns; else if (other.indexColumns != null) indexColumns.putAll(other.indexColumns); if (flattens == null) flattens = other.flattens; else if (other.flattens != null) { int i = -1; if (before != null) i = flattens.indexOf(before); if (i < 0) i = flattens.size(); for (PlanNode flatten : other.flattens) { if (flatten == before) break; flattens.add(i++, flatten); } } if (flattened == null) flattened = other.flattened; else if (other.flattened != null) { for (Map.Entry<PlanNode,Set<TableSource>> entry : other.flattened.entrySet()) { Set<TableSource> existing = flattened.get(entry.getKey()); if (existing != null) existing.addAll(entry.getValue()); else flattened.put(entry.getKey(), entry.getValue()); } } return this; } /** Does this loop have any interesting state? */ public boolean isEmpty() { return ((flattens == null) || (loaders.isEmpty() && ((indexColumns == null) || indexColumns.isEmpty()))); } /** Does this loop consist solely of an index? */ public boolean indexOnly() { return (loaders.isEmpty() && !((indexColumns == null) || indexColumns.isEmpty())); } } /** Move conditions as follows: * * Starting with index scans and lookup operators, trace * downstream, adding tables from additional such operators. When * we come to a <code>Product</code>, merge with any other * streams. When we come to a <code>MapJoin</code>, note the * traversal of its loops, which corresponds to bindings being * available to inner loops. * * When we finally come to a <code>Select</code>, move conditions from it down to * earlier operators:<ul> * <li>If the condition only uses columns from an index, right after the scan.</li> * <li>If the condition uses columns from a single table, right * after that table is looked up.</li> * <li>If the condition uses multiple tables in a single group, when they are joined * together by <code>Flatten</code> or <code>Product</code></li> * <li>Tables from outer loops using <code>MapJoin</code>, which are available to * the inner loop, can be ignored in the above.</li></ul> * * In general, nested loop handling needs to be deferred until all * the loops are recorded. */ static class Preponer { Map<Product,Loop> products; Map<Select,SelectConditions> selects; public Preponer() { } /** Starting at the given node, trace downstream until get to * some conditions or something we can't handle. */ public void addOrigin(PlanNode node) { Loop loop = new Loop(); boolean newLoop = true, hasMaps = false, hasProducts = false; PlanNode prev = null; if (node instanceof IndexScan) { loop.setIndex((IndexScan)node); prev = node; node = node.getOutput(); } while (node instanceof TableLoader) { loop.addLoader(node); prev = node; node = node.getOutput(); } while (true) { if (node instanceof Flatten) { // A Flatten takes a single stream of lookups. loop.addFlatten((Flatten)node); } else if (node instanceof Product) { Product product = (Product)node; if (newLoop) { // Always inner join at present, so no filtering // of sources. loop.addFlattenOrProduct(product); // A Product takes multiple streams, so we may // have seen this one before. if (products == null) products = new HashMap<Product,Loop>(); Loop oloop = products.get(product); if (oloop != null) { loop = oloop.merge(loop, product); newLoop = false; } else { products.put(product, loop); } } hasProducts = true; } else if (node instanceof MapJoin) { MapJoin map = (MapJoin)node; switch (map.getJoinType()) { case INNER: break; case LEFT: case SEMI: if (prev == map.getInner()) return; break; default: return; } hasMaps = true; } else if (node instanceof Select) { Select select = (Select)node; if (!select.getConditions().isEmpty()) { SelectConditions selectConditions = null; boolean newSelect = false; if (selects != null) selectConditions = selects.get(select); if (selectConditions == null) { selectConditions = new SelectConditions(select); newSelect = true; } if (!loop.isEmpty()) { // Try once right away to get single table conditions. selectConditions.moveConditions(loop); } if (select.getConditions().isEmpty()) { if (!newSelect) selects.remove(select); } else { if (hasMaps && newLoop) { selectConditions.addLoop(loop); } if (hasProducts || hasMaps) { // Need to defer until have all the contributors // to the Map joins. Enable reuse for // Product. if (selects == null) selects = new HashMap<Select,SelectConditions>(); selects.put(select, selectConditions); } } } } else break; prev = node; node = node.getOutput(); } } public void moveDeferred() { if (selects != null) { for (SelectConditions swm : selects.values()) { if (swm.hasLoops()) { swm.moveConditions(null); } } } } } /** Holds what is known about inputs to a Select, which may come * from multiple <code>MapJoin</code> loops. */ static class SelectConditions { Select select; ConditionDependencyAnalyzer dependencies; // The loops that are joined up to feed the Select, added in visitor // order, meaning that tables from an earlier loop should be available as // bound variables to later / deeper ones. List<Loop> loops; public SelectConditions(Select select) { this.select = select; dependencies = new ConditionDependencyAnalyzer(select); } public void addLoop(Loop loop) { if (loops == null) loops = new ArrayList<Loop>(); loops.add(loop); } public boolean hasLoops() { return (loops != null); } /** Try to move conditions from <code>Select</code>. * @param loop If non-null, have a straight path to these * conditions and know where tables came from. See what can * be moved back there. */ public void moveConditions(Loop loop) { Iterator<ConditionExpression> iter = select.getConditions().iterator(); while (iter.hasNext()) { ConditionExpression condition = iter.next(); ColumnSource singleTable = dependencies.analyze(condition); PlanNode moveTo = canMove(loop, singleTable); if ((moveTo != null) && (moveTo != select.getInput())) { moveCondition(condition, moveTo); iter.remove(); } } } /** Return where this condition can move. */ // TODO: Could move earlier after subset of joins by breaking apart Flatten. public PlanNode canMove(Loop loop, ColumnSource singleTable) { Set<TableSource> outerTables = null; if (loop == null) { // If the condition only references a single table, no // need to check outer bindings; it's wherever it is. if (singleTable == null) outerTables = new HashSet<TableSource>(); // Several nested loops: find the shallowest one that has everything. loop = findLoop(outerTables); if (loop == null) return null; } if (loop.indexColumns != null) { // Can check the index column(s) before it's used for lookup. PlanNode loader = getSingleIndexLoader(loop, outerTables); if (loader != null) return loader; } Set<ColumnSource> allTables = dependencies.getReferencedTables(); if ((singleTable == null) && (outerTables != null)) { // Might still narrow down to a single table within this loop. allTables.removeAll(outerTables); if (allTables.size() == 1) singleTable = allTables.iterator().next(); } if (singleTable != null) { return loop.loaders.get(singleTable); } if ((loop.flattens != null) && !allTables.isEmpty()) { flattens: for (PlanNode flatten : loop.flattens) { // Find the first (deepest) flatten that has all the tables we need. Set<TableSource> tables = loop.flattened.get(flatten); for (ColumnSource table : allTables) { if (!tables.contains(table)) continue flattens; } return flatten; } } return null; } /** Find the first loop that has enough to evaluate the condition. */ public Loop findLoop(Set<TableSource> outerTables) { for (Loop loop : loops) { if (loop.indexOnly()) { // If the map loop is just an index, have to // look at individual columns. Set<TableSource> maybeOuterTables = null; if (outerTables != null) // Even though index only has some columns, can exclude whole // tables for purposes of deeper loops. maybeOuterTables = new HashSet<TableSource>(); boolean allFound = true; for (ColumnExpression column : dependencies.getReferencedColumns()) { if (outerTables != null) { if (outerTables.contains(column.getTable())) continue; } if (loop.indexColumns.containsKey(column)) { if (maybeOuterTables != null) maybeOuterTables.add((TableSource)column.getTable()); } else { allFound = false; } } if (allFound) return loop; if (maybeOuterTables != null) outerTables.addAll(maybeOuterTables); } else { boolean allFound = true; for (ColumnSource referencedTable : dependencies.getReferencedTables()) { if (outerTables != null) { if (outerTables.contains(referencedTable)) continue; } if (!loop.loaders.containsKey(referencedTable)) { allFound = false; break; } } if (allFound) return loop; if (outerTables != null) // Not moving to this loop; its tables are then available. outerTables.addAll(loop.loaders.keySet()); } } return null; } /** If all the referenced columns come from the same index, return it. */ public PlanNode getSingleIndexLoader(Loop loop, Set<TableSource> outerTables) { PlanNode single = null; for (ColumnExpression column : dependencies.getReferencedColumns()) { if (outerTables != null) { if (outerTables.contains(column.getTable())) continue; } PlanNode loader = loop.indexColumns.get(column); if (loader == null) return null; if (single == null) single = loader; else if (single != loader) return null; } return single; } /** Move the given condition to a Select that is right after the given node. */ public void moveCondition(ConditionExpression condition, PlanNode before) { Select select = null; PlanWithInput after = before.getOutput(); if (after instanceof Select) select = (Select)after; else { select = new Select(before, new ConditionList(1)); after.replaceInput(before, select); } select.getConditions().add(condition); } } }
src/main/java/com/akiban/sql/optimizer/rule/SelectPreponer.java
/** * END USER LICENSE AGREEMENT (“EULA”) * * READ THIS AGREEMENT CAREFULLY (date: 9/13/2011): * http://www.akiban.com/licensing/20110913 * * BY INSTALLING OR USING ALL OR ANY PORTION OF THE SOFTWARE, YOU ARE ACCEPTING * ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. YOU AGREE THAT THIS * AGREEMENT IS ENFORCEABLE LIKE ANY WRITTEN AGREEMENT SIGNED BY YOU. * * IF YOU HAVE PAID A LICENSE FEE FOR USE OF THE SOFTWARE AND DO NOT AGREE TO * THESE TERMS, YOU MAY RETURN THE SOFTWARE FOR A FULL REFUND PROVIDED YOU (A) DO * NOT USE THE SOFTWARE AND (B) RETURN THE SOFTWARE WITHIN THIRTY (30) DAYS OF * YOUR INITIAL PURCHASE. * * IF YOU WISH TO USE THE SOFTWARE AS AN EMPLOYEE, CONTRACTOR, OR AGENT OF A * CORPORATION, PARTNERSHIP OR SIMILAR ENTITY, THEN YOU MUST BE AUTHORIZED TO SIGN * FOR AND BIND THE ENTITY IN ORDER TO ACCEPT THE TERMS OF THIS AGREEMENT. THE * LICENSES GRANTED UNDER THIS AGREEMENT ARE EXPRESSLY CONDITIONED UPON ACCEPTANCE * BY SUCH AUTHORIZED PERSONNEL. * * IF YOU HAVE ENTERED INTO A SEPARATE WRITTEN LICENSE AGREEMENT WITH AKIBAN FOR * USE OF THE SOFTWARE, THE TERMS AND CONDITIONS OF SUCH OTHER AGREEMENT SHALL * PREVAIL OVER ANY CONFLICTING TERMS OR CONDITIONS IN THIS AGREEMENT. */ package com.akiban.sql.optimizer.rule; import com.akiban.sql.optimizer.plan.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** Move WHERE clauses closer to their table origin. * This rule runs after flattening has been laid out. * * Note: <i>prepone</i>, while not an American or British English * word, is the transparent opposite of <i>postpone</i>. */ // TODO: Something similar is needed to handle moving HAVING // conditions on the group by fields across the aggregation boundary // and WHERE conditions on subqueries (views) into the subquery // itself. These need to run earlier to affect indexing. Not sure how // to integrate all these. Maybe move everything earlier on and then // recognize joins of such filtered tables as Joinable. public class SelectPreponer extends BaseRule { private static final Logger logger = LoggerFactory.getLogger(SelectPreponer.class); @Override protected Logger getLogger() { return logger; } @Override public void apply(PlanContext plan) { TableOriginFinder finder = new TableOriginFinder(); finder.find(plan.getPlan()); Preponer preponer = new Preponer(); for (PlanNode origin : finder.getOrigins()) { preponer.addOrigin(origin); } preponer.moveDeferred(); } /** Find all the places where data starts, such as <code>IndexScan</code> and <code><i>XxxLookup</i></code>. */ static class TableOriginFinder implements PlanVisitor, ExpressionVisitor { List<PlanNode> origins = new ArrayList<PlanNode>(); public void find(PlanNode root) { root.accept(this); } public List<PlanNode> getOrigins() { return origins; } @Override public boolean visitEnter(PlanNode n) { return visit(n); } @Override public boolean visitLeave(PlanNode n) { return true; } @Override public boolean visit(PlanNode n) { if (n instanceof IndexScan) { origins.add(n); } else if (n instanceof TableLoader) { if (n instanceof BasePlanWithInput) { PlanNode input = ((BasePlanWithInput)n).getInput(); if (!((input instanceof TableLoader) || (input instanceof IndexScan))) { // Will put input in, so don't bother putting both in. origins.add(n); } } else { origins.add(n); } } return true; } @Override public boolean visitEnter(ExpressionNode n) { return visit(n); } @Override public boolean visitLeave(ExpressionNode n) { return true; } @Override public boolean visit(ExpressionNode n) { return true; } } /** Holds the state of a single side of a loop, which usually means a group. */ static class Loop { Map<TableSource,PlanNode> loaders; // Lookup operators. Map<ExpressionNode,PlanNode> indexColumns; // Individual columns of IndexScan. List<PlanNode> flattens; // Flatten & Product operators that do in-group join. Map<PlanNode,Set<TableSource>> flattened; // Tables that participate in those. public Loop() { loaders = new HashMap<TableSource,PlanNode>(); } public void setIndex(IndexScan index) { indexColumns = new HashMap<ExpressionNode,PlanNode>(); for (ExpressionNode column : index.getColumns()) { if (column != null) { indexColumns.put(column, index); } } } public void addLoader(PlanNode loader) { for (TableSource table : ((TableLoader)loader).getTables()) { loaders.put(table, loader); } } /** Add a within-group join: Flatten or Product. */ public Set<TableSource> addFlattenOrProduct(PlanNode join) { if (flattens == null) flattens = new ArrayList<PlanNode>(); flattens.add(join); // Might be able to place multi-table conditions after a join. if (flattened == null) flattened = new HashMap<PlanNode,Set<TableSource>>(); Set<TableSource> tables = new HashSet<TableSource>(loaders.keySet()); flattened.put(join, tables); return tables; } public void addFlatten(Flatten flatten) { // Limit to tables that are inner joined (and on the outer // side of outer joins.) Set<TableSource> inner = flatten.getInnerJoinedTables(); loaders.keySet().retainAll(inner); if (indexColumns != null) { Iterator<ExpressionNode> iter = indexColumns.keySet().iterator(); while (iter.hasNext()) { ExpressionNode expr = iter.next(); if (expr.isColumn() && !inner.contains(((ColumnExpression)expr).getTable())) iter.remove(); } } // A Flatten can get more tables than directly feed it when in a Product. addFlattenOrProduct(flatten).addAll(inner); } /** Merge another loop into this one. Although <code>Product</code> starts * with separate lookup operators, it's a single loop for purposes of nesting. */ public Loop merge(Loop other, PlanNode before) { loaders.putAll(other.loaders); if (indexColumns == null) indexColumns = other.indexColumns; else if (other.indexColumns != null) indexColumns.putAll(other.indexColumns); if (flattens == null) flattens = other.flattens; else if (other.flattens != null) { int i = -1; if (before != null) i = flattens.indexOf(before); if (i < 0) i = flattens.size(); for (PlanNode flatten : other.flattens) { if (flatten == before) break; flattens.add(i++, flatten); } } if (flattened == null) flattened = other.flattened; else if (other.flattened != null) { for (Map.Entry<PlanNode,Set<TableSource>> entry : other.flattened.entrySet()) { Set<TableSource> existing = flattened.get(entry.getKey()); if (existing != null) existing.addAll(entry.getValue()); else flattened.put(entry.getKey(), entry.getValue()); } } return this; } /** Does this loop have any interesting state? */ public boolean isEmpty() { return ((flattens == null) || (loaders.isEmpty() && ((indexColumns == null) || indexColumns.isEmpty()))); } /** Does this loop consist solely of an index? */ public boolean indexOnly() { return (loaders.isEmpty() && !((indexColumns == null) || indexColumns.isEmpty())); } } /** Move conditions as follows: * * Starting with index scans and lookup operators, trace * downstream, adding tables from additional such operators. When * we come to a <code>Product</code>, merge with any other * streams. When we come to a <code>MapJoin</code>, note the * traversal of its loops, which corresponds to bindings being * available to inner loops. * * When we finally come to a <code>Select</code>, move conditions from it down to * earlier operators:<ul> * <li>If the condition only uses columns from an index, right after the scan.</li> * <li>If the condition uses columns from a single table, right * after that table is looked up.</li> * <li>If the condition uses multiple tables in a single group, when they are joined * together by <code>Flatten</code> or <code>Product</code></li> * <li>Tables from outer loops using <code>MapJoin</code>, which are available to * the inner loop, can be ignored in the above.</li></ul> * * In general, nested loop handling needs to be deferred until all * the loops are recorded. */ static class Preponer { Map<Product,Loop> products; Map<Select,SelectConditions> selects; public Preponer() { } /** Starting at the given node, trace downstream until get to * some conditions or something we can't handle. */ public void addOrigin(PlanNode node) { Loop loop = new Loop(); boolean newLoop = true, hasMaps = false, hasProducts = false; PlanNode prev = null; if (node instanceof IndexScan) { loop.setIndex((IndexScan)node); prev = node; node = node.getOutput(); } while (node instanceof TableLoader) { loop.addLoader(node); prev = node; node = node.getOutput(); } while (true) { if (node instanceof Flatten) { // A Flatten takes a single stream of lookups. loop.addFlatten((Flatten)node); } else if (node instanceof Product) { Product product = (Product)node; if (newLoop) { // Always inner join at present, so no filtering // of sources. loop.addFlattenOrProduct(product); // A Product takes multiple streams, so we may // have seen this one before. if (products == null) products = new HashMap<Product,Loop>(); Loop oloop = products.get(product); if (oloop != null) { loop = oloop.merge(loop, product); newLoop = false; } else { products.put(product, loop); } } hasProducts = true; } else if (node instanceof MapJoin) { MapJoin map = (MapJoin)node; switch (map.getJoinType()) { case INNER: break; case LEFT: case SEMI: if (prev == map.getInner()) return; break; default: return; } hasMaps = true; } else if (node instanceof Select) { Select select = (Select)node; if (!select.getConditions().isEmpty()) { SelectConditions selectConditions = null; boolean newSelect = false; if (selects != null) selectConditions = selects.get(select); if (selectConditions == null) { selectConditions = new SelectConditions(select); newSelect = true; } if (!loop.isEmpty()) { // Try once right away to get single table conditions. selectConditions.moveConditions(loop); } if (select.getConditions().isEmpty()) { if (!newSelect) selects.remove(select); } else { if (hasMaps && newLoop) { selectConditions.addBranch(loop); } if (hasProducts || hasMaps) { // Need to defer until have all the contributors // to the Map joins. Enable reuse for // Product. if (selects == null) selects = new HashMap<Select,SelectConditions>(); selects.put(select, selectConditions); } } } } else break; prev = node; node = node.getOutput(); } } public void moveDeferred() { if (selects != null) { for (SelectConditions swm : selects.values()) { if (swm.hasLoops()) { swm.moveConditions(null); } } } } } /** Holds what is known about inputs to a Select, which may come * from multiple <code>MapJoin</code> loops. */ static class SelectConditions { Select select; ConditionDependencyAnalyzer dependencies; // The loops that are joined up to feed the Select, added in visitor // order, meaning that tables from an earlier loop should be available as // bound variables to later / deeper ones. List<Loop> loops; public SelectConditions(Select select) { this.select = select; dependencies = new ConditionDependencyAnalyzer(select); } public void addBranch(Loop loop) { if (loops == null) loops = new ArrayList<Loop>(); loops.add(loop); } public boolean hasLoops() { return (loops != null); } /** Try to move conditions from <code>Select</code>. * @param loop If non-null, have a straight path to these * conditions and know where tables came from. See what can * be moved back there. */ public void moveConditions(Loop loop) { Iterator<ConditionExpression> iter = select.getConditions().iterator(); while (iter.hasNext()) { ConditionExpression condition = iter.next(); ColumnSource singleTable = dependencies.analyze(condition); PlanNode moveTo = canMove(loop, singleTable); if ((moveTo != null) && (moveTo != select.getInput())) { moveCondition(condition, moveTo); iter.remove(); } } } /** Return where this condition can move. */ // TODO: Could move earlier after subset of joins by breaking apart Flatten. public PlanNode canMove(Loop loop, ColumnSource singleTable) { Set<TableSource> outerTables = null; if (loop == null) { // If the condition only references a single table, no // need to check outer bindings; it's wherever it is. if (singleTable == null) outerTables = new HashSet<TableSource>(); // Several joined loops: find the shallowest one that has everything. loop = findLoop(outerTables); if (loop == null) return null; } if (loop.indexColumns != null) { // Can check the index column before it's used for lookup. PlanNode loader = getSingleIndexLoader(loop, outerTables); if (loader != null) return loader; } Set<ColumnSource> allTables = dependencies.getReferencedTables(); if ((singleTable == null) && (outerTables != null)) { // Might still narrow down to a single table within this loop. allTables.removeAll(outerTables); if (allTables.size() == 1) singleTable = allTables.iterator().next(); } if (singleTable != null) { return loop.loaders.get(singleTable); } if ((loop.flattens != null) && !allTables.isEmpty()) { flattens: for (PlanNode flatten : loop.flattens) { // Find the first (deepest) flatten that has all the tables we need. Set<TableSource> tables = loop.flattened.get(flatten); for (ColumnSource table : allTables) { if (!tables.contains(table)) continue flattens; } return flatten; } } return null; } /** Find the first loop that has enough to evaluate the condition. */ public Loop findLoop(Set<TableSource> outerTables) { for (Loop loop : loops) { if (loop.indexOnly()) { // If the map loop is just an index, have to // look at individual columns. Set<TableSource> maybeOuterTables = null; if (outerTables != null) // Even though index only has some columns, can exclude whole // tables for purposes of deeper loops. maybeOuterTables = new HashSet<TableSource>(); boolean allFound = true; for (ColumnExpression column : dependencies.getReferencedColumns()) { if (outerTables != null) { if (outerTables.contains(column.getTable())) continue; } if (loop.indexColumns.containsKey(column)) { if (maybeOuterTables != null) maybeOuterTables.add((TableSource)column.getTable()); } else { allFound = false; } } if (allFound) return loop; if (maybeOuterTables != null) outerTables.addAll(maybeOuterTables); } else { boolean allFound = true; for (ColumnSource referencedTable : dependencies.getReferencedTables()) { if (outerTables != null) { if (outerTables.contains(referencedTable)) continue; } if (!loop.loaders.containsKey(referencedTable)) { allFound = false; break; } } if (allFound) return loop; if (outerTables != null) // Not moving to this loop; its tables are then available. outerTables.addAll(loop.loaders.keySet()); } } return null; } /** If all the referenced columns come from the same index, return it. */ public PlanNode getSingleIndexLoader(Loop loop, Set<TableSource> outerTables) { PlanNode single = null; for (ColumnExpression column : dependencies.getReferencedColumns()) { if (outerTables != null) { if (outerTables.contains(column.getTable())) continue; } PlanNode loader = loop.indexColumns.get(column); if (loader == null) return null; if (single == null) single = loader; else if (single != loader) return null; } return single; } /** Move the given condition to a Select that is right after the given node. */ public void moveCondition(ConditionExpression condition, PlanNode before) { Select select = null; PlanWithInput after = before.getOutput(); if (after instanceof Select) select = (Select)after; else { select = new Select(before, new ConditionList(1)); after.replaceInput(before, select); } select.getConditions().add(condition); } } }
Clean up comments some more.
src/main/java/com/akiban/sql/optimizer/rule/SelectPreponer.java
Clean up comments some more.
Java
lgpl-2.1
2e58ba2dc8d41bf2e02f39cf0d210e5a455f3909
0
simoc/mapyrus,simoc/mapyrus,simoc/mapyrus
/* * This file is part of Mapyrus, software for plotting maps. * Copyright (C) 2003, 2004 Simon Chenery. * * Mapyrus is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Mapyrus is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Mapyrus; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /* * @(#) $Id$ */ package org.mapyrus; import java.awt.image.BufferedImage; import java.io.*; import java.net.ServerSocket; import java.net.Socket; import java.util.HashSet; import java.util.Iterator; import java.util.logging.ConsoleHandler; import java.util.logging.Level; import java.util.logging.Logger; import org.mapyrus.logging.SingleLineFormatter; /** * Main class for Mapyrus, a program for generating plots of points, * lines and polygons to various output formats. * Runs as either an interpreter for files given on the command * line, as a library embedded in a Java application, or as an HTTP server. * * An interpreter is not thread-safe. * * When all work with an interpreter is complete call the close() method * to flush and close any output page and datasets still in use. */ public class Mapyrus { private static final String OUT_OF_MEMORY_MESSAGE = "Out of memory. Use Java -Xmx option to increase memory\navailable to Mapyrus. For example, java -Xmx256m -classpath ...\n"; private Interpreter mInterpreter; private ContextStack mContext; /** * Create new interpreter. */ public Mapyrus() { mInterpreter = new Interpreter(); mContext = new ContextStack(); } /** * Create string reader from string array. * @param s string to create reader from. * @return string reader. */ private StringReader makeStringReader(String []s) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < s.length; i++) { sb.append(s[i]); sb.append(Constants.LINE_SEPARATOR); } StringReader retval = new StringReader(sb.toString()); return(retval); } /** * Read, parse and execute commands. * Can be called repeatedly to interpret many files. * Graphics state and variables are retained between calls. * An interpreter cannot be used again if it throws an exception. * @param commands lines of commands to interpret. * @param stdout stream to write stdout of interpreter into. * @throws IOException if reading or writing files fails. * @throws MapyrusException if there is an error interpreting commands. */ public void interpret(String []commands, PrintStream stdout) throws IOException, MapyrusException { /* * Convert commands into a reader that can be parsed one * character at a time. */ StringReader sr = makeStringReader(commands); FileOrURL f = new FileOrURL(sr, "commands"); ColorDatabase.load(); mInterpreter.interpret(mContext, f, stdout); } /** * Set output page for Mapyrus interpreter. * This enables Mapyrus to draw into a buffer that an application * later displays in a window. * @param image buffered image to use as initial output page. * @param extras extras settings for output page. * @throws IOException * @throws MapyrusException if there is an error accessing image. */ public void setPage(BufferedImage image, String extras) throws IOException, MapyrusException { if (extras == null) extras = ""; mContext.setOutputFormat(image, extras); } /** * Flush any pending output to an output file and close output file. * Close any dataset being accessed. * @throws IOException * @throws MapyrusException */ public void close() throws IOException, MapyrusException { try { if (mContext != null) mContext.closeContextStack(); } catch (IOException e) { throw e; } catch (MapyrusException e) { throw e; } finally { /* * Always clears fields so we only attempt a close once. */ mContext = null; mInterpreter = null; } } /** * Show software version number and usage message, then exit. */ private static void printUsageAndExit() { String []usage = { "Usage:", "java [-Dvariable=value] ... -classpath " + Constants.PROGRAM_NAME.toLowerCase() + ".jar org.mapyrus.Mapyrus", " [options] filename ...", "", Constants.PROGRAM_NAME + " reads each file or URL in turn.", "If filename is '-' then standard input is read.", "", "Variables and configuration are passed to " + Constants.PROGRAM_NAME + " using the", "Java -D option.", "", "Options:", " -s <port> starts " + Constants.PROGRAM_NAME + " as a self-contained HTTP server on the", " given port. Refer to manual for detailed instructions.", " -e <commands> runs given commands instead of reading commands from a file", " -v print version information and exit", " -h print this message" }; String []license = { Constants.PROGRAM_NAME + " comes with ABSOLUTELY NO WARRANTY, not even for MERCHANTABILITY or", "FITNESS FOR A PARTICULAR PURPOSE. You may redistribute copies of " + Constants.PROGRAM_NAME, "under the terms of the GNU Lesser General Public License. For more", "information about these matters, see the file named COPYING." }; for (int i = 0; i < usage.length; i++) { System.out.println(usage[i]); } System.out.println(""); for (int i = 0; i < license.length; i++) { System.out.println(license[i]); } System.out.println(""); System.out.println("Report bugs to <" + Constants.PROGRAM_NAME.toLowerCase() + "@chenery.id.au>."); System.exit(1); } /** * Parse and interpret commands from a file. Trap any exceptions. * @param context is context to use during interpretation. * @param f open file or URL to read. * @param interpreter interpreter in which to run commands. * @param closeFile if set to true file is closed after we finish reading it. * @return flag indicating whether interpretation succeeeded. */ private static boolean processFile(ContextStack context, FileOrURL f, Interpreter interpreter, boolean closeFile) { try { interpreter.interpret(context, f, System.out); if (closeFile) f.getReader().close(); } catch (MapyrusException e) { System.err.println(e.getMessage()); return(false); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); return(false); } catch (OutOfMemoryError e) { /* * Tell user to make more memory available. * Use literal strings, do not look them up in property file as * this may fail if no more memory is available. */ System.err.println(OUT_OF_MEMORY_MESSAGE); e.printStackTrace(); return(false); } return(true); } /* * Initialise global settings, color name lookup tables. */ private static void initialise() { try { ColorDatabase.load(); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (MapyrusException e) { System.err.println(e.getMessage()); System.exit(1); } } /** * Listen on a server socket, accepting and processing HTTP requests. * @param interpreter interpreter to use for * @param port port on which to create socket and listen on. * This function normally runs forever and will only return if server * cannot be started. */ private static void serveHttp(Interpreter interpreter, int port) { ServerSocket serverSocket = null; Pool interpreterPool; HashSet activeThreads; /* * Make pool of interpreters available to threads that * handle HTTP requests. */ interpreterPool = new Pool(); interpreterPool.put(interpreter); for (int i = 1; i < Constants.MAX_HTTP_THREADS; i++) interpreterPool.put(interpreter.clone()); /* * Initialise set of threads that have been started. */ activeThreads = new HashSet(); /* * Create a logger for writing errors and information whilst * running as an HTTP server. */ String className = Mapyrus.class.getName(); Logger logger = Logger.getLogger(className); ConsoleHandler consoleHandler = new ConsoleHandler(); consoleHandler.setFormatter(new SingleLineFormatter()); logger.addHandler(consoleHandler); logger.setUseParentHandlers(false); try { /* * Create socket on given port. If port was 0 then it * is assigned to any free port number. */ serverSocket = new ServerSocket(port); port = serverSocket.getLocalPort(); } catch (IOException e) { System.err.println(MapyrusMessages.get(MapyrusMessages.INIT_HTTP_FAILED) + ": " + e.getMessage()); return; } /* * Log startup configuration information or write it to the terminal. */ String versionMessage = Constants.PROGRAM_NAME + " " + Constants.getVersion() + " " + Constants.getReleaseDate(); String threadMessage = MapyrusMessages.get(MapyrusMessages.HTTP_THREADED_SERVER) + ": " + Constants.MAX_HTTP_THREADS; String acceptingMessage = MapyrusMessages.get(MapyrusMessages.ACCEPTING_HTTP) + ": " + port; logger.config(versionMessage); logger.config(threadMessage); logger.config(acceptingMessage); if (!logger.isLoggable(Level.CONFIG)) { System.out.println(versionMessage); System.out.println(threadMessage); System.out.println(acceptingMessage); } while (true) { Socket socket = null; try { /* * Listen on socket for next client connection. */ socket = serverSocket.accept(); socket.setSoTimeout(Constants.HTTP_SOCKET_TIMEOUT); /* * Take a intepreter to handle this request (waiting * until one becomes available, if necessary). * Then start new thread to handle this request. */ interpreter = (Interpreter)(interpreterPool.get(Constants.HTTP_TIMEOUT)); if (interpreter == null) { throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.HTTP_TIMEOUT)); } HTTPRequest request = new HTTPRequest(socket, interpreter, interpreterPool, logger); activeThreads.add(request); logger.fine(MapyrusMessages.get(MapyrusMessages.STARTED_THREAD) + ": " + request.getName()); /* * Forget about socket, the request thread guarantees that it * will be closed. */ socket = null; request.start(); /* * Join any threads we started that have now finished. */ Iterator iterator = activeThreads.iterator(); while (iterator.hasNext()) { HTTPRequest active = (HTTPRequest)(iterator.next()); if (!active.isAlive()) { /* * Wait for thread to complete, then check if it succeeded. */ active.join(); logger.fine(MapyrusMessages.get(MapyrusMessages.JOINED_THREAD) + ": " + active.getName()); if (!active.getStatus()) logger.severe(active.getName() + ": " + active.getErrorMessage()); iterator.remove(); } } } catch (IOException e) { logger.severe(e.getMessage()); } catch (InterruptedException e) { logger.severe(e.getMessage()); } catch (MapyrusException e) { logger.severe(e.getMessage()); } catch (OutOfMemoryError e) { logger.severe(OUT_OF_MEMORY_MESSAGE); } finally { /* * Ensure that socket is always closed. */ try { if (socket != null) socket.close(); } catch (IOException e) { } } } } /* * Return java version info and it's capabilities. * @return version details. */ private static String getJavaConfiguration() { String vendor = System.getProperty("java.vendor"); if (vendor == null) vendor = "null"; String version = System.getProperty("java.version"); if (version == null) version = "null"; String javaHome = System.getProperty("java.home"); if (javaHome == null) javaHome = "null"; return("Java version " + version + " (" + vendor + ") in " + javaHome); } /** * Parse command line arguments and start processing. * Called when Mapyrus is run as a stand-alone interpreter. * @param args command line arguments. */ public static void main(String []args) { FileOrURL f = null; ContextStack context; int i; boolean readingStdin; boolean isHttpServer = false; int argIndex = 0; int port = 0; StringBuffer commandsToExecute = new StringBuffer(); if (args.length == 0) printUsageAndExit(); /* * Parse command line arguments -- these are the files and URLs * to read commands from. */ while (argIndex < args.length && args[argIndex].startsWith("-") && args[argIndex].length() > 1) { String arg = args[argIndex]; if (arg.equals("-h") || arg.equals("--help") || arg.equals("-?")) { /* * Show usage message and quit. */ printUsageAndExit(); } else if (arg.equals("-v") || arg.equals("--version")) { /* * Show version number and quit. */ System.out.println(Constants.PROGRAM_NAME + " " + Constants.getVersion() + " " + Constants.getReleaseDate()); System.out.println(getJavaConfiguration()); System.exit(1); } else if (arg.equals("-s")) { if (argIndex + 1 == args.length) printUsageAndExit(); try { port = Integer.parseInt(args[1]); } catch (NumberFormatException e) { printUsageAndExit(); } argIndex += 2; isHttpServer = true; } else if (arg.equals("-e")) { if (argIndex + 1 == args.length) printUsageAndExit(); /* * Commands to be executed given on command line. */ commandsToExecute.append(args[argIndex + 1]); commandsToExecute.append(Constants.LINE_SEPARATOR); argIndex += 2; } else if (arg.equals("--")) { /* * "--" marks end of options. */ argIndex++; break; } else { /* * Unknown option. */ System.err.println(MapyrusMessages.get(MapyrusMessages.INVALID_OPTION) + ": " + arg); System.exit(1); } } initialise(); context = new ContextStack(); Interpreter interpreter = new Interpreter(); if (commandsToExecute.length() > 0) { /* * Run commands given as a command line argument. */ f = new FileOrURL(new StringReader(commandsToExecute.toString()), "-e"); if (!processFile(context, f, interpreter, false)) System.exit(1); } else { i = argIndex; while (i < args.length) { readingStdin = args[i].equals("-"); if (readingStdin) { /* * Read from standard input. */ f = new FileOrURL(new InputStreamReader(System.in), "standard input"); } else { /* * Read from a file or URL. */ try { f = new FileOrURL(args[i]); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (MapyrusException e) { System.err.println(e.getMessage()); System.exit(1); } } if (!processFile(context, f, interpreter, !readingStdin)) System.exit(1); i++; } } /* * Finish off anything being created in this context. */ try { context.closeContextStack(); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (MapyrusException e) { System.err.println(e.getMessage()); System.exit(1); } /* * If we're running as an HTTP server we are now ready to * accept connections and respond to requests from HTTP clients. */ if (isHttpServer) { serveHttp(interpreter, port); System.exit(1); } System.exit(0); } }
src/org/mapyrus/Mapyrus.java
/* * This file is part of Mapyrus, software for plotting maps. * Copyright (C) 2003, 2004 Simon Chenery. * * Mapyrus is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Mapyrus is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Mapyrus; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /* * @(#) $Id$ */ package org.mapyrus; import java.awt.image.BufferedImage; import java.io.*; import java.net.ServerSocket; import java.net.Socket; import java.util.HashSet; import java.util.Iterator; import java.util.logging.ConsoleHandler; import java.util.logging.Level; import java.util.logging.Logger; import org.mapyrus.logging.SingleLineFormatter; /** * Main class for Mapyrus, a program for generating plots of points, * lines and polygons to various output formats. * Runs as either an interpreter for files given on the command * line, as a library embedded in a Java application, or as an HTTP server. * * An interpreter is not thread-safe. * * When all work with an interpreter is complete call the close() method * to flush and close any output page and datasets still in use. */ public class Mapyrus { private static final String OUT_OF_MEMORY_MESSAGE = "Out of memory. Use Java -Xmx option to increase memory\navailable to Mapyrus. For example, java -Xmx256m -classpath ...\n"; private Interpreter mInterpreter; private ContextStack mContext; /** * Create new interpreter. */ public Mapyrus() { mInterpreter = new Interpreter(); mContext = new ContextStack(); } /** * Create string reader from string array. * @param s string to create reader from. * @return string reader. */ private StringReader makeStringReader(String []s) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < s.length; i++) { sb.append(s[i]); sb.append(Constants.LINE_SEPARATOR); } StringReader retval = new StringReader(sb.toString()); return(retval); } /** * Read, parse and execute commands. * Can be called repeatedly to interpret many files. * Graphics state and variables are retained between calls. * An interpreter cannot be used again if it throws an exception. * @param commands lines of commands to interpret. * @param stdout stream to write stdout of interpreter into. * @throws IOException if reading or writing files fails. * @throws MapyrusException if there is an error interpreting commands. */ public void interpret(String []commands, PrintStream stdout) throws IOException, MapyrusException { /* * Convert commands into a reader that can be parsed one * character at a time. */ StringReader sr = makeStringReader(commands); FileOrURL f = new FileOrURL(sr, "commands"); ColorDatabase.load(); mInterpreter.interpret(mContext, f, stdout); } /** * Set output page for Mapyrus interpreter. * This enables Mapyrus to draw into a buffer that an application * later displays in a window. * @param image buffered image to use as initial output page. * @param extras extras settings for output page. * @throws IOException * @throws MapyrusException if there is an error accessing image. */ public void setPage(BufferedImage image, String extras) throws IOException, MapyrusException { if (extras == null) extras = ""; mContext.setOutputFormat(image, extras); } /** * Flush any pending output to an output file and close output file. * Close any dataset being accessed. * @throws IOException * @throws MapyrusException */ public void close() throws IOException, MapyrusException { try { if (mContext != null) mContext.closeContextStack(); } catch (IOException e) { throw e; } catch (MapyrusException e) { throw e; } finally { /* * Always clears fields so we only attempt a close once. */ mContext = null; mInterpreter = null; } } /** * Show software version number and usage message, then exit. */ private static void printUsageAndExit() { String []usage = { "Usage:", "java [-Dvariable=value] ... -classpath " + Constants.PROGRAM_NAME.toLowerCase() + ".jar org.mapyrus.Mapyrus", " [options] filename ...", "", Constants.PROGRAM_NAME + " reads each file or URL in turn.", "If filename is '-' then standard input is read.", "", "Variables and configuration are passed to " + Constants.PROGRAM_NAME + " using the", "Java -D option.", "", "Options:", " -s <port> starts " + Constants.PROGRAM_NAME + " as a self-contained HTTP server on the", " given port. Refer to manual for detailed instructions.", " -v print version information and exit", " -h print this message" }; String []license = { Constants.PROGRAM_NAME + " comes with ABSOLUTELY NO WARRANTY, not even for", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "You may redistribute copies of " + Constants.PROGRAM_NAME + " under the terms", "of the GNU Lesser General Public License. For more information", "about these matters, see the file named COPYING." }; System.out.println(Constants.PROGRAM_NAME + " version " + Constants.getVersion() + " " + Constants.getReleaseDate() + " Copyright (C) 2003, 2004 Simon Chenery"); System.out.println(""); for (int i = 0; i < usage.length; i++) { System.out.println(usage[i]); } System.out.println(""); for (int i = 0; i < license.length; i++) { System.out.println(license[i]); } System.out.println(""); System.out.println("Report bugs to <" + Constants.PROGRAM_NAME.toLowerCase() + "@chenery.id.au>."); System.exit(1); } /** * Parse and interpret commands from a file. Trap any exceptions. * @param context is context to use during interpretation. * @param f open file or URL to read. * @param interpreter interpreter in which to run commands. * @param closeFile if set to true file is closed after we finish reading it. * @return flag indicating whether interpretation succeeeded. */ private static boolean processFile(ContextStack context, FileOrURL f, Interpreter interpreter, boolean closeFile) { try { interpreter.interpret(context, f, System.out); if (closeFile) f.getReader().close(); } catch (MapyrusException e) { System.err.println(e.getMessage()); return(false); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); return(false); } catch (OutOfMemoryError e) { /* * Tell user to make more memory available. * Use literal strings, do not look them up in property file as * this may fail if no more memory is available. */ System.err.println(OUT_OF_MEMORY_MESSAGE); e.printStackTrace(); return(false); } return(true); } /* * Initialise global settings, color name lookup tables. */ private static void initialise() { try { ColorDatabase.load(); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (MapyrusException e) { System.err.println(e.getMessage()); System.exit(1); } } /** * Listen on a server socket, accepting and processing HTTP requests. * @param interpreter interpreter to use for * @param port port on which to create socket and listen on. * This function normally runs forever and will only return if server * cannot be started. */ private static void serveHttp(Interpreter interpreter, int port) { ServerSocket serverSocket = null; Pool interpreterPool; HashSet activeThreads; /* * Make pool of interpreters available to threads that * handle HTTP requests. */ interpreterPool = new Pool(); interpreterPool.put(interpreter); for (int i = 1; i < Constants.MAX_HTTP_THREADS; i++) interpreterPool.put(interpreter.clone()); /* * Initialise set of threads that have been started. */ activeThreads = new HashSet(); /* * Create a logger for writing errors and information whilst * running as an HTTP server. */ String className = Mapyrus.class.getName(); Logger logger = Logger.getLogger(className); ConsoleHandler consoleHandler = new ConsoleHandler(); consoleHandler.setFormatter(new SingleLineFormatter()); logger.addHandler(consoleHandler); logger.setUseParentHandlers(false); try { /* * Create socket on given port. If port was 0 then it * is assigned to any free port number. */ serverSocket = new ServerSocket(port); port = serverSocket.getLocalPort(); } catch (IOException e) { System.err.println(MapyrusMessages.get(MapyrusMessages.INIT_HTTP_FAILED) + ": " + e.getMessage()); return; } /* * Log startup configuration information or write it to the terminal. */ String versionMessage = Constants.PROGRAM_NAME + " " + Constants.getVersion() + " " + Constants.getReleaseDate(); String threadMessage = MapyrusMessages.get(MapyrusMessages.HTTP_THREADED_SERVER) + ": " + Constants.MAX_HTTP_THREADS; String acceptingMessage = MapyrusMessages.get(MapyrusMessages.ACCEPTING_HTTP) + ": " + port; logger.config(versionMessage); logger.config(threadMessage); logger.config(acceptingMessage); if (!logger.isLoggable(Level.CONFIG)) { System.out.println(versionMessage); System.out.println(threadMessage); System.out.println(acceptingMessage); } while (true) { Socket socket = null; try { /* * Listen on socket for next client connection. */ socket = serverSocket.accept(); socket.setSoTimeout(Constants.HTTP_SOCKET_TIMEOUT); /* * Take a intepreter to handle this request (waiting * until one becomes available, if necessary). * Then start new thread to handle this request. */ interpreter = (Interpreter)(interpreterPool.get(Constants.HTTP_TIMEOUT)); if (interpreter == null) { throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.HTTP_TIMEOUT)); } HTTPRequest request = new HTTPRequest(socket, interpreter, interpreterPool, logger); activeThreads.add(request); logger.fine(MapyrusMessages.get(MapyrusMessages.STARTED_THREAD) + ": " + request.getName()); /* * Forget about socket, the request thread guarantees that it * will be closed. */ socket = null; request.start(); /* * Join any threads we started that have now finished. */ Iterator iterator = activeThreads.iterator(); while (iterator.hasNext()) { HTTPRequest active = (HTTPRequest)(iterator.next()); if (!active.isAlive()) { /* * Wait for thread to complete, then check if it succeeded. */ active.join(); logger.fine(MapyrusMessages.get(MapyrusMessages.JOINED_THREAD) + ": " + active.getName()); if (!active.getStatus()) logger.severe(active.getName() + ": " + active.getErrorMessage()); iterator.remove(); } } } catch (IOException e) { logger.severe(e.getMessage()); } catch (InterruptedException e) { logger.severe(e.getMessage()); } catch (MapyrusException e) { logger.severe(e.getMessage()); } catch (OutOfMemoryError e) { logger.severe(OUT_OF_MEMORY_MESSAGE); } finally { /* * Ensure that socket is always closed. */ try { if (socket != null) socket.close(); } catch (IOException e) { } } } } /* * Return java version info and it's capabilities. * @return version details. */ private static String getJavaConfiguration() { String vendor = System.getProperty("java.vendor"); if (vendor == null) vendor = "null"; String version = System.getProperty("java.version"); if (version == null) version = "null"; String javaHome = System.getProperty("java.home"); if (javaHome == null) javaHome = "null"; return("Java version " + version + " (" + vendor + ") in " + javaHome); } /** * Parse command line arguments and start processing. * Called when Mapyrus is run as a stand-alone interpreter. * @param args command line arguments. */ public static void main(String []args) { FileOrURL f = null; ContextStack context; int i; boolean readingStdin; boolean isHttpServer = false; int argStartIndex = 0; int port = 0; /* * Parse command line arguments -- these are the files and URLs * to read commands from. */ if (args.length == 0 || (args.length == 1 && (args[0].equals("-h") || args[0].equals("--help") || args[0].equals("-?")))) { /* * Show usage message and quit. */ printUsageAndExit(); } else if (args.length == 1 && (args[0].equals("-v") || args[0].equals("--version"))) { /* * Show version number and quit. */ System.out.println(Constants.PROGRAM_NAME + " " + Constants.getVersion() + " " + Constants.getReleaseDate()); System.out.println(getJavaConfiguration()); System.exit(1); } else if (args[0].equals("-s")) { if (args.length < 2) { printUsageAndExit(); } try { port = Integer.parseInt(args[1]); } catch (NumberFormatException e) { printUsageAndExit(); } argStartIndex = 2; isHttpServer = true; } initialise(); context = new ContextStack(); Interpreter interpreter = new Interpreter(); i = argStartIndex; while (i < args.length) { readingStdin = args[i].equals("-"); if (readingStdin) { /* * Read from standard input. */ f = new FileOrURL(new InputStreamReader(System.in), "standard input"); } else { /* * Read from a file or URL. */ try { f = new FileOrURL(args[i]); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (MapyrusException e) { System.err.println(e.getMessage()); System.exit(1); } } if (!processFile(context, f, interpreter, !readingStdin)) System.exit(1); i++; } /* * Finished off anything being created in this context. */ try { context.closeContextStack(); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (MapyrusException e) { System.err.println(e.getMessage()); System.exit(1); } /* * If we're running as an HTTP server we are now ready to * accept connections and respond to requests from HTTP clients. */ if (isHttpServer) { serveHttp(interpreter, port); System.exit(1); } System.exit(0); } }
Add -e option to give Mapyrus commands on command line, like in Perl. Rewrite command line parsing to handle arguments in a loop. Shorten usage message to fit in a 80x25 terminal.
src/org/mapyrus/Mapyrus.java
Add -e option to give Mapyrus commands on command line, like in Perl. Rewrite command line parsing to handle arguments in a loop. Shorten usage message to fit in a 80x25 terminal.
Java
lgpl-2.1
2d35f1358c8eba39ea3d10d2721e66a51904502c
0
MenZil/opencms-core,victos/opencms-core,victos/opencms-core,serrapos/opencms-core,mediaworx/opencms-core,serrapos/opencms-core,sbonoc/opencms-core,serrapos/opencms-core,mediaworx/opencms-core,alkacon/opencms-core,it-tavis/opencms-core,sbonoc/opencms-core,serrapos/opencms-core,it-tavis/opencms-core,alkacon/opencms-core,mediaworx/opencms-core,ggiudetti/opencms-core,mediaworx/opencms-core,sbonoc/opencms-core,alkacon/opencms-core,victos/opencms-core,serrapos/opencms-core,serrapos/opencms-core,MenZil/opencms-core,gallardo/opencms-core,MenZil/opencms-core,sbonoc/opencms-core,gallardo/opencms-core,alkacon/opencms-core,ggiudetti/opencms-core,gallardo/opencms-core,it-tavis/opencms-core,gallardo/opencms-core,it-tavis/opencms-core,serrapos/opencms-core,victos/opencms-core,ggiudetti/opencms-core,MenZil/opencms-core,ggiudetti/opencms-core
/* * File : $Source: /alkacon/cvs/opencms/src/com/opencms/workplace/Attic/CmsAdminProjectNew.java,v $ * Date : $Date: 2001/07/17 07:16:05 $ * Version: $Revision: 1.51 $ * * Copyright (C) 2000 The OpenCms Group * * This File is part of OpenCms - * the Open Source Content Mananagement System * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * For further information about OpenCms, please see the * OpenCms Website: http://www.opencms.com * * You should have received a copy of the GNU General Public License * long with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package com.opencms.workplace; import com.opencms.file.*; import com.opencms.core.*; import com.opencms.util.*; import com.opencms.template.*; import java.util.*; import java.io.*; import javax.servlet.http.*; /** * Template class for displaying OpenCms workplace admin project screens. * <P> * * @author Andreas Schouten * @author Michael Emmerich * @author Mario Stanke * @version $Revision: 1.51 $ $Date: 2001/07/17 07:16:05 $ * @see com.opencms.workplace.CmsXmlWpTemplateFile */ public class CmsAdminProjectNew extends CmsWorkplaceDefault implements I_CmsConstants { /** Session key */ private static String C_NEWNAME = "new_project_name"; /** Session key */ private static String C_NEWDESCRIPTION = "new_project_description"; /** Session key */ private static String C_NEWGROUP = "new_project_group"; /** Session key */ private static String C_NEWMANAGERGROUP = "new_project_managergroup"; /** Session key */ private static String C_NEWFOLDER = "new_project_folder"; /** Session key */ private static String C_NEWTYPE = "projecttype"; /** Session key */ private static String C_NEWRESOURCES = "ALLRES"; private static String C_PROJECTNEW_THREAD = "project_new_thread"; /** Check whether some of the resources are redundant because a superfolder has also * been selected. * * @param resources containts the full pathnames of all the resources * @return A vector with the same resources, but the paths in the return value are disjoint */ private void checkRedundancies(Vector resources) { int i, j; if(resources == null) { return ; } Vector redundant = new Vector(); int n = resources.size(); if(n < 2) { // no check needed, because there is only one resource or // no resources selected, return empty Vector return ; } for(i = 0;i < n;i++) { redundant.addElement(new Boolean(false)); } for(i = 0;i < n - 1;i++) { for(j = i + 1;j < n;j++) { if(((String)resources.elementAt(i)).length() < ((String)resources.elementAt(j)).length()) { if(((String)resources.elementAt(j)).startsWith((String)resources.elementAt(i))) { redundant.setElementAt(new Boolean(true), j); } } else { if(((String)resources.elementAt(i)).startsWith((String)resources.elementAt(j))) { redundant.setElementAt(new Boolean(true), i); } } } } for(i = n - 1;i >= 0;i--) { if(((Boolean)redundant.elementAt(i)).booleanValue()) { resources.removeElementAt(i); } } } /** * Check if this resource should is writeable. * @param cms The CmsObject * @param res The resource to be checked. * @return True or false. * @exception CmsException if something goes wrong. */ private boolean checkWriteable(CmsObject cms, String resPath) { boolean access = false; int accessflags; try { CmsResource res = cms.readFolder(resPath); accessflags = res.getAccessFlags(); boolean groupAccess = false; Enumeration allGroups = cms.getGroupsOfUser(cms.getRequestContext().currentUser().getName()).elements(); while((!groupAccess) && allGroups.hasMoreElements()) { groupAccess = cms.readGroup(res).equals((CmsGroup)allGroups.nextElement()); } if(((accessflags & C_ACCESS_PUBLIC_WRITE) > 0) || (cms.getRequestContext().isAdmin()) || (cms.readOwner(res).equals(cms.getRequestContext().currentUser()) && (accessflags & C_ACCESS_OWNER_WRITE) > 0) || (groupAccess && (accessflags & C_ACCESS_GROUP_WRITE) > 0)) { access = true; } } catch(CmsException e) { access = false; } return access; } /** * Gets the content of a defined section in a given template file and its subtemplates * with the given parameters. * * @see getContent(CmsObject cms, String templateFile, String elementName, Hashtable parameters) * @param cms CmsObject Object for accessing system resources. * @param templateFile Filename of the template file. * @param elementName Element name of this template in our parent template. * @param parameters Hashtable with all template class parameters. * @param templateSelector template section that should be processed. */ public byte[] getContent(CmsObject cms, String templateFile, String elementName, Hashtable parameters, String templateSelector) throws CmsException { if(I_CmsLogChannels.C_PREPROCESSOR_IS_LOGGING && A_OpenCms.isLogging() && C_DEBUG) { A_OpenCms.log(C_OPENCMS_DEBUG, this.getClassName() + "getting content of element " + ((elementName == null) ? "<root>" : elementName)); A_OpenCms.log(C_OPENCMS_DEBUG, this.getClassName() + "template file is: " + templateFile); A_OpenCms.log(C_OPENCMS_DEBUG, this.getClassName() + "selected template section is: " + ((templateSelector == null) ? "<default>" : templateSelector)); } I_CmsSession session = cms.getRequestContext().getSession(true); CmsRequestContext reqCont = cms.getRequestContext(); CmsXmlLanguageFile lang = new CmsXmlLanguageFile(cms); // clear session values on first load String initial = (String)parameters.get(C_PARA_INITIAL); if(initial != null) { // remove all session values session.removeValue(C_NEWNAME); session.removeValue(C_NEWGROUP); session.removeValue(C_NEWDESCRIPTION); session.removeValue(C_NEWMANAGERGROUP); session.removeValue(C_NEWFOLDER); session.removeValue(C_NEWRESOURCES); session.removeValue(C_NEWTYPE); session.removeValue("lasturl"); session.removeValue("newProjectCallingFrom"); reqCont.setCurrentProject(cms.onlineProject().getId()); } String newName, newGroup, newDescription, newManagerGroup, newFolder, projectType; String newType = new String(); String action = new String(); action = (String)parameters.get("action"); CmsXmlTemplateFile xmlTemplateDocument = getOwnTemplateFile(cms, templateFile, elementName, parameters, templateSelector); //look if we come from the explorer view String fileToGo = (String)parameters.get("file"); if (fileToGo == null){ fileToGo = (String)session.getValue("newProjectCallingFrom"); } String lasturl = (String)parameters.get("lasturl"); if (lasturl == null){ lasturl = (String)session.getValue("lasturl"); } newName = (String)parameters.get(C_PROJECTNEW_NAME); if(newName == null) { newName = (String)session.getValue(C_NEWNAME); } String errorTemplateAddOn = ""; if (fileToGo != null){ // this is from the explorer view if((!cms.getRequestContext().isProjectManager()) && (!cms.isAdmin())){ // user has no rights to create a project return startProcessing(cms, xmlTemplateDocument, elementName,parameters, "norigths"); } errorTemplateAddOn = "explorer"; session.putValue("newProjectCallingFrom", fileToGo); xmlTemplateDocument.setData("pathCorrection",""); xmlTemplateDocument.setData("backButton",lasturl); xmlTemplateDocument.setData("myUrl","resource_to_project.html"); xmlTemplateDocument.setData("dontDoIt", " //"); // we have to put the file in the box and set the projectname xmlTemplateDocument.setData("doThis","addFolder(document.PROJECTNEW.new_ressources,'"+fileToGo+"');"); if (newName == null){ newName = getProjectName(cms,fileToGo); } }else{ // this is from the administration view xmlTemplateDocument.setData("pathCorrection","../../"); xmlTemplateDocument.setData("backButton","../../../action/administration_content_top.html?sender=/system/workplace/administration/project/"); xmlTemplateDocument.setData("myUrl","index.html"); xmlTemplateDocument.setData("dontDoIt", ""); xmlTemplateDocument.setData("doThis",""); } xmlTemplateDocument.setData("onlineId", "" + cms.onlineProject().getId()); newGroup = (String)parameters.get(C_PROJECTNEW_GROUP); newDescription = (String)parameters.get(C_PROJECTNEW_DESCRIPTION); newManagerGroup = (String)parameters.get(C_PROJECTNEW_MANAGERGROUP); String allResources = (String)parameters.get(C_NEWRESOURCES); newType = (String)parameters.get(C_NEWTYPE); // if there are still values in the session (like after an error), use them if(newGroup == null) { newGroup = (String)session.getValue(C_NEWGROUP); } if(newDescription == null) { newDescription = (String)session.getValue(C_NEWDESCRIPTION); } if(newManagerGroup == null) { newManagerGroup = (String)session.getValue(C_NEWMANAGERGROUP); } if(allResources == null) { allResources = (String)session.getValue(C_NEWRESOURCES); } if(newName == null) { newName = ""; } if(newGroup == null) { newGroup = ""; } if(newDescription == null) { newDescription = ""; } if(newManagerGroup == null) { newManagerGroup = ""; } if(allResources == null) { allResources = ""; } if(newType == null || "".equals(newType)) { projectType = ""+I_CmsConstants.C_PROJECT_TYPE_NORMAL; newType = ""; } else { projectType = ""+I_CmsConstants.C_PROJECT_TYPE_TEMPORARY; } // first we look if the thread is allready running if((action != null) && ("working".equals(action))) { // still working? /* Thread doProjectNew = (Thread)session.getValue(C_PROJECTNEW_THREAD); if(doProjectNew.isAlive()) { String time = (String)parameters.get("time"); int wert = Integer.parseInt(time); wert += 20; xmlTemplateDocument.setData("time", "" + wert); return startProcessing(cms, xmlTemplateDocument, elementName, parameters, "wait"); } else { */ // thread has come to an end, was there an error? //String errordetails = (String)session.getValue(C_SESSION_THREAD_ERROR); //if(errordetails == null) { // project ready; clear the session session.removeValue(C_NEWNAME); session.removeValue(C_NEWGROUP); session.removeValue(C_NEWDESCRIPTION); session.removeValue(C_NEWMANAGERGROUP); session.removeValue(C_NEWFOLDER); session.removeValue(C_NEWTYPE); session.removeValue("lasturl"); session.removeValue("newProjectCallingFrom"); return startProcessing(cms, xmlTemplateDocument, elementName, parameters, "done"); /*} else { // get errorpage: xmlTemplateDocument.setData(C_NEWNAME, newName); xmlTemplateDocument.setData(C_NEWDESCRIPTION, newDescription); xmlTemplateDocument.setData(C_NEWTYPE, newType); xmlTemplateDocument.setData("details", errordetails); //session.removeValue(C_SESSION_THREAD_ERROR); return startProcessing(cms, xmlTemplateDocument, elementName, parameters, "errornewproject"+errorTemplateAddOn); } } */ } if(parameters.get("submitform") != null) { // the form has just been submitted, store the data in the session session.putValue(C_NEWNAME, newName); session.putValue(C_NEWGROUP, newGroup); session.putValue(C_NEWDESCRIPTION, newDescription); session.putValue(C_NEWMANAGERGROUP, newManagerGroup); session.putValue(C_NEWTYPE, newType); if(newName.equals("") || newGroup.equals("") || newManagerGroup.equals("") || allResources.equals("")) { templateSelector = "datamissing"+errorTemplateAddOn; } else { session.putValue(C_NEWRESOURCES, allResources); // all the required data has been entered, display 'Please wait' templateSelector = "wait"; action = "start"; } } // is the wait-page showing? if("start".equals(action)) { // YES: get the stored data newName = (String)session.getValue(C_NEWNAME); newGroup = (String)session.getValue(C_NEWGROUP); newDescription = (String)session.getValue(C_NEWDESCRIPTION); newManagerGroup = (String)session.getValue(C_NEWMANAGERGROUP); allResources = (String)session.getValue(C_NEWRESOURCES); newType = (String)session.getValue(C_NEWTYPE); // create new Project try { // append the /content/bodys/, /pics/ and /download/ path to the list of all resources String picspath = getConfigFile(cms).getPicGalleryPath(); String downloadpath = getConfigFile(cms).getDownGalleryPath(); //String contentpath = ";"+C_CONTENTPATH; // only append the /content/bodys/ for the folders of the new project // and the folders /content/default_bodies/, /content/internal/, /content/templates/ String contentpath = ";"+C_CONTENTDEFAULTBODIESPATH+ ";"+C_CONTENTINTERNALPATH+ ";"+C_CONTENTTEMPLATEPATH; /* the existing contentbodys path is added with copyResourceToProject Vector addResources = parseResources(allResources); for (int j = 0; j < addResources.size(); j++){ String foldername = (String)addResources.get(j); if(!(foldername.startsWith(C_CONTENTPATH) || foldername.startsWith(picspath)|| foldername.startsWith(downloadpath)|| foldername.startsWith("/system/"))){ contentpath = contentpath+";/content/bodys"+foldername; } } */ allResources = allResources + ";" + picspath + ";" + downloadpath; // 'allResurces' has the "form res1;res2;...resk;" // this is because the simpler 'getParameterValues' method doesn't work with Silverstream Vector folders = parseResources(allResources); int numRes = folders.size(); for(int i = 0;i < numRes;i++) { // modify the foldername if nescessary (the root folder is always given // as a nice name) if(lang.getLanguageValue("title.rootfolder").equals(folders.elementAt(i))) { folders.setElementAt("/", i); } } checkRedundancies(folders); numRes = folders.size(); // could have been changed // finally create the project CmsProject project = cms.createProject(newName, newDescription, newGroup, newManagerGroup, projectType); // change the current project reqCont.setCurrentProject(project.getId()); // start the thread for: copy the resources to the project // first clear the session entry if necessary //if(session.getValue(C_SESSION_THREAD_ERROR) != null) { // session.removeValue(C_SESSION_THREAD_ERROR); //} //Thread doProjectNew = new CmsAdminNewProjectThread(cms, folders, session); //doProjectNew.start(); //session.putValue(C_PROJECTNEW_THREAD, doProjectNew); //xmlTemplateDocument.setData("time", "10"); templateSelector = "wait"; // copy the resources to the actual project try { // copy the resources to the actual project for(int i = 0;i < folders.size();i++) { cms.copyResourceToProject((String)folders.elementAt(i)); } } catch(CmsException e) { if(I_CmsLogChannels.C_PREPROCESSOR_IS_LOGGING && A_OpenCms.isLogging() ) { A_OpenCms.log(A_OpenCms.C_OPENCMS_CRITICAL, e.getMessage()); } throw e; } }catch(CmsException exc) { xmlTemplateDocument.setData("details", Utils.getStackTrace(exc)); templateSelector = "errornewproject"+errorTemplateAddOn; } } // after an error the form data is retrieved and filled into the template xmlTemplateDocument.setData(C_NEWNAME, newName); xmlTemplateDocument.setData(C_NEWDESCRIPTION, newDescription); xmlTemplateDocument.setData(C_NEWTYPE, newType); // Now load the template file and start the processing return startProcessing(cms, xmlTemplateDocument, elementName, parameters, templateSelector); } /** * Gets all groups, that may work for a project. * <P> * The given vectors <code>names</code> and <code>values</code> will * be filled with the appropriate information to be used for building * a select box. * * @param cms CmsObject Object for accessing system resources. * @param names Vector to be filled with the appropriate values in this method. * @param values Vector to be filled with the appropriate values in this method. * @param parameters Hashtable containing all user parameters <em>(not used here)</em>. * @return Index representing the current value in the vectors. * @exception CmsException */ public Integer getGroups(CmsObject cms, CmsXmlLanguageFile lang, Vector names, Vector values, Hashtable parameters) throws CmsException { // get all groups Vector groups = cms.getGroups(); int retValue = -1; String defaultGroup = C_GROUP_USERS; I_CmsSession session = cms.getRequestContext().getSession(true); String enteredGroup = (String)session.getValue(C_NEWGROUP); if(enteredGroup != null && !enteredGroup.equals("")) { // if an error has occurred before, take the previous entry of the user defaultGroup = enteredGroup; } // fill the names and values int n = 0; for(int z = 0;z < groups.size();z++) { if(((CmsGroup)groups.elementAt(z)).getProjectCoWorker()) { String name = ((CmsGroup)groups.elementAt(z)).getName(); if(defaultGroup.equals(name)) { retValue = n; } names.addElement(name); values.addElement(name); n++; // count the number of ProjectCoWorkers } } return new Integer(retValue); } /** * Gets all groups, that may manage a project. * <P> * The given vectors <code>names</code> and <code>values</code> will * be filled with the appropriate information to be used for building * a select box. * * @param cms CmsObject Object for accessing system resources. * @param names Vector to be filled with the appropriate values in this method. * @param values Vector to be filled with the appropriate values in this method. * @param parameters Hashtable containing all user parameters <em>(not used here)</em>. * @return Index representing the current value in the vectors. * @exception CmsException */ public Integer getManagerGroups(CmsObject cms, CmsXmlLanguageFile lang, Vector names, Vector values, Hashtable parameters) throws CmsException { // get all groups Vector groups = cms.getGroups(); int retValue = -1; String defaultGroup = C_GROUP_PROJECTLEADER; I_CmsSession session = cms.getRequestContext().getSession(true); String enteredGroup = (String)session.getValue(C_NEWMANAGERGROUP); if(enteredGroup != null && !enteredGroup.equals("")) { // if an error has occurred before, take the previous entry of the user defaultGroup = enteredGroup; } // fill the names and values int n = 0; for(int z = 0;z < groups.size();z++) { if(((CmsGroup)groups.elementAt(z)).getProjectmanager()) { String name = ((CmsGroup)groups.elementAt(z)).getName(); if(defaultGroup.equals(name)) { retValue = n; } names.addElement(name); values.addElement(name); n++; // count the number of project managers } } return new Integer(retValue); } public Integer getSelectedResources(CmsObject cms, CmsXmlLanguageFile lang, Vector names, Vector values, Hashtable parameters) throws CmsException { I_CmsSession session = cms.getRequestContext().getSession(true); String[] newProjectResources = (String[])session.getValue(C_NEWRESOURCES); if(newProjectResources != null) { for(int i = 0;i < newProjectResources.length;i++) { names.addElement(newProjectResources[i]); values.addElement(newProjectResources[i]); } } // no current folder, set index to -1 return new Integer(-1); } /** * Indicates if the results of this class are cacheable. * * @param cms CmsObject Object for accessing system resources * @param templateFile Filename of the template file * @param elementName Element name of this template in our parent template. * @param parameters Hashtable with all template class parameters. * @param templateSelector template section that should be processed. * @return <EM>true</EM> if cacheable, <EM>false</EM> otherwise. */ public boolean isCacheable(CmsObject cms, String templateFile, String elementName, Hashtable parameters, String templateSelector) { return false; } /** Parse the string which holds all resources * * @param resources containts the full pathnames of all the resources, separated by semicolons * @return A vector with the same resources */ private Vector parseResources(String resources) { Vector ret = new Vector(); if(resources != null) { StringTokenizer resTokenizer = new StringTokenizer(resources, ";"); while(resTokenizer.hasMoreElements()) { String path = (String)resTokenizer.nextElement(); ret.addElement(path); } } return ret; } /** gets the projectname for a contexgenerated project * * @param cms the cmsObject * @param resource the name of the resource * @return A vector with the same resources */ private String getProjectName(CmsObject cms, String resource) { String ret = resource; if (ret.endsWith("/")){ ret = ret.substring(0, ret.length()-1); } ret = ret.substring(ret.lastIndexOf('/')+1); if (ret.length() > 14){ ret = ret.substring(0,13); } try{ Vector allProjects = cms.getAllAccessibleProjects(); Vector theNames = new Vector(); // count all projects starting with the same name int count = 0; for (int i = 0; i < allProjects.size(); i++){ String currProject = ((CmsProject)allProjects.elementAt(i)).getName(); if (currProject.startsWith(ret)){ count++; theNames.addElement(currProject); } } if ((count > 0) && (count < 99)){ // get the highest version nummber int version = 1; for (int i = 0; i<theNames.size(); i++){ int currVersion = 0; try{ currVersion = Integer.parseInt(((String)theNames.elementAt(i)).substring(ret.length()+1)); }catch(Exception e){ } if ((currVersion > version)&& (currVersion < 100)){ version = currVersion; } } if (version < 99){ ret = ret + "_" + (version + 1); } } }catch(CmsException e){ } return ret; } }
src/com/opencms/workplace/CmsAdminProjectNew.java
/* * File : $Source: /alkacon/cvs/opencms/src/com/opencms/workplace/Attic/CmsAdminProjectNew.java,v $ * Date : $Date: 2001/07/09 08:09:21 $ * Version: $Revision: 1.50 $ * * Copyright (C) 2000 The OpenCms Group * * This File is part of OpenCms - * the Open Source Content Mananagement System * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * For further information about OpenCms, please see the * OpenCms Website: http://www.opencms.com * * You should have received a copy of the GNU General Public License * long with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package com.opencms.workplace; import com.opencms.file.*; import com.opencms.core.*; import com.opencms.util.*; import com.opencms.template.*; import java.util.*; import java.io.*; import javax.servlet.http.*; /** * Template class for displaying OpenCms workplace admin project screens. * <P> * * @author Andreas Schouten * @author Michael Emmerich * @author Mario Stanke * @version $Revision: 1.50 $ $Date: 2001/07/09 08:09:21 $ * @see com.opencms.workplace.CmsXmlWpTemplateFile */ public class CmsAdminProjectNew extends CmsWorkplaceDefault implements I_CmsConstants { /** Session key */ private static String C_NEWNAME = "new_project_name"; /** Session key */ private static String C_NEWDESCRIPTION = "new_project_description"; /** Session key */ private static String C_NEWGROUP = "new_project_group"; /** Session key */ private static String C_NEWMANAGERGROUP = "new_project_managergroup"; /** Session key */ private static String C_NEWFOLDER = "new_project_folder"; /** Session key */ private static String C_NEWTYPE = "projecttype"; /** Session key */ private static String C_NEWRESOURCES = "ALLRES"; private static String C_PROJECTNEW_THREAD = "project_new_thread"; /** Check whether some of the resources are redundant because a superfolder has also * been selected. * * @param resources containts the full pathnames of all the resources * @return A vector with the same resources, but the paths in the return value are disjoint */ private void checkRedundancies(Vector resources) { int i, j; if(resources == null) { return ; } Vector redundant = new Vector(); int n = resources.size(); if(n < 2) { // no check needed, because there is only one resource or // no resources selected, return empty Vector return ; } for(i = 0;i < n;i++) { redundant.addElement(new Boolean(false)); } for(i = 0;i < n - 1;i++) { for(j = i + 1;j < n;j++) { if(((String)resources.elementAt(i)).length() < ((String)resources.elementAt(j)).length()) { if(((String)resources.elementAt(j)).startsWith((String)resources.elementAt(i))) { redundant.setElementAt(new Boolean(true), j); } } else { if(((String)resources.elementAt(i)).startsWith((String)resources.elementAt(j))) { redundant.setElementAt(new Boolean(true), i); } } } } for(i = n - 1;i >= 0;i--) { if(((Boolean)redundant.elementAt(i)).booleanValue()) { resources.removeElementAt(i); } } } /** * Check if this resource should is writeable. * @param cms The CmsObject * @param res The resource to be checked. * @return True or false. * @exception CmsException if something goes wrong. */ private boolean checkWriteable(CmsObject cms, String resPath) { boolean access = false; int accessflags; try { CmsResource res = cms.readFolder(resPath); accessflags = res.getAccessFlags(); boolean groupAccess = false; Enumeration allGroups = cms.getGroupsOfUser(cms.getRequestContext().currentUser().getName()).elements(); while((!groupAccess) && allGroups.hasMoreElements()) { groupAccess = cms.readGroup(res).equals((CmsGroup)allGroups.nextElement()); } if(((accessflags & C_ACCESS_PUBLIC_WRITE) > 0) || (cms.getRequestContext().isAdmin()) || (cms.readOwner(res).equals(cms.getRequestContext().currentUser()) && (accessflags & C_ACCESS_OWNER_WRITE) > 0) || (groupAccess && (accessflags & C_ACCESS_GROUP_WRITE) > 0)) { access = true; } } catch(CmsException e) { access = false; } return access; } /** * Gets the content of a defined section in a given template file and its subtemplates * with the given parameters. * * @see getContent(CmsObject cms, String templateFile, String elementName, Hashtable parameters) * @param cms CmsObject Object for accessing system resources. * @param templateFile Filename of the template file. * @param elementName Element name of this template in our parent template. * @param parameters Hashtable with all template class parameters. * @param templateSelector template section that should be processed. */ public byte[] getContent(CmsObject cms, String templateFile, String elementName, Hashtable parameters, String templateSelector) throws CmsException { if(I_CmsLogChannels.C_PREPROCESSOR_IS_LOGGING && A_OpenCms.isLogging() && C_DEBUG) { A_OpenCms.log(C_OPENCMS_DEBUG, this.getClassName() + "getting content of element " + ((elementName == null) ? "<root>" : elementName)); A_OpenCms.log(C_OPENCMS_DEBUG, this.getClassName() + "template file is: " + templateFile); A_OpenCms.log(C_OPENCMS_DEBUG, this.getClassName() + "selected template section is: " + ((templateSelector == null) ? "<default>" : templateSelector)); } I_CmsSession session = cms.getRequestContext().getSession(true); CmsRequestContext reqCont = cms.getRequestContext(); CmsXmlLanguageFile lang = new CmsXmlLanguageFile(cms); // clear session values on first load String initial = (String)parameters.get(C_PARA_INITIAL); if(initial != null) { // remove all session values session.removeValue(C_NEWNAME); session.removeValue(C_NEWGROUP); session.removeValue(C_NEWDESCRIPTION); session.removeValue(C_NEWMANAGERGROUP); session.removeValue(C_NEWFOLDER); session.removeValue(C_NEWRESOURCES); session.removeValue(C_NEWTYPE); session.removeValue("lasturl"); session.removeValue("newProjectCallingFrom"); reqCont.setCurrentProject(cms.onlineProject().getId()); } String newName, newGroup, newDescription, newManagerGroup, newFolder, projectType; String newType = new String(); String action = new String(); action = (String)parameters.get("action"); CmsXmlTemplateFile xmlTemplateDocument = getOwnTemplateFile(cms, templateFile, elementName, parameters, templateSelector); //look if we come from the explorer view String fileToGo = (String)parameters.get("file"); if (fileToGo == null){ fileToGo = (String)session.getValue("newProjectCallingFrom"); } String lasturl = (String)parameters.get("lasturl"); if (lasturl == null){ lasturl = (String)session.getValue("lasturl"); } newName = (String)parameters.get(C_PROJECTNEW_NAME); if(newName == null) { newName = (String)session.getValue(C_NEWNAME); } String errorTemplateAddOn = ""; if (fileToGo != null){ // this is from the explorer view if((!cms.getRequestContext().isProjectManager()) && (!cms.isAdmin())){ // user has no rights to create a project return startProcessing(cms, xmlTemplateDocument, elementName,parameters, "norigths"); } errorTemplateAddOn = "explorer"; session.putValue("newProjectCallingFrom", fileToGo); xmlTemplateDocument.setData("pathCorrection",""); xmlTemplateDocument.setData("backButton",lasturl); xmlTemplateDocument.setData("myUrl","resource_to_project.html"); xmlTemplateDocument.setData("dontDoIt", " //"); // we have to put the file in the box and set the projectname xmlTemplateDocument.setData("doThis","addFolder(document.PROJECTNEW.new_ressources,'"+fileToGo+"');"); if (newName == null){ newName = getProjectName(cms,fileToGo); } }else{ // this is from the administration view xmlTemplateDocument.setData("pathCorrection","../../"); xmlTemplateDocument.setData("backButton","../../../action/administration_content_top.html?sender=/system/workplace/administration/project/"); xmlTemplateDocument.setData("myUrl","index.html"); xmlTemplateDocument.setData("dontDoIt", ""); xmlTemplateDocument.setData("doThis",""); } xmlTemplateDocument.setData("onlineId", "" + cms.onlineProject().getId()); newGroup = (String)parameters.get(C_PROJECTNEW_GROUP); newDescription = (String)parameters.get(C_PROJECTNEW_DESCRIPTION); newManagerGroup = (String)parameters.get(C_PROJECTNEW_MANAGERGROUP); String allResources = (String)parameters.get(C_NEWRESOURCES); newType = (String)parameters.get(C_NEWTYPE); // if there are still values in the session (like after an error), use them if(newGroup == null) { newGroup = (String)session.getValue(C_NEWGROUP); } if(newDescription == null) { newDescription = (String)session.getValue(C_NEWDESCRIPTION); } if(newManagerGroup == null) { newManagerGroup = (String)session.getValue(C_NEWMANAGERGROUP); } if(allResources == null) { allResources = (String)session.getValue(C_NEWRESOURCES); } if(newName == null) { newName = ""; } if(newGroup == null) { newGroup = ""; } if(newDescription == null) { newDescription = ""; } if(newManagerGroup == null) { newManagerGroup = ""; } if(allResources == null) { allResources = ""; } if(newType == null || "".equals(newType)) { projectType = ""+I_CmsConstants.C_PROJECT_TYPE_NORMAL; newType = ""; } else { projectType = ""+I_CmsConstants.C_PROJECT_TYPE_TEMPORARY; } // first we look if the thread is allready running if((action != null) && ("working".equals(action))) { // still working? /* Thread doProjectNew = (Thread)session.getValue(C_PROJECTNEW_THREAD); if(doProjectNew.isAlive()) { String time = (String)parameters.get("time"); int wert = Integer.parseInt(time); wert += 20; xmlTemplateDocument.setData("time", "" + wert); return startProcessing(cms, xmlTemplateDocument, elementName, parameters, "wait"); } else { */ // thread has come to an end, was there an error? //String errordetails = (String)session.getValue(C_SESSION_THREAD_ERROR); //if(errordetails == null) { // project ready; clear the session session.removeValue(C_NEWNAME); session.removeValue(C_NEWGROUP); session.removeValue(C_NEWDESCRIPTION); session.removeValue(C_NEWMANAGERGROUP); session.removeValue(C_NEWFOLDER); session.removeValue(C_NEWTYPE); session.removeValue("lasturl"); session.removeValue("newProjectCallingFrom"); long startTime = ((Long)session.getValue("startTime")).longValue(); session.removeValue("startTime"); long stopTime = System.currentTimeMillis(); if(I_CmsLogChannels.C_PREPROCESSOR_IS_LOGGING && A_OpenCms.isLogging()) { A_OpenCms.log(I_CmsLogChannels.C_OPENCMS_INFO, "[CmsAdminProjectNew] createProject time: "+(stopTime-startTime)); } return startProcessing(cms, xmlTemplateDocument, elementName, parameters, "done"); /*} else { // get errorpage: xmlTemplateDocument.setData(C_NEWNAME, newName); xmlTemplateDocument.setData(C_NEWDESCRIPTION, newDescription); xmlTemplateDocument.setData(C_NEWTYPE, newType); xmlTemplateDocument.setData("details", errordetails); //session.removeValue(C_SESSION_THREAD_ERROR); return startProcessing(cms, xmlTemplateDocument, elementName, parameters, "errornewproject"+errorTemplateAddOn); } } */ } if(parameters.get("submitform") != null) { // the form has just been submitted, store the data in the session session.putValue(C_NEWNAME, newName); session.putValue(C_NEWGROUP, newGroup); session.putValue(C_NEWDESCRIPTION, newDescription); session.putValue(C_NEWMANAGERGROUP, newManagerGroup); session.putValue(C_NEWTYPE, newType); if(newName.equals("") || newGroup.equals("") || newManagerGroup.equals("") || allResources.equals("")) { templateSelector = "datamissing"+errorTemplateAddOn; } else { session.putValue(C_NEWRESOURCES, allResources); // all the required data has been entered, display 'Please wait' templateSelector = "wait"; action = "start"; } } // is the wait-page showing? if("start".equals(action)) { session.putValue("startTime", new Long(System.currentTimeMillis())); // YES: get the stored data newName = (String)session.getValue(C_NEWNAME); newGroup = (String)session.getValue(C_NEWGROUP); newDescription = (String)session.getValue(C_NEWDESCRIPTION); newManagerGroup = (String)session.getValue(C_NEWMANAGERGROUP); allResources = (String)session.getValue(C_NEWRESOURCES); newType = (String)session.getValue(C_NEWTYPE); // create new Project try { // append the /content/bodys/, /pics/ and /download/ path to the list of all resources String picspath = getConfigFile(cms).getPicGalleryPath(); String downloadpath = getConfigFile(cms).getDownGalleryPath(); //String contentpath = ";"+C_CONTENTPATH; // only append the /content/bodys/ for the folders of the new project // and the folders /content/default_bodies/, /content/internal/, /content/templates/ String contentpath = ";"+C_CONTENTDEFAULTBODIESPATH+ ";"+C_CONTENTINTERNALPATH+ ";"+C_CONTENTTEMPLATEPATH; /* the existing contentbodys path is added with copyResourceToProject Vector addResources = parseResources(allResources); for (int j = 0; j < addResources.size(); j++){ String foldername = (String)addResources.get(j); if(!(foldername.startsWith(C_CONTENTPATH) || foldername.startsWith(picspath)|| foldername.startsWith(downloadpath)|| foldername.startsWith("/system/"))){ contentpath = contentpath+";/content/bodys"+foldername; } } */ allResources = allResources + contentpath + ";" + picspath + ";" + downloadpath; // 'allResurces' has the "form res1;res2;...resk;" // this is because the simpler 'getParameterValues' method doesn't work with Silverstream Vector folders = parseResources(allResources); int numRes = folders.size(); for(int i = 0;i < numRes;i++) { // modify the foldername if nescessary (the root folder is always given // as a nice name) if(lang.getLanguageValue("title.rootfolder").equals(folders.elementAt(i))) { folders.setElementAt("/", i); } } checkRedundancies(folders); numRes = folders.size(); // could have been changed // finally create the project CmsProject project = cms.createProject(newName, newDescription, newGroup, newManagerGroup, projectType); // change the current project reqCont.setCurrentProject(project.getId()); // start the thread for: copy the resources to the project // first clear the session entry if necessary //if(session.getValue(C_SESSION_THREAD_ERROR) != null) { // session.removeValue(C_SESSION_THREAD_ERROR); //} //Thread doProjectNew = new CmsAdminNewProjectThread(cms, folders, session); //doProjectNew.start(); //session.putValue(C_PROJECTNEW_THREAD, doProjectNew); //xmlTemplateDocument.setData("time", "10"); templateSelector = "wait"; // copy the resources to the actual project try { // copy the resources to the actual project for(int i = 0;i < folders.size();i++) { cms.copyResourceToProject((String)folders.elementAt(i)); } } catch(CmsException e) { if(I_CmsLogChannels.C_PREPROCESSOR_IS_LOGGING && A_OpenCms.isLogging() ) { A_OpenCms.log(A_OpenCms.C_OPENCMS_CRITICAL, e.getMessage()); } throw e; } }catch(CmsException exc) { xmlTemplateDocument.setData("details", Utils.getStackTrace(exc)); templateSelector = "errornewproject"+errorTemplateAddOn; } } // after an error the form data is retrieved and filled into the template xmlTemplateDocument.setData(C_NEWNAME, newName); xmlTemplateDocument.setData(C_NEWDESCRIPTION, newDescription); xmlTemplateDocument.setData(C_NEWTYPE, newType); // Now load the template file and start the processing return startProcessing(cms, xmlTemplateDocument, elementName, parameters, templateSelector); } /** * Gets all groups, that may work for a project. * <P> * The given vectors <code>names</code> and <code>values</code> will * be filled with the appropriate information to be used for building * a select box. * * @param cms CmsObject Object for accessing system resources. * @param names Vector to be filled with the appropriate values in this method. * @param values Vector to be filled with the appropriate values in this method. * @param parameters Hashtable containing all user parameters <em>(not used here)</em>. * @return Index representing the current value in the vectors. * @exception CmsException */ public Integer getGroups(CmsObject cms, CmsXmlLanguageFile lang, Vector names, Vector values, Hashtable parameters) throws CmsException { // get all groups Vector groups = cms.getGroups(); int retValue = -1; String defaultGroup = C_GROUP_USERS; I_CmsSession session = cms.getRequestContext().getSession(true); String enteredGroup = (String)session.getValue(C_NEWGROUP); if(enteredGroup != null && !enteredGroup.equals("")) { // if an error has occurred before, take the previous entry of the user defaultGroup = enteredGroup; } // fill the names and values int n = 0; for(int z = 0;z < groups.size();z++) { if(((CmsGroup)groups.elementAt(z)).getProjectCoWorker()) { String name = ((CmsGroup)groups.elementAt(z)).getName(); if(defaultGroup.equals(name)) { retValue = n; } names.addElement(name); values.addElement(name); n++; // count the number of ProjectCoWorkers } } return new Integer(retValue); } /** * Gets all groups, that may manage a project. * <P> * The given vectors <code>names</code> and <code>values</code> will * be filled with the appropriate information to be used for building * a select box. * * @param cms CmsObject Object for accessing system resources. * @param names Vector to be filled with the appropriate values in this method. * @param values Vector to be filled with the appropriate values in this method. * @param parameters Hashtable containing all user parameters <em>(not used here)</em>. * @return Index representing the current value in the vectors. * @exception CmsException */ public Integer getManagerGroups(CmsObject cms, CmsXmlLanguageFile lang, Vector names, Vector values, Hashtable parameters) throws CmsException { // get all groups Vector groups = cms.getGroups(); int retValue = -1; String defaultGroup = C_GROUP_PROJECTLEADER; I_CmsSession session = cms.getRequestContext().getSession(true); String enteredGroup = (String)session.getValue(C_NEWMANAGERGROUP); if(enteredGroup != null && !enteredGroup.equals("")) { // if an error has occurred before, take the previous entry of the user defaultGroup = enteredGroup; } // fill the names and values int n = 0; for(int z = 0;z < groups.size();z++) { if(((CmsGroup)groups.elementAt(z)).getProjectmanager()) { String name = ((CmsGroup)groups.elementAt(z)).getName(); if(defaultGroup.equals(name)) { retValue = n; } names.addElement(name); values.addElement(name); n++; // count the number of project managers } } return new Integer(retValue); } public Integer getSelectedResources(CmsObject cms, CmsXmlLanguageFile lang, Vector names, Vector values, Hashtable parameters) throws CmsException { I_CmsSession session = cms.getRequestContext().getSession(true); String[] newProjectResources = (String[])session.getValue(C_NEWRESOURCES); if(newProjectResources != null) { for(int i = 0;i < newProjectResources.length;i++) { names.addElement(newProjectResources[i]); values.addElement(newProjectResources[i]); } } // no current folder, set index to -1 return new Integer(-1); } /** * Indicates if the results of this class are cacheable. * * @param cms CmsObject Object for accessing system resources * @param templateFile Filename of the template file * @param elementName Element name of this template in our parent template. * @param parameters Hashtable with all template class parameters. * @param templateSelector template section that should be processed. * @return <EM>true</EM> if cacheable, <EM>false</EM> otherwise. */ public boolean isCacheable(CmsObject cms, String templateFile, String elementName, Hashtable parameters, String templateSelector) { return false; } /** Parse the string which holds all resources * * @param resources containts the full pathnames of all the resources, separated by semicolons * @return A vector with the same resources */ private Vector parseResources(String resources) { Vector ret = new Vector(); if(resources != null) { StringTokenizer resTokenizer = new StringTokenizer(resources, ";"); while(resTokenizer.hasMoreElements()) { String path = (String)resTokenizer.nextElement(); ret.addElement(path); } } return ret; } /** gets the projectname for a contexgenerated project * * @param cms the cmsObject * @param resource the name of the resource * @return A vector with the same resources */ private String getProjectName(CmsObject cms, String resource) { String ret = resource; if (ret.endsWith("/")){ ret = ret.substring(0, ret.length()-1); } ret = ret.substring(ret.lastIndexOf('/')+1); if (ret.length() > 14){ ret = ret.substring(0,13); } try{ Vector allProjects = cms.getAllAccessibleProjects(); Vector theNames = new Vector(); // count all projects starting with the same name int count = 0; for (int i = 0; i < allProjects.size(); i++){ String currProject = ((CmsProject)allProjects.elementAt(i)).getName(); if (currProject.startsWith(ret)){ count++; theNames.addElement(currProject); } } if ((count > 0) && (count < 99)){ // get the highest version nummber int version = 1; for (int i = 0; i<theNames.size(); i++){ int currVersion = 0; try{ currVersion = Integer.parseInt(((String)theNames.elementAt(i)).substring(ret.length()+1)); }catch(Exception e){ } if ((currVersion > version)&& (currVersion < 100)){ version = currVersion; } } if (version < 99){ ret = ret + "_" + (version + 1); } } }catch(CmsException e){ } return ret; } }
only folders in /content/bodys/ are added automatically
src/com/opencms/workplace/CmsAdminProjectNew.java
only folders in /content/bodys/ are added automatically
Java
lgpl-2.1
867c628da8daea9c01835bde459838ece3c17045
0
levants/lightmare
package org.lightmare.ejb; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import javax.ejb.Stateless; import javax.persistence.EntityManagerFactory; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.ConnectionData; import org.lightmare.cache.ConnectionSemaphore; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.MetaData; import org.lightmare.config.Configuration; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.ejb.handlers.BeanHandlerFactory; import org.lightmare.ejb.handlers.BeanLocalHandlerFactory; import org.lightmare.ejb.handlers.RestHandler; import org.lightmare.ejb.handlers.RestHandlerFactory; import org.lightmare.libraries.LibraryLoader; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.RpcUtils; import org.lightmare.utils.StringUtils; import org.lightmare.utils.reflect.MetaUtils; /** * Connector class for get EJB beans or call remote procedure in this bean (RPC) * by interface class * * @author Levan * @since 0.0.15-SNAPSHOT */ public class EjbConnector { /** * Gets {@link MetaData} from {@link MetaContainer} if it is not locked or * waits while {@link MetaData#isInProgress()} is true * * @param beanName * @return {@link MetaData} * @throws IOException */ private MetaData getMeta(String beanName) throws IOException { MetaData metaData = MetaContainer.getSyncMetaData(beanName); return metaData; } /** * Gets connection for {@link javax.ejb.Stateless} bean {@link Class} from * cache * * @param unitName * @return {@link EntityManagerFactory} * @throws IOException */ private void setEntityManagerFactory(ConnectionData connection) throws IOException { if (connection.getEmf() == null) { String unitName = connection.getUnitName(); if (StringUtils.valid(unitName)) { ConnectionSemaphore semaphore = ConnectionContainer .getConnection(unitName); connection.setConnection(semaphore); } } } /** * Gets connections for {@link Stateless} bean {@link Class} from cache * * @param unitName * @return {@link EntityManagerFactory} * @throws IOException */ private void setEntityManagerFactories(MetaData metaData) throws IOException { Collection<ConnectionData> connections = metaData.getConnections(); if (CollectionUtils.valid(connections)) { for (ConnectionData connection : connections) { setEntityManagerFactory(connection); } } } /** * Instantiates bean by class * * @param metaData * @return <code>T</code> Bean instance * @throws IOException */ private <T> T getBeanInstance(MetaData metaData) throws IOException { Class<? extends T> beanClass = ObjectUtils .cast(metaData.getBeanClass()); T beanInstance = MetaUtils.instantiate(beanClass); return beanInstance; } /** * Creates {@link InvocationHandler} implementation for server mode * * @param metaData * @return {@link InvocationHandler} * @throws IOException */ private <T> BeanHandler getBeanHandler(MetaData metaData) throws IOException { T beanInstance = getBeanInstance(metaData); // Caches EnriryManagerFactory instances in MetaData if they are not // cached yet setEntityManagerFactories(metaData); // Initializes BeanHandler instance and caches it in MetaData if it was // not cached yet BeanHandler handler = BeanHandlerFactory.get(metaData, beanInstance); return handler; } /** * Instantiates bean with {@link Proxy} utility * * @param interfaces * @param handler * @return <code>T</code> implementation of bean interface */ private <T> T instatiateBean(Class<T>[] interfaces, InvocationHandler handler, ClassLoader loader) { if (loader == null) { loader = LibraryLoader.getContextClassLoader(); } else { LibraryLoader.loadCurrentLibraries(loader); } Object instance = Proxy.newProxyInstance(loader, interfaces, handler); T beanInstance = ObjectUtils.cast(instance); return beanInstance; } /** * Instantiates bean with {@link Proxy} utility * * @param interfaceClass * @param handler * @return <code>T</code> implementation of bean interface */ private <T> T instatiateBean(Class<T> interfaceClass, InvocationHandler handler, ClassLoader loader) { Class<T>[] interfaceArray = ObjectUtils .cast(new Class<?>[] { interfaceClass }); T beanInstance = instatiateBean(interfaceArray, handler, loader); return beanInstance; } /** * Initializes and caches all interfaces for bean class from passed * {@link MetaData} instance if it is not already cached * * @param metaData * @return {@link Class}[] */ private Class<?>[] setInterfaces(MetaData metaData) { Class<?>[] interfaceClasses = metaData.getInterfaceClasses(); if (CollectionUtils.invalid(interfaceClasses)) { List<Class<?>> interfacesList = new ArrayList<Class<?>>(); Class<?>[] interfaces = metaData.getLocalInterfaces(); if (CollectionUtils.valid(interfaces)) { interfacesList.addAll(Arrays.asList(interfaces)); } interfaces = metaData.getRemoteInterfaces(); if (CollectionUtils.valid(interfaces)) { interfacesList.addAll(Arrays.asList(interfaces)); } int size = interfacesList.size(); interfaceClasses = interfacesList.toArray(new Class[size]); metaData.setInterfaceClasses(interfaceClasses); } return interfaceClasses; } /** * Creates appropriate bean {@link Proxy} instance by passed * {@link MetaData} parameter * * @param metaData * @param rpcArgs * @return <code>T</code> implementation of bean interface * @throws IOException */ public <T> T connectToBean(MetaData metaData) throws IOException { InvocationHandler handler = getBeanHandler(metaData); Class<?>[] interfaces = setInterfaces(metaData); Class<T>[] typedInterfaces = ObjectUtils.cast(interfaces); ClassLoader loader = metaData.getLoader(); T beanInstance = instatiateBean(typedInterfaces, handler, loader); return beanInstance; } /** * Creates custom implementation of bean {@link Class} by class name and its * {@link Proxy} interface {@link Class} instance * * @param interfaceClass * @return <code>T</code> implementation of bean interface * @throws IOException */ public <T> T connectToBean(String beanName, Class<T> interfaceClass, Object... rpcArgs) throws IOException { T beanInstance; InvocationHandler handler; ClassLoader loader; if (Configuration.isServer()) { MetaData metaData = getMeta(beanName); setInterfaces(metaData); handler = getBeanHandler(metaData); loader = metaData.getLoader(); } else { if (rpcArgs.length == RpcUtils.RPC_ARGS_LENGTH) { handler = BeanLocalHandlerFactory.get(rpcArgs); loader = null; } else { throw new IOException(RpcUtils.RPC_ARGS_ERROR); } } beanInstance = instatiateBean(interfaceClass, handler, loader); return beanInstance; } /** * Creates custom implementation of bean {@link Class} by class name and its * {@link Proxy} interface name * * @param beanName * @param interfaceName * @param rpcArgs * @return <code>T</code> implementation of bean interface * @throws IOException */ public <T> T connectToBean(String beanName, String interfaceName, Object... rpcArgs) throws IOException { T beanInstance; MetaData metaData = getMeta(beanName); ClassLoader loader = metaData.getLoader(); Class<?> classForName = MetaUtils.classForName(interfaceName, Boolean.FALSE, loader); Class<T> interfaceClass = ObjectUtils.cast(classForName); beanInstance = connectToBean(beanName, interfaceClass, rpcArgs); return beanInstance; } /** * Creates {@link RestHandler} instance for invoking bean methods by REST * services * * @param metaData * @return {@link RestHandler} * @throws IOException */ public <T> RestHandler<T> createRestHandler(MetaData metaData) throws IOException { RestHandler<T> restHandler; BeanHandler handler = getBeanHandler(metaData); Class<T> beanClass = ObjectUtils.cast(metaData.getBeanClass()); T beanInstance = MetaUtils.instantiate(beanClass); restHandler = RestHandlerFactory.get(handler, beanInstance); return restHandler; } }
src/main/java/org/lightmare/ejb/EjbConnector.java
package org.lightmare.ejb; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import javax.ejb.Stateless; import javax.persistence.EntityManagerFactory; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.ConnectionData; import org.lightmare.cache.ConnectionSemaphore; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.MetaData; import org.lightmare.config.Configuration; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.ejb.handlers.BeanHandlerFactory; import org.lightmare.ejb.handlers.BeanLocalHandlerFactory; import org.lightmare.ejb.handlers.RestHandler; import org.lightmare.ejb.handlers.RestHandlerFactory; import org.lightmare.libraries.LibraryLoader; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.RpcUtils; import org.lightmare.utils.StringUtils; import org.lightmare.utils.reflect.MetaUtils; /** * Connector class for get EJB beans or call remote procedure in this bean (RPC) * by interface class * * @author Levan * @since 0.0.15-SNAPSHOT */ public class EjbConnector { /** * Gets {@link MetaData} from {@link MetaContainer} if it is not locked or * waits while {@link MetaData#isInProgress()} is true * * @param beanName * @return {@link MetaData} * @throws IOException */ private MetaData getMeta(String beanName) throws IOException { MetaData metaData = MetaContainer.getSyncMetaData(beanName); return metaData; } /** * Gets connection for {@link javax.ejb.Stateless} bean {@link Class} from * cache * * @param unitName * @return {@link EntityManagerFactory} * @throws IOException */ private void setEntityManagerFactory(ConnectionData connection) throws IOException { if (connection.getEmf() == null) { String unitName = connection.getUnitName(); if (StringUtils.valid(unitName)) { ConnectionSemaphore semaphore = ConnectionContainer .getConnection(unitName); connection.setConnection(semaphore); } } } /** * Gets connections for {@link Stateless} bean {@link Class} from cache * * @param unitName * @return {@link EntityManagerFactory} * @throws IOException */ private void setEntityManagerFactories(MetaData metaData) throws IOException { Collection<ConnectionData> connections = metaData.getConnections(); if (CollectionUtils.valid(connections)) { for (ConnectionData connection : connections) { setEntityManagerFactory(connection); } } } /** * Instantiates bean by class * * @param metaData * @return <code>T</code> Bean instance * @throws IOException */ private <T> T getBeanInstance(MetaData metaData) throws IOException { Class<? extends T> beanClass = ObjectUtils .cast(metaData.getBeanClass()); T beanInstance = MetaUtils.instantiate(beanClass); return beanInstance; } /** * Creates {@link InvocationHandler} implementation for server mode * * @param metaData * @return {@link InvocationHandler} * @throws IOException */ private <T> BeanHandler getBeanHandler(MetaData metaData) throws IOException { T beanInstance = getBeanInstance(metaData); // Caches EnriryManagerFactory instances in MetaData if they are not // cached yet setEntityManagerFactories(metaData); // Initializes BeanHandler instance and caches it in MetaData if it was // not cached yet BeanHandler handler = BeanHandlerFactory.get(metaData, beanInstance); return handler; } /** * Instantiates bean with {@link Proxy} utility * * @param interfaces * @param handler * @return <code>T</code> implementation of bean interface */ private <T> T instatiateBean(Class<T>[] interfaces, InvocationHandler handler, ClassLoader loader) { if (loader == null) { loader = LibraryLoader.getContextClassLoader(); } else { LibraryLoader.loadCurrentLibraries(loader); } Object instance = Proxy.newProxyInstance(loader, interfaces, handler); T beanInstance = ObjectUtils.cast(instance); return beanInstance; } /** * Instantiates bean with {@link Proxy} utility * * @param interfaceClass * @param handler * @return <code>T</code> implementation of bean interface */ private <T> T instatiateBean(Class<T> interfaceClass, InvocationHandler handler, ClassLoader loader) { Class<T>[] interfaceArray = ObjectUtils .cast(new Class<?>[] { interfaceClass }); T beanInstance = instatiateBean(interfaceArray, handler, loader); return beanInstance; } /** * Initializes and caches all interfaces for bean class from passed * {@link MetaData} instance if it is not already cached * * @param metaData * @return {@link Class}[] */ private Class<?>[] setInterfaces(MetaData metaData) { Class<?>[] interfaceClasses = metaData.getInterfaceClasses(); if (CollectionUtils.invalid(interfaceClasses)) { List<Class<?>> interfacesList = new ArrayList<Class<?>>(); Class<?>[] interfaces = metaData.getLocalInterfaces(); if (CollectionUtils.valid(interfaces)) { interfacesList.addAll(Arrays.asList(interfaces)); } interfaces = metaData.getRemoteInterfaces(); if (CollectionUtils.valid(interfaces)) { interfacesList.addAll(Arrays.asList(interfaces)); } int size = interfacesList.size(); interfaceClasses = interfacesList.toArray(new Class[size]); metaData.setInterfaceClasses(interfaceClasses); } return interfaceClasses; } /** * Creates appropriate bean {@link Proxy} instance by passed * {@link MetaData} parameter * * @param metaData * @param rpcArgs * @return <code>T</code> implementation of bean interface * @throws IOException */ public <T> T connectToBean(MetaData metaData) throws IOException { InvocationHandler handler = getBeanHandler(metaData); Class<?>[] interfaces = setInterfaces(metaData); Class<T>[] typedInterfaces = ObjectUtils.cast(interfaces); ClassLoader loader = metaData.getLoader(); T beanInstance = instatiateBean(typedInterfaces, handler, loader); return beanInstance; } /** * Creates custom implementation of bean {@link Class} by class name and its * {@link Proxy} interface {@link Class} instance * * @param interfaceClass * @return <code>T</code> implementation of bean interface * @throws IOException */ public <T> T connectToBean(String beanName, Class<T> interfaceClass, Object... rpcArgs) throws IOException { T beanInstance; InvocationHandler handler; ClassLoader loader; if (Configuration.isServer()) { MetaData metaData = getMeta(beanName); setInterfaces(metaData); handler = getBeanHandler(metaData); loader = metaData.getLoader(); } else { if (rpcArgs.length == RpcUtils.RPC_ARGS_LENGTH) { handler = BeanLocalHandlerFactory.get(rpcArgs); loader = null; } else { throw new IOException(RpcUtils.RPC_ARGS_ERROR); } } beanInstance = instatiateBean(interfaceClass, handler, loader); return beanInstance; } /** * Creates custom implementation of bean {@link Class} by class name and its * {@link Proxy} interface name * * @param beanName * @param interfaceName * @param rpcArgs * @return <code>T</code> implementation of bean interface * @throws IOException */ public <T> T connectToBean(String beanName, String interfaceName, Object... rpcArgs) throws IOException { T beanInstance; MetaData metaData = getMeta(beanName); ClassLoader loader = metaData.getLoader(); Class<?> classForName = MetaUtils.classForName(interfaceName, Boolean.FALSE, loader); Class<T> interfaceClass = ObjectUtils.cast(classForName); beanInstance = connectToBean(beanName, interfaceClass, rpcArgs); return beanInstance; } /** * Creates {@link RestHandler} instance for invoking bean methods by REST * services * * @param metaData * @return {@link RestHandler} * @throws IOException */ public <T> RestHandler<T> createRestHandler(MetaData metaData) throws IOException { RestHandler<T> restHandler; BeanHandler handler = getBeanHandler(metaData); Class<T> beanClass = ObjectUtils.cast(metaData.getBeanClass()); T beanInstance = MetaUtils.instantiate(beanClass); restHandler = RestHandlerFactory.get(handler, beanInstance); return restHandler; } }
improved code / comments at utility classes
src/main/java/org/lightmare/ejb/EjbConnector.java
improved code / comments at utility classes
Java
lgpl-2.1
373b275d6647de20273e648f8f5165a87daeb31a
0
bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features
/* * (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * Nuxeo - initial API and implementation * * $Id: JOOoConvertPluginImpl.java 18651 2007-05-13 20:28:53Z sfermigier $ */ package org.nuxeo.ecm.webapp.action; import java.io.IOException; import java.io.InputStream; import java.util.List; import javax.ejb.Remove; import javax.faces.context.FacesContext; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.RequestParameter; import org.jboss.seam.annotations.Transactional; import org.jboss.seam.annotations.WebRemote; import org.nuxeo.ecm.core.api.Blob; import org.nuxeo.ecm.core.api.ClientException; import org.nuxeo.ecm.core.api.CoreSession; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.api.IdRef; import org.nuxeo.ecm.core.api.model.PropertyException; import org.nuxeo.ecm.platform.mimetype.interfaces.MimetypeEntry; import org.nuxeo.ecm.platform.mimetype.interfaces.MimetypeRegistry; import org.nuxeo.ecm.platform.transform.api.TransformException; import org.nuxeo.ecm.platform.transform.api.TransformServiceDelegate; import org.nuxeo.ecm.platform.transform.interfaces.TransformDocument; import org.nuxeo.ecm.platform.transform.interfaces.TransformServiceCommon; import org.nuxeo.ecm.platform.ui.web.api.NavigationContext; import org.nuxeo.runtime.api.Framework; /** * @author <a href="mailto:[email protected]">Florent BONNET</a> */ @Name("conversionActions") @Transactional public class ConversionActionBean implements ConversionAction { private static final Log log = LogFactory.getLog(ConversionActionBean.class); @In(create = true, required = false) transient CoreSession documentManager; @In(create = true) transient NavigationContext navigationContext; @RequestParameter private String docRef; @RequestParameter private String fileFieldFullName; @RequestParameter private String filename; @Remove public void destroy() { log.debug("Removing Seam action listener..."); } public String display() { return "view_file"; } private DocumentModel getDocument() throws ClientException { if (docRef == null) { return navigationContext.getCurrentDocument(); } else { return documentManager.getDocument(new IdRef(docRef)); } } private String getMimetypeFromDocument(String propertyName) throws PropertyException, ClientException { Blob blob = (Blob) getDocument().getPropertyValue(propertyName); return blob.getMimeType(); } public boolean isExportableToPDF(Blob blob) { boolean isSupported = false; try { if (blob != null) { String mimetype = blob.getMimeType(); TransformServiceCommon nxt = TransformServiceDelegate.getRemoteTransformService(); isSupported = nxt.isMimetypeSupportedByPlugin("any2pdf", mimetype); } } catch (Exception e) { log.error("error asking the any2pdf plugin whether pdf conversion " + " is supported: " + e.getMessage()); } return isSupported; } @WebRemote public boolean isFileExportableToPDF(String fieldName) { boolean isSupported = false; try { String mimetype = getMimetypeFromDocument(fieldName); TransformServiceCommon nxt = TransformServiceDelegate.getRemoteTransformService(); isSupported = nxt.isMimetypeSupportedByPlugin("any2pdf", mimetype); } catch (TransformException e) { log.error("error asking the any2pdf plugin whether " + fieldName + " is supported: ",e); } catch (Exception e) { log.error(e); } return isSupported; } @WebRemote public String generatePdfFile() { try { if (fileFieldFullName == null) { return null; } Blob blob = (Blob) getDocument().getPropertyValue(fileFieldFullName); TransformServiceCommon nxt = Framework.getService(TransformServiceCommon.class); List<TransformDocument> resultingDocs = nxt.transform("any2pdf", null, blob); String name; if (filename == null || filename.equals("")) { name = "file"; } else { name = filename; } // add pdf extension int pos = name.lastIndexOf("."); if (pos <= 0) { name += ".pdf"; } else { String sub = name.substring(pos + 1); name = name.replace(sub, "pdf"); } if (resultingDocs.size() == 0) { log.error("Transform service didn't return any resulting documents which is not normal."); return "pdf_generation_error"; } // converting the result into byte[] to be able to put it in the // response InputStream inputStream = resultingDocs.get(0).getBlob().getStream(); int length = inputStream.available(); byte[] array = new byte[length]; int offset = 0; int n; do { n = inputStream.read(array, offset, length - offset); } while (n != -1); String headerContent = "attachment; filename=\"" + name + "\";"; writeResponse("Content-Disposition", headerContent, "application/pdf", array); return null; } catch (Exception e) { log.error("PDF generation error for file " + filename, e); } return "pdf_generation_error"; } /** * @deprecated use LiveEditBootstrapHelper.isCurrentDocumentLiveEditable() * instead */ @Deprecated @WebRemote public boolean isFileOnlineEditable(String fieldName) { try { boolean isOnlineEditable; String mimetype = getMimetypeFromDocument(fieldName); MimetypeRegistry mimeTypeService = Framework.getService(MimetypeRegistry.class); MimetypeEntry mimetypeEntry = mimeTypeService.getMimetypeEntryByMimeType(mimetype); if (mimetypeEntry == null) { isOnlineEditable = false; } else { isOnlineEditable = mimetypeEntry.isOnlineEditable(); } return isOnlineEditable; } catch (Exception e) { log.error("error getting the mimetype entry for " + fieldName + ": " + e.getMessage()); return false; } } /** * Simply sends what to be downloaded or shown at screen via * HttpServletResponse. * * @param header * @param headerContent * @param contentType * @param value * @throws IOException */ private void writeResponse(String header, String headerContent, String contentType, byte[] value) throws IOException { FacesContext context = FacesContext.getCurrentInstance(); HttpServletResponse response = (HttpServletResponse) context.getExternalContext().getResponse(); response.setHeader(header, headerContent); response.setContentType(contentType); response.getOutputStream().write(value); context.responseComplete(); } public void initialize() { log.info("initializing FileViewAction"); } }
nuxeo-platform-webapp-core/src/main/java/org/nuxeo/ecm/webapp/action/ConversionActionBean.java
/* * (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * Nuxeo - initial API and implementation * * $Id: JOOoConvertPluginImpl.java 18651 2007-05-13 20:28:53Z sfermigier $ */ package org.nuxeo.ecm.webapp.action; import java.io.IOException; import java.io.InputStream; import java.util.List; import javax.ejb.Remove; import javax.faces.context.FacesContext; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.RequestParameter; import org.jboss.seam.annotations.Transactional; import org.jboss.seam.annotations.WebRemote; import org.nuxeo.ecm.core.api.Blob; import org.nuxeo.ecm.core.api.ClientException; import org.nuxeo.ecm.core.api.CoreSession; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.api.IdRef; import org.nuxeo.ecm.core.api.model.PropertyException; import org.nuxeo.ecm.platform.mimetype.interfaces.MimetypeEntry; import org.nuxeo.ecm.platform.mimetype.interfaces.MimetypeRegistry; import org.nuxeo.ecm.platform.transform.api.TransformServiceDelegate; import org.nuxeo.ecm.platform.transform.interfaces.TransformDocument; import org.nuxeo.ecm.platform.transform.interfaces.TransformServiceCommon; import org.nuxeo.ecm.platform.ui.web.api.NavigationContext; import org.nuxeo.runtime.api.Framework; /** * @author <a href="mailto:[email protected]">Florent BONNET</a> */ @Name("conversionActions") @Transactional public class ConversionActionBean implements ConversionAction { private static final Log log = LogFactory.getLog(ConversionActionBean.class); @In(create = true, required = false) transient CoreSession documentManager; @In(create = true) transient NavigationContext navigationContext; @RequestParameter private String docRef; @RequestParameter private String fileFieldFullName; @RequestParameter private String filename; @Remove public void destroy() { log.debug("Removing Seam action listener..."); } public String display() { return "view_file"; } private DocumentModel getDocument() throws ClientException { if (docRef == null) { return navigationContext.getCurrentDocument(); } else { return documentManager.getDocument(new IdRef(docRef)); } } private String getMimetypeFromDocument(String propertyName) throws PropertyException, ClientException { Blob blob = (Blob) getDocument().getPropertyValue(propertyName); return blob.getMimeType(); } public boolean isExportableToPDF(Blob blob) { boolean isSupported = false; try { if (blob != null) { String mimetype = blob.getMimeType(); TransformServiceCommon nxt = TransformServiceDelegate.getRemoteTransformService(); isSupported = nxt.isMimetypeSupportedByPlugin("any2pdf", mimetype); } } catch (Exception e) { log.error("error asking the any2pdf plugin whether pdf conversion " + " is supported: " + e.getMessage()); } return isSupported; } @WebRemote public boolean isFileExportableToPDF(String fieldName) { boolean isSupported = false; try { String mimetype = getMimetypeFromDocument(fieldName); TransformServiceCommon nxt = TransformServiceDelegate.getRemoteTransformService(); isSupported = nxt.isMimetypeSupportedByPlugin("any2pdf", mimetype); } catch (Exception e) { log.error("error asking the any2pdf plugin whether " + fieldName + " is supported: " + e.getMessage()); } return isSupported; } @WebRemote public String generatePdfFile() { try { if (fileFieldFullName == null) { return null; } Blob blob = (Blob) getDocument().getPropertyValue(fileFieldFullName); TransformServiceCommon nxt = Framework.getService(TransformServiceCommon.class); List<TransformDocument> resultingDocs = nxt.transform("any2pdf", null, blob); String name; if (filename == null || filename.equals("")) { name = "file"; } else { name = filename; } // add pdf extension int pos = name.lastIndexOf("."); if (pos <= 0) { name += ".pdf"; } else { String sub = name.substring(pos + 1); name = name.replace(sub, "pdf"); } if (resultingDocs.size() == 0) { log.error("Transform service didn't return any resulting documents which is not normal."); return "pdf_generation_error"; } // converting the result into byte[] to be able to put it in the // response InputStream inputStream = resultingDocs.get(0).getBlob().getStream(); int length = inputStream.available(); byte[] array = new byte[length]; int offset = 0; int n; do { n = inputStream.read(array, offset, length - offset); } while (n != -1); String headerContent = "attachment; filename=\"" + name + "\";"; writeResponse("Content-Disposition", headerContent, "application/pdf", array); return null; } catch (Exception e) { log.error("PDF generation error for file " + filename, e); } return "pdf_generation_error"; } /** * @deprecated use LiveEditBootstrapHelper.isCurrentDocumentLiveEditable() * instead */ @Deprecated @WebRemote public boolean isFileOnlineEditable(String fieldName) { try { boolean isOnlineEditable; String mimetype = getMimetypeFromDocument(fieldName); MimetypeRegistry mimeTypeService = Framework.getService(MimetypeRegistry.class); MimetypeEntry mimetypeEntry = mimeTypeService.getMimetypeEntryByMimeType(mimetype); if (mimetypeEntry == null) { isOnlineEditable = false; } else { isOnlineEditable = mimetypeEntry.isOnlineEditable(); } return isOnlineEditable; } catch (Exception e) { log.error("error getting the mimetype entry for " + fieldName + ": " + e.getMessage()); return false; } } /** * Simply sends what to be downloaded or shown at screen via * HttpServletResponse. * * @param header * @param headerContent * @param contentType * @param value * @throws IOException */ private void writeResponse(String header, String headerContent, String contentType, byte[] value) throws IOException { FacesContext context = FacesContext.getCurrentInstance(); HttpServletResponse response = (HttpServletResponse) context.getExternalContext().getResponse(); response.setHeader(header, headerContent); response.setContentType(contentType); response.getOutputStream().write(value); context.responseComplete(); } public void initialize() { log.info("initializing FileViewAction"); } }
NXP-2305 missing catch
nuxeo-platform-webapp-core/src/main/java/org/nuxeo/ecm/webapp/action/ConversionActionBean.java
NXP-2305 missing catch
Java
lgpl-2.1
095aac2000077509494a1385a755d6e378e3dda4
0
exedio/copernica,exedio/copernica,exedio/copernica
package com.exedio.cope.lib; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import com.exedio.cope.lib.collision.CollisionItem1; import com.exedio.cope.lib.collision.CollisionItem2; import com.exedio.cope.lib.hierarchy.FirstSub; import com.exedio.cope.lib.hierarchy.Super; /** * An abstract test class for tests creating/using some persistent data. */ public abstract class DatabaseLibTest extends AbstractLibTest { public static final Type[] types = new Type[] { ItemWithSingleUnique.TYPE, ItemWithSingleUniqueReadOnly.TYPE, ItemWithSingleUniqueNotNull.TYPE, ItemWithDoubleUnique.TYPE, EmptyItem.TYPE, EmptyItem2.TYPE, ItemWithManyAttributes.TYPE, StringItem.TYPE, MediaItem.TYPE, SumItem.TYPE, QualifiedItem.TYPE, QualifiedEmptyQualifier.TYPE, PointerItem2.TYPE, PointerItem.TYPE, Super.TYPE, FirstSub.TYPE, CollisionItem1.TYPE, CollisionItem2.TYPE, }; private static boolean createdDatabase = false; private static boolean registeredDropDatabaseHook = false; private static Object lock = new Object(); private static void createDatabase() { synchronized(lock) { if(!createdDatabase) { Database.theInstance.createDatabase(); createdDatabase = true; } } } private void dropDatabase() { synchronized(lock) { if(!registeredDropDatabaseHook) { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable(){ public void run() { Database.theInstance.dropDatabase(); } })); registeredDropDatabaseHook = true; } } } protected void setUp() throws Exception { super.setUp(); createDatabase(); Database.theInstance.checkEmptyTables(); } protected void tearDown() throws Exception { Database.theInstance.checkEmptyTables(); dropDatabase(); super.tearDown(); } protected InputStream stream(byte[] data) { return new ByteArrayInputStream(data); } protected void assertData(final byte[] expectedData, final InputStream actualData) { try { final byte[] actualDataArray = new byte[2*expectedData.length]; final int actualLength = actualData.read(actualDataArray); actualData.close(); assertEquals(expectedData.length, actualLength); for(int i = 0; i<actualLength; i++) assertEquals(expectedData[i], actualDataArray[i]); } catch(IOException e) { throw new SystemException(e); } } protected void assertMediaMime(final ItemWithManyAttributes item, final String mimeMajor, final String mimeMinor, final byte[] data, final String url) { try { item.setSomeMediaData(new ByteArrayInputStream(data), mimeMajor, mimeMinor); } catch(IOException e) { throw new SystemException(e); } final String prefix = "/medias/ItemWithManyAttributes/someMedia/"; final String pkString = String.valueOf(Search.pk2id(item.pk)); final String expectedURL = prefix+pkString+'.'+url; final String expectedURLSomeVariant = prefix+"SomeVariant/"+pkString+'.'+url; //System.out.println(expectedURL); //System.out.println(item.getSomeMediaURL()); assertEquals(expectedURL, item.getSomeMediaURL()); assertEquals(expectedURLSomeVariant, item.getSomeMediaURLSomeVariant()); //System.out.println(expectedURLSomeVariant); //System.out.println(item.getSomeMediaURL()); assertData(data, item.getSomeMediaData()); assertEquals(mimeMajor, item.getSomeMediaMimeMajor()); assertEquals(mimeMinor, item.getSomeMediaMimeMinor()); } protected void assertNotEquals(final Item item1, final Item item2) { assertFalse(item1.equals(item2)); assertFalse(item2.equals(item1)); assertFalse(item1.getID().equals(item2.getID())); assertFalse(item1.hashCode()==item2.hashCode()); } protected void assertID(final int id, final Item item) { assertTrue(item.getID()+"/"+id, item.getID().endsWith("."+id)); } protected void assertDelete(final Item item) throws IntegrityViolationException { assertTrue(!item.isDeleted()); item.delete(); assertTrue(item.isDeleted()); } public static void initializeExampleSystem() { try { { final ItemWithSingleUnique item1 = new ItemWithSingleUnique(); item1.setUniqueString("item1"); final ItemWithSingleUnique item2 = new ItemWithSingleUnique(); item2.setUniqueString("item2"); } new ItemWithSingleUniqueReadOnly("item1"); new ItemWithSingleUniqueReadOnly("item2"); new ItemWithSingleUniqueNotNull("item1"); new ItemWithSingleUniqueNotNull("item2"); new ItemWithDoubleUnique("string1", 1); new ItemWithDoubleUnique("string1", 2); new ItemWithDoubleUnique("string2", 1); new ItemWithDoubleUnique("string2", 2); final EmptyItem emptyItem1 = new EmptyItem(); final EmptyItem emptyItem2 = new EmptyItem(); final EmptyItem emptyItem3 = new EmptyItem(); new EmptyItem2(); new ItemWithManyAttributes("someString1", 5, 6l, 2.2, true, emptyItem1, ItemWithManyAttributes.SomeEnumeration.enumValue1); new ItemWithManyAttributes("someString2", 6, 7l, 2.3, true, emptyItem2, ItemWithManyAttributes.SomeEnumeration.enumValue2); new ItemWithManyAttributes("someString3", 7, 8l, 2.4, false, emptyItem2, ItemWithManyAttributes.SomeEnumeration.enumValue2); { final StringItem item1 = new StringItem(); final StringItem item2 = new StringItem(); final StringItem item3 = new StringItem(); item1.setAny("any1"); item1.setMin4("min4"); item1.setMax4("max4"); item1.setMin4Max8("min4max8"); item2.setAny("any1"); item2.setMin4("min4"); item2.setMax4("max4"); item2.setMin4Max8("m4x8"); } new MediaItem(); new MediaItem(); new MediaItem(); new SumItem(1, 2, 3); new SumItem(4, 5, 6); { final PointerItem2 item2a = new PointerItem2("hallo"); final PointerItem2 item2b = new PointerItem2("bello"); new PointerItem("bello", item2a); new PointerItem("collo", item2b); } new FirstSub(1); new FirstSub(2); new CollisionItem1(emptyItem1); new CollisionItem1(emptyItem2); new CollisionItem2(emptyItem1); new CollisionItem2(emptyItem2); } catch(ConstraintViolationException e) { throw new SystemException(e); } } public static void main(String[] args) { Database.theInstance.tearDownDatabase(); } }
lib/testsrc/com/exedio/cope/lib/DatabaseLibTest.java
package com.exedio.cope.lib; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import com.exedio.cope.lib.collision.CollisionItem1; import com.exedio.cope.lib.collision.CollisionItem2; import com.exedio.cope.lib.hierarchy.FirstSub; import com.exedio.cope.lib.hierarchy.Super; /** * An abstract test class for tests creating/using some persistent data. */ public abstract class DatabaseLibTest extends AbstractLibTest { public static final Type[] types = new Type[] { ItemWithSingleUnique.TYPE, ItemWithSingleUniqueReadOnly.TYPE, ItemWithSingleUniqueNotNull.TYPE, ItemWithDoubleUnique.TYPE, EmptyItem.TYPE, EmptyItem2.TYPE, ItemWithManyAttributes.TYPE, StringItem.TYPE, MediaItem.TYPE, SumItem.TYPE, QualifiedItem.TYPE, QualifiedEmptyQualifier.TYPE, PointerItem2.TYPE, PointerItem.TYPE, Super.TYPE, FirstSub.TYPE, CollisionItem1.TYPE, CollisionItem2.TYPE, }; private static boolean createdDatabase = false; private static boolean registeredDropDatabaseHook = false; private static Object lock = new Object(); private static void createDatabase() { synchronized(lock) { if(!createdDatabase) { Database.theInstance.createDatabase(); createdDatabase = true; } } } private void dropDatabase() { synchronized(lock) { if(!registeredDropDatabaseHook) { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable(){ public void run() { Database.theInstance.dropDatabase(); } })); registeredDropDatabaseHook = true; } } } protected void setUp() throws Exception { super.setUp(); createDatabase(); Database.theInstance.checkEmptyTables(); } protected void tearDown() throws Exception { Database.theInstance.checkEmptyTables(); dropDatabase(); super.tearDown(); } protected InputStream stream(byte[] data) { return new ByteArrayInputStream(data); } protected void assertData(final byte[] expectedData, final InputStream actualData) { try { final byte[] actualDataArray = new byte[2*expectedData.length]; final int actualLength = actualData.read(actualDataArray); actualData.close(); assertEquals(expectedData.length, actualLength); for(int i = 0; i<actualLength; i++) assertEquals(expectedData[i], actualDataArray[i]); } catch(IOException e) { throw new SystemException(e); } } protected void assertMediaMime(final ItemWithManyAttributes item, final String mimeMajor, final String mimeMinor, final byte[] data, final String url) { try { item.setSomeMediaData(new ByteArrayInputStream(data), mimeMajor, mimeMinor); } catch(IOException e) { throw new SystemException(e); } final String prefix = "/medias/ItemWithManyAttributes/someMedia/"; final String pkString = String.valueOf(Search.pk2id(item.pk)); final String expectedURL = prefix+pkString+'.'+url; final String expectedURLSomeVariant = prefix+"SomeVariant/"+pkString+'.'+url; //System.out.println(expectedURL); //System.out.println(item.getSomeMediaURL()); assertEquals(expectedURL, item.getSomeMediaURL()); assertEquals(expectedURLSomeVariant, item.getSomeMediaURLSomeVariant()); //System.out.println(expectedURLSomeVariant); //System.out.println(item.getSomeMediaURL()); assertData(data, item.getSomeMediaData()); assertEquals(mimeMajor, item.getSomeMediaMimeMajor()); assertEquals(mimeMinor, item.getSomeMediaMimeMinor()); } protected void assertNotEquals(final Item item1, final Item item2) { assertFalse(item1.equals(item2)); assertFalse(item2.equals(item1)); assertFalse(item1.getID().equals(item2.getID())); assertFalse(item1.hashCode()==item2.hashCode()); } protected void assertID(final int id, final Item item) { assertTrue(item.getID()+"/"+id, item.getID().endsWith("."+id)); } protected void assertDelete(final Item item) throws IntegrityViolationException { assertTrue(!item.isDeleted()); item.delete(); assertTrue(item.isDeleted()); } public static void initializeExampleSystem() { try { final PointerItem2 item2a = new PointerItem2("hallo"); final PointerItem2 item2b = new PointerItem2("bello"); new PointerItem("bello", item2a); new PointerItem("collo", item2b); } catch(NotNullViolationException e) { throw new SystemException(e); } } public static void main(String[] args) { Database.theInstance.tearDownDatabase(); } }
much larger example system git-svn-id: 9dbc6da3594b32e13bcf3b3752e372ea5bc7c2cc@987 e7d4fc99-c606-0410-b9bf-843393a9eab7
lib/testsrc/com/exedio/cope/lib/DatabaseLibTest.java
much larger example system
Java
apache-2.0
a8234bd4015ea9d1f968a610db38f5aa466825fa
0
MyRealityCoding/rbcgj-2016,MyRealityCoding/rbcgj-2016,MyRealityCoding/rbcgj-2016
package tv.rocketbeans.rbcgj.core; import com.badlogic.gdx.audio.Music; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.maps.MapLayer; import com.badlogic.gdx.maps.MapLayers; import com.badlogic.gdx.maps.MapObject; import com.badlogic.gdx.maps.MapProperties; import com.badlogic.gdx.maps.tiled.TiledMap; import com.badlogic.gdx.maps.tiled.TiledMapTileLayer; import com.badlogic.gdx.maps.tiled.renderers.OrthogonalTiledMapRenderer; import com.badlogic.gdx.math.Vector2; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import aurelienribon.tweenengine.Tween; import aurelienribon.tweenengine.TweenEquation; import aurelienribon.tweenengine.TweenEquations; import box2dLight.PointLight; import tv.rocketbeans.rbcgj.GameConfig; import tv.rocketbeans.rbcgj.assets.AssetManager; import tv.rocketbeans.rbcgj.core.tmx.Tmx; import tv.rocketbeans.rbcgj.graphics.FX; import tv.rocketbeans.rbcgj.graphics.LightingManager; import tv.rocketbeans.rbcgj.tweens.GameObjectTween; import tv.rocketbeans.rbcgj.tweens.SharedTweenManager; public class LevelManager { static { Tween.registerAccessor(GameObject.class, new GameObjectTween()); } private OrthogonalTiledMapRenderer mapRenderer; private MapLayers layers; private LightingManager lightingManager; private List<PointLight> staticLights; private CollisionDetector collisions; private Vector2 spawn = new Vector2(); private Music music; private MapActionHandler actionHandler; private boolean initialized; private Set<GameObject> gameObjects; private GameWorld world; public LevelManager(LightingManager lightingManager, GameWorld world, MapActionHandler handler, CollisionDetector collisions) { this.lightingManager = lightingManager; staticLights = new ArrayList<PointLight>(); this.collisions = collisions; this.actionHandler = handler; this.world = world; this.gameObjects = new HashSet<GameObject>(); } public void loadLevel(Levels levels, GameObject player) { initialized = true; for (PointLight light : staticLights) { light.remove(true); } for (GameObject object : gameObjects) { world.remove(object); } gameObjects.clear(); staticLights.clear(); lightingManager.setAmbientLight(levels.getAmbientColor()); if (mapRenderer != null) { mapRenderer.dispose(); } TiledMap map = AssetManager.getMap(levels.getMaps()); collisions.updateCollisions(map); layers = map.getLayers(); mapRenderer = new OrthogonalTiledMapRenderer(map); updateObjects(); actionHandler.load(map); player.setPosition(spawn.x, spawn.y); if (music != null) { music.setLooping(false); music.stop(); } music = AssetManager.getMusic(levels.getMusics()); music.setLooping(true); music.play(); FX.getInstance().setFadeColor(Color.BLACK); FX.getInstance().fadeIn(2.5f); } public boolean isNowInitialized() { return initialized; } public void renderForeground(OrthographicCamera camera) { if (mapRenderer != null) { mapRenderer.getBatch().begin(); mapRenderer.setView(camera); mapRenderer.renderTileLayer((TiledMapTileLayer) layers.get(1)); mapRenderer.getBatch().end(); } initialized = false; } public void renderBackground(OrthographicCamera camera) { if (mapRenderer != null) { mapRenderer.setView(camera); mapRenderer.getBatch().begin(); mapRenderer.renderTileLayer((TiledMapTileLayer) layers.get(0)); mapRenderer.getBatch().end(); } } private void updateObjects() { TiledMap map = mapRenderer.getMap(); for (MapLayer layer : map.getLayers()) { for (MapObject object : layer.getObjects()) { MapProperties properties = object.getProperties(); if (properties.get(Tmx.TYPE) == null) { continue; } if (properties.get(Tmx.TYPE).equals(Tmx.LIGHT)) { Float r = Float.valueOf((String)properties.get(Tmx.RED)); Float g = Float.valueOf((String)properties.get(Tmx.GREEN)); Float b = Float.valueOf((String)properties.get(Tmx.BLUE)); Float a = Float.valueOf((String)properties.get(Tmx.ALPHA)); Float radius = Float.valueOf((String)properties.get(Tmx.RADIUS)); Float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; Float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; PointLight light = lightingManager.addPointLight(radius, new Color(r, g, b, a), x, y); staticLights.add(light); } else if (properties.get(Tmx.TYPE).equals(Tmx.SPAWN)) { float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; // Normalize spawn position int xIndex = (int)Math.floor(x / GameConfig.CELL_SCALE); int yIndex = (int)Math.floor(y / GameConfig.CELL_SCALE); spawn.x = xIndex * GameConfig.CELL_SCALE; spawn.y = yIndex * GameConfig.CELL_SCALE; } else if (properties.get(Tmx.TYPE).equals(Tmx.NPC)) { float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; String type = (String)properties.get(Tmx.NUT); // Normalize spawn position x = (int)Math.floor(x / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; y = (int)Math.floor(y / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; GameObject npc = world.addObject(); npc.setDimensions(GameConfig.CELL_SCALE, GameConfig.CELL_SCALE); npc.setPosition(x, y); npc.setType(getNPCType(type)); gameObjects.add(npc); } else if (properties.get(Tmx.TYPE).equals(Tmx.CRUMB)) { float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; // Normalize spawn position x = (int)Math.floor(x / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; y = (int)Math.floor(y / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; GameObject crumb = world.addObject(); crumb.setDimensions(GameConfig.CELL_SCALE, GameConfig.CELL_SCALE); crumb.setPosition(x, y); crumb.setType(GameObjectType.CRUMB); gameObjects.add(crumb); // Animate crumbs Tween.to(crumb, GameObjectTween.OFFSET_Y, 0.5f).delay(1f * (float)Math.random()).ease(TweenEquations.easeOutQuad).target(GameConfig.CELL_SCALE / 4f) .repeatYoyo(Tween.INFINITY, 0.5f).start(SharedTweenManager.getInstance()); } } } } private int getNPCType(String type) { if (type.equals(Tmx.RAISIN)) { return GameObjectType.RUISIN; } else if (type.equals(Tmx.ALMOND)) { return GameObjectType.ALMOND; } else if (type.equals(Tmx.BRAZIL)) { return GameObjectType.BRAZIL; } else if (type.equals(Tmx.CASHEW)) { return GameObjectType.CASHEW; } return -1; } }
core/src/tv/rocketbeans/rbcgj/core/LevelManager.java
package tv.rocketbeans.rbcgj.core; import com.badlogic.gdx.audio.Music; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.maps.MapLayer; import com.badlogic.gdx.maps.MapLayers; import com.badlogic.gdx.maps.MapObject; import com.badlogic.gdx.maps.MapProperties; import com.badlogic.gdx.maps.tiled.TiledMap; import com.badlogic.gdx.maps.tiled.TiledMapTileLayer; import com.badlogic.gdx.maps.tiled.renderers.OrthogonalTiledMapRenderer; import com.badlogic.gdx.math.Vector2; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import box2dLight.PointLight; import tv.rocketbeans.rbcgj.GameConfig; import tv.rocketbeans.rbcgj.assets.AssetManager; import tv.rocketbeans.rbcgj.core.tmx.Tmx; import tv.rocketbeans.rbcgj.graphics.FX; import tv.rocketbeans.rbcgj.graphics.LightingManager; public class LevelManager { private OrthogonalTiledMapRenderer mapRenderer; private MapLayers layers; private LightingManager lightingManager; private List<PointLight> staticLights; private CollisionDetector collisions; private Vector2 spawn = new Vector2(); private Music music; private MapActionHandler actionHandler; private boolean initialized; private Set<GameObject> gameObjects; private GameWorld world; public LevelManager(LightingManager lightingManager, GameWorld world, MapActionHandler handler, CollisionDetector collisions) { this.lightingManager = lightingManager; staticLights = new ArrayList<PointLight>(); this.collisions = collisions; this.actionHandler = handler; this.world = world; this.gameObjects = new HashSet<GameObject>(); } public void loadLevel(Levels levels, GameObject player) { initialized = true; for (PointLight light : staticLights) { light.remove(true); } for (GameObject object : gameObjects) { world.remove(object); } gameObjects.clear(); staticLights.clear(); lightingManager.setAmbientLight(levels.getAmbientColor()); if (mapRenderer != null) { mapRenderer.dispose(); } TiledMap map = AssetManager.getMap(levels.getMaps()); collisions.updateCollisions(map); layers = map.getLayers(); mapRenderer = new OrthogonalTiledMapRenderer(map); updateObjects(); actionHandler.load(map); player.setPosition(spawn.x, spawn.y); if (music != null) { music.stop(); } music = AssetManager.getMusic(levels.getMusics()); music.setLooping(true); music.play(); FX.getInstance().setFadeColor(Color.BLACK); FX.getInstance().fadeIn(2.5f); } public boolean isNowInitialized() { return initialized; } public void renderForeground(OrthographicCamera camera) { if (mapRenderer != null) { mapRenderer.getBatch().begin(); mapRenderer.setView(camera); mapRenderer.renderTileLayer((TiledMapTileLayer) layers.get(1)); mapRenderer.getBatch().end(); } initialized = false; } public void renderBackground(OrthographicCamera camera) { if (mapRenderer != null) { mapRenderer.setView(camera); mapRenderer.getBatch().begin(); mapRenderer.renderTileLayer((TiledMapTileLayer) layers.get(0)); mapRenderer.getBatch().end(); } } private void updateObjects() { TiledMap map = mapRenderer.getMap(); for (MapLayer layer : map.getLayers()) { for (MapObject object : layer.getObjects()) { MapProperties properties = object.getProperties(); if (properties.get(Tmx.TYPE) == null) { continue; } if (properties.get(Tmx.TYPE).equals(Tmx.LIGHT)) { Float r = Float.valueOf((String)properties.get(Tmx.RED)); Float g = Float.valueOf((String)properties.get(Tmx.GREEN)); Float b = Float.valueOf((String)properties.get(Tmx.BLUE)); Float a = Float.valueOf((String)properties.get(Tmx.ALPHA)); Float radius = Float.valueOf((String)properties.get(Tmx.RADIUS)); Float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; Float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; PointLight light = lightingManager.addPointLight(radius, new Color(r, g, b, a), x, y); staticLights.add(light); } else if (properties.get(Tmx.TYPE).equals(Tmx.SPAWN)) { float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; // Normalize spawn position int xIndex = (int)Math.floor(x / GameConfig.CELL_SCALE); int yIndex = (int)Math.floor(y / GameConfig.CELL_SCALE); spawn.x = xIndex * GameConfig.CELL_SCALE; spawn.y = yIndex * GameConfig.CELL_SCALE; } else if (properties.get(Tmx.TYPE).equals(Tmx.NPC)) { float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; String type = (String)properties.get(Tmx.NUT); // Normalize spawn position x = (int)Math.floor(x / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; y = (int)Math.floor(y / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; GameObject npc = world.addObject(); npc.setDimensions(GameConfig.CELL_SCALE, GameConfig.CELL_SCALE); npc.setPosition(x, y); npc.setType(getNPCType(type)); gameObjects.add(npc); } else if (properties.get(Tmx.TYPE).equals(Tmx.CRUMB)) { float x = (Float)properties.get(Tmx.X) + GameConfig.CELL_SCALE / 2f; float y = (Float)properties.get(Tmx.Y) + GameConfig.CELL_SCALE / 2f; // Normalize spawn position x = (int)Math.floor(x / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; y = (int)Math.floor(y / GameConfig.CELL_SCALE) * GameConfig.CELL_SCALE; GameObject crumb = world.addObject(); crumb.setDimensions(GameConfig.CELL_SCALE, GameConfig.CELL_SCALE); crumb.setPosition(x, y); crumb.setType(GameObjectType.CRUMB); gameObjects.add(crumb); } } } } private int getNPCType(String type) { if (type.equals(Tmx.RAISIN)) { return GameObjectType.RUISIN; } else if (type.equals(Tmx.ALMOND)) { return GameObjectType.ALMOND; } else if (type.equals(Tmx.BRAZIL)) { return GameObjectType.BRAZIL; } else if (type.equals(Tmx.CASHEW)) { return GameObjectType.CASHEW; } return -1; } }
Animate breadcrumbs
core/src/tv/rocketbeans/rbcgj/core/LevelManager.java
Animate breadcrumbs
Java
apache-2.0
685f074aa8cf3f7e1a05c4cbcc39372f5d9bdf00
0
cdapio/netty-http
/* * Copyright 2016 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.cdap.http.internal; import io.netty.util.concurrent.AbstractEventExecutor; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.EventExecutorGroup; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.OrderedEventExecutor; import io.netty.util.concurrent.ScheduledFuture; import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.UnstableApi; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; /** * {@link EventExecutorGroup} which will preserve {@link Runnable} execution order but makes no guarantees about what * {@link EventExecutor} (and therefore {@link Thread}) will be used to execute the {@link Runnable}s. * * <p>The {@link EventExecutorGroup#next()} for the wrapped {@link EventExecutorGroup} must <strong>NOT</strong> return * executors of type {@link OrderedEventExecutor}. * * NOTE: This class is copied from the netty project to fix the netty bug #8230. This class should be removed * after the fix goes in the netty library */ @UnstableApi public final class NonStickyEventExecutorGroup implements EventExecutorGroup { private final EventExecutorGroup group; private final int maxTaskExecutePerRun; /** * Creates a new instance. Be aware that the given {@link EventExecutorGroup} <strong>MUST NOT</strong> contain * any {@link OrderedEventExecutor}s. */ public NonStickyEventExecutorGroup(EventExecutorGroup group) { this(group, 1024); } /** * Creates a new instance. Be aware that the given {@link EventExecutorGroup} <strong>MUST NOT</strong> contain * any {@link OrderedEventExecutor}s. */ public NonStickyEventExecutorGroup(EventExecutorGroup group, int maxTaskExecutePerRun) { this.group = verify(group); this.maxTaskExecutePerRun = ObjectUtil.checkPositive(maxTaskExecutePerRun, "maxTaskExecutePerRun"); } private static EventExecutorGroup verify(EventExecutorGroup group) { Iterator<EventExecutor> executors = ObjectUtil.checkNotNull(group, "group").iterator(); while (executors.hasNext()) { EventExecutor executor = executors.next(); if (executor instanceof OrderedEventExecutor) { throw new IllegalArgumentException("EventExecutorGroup " + group + " contains OrderedEventExecutors: " + executor); } } return group; } private NonStickyOrderedEventExecutor newExecutor(EventExecutor executor) { return new NonStickyOrderedEventExecutor(executor, maxTaskExecutePerRun); } @Override public boolean isShuttingDown() { return group.isShuttingDown(); } @Override public Future<?> shutdownGracefully() { return group.shutdownGracefully(); } @Override public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) { return group.shutdownGracefully(quietPeriod, timeout, unit); } @Override public Future<?> terminationFuture() { return group.terminationFuture(); } @SuppressWarnings("deprecation") @Override public void shutdown() { group.shutdown(); } @SuppressWarnings("deprecation") @Override public List<Runnable> shutdownNow() { return group.shutdownNow(); } @Override public EventExecutor next() { return newExecutor(group.next()); } @Override public Iterator<EventExecutor> iterator() { final Iterator<EventExecutor> itr = group.iterator(); return new Iterator<EventExecutor>() { @Override public boolean hasNext() { return itr.hasNext(); } @Override public EventExecutor next() { return newExecutor(itr.next()); } @Override public void remove() { itr.remove(); } }; } @Override public Future<?> submit(Runnable task) { return group.submit(task); } @Override public <T> Future<T> submit(Runnable task, T result) { return group.submit(task, result); } @Override public <T> Future<T> submit(Callable<T> task) { return group.submit(task); } @Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { return group.schedule(command, delay, unit); } @Override public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) { return group.schedule(callable, delay, unit); } @Override public ScheduledFuture<?> scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) { return group.scheduleAtFixedRate(command, initialDelay, period, unit); } @Override public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) { return group.scheduleWithFixedDelay(command, initialDelay, delay, unit); } @Override public boolean isShutdown() { return group.isShutdown(); } @Override public boolean isTerminated() { return group.isTerminated(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { return group.awaitTermination(timeout, unit); } @Override public <T> List<java.util.concurrent.Future<T>> invokeAll( Collection<? extends Callable<T>> tasks) throws InterruptedException { return group.invokeAll(tasks); } @Override public <T> List<java.util.concurrent.Future<T>> invokeAll( Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException { return group.invokeAll(tasks, timeout, unit); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException { return group.invokeAny(tasks); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return group.invokeAny(tasks, timeout, unit); } @Override public void execute(Runnable command) { group.execute(command); } private static final class NonStickyOrderedEventExecutor extends AbstractEventExecutor implements Runnable, OrderedEventExecutor { private final EventExecutor executor; private final Queue<Runnable> tasks = PlatformDependent.newMpscQueue(); private static final int NONE = 0; private static final int SUBMITTED = 1; private static final int RUNNING = 2; private final AtomicInteger state = new AtomicInteger(); private final int maxTaskExecutePerRun; NonStickyOrderedEventExecutor(EventExecutor executor, int maxTaskExecutePerRun) { super(executor); this.executor = executor; this.maxTaskExecutePerRun = maxTaskExecutePerRun; } @Override public void run() { if (!state.compareAndSet(SUBMITTED, RUNNING)) { return; } for (;;) { int i = 0; try { for (; i < maxTaskExecutePerRun; i++) { Runnable task = tasks.poll(); if (task == null) { break; } safeExecute(task); } } finally { if (i == maxTaskExecutePerRun) { try { state.set(SUBMITTED); executor.execute(this); return; // done } catch (Throwable ignore) { // Reset the state back to running as we will keep on executing tasks. state.set(RUNNING); // if an error happened we should just ignore it and let the loop run again as there is not // much else we can do. Most likely this was triggered by a full task queue. In this case // we just will run more tasks and try again later. } } else { state.set(NONE); // After setting the state to NONE, look at the tasks queue one more time. // If it is empty, then we can return from this method. // Otherwise, it means the producer thread has called execute(Runnable) // and enqueued a task in between the tasks.poll() above and the state.set(NONE) here. // There are two possible scenarios when this happen // // 1. The producer thread sees state == NONE, hence the compareAndSet(NONE, SUBMITTED) // is successfully setting the state to SUBMITTED. This mean the producer // will call / has called executor.execute(this). In this case, we can just return. // 2. The producer thread don't see the state change, hence the compareAndSet(NONE, SUBMITTED) // returns false. In this case, the producer thread won't call executor.execute. // In this case, we need to change the state to RUNNING and keeps running. // // The above cases can be distinguished by performing a // compareAndSet(NONE, RUNNING). If it returns "false", it is case 1; otherwise it is case 2. if (tasks.isEmpty() || !state.compareAndSet(NONE, RUNNING)) { return; // done } } } } } @Override public boolean inEventLoop(Thread thread) { return false; } @Override public boolean inEventLoop() { return false; } @Override public boolean isShuttingDown() { return executor.isShutdown(); } @Override public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) { return executor.shutdownGracefully(quietPeriod, timeout, unit); } @Override public Future<?> terminationFuture() { return executor.terminationFuture(); } @Override public void shutdown() { executor.shutdown(); } @Override public boolean isShutdown() { return executor.isShutdown(); } @Override public boolean isTerminated() { return executor.isTerminated(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { return executor.awaitTermination(timeout, unit); } @Override public void execute(Runnable command) { if (!tasks.offer(command)) { throw new RejectedExecutionException(); } if (state.compareAndSet(NONE, SUBMITTED)) { // Actually it could happen that the runnable was picked up in between but we not care to much and just // execute ourself. At worst this will be a NOOP when run() is called. try { executor.execute(this); } catch (Throwable e) { // Not reset the state as some other Runnable may be added to the queue already in the meantime. tasks.remove(command); PlatformDependent.throwException(e); } } } } }
src/main/java/io/cdap/http/internal/NonStickyEventExecutorGroup.java
/* * Copyright 2016 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.cdap.http.internal; import io.netty.util.concurrent.AbstractEventExecutor; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.EventExecutorGroup; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.OrderedEventExecutor; import io.netty.util.concurrent.ScheduledFuture; import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.UnstableApi; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; /** * {@link EventExecutorGroup} which will preserve {@link Runnable} execution order but makes no guarantees about what * {@link EventExecutor} (and therefore {@link Thread}) will be used to execute the {@link Runnable}s. * * <p>The {@link EventExecutorGroup#next()} for the wrapped {@link EventExecutorGroup} must <strong>NOT</strong> return * executors of type {@link OrderedEventExecutor}. * * NOTE: This class is copied from the netty project to fix the netty bug #8230. This class should be removed * after the fix goes in the netty library */ @UnstableApi public final class NonStickyEventExecutorGroup implements EventExecutorGroup { private final EventExecutorGroup group; private final int maxTaskExecutePerRun; /** * Creates a new instance. Be aware that the given {@link EventExecutorGroup} <strong>MUST NOT</strong> contain * any {@link OrderedEventExecutor}s. */ public NonStickyEventExecutorGroup(EventExecutorGroup group) { this(group, 1024); } /** * Creates a new instance. Be aware that the given {@link EventExecutorGroup} <strong>MUST NOT</strong> contain * any {@link OrderedEventExecutor}s. */ public NonStickyEventExecutorGroup(EventExecutorGroup group, int maxTaskExecutePerRun) { this.group = verify(group); this.maxTaskExecutePerRun = ObjectUtil.checkPositive(maxTaskExecutePerRun, "maxTaskExecutePerRun"); } private static EventExecutorGroup verify(EventExecutorGroup group) { Iterator<EventExecutor> executors = ObjectUtil.checkNotNull(group, "group").iterator(); while (executors.hasNext()) { EventExecutor executor = executors.next(); if (executor instanceof OrderedEventExecutor) { throw new IllegalArgumentException("EventExecutorGroup " + group + " contains OrderedEventExecutors: " + executor); } } return group; } private NonStickyOrderedEventExecutor newExecutor(EventExecutor executor) { return new NonStickyOrderedEventExecutor(executor, maxTaskExecutePerRun); } @Override public boolean isShuttingDown() { return group.isShuttingDown(); } @Override public Future<?> shutdownGracefully() { return group.shutdownGracefully(); } @Override public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) { return group.shutdownGracefully(quietPeriod, timeout, unit); } @Override public Future<?> terminationFuture() { return group.terminationFuture(); } @SuppressWarnings("deprecation") @Override public void shutdown() { group.shutdown(); } @SuppressWarnings("deprecation") @Override public List<Runnable> shutdownNow() { return group.shutdownNow(); } @Override public EventExecutor next() { return newExecutor(group.next()); } @Override public Iterator<EventExecutor> iterator() { final Iterator<EventExecutor> itr = group.iterator(); return new Iterator<EventExecutor>() { @Override public boolean hasNext() { return itr.hasNext(); } @Override public EventExecutor next() { return newExecutor(itr.next()); } @Override public void remove() { itr.remove(); } }; } @Override public Future<?> submit(Runnable task) { return group.submit(task); } @Override public <T> Future<T> submit(Runnable task, T result) { return group.submit(task, result); } @Override public <T> Future<T> submit(Callable<T> task) { return group.submit(task); } @Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { return group.schedule(command, delay, unit); } @Override public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) { return group.schedule(callable, delay, unit); } @Override public ScheduledFuture<?> scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) { return group.scheduleAtFixedRate(command, initialDelay, period, unit); } @Override public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) { return group.scheduleWithFixedDelay(command, initialDelay, delay, unit); } @Override public boolean isShutdown() { return group.isShutdown(); } @Override public boolean isTerminated() { return group.isTerminated(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { return group.awaitTermination(timeout, unit); } @Override public <T> List<java.util.concurrent.Future<T>> invokeAll( Collection<? extends Callable<T>> tasks) throws InterruptedException { return group.invokeAll(tasks); } @Override public <T> List<java.util.concurrent.Future<T>> invokeAll( Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException { return group.invokeAll(tasks, timeout, unit); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException { return group.invokeAny(tasks); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return group.invokeAny(tasks, timeout, unit); } @Override public void execute(Runnable command) { group.execute(command); } private static final class NonStickyOrderedEventExecutor extends AbstractEventExecutor implements Runnable, OrderedEventExecutor { private final EventExecutor executor; private final Queue<Runnable> tasks = PlatformDependent.newMpscQueue(); private static final int NONE = 0; private static final int SUBMITTED = 1; private static final int RUNNING = 2; private final AtomicInteger state = new AtomicInteger(); private final int maxTaskExecutePerRun; NonStickyOrderedEventExecutor(EventExecutor executor, int maxTaskExecutePerRun) { super(executor); this.executor = executor; this.maxTaskExecutePerRun = maxTaskExecutePerRun; } @Override public void run() { if (!state.compareAndSet(SUBMITTED, RUNNING)) { return; } for (;;) { int i = 0; try { for (; i < maxTaskExecutePerRun; i++) { Runnable task = tasks.poll(); if (task == null) { break; } safeExecute(task); } } finally { if (i == maxTaskExecutePerRun) { try { state.set(SUBMITTED); executor.execute(this); return; // done } catch (Throwable ignore) { // Reset the state back to running as we will keep on executing tasks. state.set(RUNNING); // if an error happened we should just ignore it and let the loop run again as there is not // much else we can do. Most likely this was triggered by a full task queue. In this case // we just will run more tasks and try again later. } } else { state.set(NONE); // After setting the state to NONE, look at the tasks queue one more time. // If it is empty, then we can return from this method. // Otherwise, it means the producer thread has called execute(Runnable) // and enqueued a task in between the tasks.poll() above and the state.set(NONE) here. // There are two possible scenarios when this happen // // 1. The producer thread sees state == NONE, hence the compareAndSet(NONE, SUBMITTED) // is successfully setting the state to SUBMITTED. This mean the producer // will call / has called executor.execute(this). In this case, we can just return. // 2. The producer thread don't see the state change, hence the compareAndSet(NONE, SUBMITTED) // returns false. In this case, the producer thread won't call executor.execute. // In this case, we need to change the state to RUNNING and keeps running. // // The above cases can be distinguished by performing a // compareAndSet(NONE, RUNNING). If it returns "false", it is case 1; otherwise it is case 2. if (tasks.peek() == null || !state.compareAndSet(NONE, RUNNING)) { return; // done } } } } } @Override public boolean inEventLoop(Thread thread) { return false; } @Override public boolean inEventLoop() { return false; } @Override public boolean isShuttingDown() { return executor.isShutdown(); } @Override public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) { return executor.shutdownGracefully(quietPeriod, timeout, unit); } @Override public Future<?> terminationFuture() { return executor.terminationFuture(); } @Override public void shutdown() { executor.shutdown(); } @Override public boolean isShutdown() { return executor.isShutdown(); } @Override public boolean isTerminated() { return executor.isTerminated(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { return executor.awaitTermination(timeout, unit); } @Override public void execute(Runnable command) { if (!tasks.offer(command)) { throw new RejectedExecutionException(); } if (state.compareAndSet(NONE, SUBMITTED)) { // Actually it could happen that the runnable was picked up in between but we not care to much and just // execute ourself. At worst this will be a NOOP when run() is called. try { executor.execute(this); } catch (Throwable e) { // Not reset the state as some other Runnable may be added to the queue already in the meantime. tasks.remove(command); PlatformDependent.throwException(e); } } } } }
Using isEmpty() instead of peek() to check if task queue is empty, since peek() implementation operates in a similar way to poll like a consumer which can lead to a multi-consumer scenario on a multi-producer-single-consumer queue, therefore getting into an infinite loop issue inside MPSC queue implementation.
src/main/java/io/cdap/http/internal/NonStickyEventExecutorGroup.java
Using isEmpty() instead of peek() to check if task queue is empty, since peek() implementation operates in a similar way to poll like a consumer which can lead to a multi-consumer scenario on a multi-producer-single-consumer queue, therefore getting into an infinite loop issue inside MPSC queue implementation.
Java
apache-2.0
4d0db0feebfe7f4a2ce5f1c68089699a00d18a17
0
tateshitah/jspwiki,tateshitah/jspwiki,tateshitah/jspwiki,tateshitah/jspwiki
/* JSPWiki - a JSP-based WikiWiki clone. Copyright (C) 2001 Janne Jalkanen ([email protected]) This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.ecyrd.jspwiki.providers; import java.lang.ref.SoftReference; import java.util.Properties; import java.util.Collection; import java.util.HashMap; import java.util.TreeSet; import java.util.Date; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import java.io.IOException; import org.apache.log4j.Category; import com.ecyrd.jspwiki.*; import com.ecyrd.jspwiki.util.ClassUtil; /** * Provides a caching page provider. This class rests on top of a * real provider class and provides a cache to speed things up. Only * if the cache copy of the page text has expired, we fetch it from * the provider. * <p> * This class also detects if someone has modified the page * externally, not through JSPWiki routines, and throws the proper * RepositoryModifiedException. * * Heavily based on ideas by Chris Brooking. * * @author Janne Jalkanen * @since 1.6.4 * @see RepositoryModifiedException */ // FIXME: Keeps a list of all WikiPages in memory - should cache them too. // FIXME: Synchronization is a bit inconsistent in places. public class CachingProvider implements WikiPageProvider { private static final Category log = Category.getInstance(CachingProvider.class); private WikiPageProvider m_provider; private HashMap m_cache = new HashMap(); private long m_cacheMisses = 0; private long m_cacheHits = 0; private long m_milliSecondsBetweenChecks = 5000; public void initialize( Properties properties ) throws NoRequiredPropertyException, IOException { log.debug("Initing CachingProvider"); String classname = WikiEngine.getRequiredProperty( properties, PageManager.PROP_PAGEPROVIDER ); try { Class providerclass = ClassUtil.findClass( "com.ecyrd.jspwiki.providers", classname ); m_provider = (WikiPageProvider)providerclass.newInstance(); log.debug("Initializing real provider class "+m_provider); m_provider.initialize( properties ); } catch( ClassNotFoundException e ) { log.error("Unable to locate provider class "+classname,e); throw new IllegalArgumentException("no provider class"); } catch( InstantiationException e ) { log.error("Unable to create provider class "+classname,e); throw new IllegalArgumentException("faulty provider class"); } catch( IllegalAccessException e ) { log.error("Illegal access to provider class "+classname,e); throw new IllegalArgumentException("illegal provider class"); } } public boolean pageExists( String page ) { CacheItem item = (CacheItem)m_cache.get( page ); if( checkIfPageChanged( item ) ) { try { revalidatePage( item.m_page ); } catch( ProviderException e ) {} // FIXME: Should do something! return m_provider.pageExists( page ); } // // A null item means that the page either does not // exist, or has not yet been cached; a non-null // means that the page does exist. // if( item != null ) { return true; } // // We could add the page to the cache here as well, // but in order to understand whether that is a // good thing or not we would need to analyze // the JSPWiki calling patterns extensively. Presumably // it would be a good thing if pageExists() is called // many times before the first getPageText() is called, // and the whole page is cached. // return m_provider.pageExists( page ); } /** * @throws RepositoryModifiedException If the page has been externally modified. */ public String getPageText( String page, int version ) throws ProviderException { String result = null; if( version == WikiPageProvider.LATEST_VERSION ) { if( pageExists( page ) ) { result = getTextFromCache( page ); } } else { CacheItem item = (CacheItem)m_cache.get( page ); // // Or is this the latest version fetched by version number? // if( item != null && item.m_page.getVersion() == version ) { result = getTextFromCache( page ); } else { result = m_provider.getPageText( page, version ); } } return result; } /** * Returns true, if the page has been changed outside of JSPWiki. */ private boolean checkIfPageChanged( CacheItem item ) { if( item == null ) return false; long currentTime = System.currentTimeMillis(); if( currentTime - item.m_lastChecked > m_milliSecondsBetweenChecks ) { // log.debug("Consistency check: has page "+item.m_page.getName()+" been changed?"); try { WikiPage cached = item.m_page; WikiPage current = m_provider.getPageInfo( cached.getName(), LATEST_VERSION ); // // Page has been deleted. // if( current == null ) { log.debug("Page "+cached.getName()+" has been removed externally."); return true; } item.m_lastChecked = currentTime; long epsilon = 1000L; // FIXME: This should be adjusted according to provider granularity. Date curDate = current.getLastModified(); Date cacDate = cached.getLastModified(); // log.debug("cached date = "+cacDate+", current date = "+curDate); if( curDate != null && cacDate != null && curDate.getTime() - cacDate.getTime() > epsilon ) { log.debug("Page "+current.getName()+" has been externally modified, refreshing contents."); return true; } } catch( ProviderException e ) { log.error("While checking cache, got error: ",e); } } return false; } /** * Removes the page from cache, and attempts to reload all information. */ private synchronized void revalidatePage( WikiPage page ) throws ProviderException { m_cache.remove( page.getName() ); addPage( page.getName(), null ); // If fetch fails, we want info to go directly to user } /** * @throws RepositoryModifiedException If the page has been externally modified. */ private String getTextFromCache( String page ) throws ProviderException { CacheItem item; synchronized(this) { item = (CacheItem)m_cache.get( page ); } // // Check if page has been changed externally. If it has, then // we need to refresh all of the information. // if( checkIfPageChanged( item ) ) { revalidatePage( item.m_page ); throw new RepositoryModifiedException( page ); } if( item == null ) { // Page has never been seen. // log.debug("Page "+page+" never seen."); String text = m_provider.getPageText( page, WikiPageProvider.LATEST_VERSION ); addPage( page, text ); m_cacheMisses++; return text; } else { String text = (String)item.m_text.get(); if( text == null ) { // Oops, expired already // log.debug("Page "+page+" expired."); text = m_provider.getPageText( page, WikiPageProvider.LATEST_VERSION ); item.m_text = new SoftReference( text ); m_cacheMisses++; return text; } // log.debug("Page "+page+" found in cache."); m_cacheHits++; return text; } } public void putPageText( WikiPage page, String text ) throws ProviderException { synchronized(this) { m_provider.putPageText( page, text ); revalidatePage( page ); } } // FIXME: This MUST be cached somehow. private boolean m_gotall = false; public Collection getAllPages() throws ProviderException { Collection all; if( m_gotall == false ) { all = m_provider.getAllPages(); // Make sure that all pages are in the cache. // FIXME: This has the unfortunate side effect of clearing // the cache. synchronized(this) { for( Iterator i = all.iterator(); i.hasNext(); ) { CacheItem item = new CacheItem(); item.m_page = (WikiPage) i.next(); item.m_text = new SoftReference( null ); m_cache.put( item.m_page.getName(), item ); } m_gotall = true; } } else { all = new ArrayList(); for( Iterator i = m_cache.values().iterator(); i.hasNext(); ) { all.add( ((CacheItem)i.next()).m_page ); } } return all; } // Null text for no page // Returns null if no page could be found. private synchronized CacheItem addPage( String pageName, String text ) throws ProviderException { CacheItem item = null; WikiPage newpage = m_provider.getPageInfo( pageName, WikiPageProvider.LATEST_VERSION ); if( newpage != null ) { item = new CacheItem(); item.m_page = newpage; item.m_text = new SoftReference( text ); m_cache.put( pageName, item ); } return item; } public Collection getAllChangedSince( Date date ) { return m_provider.getAllChangedSince( date ); } public int getPageCount() throws ProviderException { return m_provider.getPageCount(); } public Collection findPages( QueryItem[] query ) { TreeSet res = new TreeSet( new SearchResultComparator() ); SearchMatcher matcher = new SearchMatcher( query ); Collection allPages = null; try { allPages = getAllPages(); } catch( ProviderException pe ) { log.error( "Unable to retrieve page list", pe ); return( null ); } Iterator it = allPages.iterator(); while( it.hasNext() ) { try { WikiPage page = (WikiPage) it.next(); String pageName = page.getName(); String pageContent = getTextFromCache( pageName ); SearchResult comparison = matcher.matchPageContent( pageName, pageContent ); if( comparison != null ) { res.add( comparison ); } } catch( RepositoryModifiedException rme ) { // FIXME: What to do in this case??? } catch( ProviderException pe ) { log.error( "Unable to retrieve page from cache", pe ); } catch( IOException ioe ) { log.error( "Failed to search page", ioe ); } } return( res ); } public WikiPage getPageInfo( String page, int version ) throws ProviderException { CacheItem item = (CacheItem)m_cache.get( page ); int latestcached = (item != null) ? item.m_page.getVersion() : Integer.MIN_VALUE; if( version == WikiPageProvider.LATEST_VERSION || version == latestcached ) { if( item == null ) { item = addPage( page, null ); if( item == null ) { return null; } } return item.m_page; } else { // We do not cache old versions. return m_provider.getPageInfo( page, version ); } } public List getVersionHistory( String page ) throws ProviderException { return m_provider.getVersionHistory( page ); } public synchronized String getProviderInfo() { int cachedPages = 0; long totalSize = 0; for( Iterator i = m_cache.values().iterator(); i.hasNext(); ) { CacheItem item = (CacheItem) i.next(); String text = (String) item.m_text.get(); if( text != null ) { cachedPages++; totalSize += text.length()*2; } } totalSize = (totalSize+512)/1024L; return("Real provider: "+m_provider.getClass().getName()+ "<br />Cache misses: "+m_cacheMisses+ "<br />Cache hits: "+m_cacheHits+ "<br />Cached pages: "+cachedPages+ "<br />Total cache size (kBytes): "+totalSize+ "<br />Cache consistency checks: "+m_milliSecondsBetweenChecks+"ms"); } public void deleteVersion( String pageName, int version ) throws ProviderException { // // Luckily, this is such a rare operation it is okay // to synchronize against the whole thing. // synchronized( this ) { CacheItem item = (CacheItem)m_cache.get( pageName ); int latestcached = (item != null) ? item.m_page.getVersion() : Integer.MIN_VALUE; // // If we have this version cached, remove from cache. // if( version == WikiPageProvider.LATEST_VERSION || version == latestcached ) { m_cache.remove( pageName ); } m_provider.deleteVersion( pageName, version ); } } public void deletePage( String pageName ) throws ProviderException { // // See note in deleteVersion(). // synchronized(this) { m_cache.remove( pageName ); m_provider.deletePage( pageName ); } } /** * Returns the actual used provider. * @since 2.0 */ public WikiPageProvider getRealProvider() { return m_provider; } private class CacheItem { WikiPage m_page; long m_lastChecked = 0L; SoftReference m_text; } }
src/com/ecyrd/jspwiki/providers/CachingProvider.java
/* JSPWiki - a JSP-based WikiWiki clone. Copyright (C) 2001 Janne Jalkanen ([email protected]) This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.ecyrd.jspwiki.providers; import java.lang.ref.SoftReference; import java.util.Properties; import java.util.Collection; import java.util.HashMap; import java.util.TreeSet; import java.util.Date; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import java.io.IOException; import org.apache.log4j.Category; import com.ecyrd.jspwiki.*; /** * Provides a caching page provider. This class rests on top of a * real provider class and provides a cache to speed things up. Only * if the cache copy of the page text has expired, we fetch it from * the provider. * <p> * This class also detects if someone has modified the page * externally, not through JSPWiki routines, and throws the proper * RepositoryModifiedException. * * Heavily based on ideas by Chris Brooking. * * @author Janne Jalkanen * @since 1.6.4 * @see RepositoryModifiedException */ // FIXME: Keeps a list of all WikiPages in memory - should cache them too. // FIXME: Synchronization is a bit inconsistent in places. public class CachingProvider implements WikiPageProvider { private static final Category log = Category.getInstance(CachingProvider.class); private WikiPageProvider m_provider; private HashMap m_cache = new HashMap(); private long m_cacheMisses = 0; private long m_cacheHits = 0; private long m_milliSecondsBetweenChecks = 5000; public void initialize( Properties properties ) throws NoRequiredPropertyException, IOException { log.debug("Initing CachingProvider"); String classname = WikiEngine.getRequiredProperty( properties, PageManager.PROP_PAGEPROVIDER ); try { Class providerclass = WikiEngine.findWikiClass( classname, "com.ecyrd.jspwiki.providers" ); m_provider = (WikiPageProvider)providerclass.newInstance(); log.debug("Initializing real provider class "+m_provider); m_provider.initialize( properties ); } catch( ClassNotFoundException e ) { log.error("Unable to locate provider class "+classname,e); throw new IllegalArgumentException("no provider class"); } catch( InstantiationException e ) { log.error("Unable to create provider class "+classname,e); throw new IllegalArgumentException("faulty provider class"); } catch( IllegalAccessException e ) { log.error("Illegal access to provider class "+classname,e); throw new IllegalArgumentException("illegal provider class"); } } public boolean pageExists( String page ) { CacheItem item = (CacheItem)m_cache.get( page ); if( checkIfPageChanged( item ) ) { try { revalidatePage( item.m_page ); } catch( ProviderException e ) {} // FIXME: Should do something! return m_provider.pageExists( page ); } // // A null item means that the page either does not // exist, or has not yet been cached; a non-null // means that the page does exist. // if( item != null ) { return true; } // // We could add the page to the cache here as well, // but in order to understand whether that is a // good thing or not we would need to analyze // the JSPWiki calling patterns extensively. Presumably // it would be a good thing if pageExists() is called // many times before the first getPageText() is called, // and the whole page is cached. // return m_provider.pageExists( page ); } /** * @throws RepositoryModifiedException If the page has been externally modified. */ public String getPageText( String page, int version ) throws ProviderException { String result = null; if( version == WikiPageProvider.LATEST_VERSION ) { if( pageExists( page ) ) { result = getTextFromCache( page ); } } else { CacheItem item = (CacheItem)m_cache.get( page ); // // Or is this the latest version fetched by version number? // if( item != null && item.m_page.getVersion() == version ) { result = getTextFromCache( page ); } else { result = m_provider.getPageText( page, version ); } } return result; } /** * Returns true, if the page has been changed outside of JSPWiki. */ private boolean checkIfPageChanged( CacheItem item ) { if( item == null ) return false; long currentTime = System.currentTimeMillis(); if( currentTime - item.m_lastChecked > m_milliSecondsBetweenChecks ) { // log.debug("Consistency check: has page "+item.m_page.getName()+" been changed?"); try { WikiPage cached = item.m_page; WikiPage current = m_provider.getPageInfo( cached.getName(), LATEST_VERSION ); // // Page has been deleted. // if( current == null ) { log.debug("Page "+cached.getName()+" has been removed externally."); return true; } item.m_lastChecked = currentTime; long epsilon = 1000L; // FIXME: This should be adjusted according to provider granularity. Date curDate = current.getLastModified(); Date cacDate = cached.getLastModified(); // log.debug("cached date = "+cacDate+", current date = "+curDate); if( curDate != null && cacDate != null && curDate.getTime() - cacDate.getTime() > epsilon ) { log.debug("Page "+current.getName()+" has been externally modified, refreshing contents."); return true; } } catch( ProviderException e ) { log.error("While checking cache, got error: ",e); } } return false; } /** * Removes the page from cache, and attempts to reload all information. */ private synchronized void revalidatePage( WikiPage page ) throws ProviderException { m_cache.remove( page.getName() ); addPage( page.getName(), null ); // If fetch fails, we want info to go directly to user } /** * @throws RepositoryModifiedException If the page has been externally modified. */ private String getTextFromCache( String page ) throws ProviderException { CacheItem item; synchronized(this) { item = (CacheItem)m_cache.get( page ); } // // Check if page has been changed externally. If it has, then // we need to refresh all of the information. // if( checkIfPageChanged( item ) ) { revalidatePage( item.m_page ); throw new RepositoryModifiedException( page ); } if( item == null ) { // Page has never been seen. // log.debug("Page "+page+" never seen."); String text = m_provider.getPageText( page, WikiPageProvider.LATEST_VERSION ); addPage( page, text ); m_cacheMisses++; return text; } else { String text = (String)item.m_text.get(); if( text == null ) { // Oops, expired already // log.debug("Page "+page+" expired."); text = m_provider.getPageText( page, WikiPageProvider.LATEST_VERSION ); item.m_text = new SoftReference( text ); m_cacheMisses++; return text; } // log.debug("Page "+page+" found in cache."); m_cacheHits++; return text; } } public void putPageText( WikiPage page, String text ) throws ProviderException { synchronized(this) { m_provider.putPageText( page, text ); revalidatePage( page ); } } // FIXME: This MUST be cached somehow. private boolean m_gotall = false; public Collection getAllPages() throws ProviderException { Collection all; if( m_gotall == false ) { all = m_provider.getAllPages(); // Make sure that all pages are in the cache. // FIXME: This has the unfortunate side effect of clearing // the cache. synchronized(this) { for( Iterator i = all.iterator(); i.hasNext(); ) { CacheItem item = new CacheItem(); item.m_page = (WikiPage) i.next(); item.m_text = new SoftReference( null ); m_cache.put( item.m_page.getName(), item ); } m_gotall = true; } } else { all = new ArrayList(); for( Iterator i = m_cache.values().iterator(); i.hasNext(); ) { all.add( ((CacheItem)i.next()).m_page ); } } return all; } // Null text for no page // Returns null if no page could be found. private synchronized CacheItem addPage( String pageName, String text ) throws ProviderException { CacheItem item = null; WikiPage newpage = m_provider.getPageInfo( pageName, WikiPageProvider.LATEST_VERSION ); if( newpage != null ) { item = new CacheItem(); item.m_page = newpage; item.m_text = new SoftReference( text ); m_cache.put( pageName, item ); } return item; } public Collection getAllChangedSince( Date date ) { return m_provider.getAllChangedSince( date ); } public int getPageCount() throws ProviderException { return m_provider.getPageCount(); } public Collection findPages( QueryItem[] query ) { TreeSet res = new TreeSet( new SearchResultComparator() ); SearchMatcher matcher = new SearchMatcher( query ); Collection allPages = null; try { allPages = getAllPages(); } catch( ProviderException pe ) { log.error( "Unable to retrieve page list", pe ); return( null ); } Iterator it = allPages.iterator(); while( it.hasNext() ) { try { WikiPage page = (WikiPage) it.next(); String pageName = page.getName(); String pageContent = getTextFromCache( pageName ); SearchResult comparison = matcher.matchPageContent( pageName, pageContent ); if( comparison != null ) { res.add( comparison ); } } catch( RepositoryModifiedException rme ) { // FIXME: What to do in this case??? } catch( ProviderException pe ) { log.error( "Unable to retrieve page from cache", pe ); } catch( IOException ioe ) { log.error( "Failed to search page", ioe ); } } return( res ); } public WikiPage getPageInfo( String page, int version ) throws ProviderException { CacheItem item = (CacheItem)m_cache.get( page ); int latestcached = (item != null) ? item.m_page.getVersion() : Integer.MIN_VALUE; if( version == WikiPageProvider.LATEST_VERSION || version == latestcached ) { if( item == null ) { item = addPage( page, null ); if( item == null ) { return null; } } return item.m_page; } else { // We do not cache old versions. return m_provider.getPageInfo( page, version ); } } public List getVersionHistory( String page ) throws ProviderException { return m_provider.getVersionHistory( page ); } public synchronized String getProviderInfo() { int cachedPages = 0; long totalSize = 0; for( Iterator i = m_cache.values().iterator(); i.hasNext(); ) { CacheItem item = (CacheItem) i.next(); String text = (String) item.m_text.get(); if( text != null ) { cachedPages++; totalSize += text.length()*2; } } totalSize = (totalSize+512)/1024L; return("Real provider: "+m_provider.getClass().getName()+ "<br />Cache misses: "+m_cacheMisses+ "<br />Cache hits: "+m_cacheHits+ "<br />Cached pages: "+cachedPages+ "<br />Total cache size (kBytes): "+totalSize+ "<br />Cache consistency checks: "+m_milliSecondsBetweenChecks+"ms"); } public void deleteVersion( String pageName, int version ) throws ProviderException { // // Luckily, this is such a rare operation it is okay // to synchronize against the whole thing. // synchronized( this ) { CacheItem item = (CacheItem)m_cache.get( pageName ); int latestcached = (item != null) ? item.m_page.getVersion() : Integer.MIN_VALUE; // // If we have this version cached, remove from cache. // if( version == WikiPageProvider.LATEST_VERSION || version == latestcached ) { m_cache.remove( pageName ); } m_provider.deleteVersion( pageName, version ); } } public void deletePage( String pageName ) throws ProviderException { // // See note in deleteVersion(). // synchronized(this) { m_cache.remove( pageName ); m_provider.deletePage( pageName ); } } /** * Returns the actual used provider. * @since 2.0 */ public WikiPageProvider getRealProvider() { return m_provider; } private class CacheItem { WikiPage m_page; long m_lastChecked = 0L; SoftReference m_text; } }
Now uses ClassUtil instead of WikiEngine.findWikiClass() git-svn-id: 6c0206e3b9edd104850923da33ebd73b435d374d@622896 13f79535-47bb-0310-9956-ffa450edef68
src/com/ecyrd/jspwiki/providers/CachingProvider.java
Now uses ClassUtil instead of WikiEngine.findWikiClass()
Java
apache-2.0
b762fb8520a3b6d826d07c809122e0b60b6533fb
0
jguerinet/MyMartlet,jguerinet/MyMartlet,jguerinet/MyMartlet-Android,jguerinet/MyMartlet,jguerinet/MyMartlet
/* * Copyright 2014-2016 Appvelopers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ca.appvelopers.mcgillmobile.util.storage; import android.content.SharedPreferences; import com.guerinet.utils.prefs.BooleanPreference; import java.util.ArrayList; import ca.appvelopers.mcgillmobile.App; import ca.appvelopers.mcgillmobile.model.Course; import ca.appvelopers.mcgillmobile.model.Place; import ca.appvelopers.mcgillmobile.model.Statement; import ca.appvelopers.mcgillmobile.model.prefs.PasswordPreference; import ca.appvelopers.mcgillmobile.model.prefs.UsernamePreference; import ca.appvelopers.mcgillmobile.util.manager.HomepageManager; /** * Clears objects from internal storage or {@link SharedPreferences} * @author Julien Guerinet * @since 1.0.0 */ public class Clear { /** * Clears all of the user's info * * @param rememberUsernamePref Remember username {@link BooleanPreference} * @param usernamePref {@link UsernamePreference} instance * @param passwordPref {@link PasswordPreference} instance * @param homepageManager {@link HomepageManager} instance */ public static void all(BooleanPreference rememberUsernamePref, UsernamePreference usernamePref, PasswordPreference passwordPref, HomepageManager homepageManager) { //If the user had not chosen to remember their username, clear it if (!rememberUsernamePref.get()) { usernamePref.clear(); } //Password passwordPref.clear(); //Schedule App.setCourses(new ArrayList<Course>()); //Transcript App.setTranscript(null); //Ebill App.setEbill(new ArrayList<Statement>()); //User Info App.setUser(null); //HomepageManager homepageManager.clear(); //Default Term App.setDefaultTerm(null); //Wishlist App.setWishlist(new ArrayList<Course>()); //Favorite places App.setFavoritePlaces(new ArrayList<Place>()); //TODO Clear internal storage } }
app/src/main/java/ca/appvelopers/mcgillmobile/util/storage/Clear.java
/* * Copyright 2014-2016 Appvelopers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ca.appvelopers.mcgillmobile.util.storage; import android.content.SharedPreferences; import com.guerinet.utils.prefs.BooleanPreference; import java.util.ArrayList; import ca.appvelopers.mcgillmobile.App; import ca.appvelopers.mcgillmobile.model.Course; import ca.appvelopers.mcgillmobile.model.Place; import ca.appvelopers.mcgillmobile.model.Statement; import ca.appvelopers.mcgillmobile.model.prefs.PasswordPreference; import ca.appvelopers.mcgillmobile.model.prefs.UsernamePreference; import ca.appvelopers.mcgillmobile.util.manager.HomepageManager; /** * Clears objects from internal storage or {@link SharedPreferences} * @author Julien Guerinet * @since 1.0.0 */ public class Clear { /** * Clears all of the user's info * * @param rememberUsernamePref Remember username {@link BooleanPreference} * @param usernamePref {@link UsernamePreference} instance * @param passwordPref {@link PasswordPreference} instance * @param homepageManager {@link HomepageManager} instance */ public static void all(BooleanPreference rememberUsernamePref, UsernamePreference usernamePref, PasswordPreference passwordPref, HomepageManager homepageManager) { //If the user had not chosen to remember their username, clear it if (!rememberUsernamePref.get()) { usernamePref.clear(); } //Password passwordPref.clear(); //Schedule App.setCourses(new ArrayList<Course>()); //Transcript App.setTranscript(null); //Ebill App.setEbill(new ArrayList<Statement>()); //User Info App.setUser(null); //HomepageManager homepageManager.clear(); //Default Term App.setDefaultTerm(null); //Wishlist App.setWishlist(new ArrayList<Course>()); //Favorite places App.setFavoritePlaces(new ArrayList<Place>()); } }
Added TODO in Clear class.
app/src/main/java/ca/appvelopers/mcgillmobile/util/storage/Clear.java
Added TODO in Clear class.
Java
apache-2.0
fd270d1a8b7a6c7b189188df661c4a7f7420da5e
0
jasenmoloy/wirelesscontrol
package jasenmoloy.wirelesscontrol.ui; import android.content.Context; import android.content.Intent; import android.support.v7.widget.CardView; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.MotionEvent; import jasenmoloy.wirelesscontrol.data.Constants; import jasenmoloy.wirelesscontrol.data.GeofenceData; /** * Created by jasenmoloy on 4/26/16. */ public class GeofenceCardView extends CardView implements GestureDetector.OnGestureListener { /// ---------------------- /// Class Fields /// ---------------------- /// ---------------------- /// Object Fields /// ---------------------- GestureDetector mGestureDectector; int mPosition; GeofenceData mData; /// ---------------------- /// Getters / Setters /// ---------------------- /// ---------------------- /// Public Methods /// ---------------------- public GeofenceCardView(Context context) { super(context); init(); } public GeofenceCardView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public GeofenceCardView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(); } public void setData(int position, GeofenceData data) { mPosition = position; mData = data; } /// ---------------------- /// Callback Methods /// ---------------------- /** * Handle an user's touch response to open an edit geofence activity. * @param ev * @return */ @Override public boolean onTouchEvent(MotionEvent ev) { mGestureDectector.onTouchEvent(ev); return true; } /** * Intecept all touch events heading to the children to be handled here. * @param ev * @return */ @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return true; //JAM Intercept all touch events within this view group } @Override public boolean onDown(MotionEvent e) { return false; } @Override public void onShowPress(MotionEvent e) { } @Override public boolean onSingleTapUp(MotionEvent e) { Context context = getContext(); Intent intent = new Intent(context, EditGeofenceActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT); //Prevents reinstantiation if the activity already exists intent.putExtra(Constants.BROADCAST_EXTRA_KEY_GEOFENCE_ID, mPosition); intent.putExtra(Constants.BROADCAST_EXTRA_KEY_GEODATA, mData); context.startActivity(intent); return true; } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { return false; } @Override public void onLongPress(MotionEvent e) { } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { return false; } /// ---------------------- /// Protected Methods /// ---------------------- /// ---------------------- /// Private Methods /// ---------------------- private void init() { mGestureDectector = new GestureDetector(getContext(), this); mGestureDectector.setIsLongpressEnabled(false); } }
app/src/main/java/jasenmoloy/wirelesscontrol/ui/GeofenceCardView.java
package jasenmoloy.wirelesscontrol.ui; import android.content.Context; import android.content.Intent; import android.support.v7.widget.CardView; import android.util.AttributeSet; import android.view.MotionEvent; import jasenmoloy.wirelesscontrol.data.Constants; import jasenmoloy.wirelesscontrol.data.GeofenceData; import jasenmoloy.wirelesscontrol.debug.Debug; /** * Created by jasenmoloy on 4/26/16. */ public class GeofenceCardView extends CardView { /// ---------------------- /// Class Fields /// ---------------------- /// ---------------------- /// Object Fields /// ---------------------- int mPosition; GeofenceData mData; int mLastEvent; /// ---------------------- /// Getters / Setters /// ---------------------- /// ---------------------- /// Public Methods /// ---------------------- public GeofenceCardView(Context context) { super(context); } public GeofenceCardView(Context context, AttributeSet attrs) { super(context, attrs); } public GeofenceCardView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } public void setData(int position, GeofenceData data) { mPosition = position; mData = data; } /// ---------------------- /// Callback Methods /// ---------------------- /** * Handle an user's touch response to open an edit geofence activity. * @param ev * @return */ @Override public boolean onTouchEvent(MotionEvent ev) { Debug.logDebug("GeofenceCardView", "position: " + mPosition + " ev.getAction(): " + ev.getAction()); int action = ev.getAction(); switch(action) { case MotionEvent.ACTION_UP: //If the user's last action was pressing down, then we're attempting to "tap" // the button rather than scroll. if(mLastEvent == MotionEvent.ACTION_DOWN) { Context context = getContext(); Intent intent = new Intent(context, EditGeofenceActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT); //Prevents reinstantiation if the activity already exists intent.putExtra(Constants.BROADCAST_EXTRA_KEY_GEOFENCE_ID, mPosition); intent.putExtra(Constants.BROADCAST_EXTRA_KEY_GEODATA, mData); context.startActivity(intent); } break; } mLastEvent = action; return true; } /** * Intecept all touch events heading to the children to be handled here. * @param ev * @return */ @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return true; //JAM Intercept all touch events within this view group } /// ---------------------- /// Protected Methods /// ---------------------- /// ---------------------- /// Private Methods /// ---------------------- }
Improve method for determining tap vs. scroll on GeofenceCardView using GestureDetector
app/src/main/java/jasenmoloy/wirelesscontrol/ui/GeofenceCardView.java
Improve method for determining tap vs. scroll on GeofenceCardView using GestureDetector
Java
apache-2.0
2d1a7b75955e17b1a6b596622e2a74ec460827ef
0
robertmilowski/generator-jhipster,ziogiugno/generator-jhipster,gzsombor/generator-jhipster,atomfrede/generator-jhipster,deepu105/generator-jhipster,vivekmore/generator-jhipster,danielpetisme/generator-jhipster,sohibegit/generator-jhipster,xetys/generator-jhipster,jkutner/generator-jhipster,pascalgrimaud/generator-jhipster,jhipster/generator-jhipster,nkolosnjaji/generator-jhipster,cbornet/generator-jhipster,ctamisier/generator-jhipster,danielpetisme/generator-jhipster,gzsombor/generator-jhipster,robertmilowski/generator-jhipster,pascalgrimaud/generator-jhipster,eosimosu/generator-jhipster,maniacneron/generator-jhipster,dalbelap/generator-jhipster,siliconharborlabs/generator-jhipster,baskeboler/generator-jhipster,dynamicguy/generator-jhipster,siliconharborlabs/generator-jhipster,xetys/generator-jhipster,gmarziou/generator-jhipster,JulienMrgrd/generator-jhipster,dalbelap/generator-jhipster,ziogiugno/generator-jhipster,liseri/generator-jhipster,jhipster/generator-jhipster,gmarziou/generator-jhipster,siliconharborlabs/generator-jhipster,sendilkumarn/generator-jhipster,PierreBesson/generator-jhipster,rifatdover/generator-jhipster,mosoft521/generator-jhipster,gzsombor/generator-jhipster,siliconharborlabs/generator-jhipster,dynamicguy/generator-jhipster,dalbelap/generator-jhipster,rifatdover/generator-jhipster,siliconharborlabs/generator-jhipster,yongli82/generator-jhipster,ziogiugno/generator-jhipster,jkutner/generator-jhipster,vivekmore/generator-jhipster,dalbelap/generator-jhipster,stevehouel/generator-jhipster,liseri/generator-jhipster,PierreBesson/generator-jhipster,yongli82/generator-jhipster,dynamicguy/generator-jhipster,dimeros/generator-jhipster,ramzimaalej/generator-jhipster,sohibegit/generator-jhipster,wmarques/generator-jhipster,baskeboler/generator-jhipster,mraible/generator-jhipster,nkolosnjaji/generator-jhipster,duderoot/generator-jhipster,mraible/generator-jhipster,sendilkumarn/generator-jhipster,sendilkumarn/generator-jhipster,dynamicguy/generator-jhipster,liseri/generator-jhipster,mraible/generator-jhipster,stevehouel/generator-jhipster,xetys/generator-jhipster,mosoft521/generator-jhipster,rkohel/generator-jhipster,nkolosnjaji/generator-jhipster,PierreBesson/generator-jhipster,yongli82/generator-jhipster,rkohel/generator-jhipster,erikkemperman/generator-jhipster,duderoot/generator-jhipster,eosimosu/generator-jhipster,deepu105/generator-jhipster,rkohel/generator-jhipster,JulienMrgrd/generator-jhipster,erikkemperman/generator-jhipster,maniacneron/generator-jhipster,wmarques/generator-jhipster,liseri/generator-jhipster,wmarques/generator-jhipster,atomfrede/generator-jhipster,JulienMrgrd/generator-jhipster,Tcharl/generator-jhipster,mosoft521/generator-jhipster,sohibegit/generator-jhipster,maniacneron/generator-jhipster,pascalgrimaud/generator-jhipster,ziogiugno/generator-jhipster,stevehouel/generator-jhipster,cbornet/generator-jhipster,stevehouel/generator-jhipster,dalbelap/generator-jhipster,gmarziou/generator-jhipster,jhipster/generator-jhipster,rkohel/generator-jhipster,jkutner/generator-jhipster,liseri/generator-jhipster,rifatdover/generator-jhipster,eosimosu/generator-jhipster,pascalgrimaud/generator-jhipster,erikkemperman/generator-jhipster,gzsombor/generator-jhipster,cbornet/generator-jhipster,maniacneron/generator-jhipster,duderoot/generator-jhipster,jhipster/generator-jhipster,mraible/generator-jhipster,nkolosnjaji/generator-jhipster,hdurix/generator-jhipster,danielpetisme/generator-jhipster,sendilkumarn/generator-jhipster,atomfrede/generator-jhipster,jhipster/generator-jhipster,hdurix/generator-jhipster,pascalgrimaud/generator-jhipster,baskeboler/generator-jhipster,ctamisier/generator-jhipster,lrkwz/generator-jhipster,ramzimaalej/generator-jhipster,wmarques/generator-jhipster,danielpetisme/generator-jhipster,ruddell/generator-jhipster,ruddell/generator-jhipster,cbornet/generator-jhipster,gmarziou/generator-jhipster,lrkwz/generator-jhipster,hdurix/generator-jhipster,dimeros/generator-jhipster,Tcharl/generator-jhipster,PierreBesson/generator-jhipster,maniacneron/generator-jhipster,baskeboler/generator-jhipster,ctamisier/generator-jhipster,ctamisier/generator-jhipster,robertmilowski/generator-jhipster,ruddell/generator-jhipster,stevehouel/generator-jhipster,jkutner/generator-jhipster,JulienMrgrd/generator-jhipster,dimeros/generator-jhipster,vivekmore/generator-jhipster,lrkwz/generator-jhipster,deepu105/generator-jhipster,sendilkumarn/generator-jhipster,mraible/generator-jhipster,Tcharl/generator-jhipster,vivekmore/generator-jhipster,eosimosu/generator-jhipster,rkohel/generator-jhipster,robertmilowski/generator-jhipster,lrkwz/generator-jhipster,gzsombor/generator-jhipster,yongli82/generator-jhipster,jkutner/generator-jhipster,eosimosu/generator-jhipster,PierreBesson/generator-jhipster,deepu105/generator-jhipster,ctamisier/generator-jhipster,mosoft521/generator-jhipster,ruddell/generator-jhipster,hdurix/generator-jhipster,atomfrede/generator-jhipster,dimeros/generator-jhipster,danielpetisme/generator-jhipster,nkolosnjaji/generator-jhipster,duderoot/generator-jhipster,sohibegit/generator-jhipster,mosoft521/generator-jhipster,lrkwz/generator-jhipster,baskeboler/generator-jhipster,sohibegit/generator-jhipster,JulienMrgrd/generator-jhipster,duderoot/generator-jhipster,cbornet/generator-jhipster,atomfrede/generator-jhipster,hdurix/generator-jhipster,ziogiugno/generator-jhipster,Tcharl/generator-jhipster,dimeros/generator-jhipster,erikkemperman/generator-jhipster,vivekmore/generator-jhipster,wmarques/generator-jhipster,yongli82/generator-jhipster,robertmilowski/generator-jhipster,deepu105/generator-jhipster,erikkemperman/generator-jhipster,ruddell/generator-jhipster,xetys/generator-jhipster,ramzimaalej/generator-jhipster,gmarziou/generator-jhipster,Tcharl/generator-jhipster
package <%=packageName%>.domain; import com.fasterxml.jackson.annotation.JsonIgnore;<% if (hibernateCache != 'no' && databaseType == 'sql') { %> import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy;<% } %> import org.hibernate.validator.constraints.Email; <% if (databaseType == 'nosql') { %>import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; <% } %><% if (databaseType == 'sql') { %> import javax.persistence.*;<% } %> import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import java.io.Serializable; import java.util.Set; /** * A user. */ <% if (databaseType == 'sql') { %>@Entity @Table(name = "T_USER")<% } %><% if (hibernateCache != 'no' && databaseType == 'sql') { %> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)<% } %><% if (databaseType == 'nosql') { %> @Document(collection = "T_USER")<% } %> public class User extends AbstractAuditingEntity implements Serializable { @NotNull @Size(min = 0, max = 50) @Id<% if (databaseType == 'sql') { %> @Column(length = 50)<% } %> private String login; @JsonIgnore @Size(min = 0, max = 100)<% if (databaseType == 'sql') { %> @Column(length = 100)<% } %> private String password; @Size(min = 0, max = 50)<% if (databaseType == 'sql') { %> @Column(name = "first_name", length = 50)<% } %><% if (databaseType == 'nosql') { %> @Field("first_name")<% } %> private String firstName; @Size(min = 0, max = 50)<% if (databaseType == 'sql') { %> @Column(name = "last_name", length = 50)<% } %><% if (databaseType == 'nosql') { %> @Field("last_name")<% } %> private String lastName; @Email @Size(min = 0, max = 100)<% if (databaseType == 'sql') { %> @Column(length = 100)<% } %> private String email; @NotNull private boolean activated = false; @Size(min = 2, max = 5)<% if (databaseType == 'sql') { %> @Column(name = "lang_key", length = 5)<% } %><% if (databaseType == 'nosql') { %> @Field("lang_key")<% } %> private String langKey; @Size(min = 0, max = 20)<% if (databaseType == 'sql') { %> @Column(name = "activation_key")<% } %><% if (databaseType == 'nosql') { %> @Field("activation_key")<% } %> private String activationKey; @JsonIgnore<% if (databaseType == 'sql') { %> @ManyToMany @JoinTable( name = "T_USER_AUTHORITY", joinColumns = {@JoinColumn(name = "login", referencedColumnName = "login")}, inverseJoinColumns = {@JoinColumn(name = "name", referencedColumnName = "name")})<% } %><% if (hibernateCache != 'no' && databaseType == 'sql') { %> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)<% } %> private Set<Authority> authorities; <% if (databaseType == 'sql') { %>@JsonIgnore @OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, mappedBy = "user")<% } %><% if (hibernateCache != 'no' && databaseType == 'sql') { %> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)<% } %> private Set<PersistentToken> persistentTokens; public String getLogin() { return login; } public void setLogin(String login) { this.login = login; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public boolean getActivated() { return activated; } public void setActivated(boolean activated) { this.activated = activated; } public String getActivationKey() { return activationKey; } public void setActivationKey(String activationKey) { this.activationKey = activationKey; } public String getLangKey() { return langKey; } public void setLangKey(String langKey) { this.langKey = langKey; } public Set<Authority> getAuthorities() { return authorities; } public void setAuthorities(Set<Authority> authorities) { this.authorities = authorities; } <% if (databaseType == 'sql') { %> public Set<PersistentToken> getPersistentTokens() { return persistentTokens; } public void setPersistentTokens(Set<PersistentToken> persistentTokens) { this.persistentTokens = persistentTokens; }<% } %> @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } User user = (User) o; if (!login.equals(user.login)) { return false; } return true; } @Override public int hashCode() { return login.hashCode(); } @Override public String toString() { return "User{" + "login='" + login + '\'' + ", password='" + password + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", email='" + email + '\'' + ", activated='" + activated + '\'' + ", langKey='" + langKey + '\'' + ", activationKey='" + activationKey + '\'' + "}"; } }
app/templates/src/main/java/package/domain/_User.java
package <%=packageName%>.domain; import com.fasterxml.jackson.annotation.JsonIgnore;<% if (hibernateCache != 'no' && databaseType == 'sql') { %> import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy;<% } %> import org.hibernate.validator.constraints.Email; <% if (databaseType == 'nosql') { %>import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; <% } %><% if (databaseType == 'sql') { %> import javax.persistence.*;<% } %> import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import java.io.Serializable; import java.util.Set; /** * A user. */ <% if (databaseType == 'sql') { %>@Entity @Table(name = "T_USER")<% } %><% if (hibernateCache != 'no' && databaseType == 'sql') { %> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)<% } %><% if (databaseType == 'nosql') { %> @Document(collection = "T_USER")<% } %> public class User extends AbstractAuditingEntity implements Serializable { @NotNull @Size(min = 0, max = 50) @Id<% if (databaseType == 'sql') { %> @Column(length = 50)<% } %> private String login; @JsonIgnore @Size(min = 0, max = 100)<% if (databaseType == 'sql') { %> @Column(length = 100)<% } %> private String password; @Size(min = 0, max = 50)<% if (databaseType == 'sql') { %> @Column(name = "first_name", length = 50)<% } %><% if (databaseType == 'nosql') { %> @Field("first_name")<% } %> private String firstName; @Size(min = 0, max = 50)<% if (databaseType == 'sql') { %> @Column(name = "last_name", length = 50)<% } %><% if (databaseType == 'nosql') { %> @Field("last_name")<% } %> private String lastName; @Email @Size(min = 0, max = 100)<% if (databaseType == 'sql') { %> @Column(length = 100)<% } %> private String email; @NotNull private boolean activated = false; @Size(min = 2, max = 5)<% if (databaseType == 'sql') { %> @Column(name = "lang_key")<% } %><% if (databaseType == 'nosql') { %> @Field("lang_key")<% } %> private String langKey; @Size(min = 0, max = 20)<% if (databaseType == 'sql') { %> @Column(name = "activation_key")<% } %><% if (databaseType == 'nosql') { %> @Field("activation_key")<% } %> private String activationKey; @JsonIgnore<% if (databaseType == 'sql') { %> @ManyToMany @JoinTable( name = "T_USER_AUTHORITY", joinColumns = {@JoinColumn(name = "login", referencedColumnName = "login")}, inverseJoinColumns = {@JoinColumn(name = "name", referencedColumnName = "name")})<% } %><% if (hibernateCache != 'no' && databaseType == 'sql') { %> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)<% } %> private Set<Authority> authorities; <% if (databaseType == 'sql') { %>@JsonIgnore @OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, mappedBy = "user")<% } %><% if (hibernateCache != 'no' && databaseType == 'sql') { %> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)<% } %> private Set<PersistentToken> persistentTokens; public String getLogin() { return login; } public void setLogin(String login) { this.login = login; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public boolean getActivated() { return activated; } public void setActivated(boolean activated) { this.activated = activated; } public String getActivationKey() { return activationKey; } public void setActivationKey(String activationKey) { this.activationKey = activationKey; } public String getLangKey() { return langKey; } public void setLangKey(String langKey) { this.langKey = langKey; } public Set<Authority> getAuthorities() { return authorities; } public void setAuthorities(Set<Authority> authorities) { this.authorities = authorities; } <% if (databaseType == 'sql') { %> public Set<PersistentToken> getPersistentTokens() { return persistentTokens; } public void setPersistentTokens(Set<PersistentToken> persistentTokens) { this.persistentTokens = persistentTokens; }<% } %> @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } User user = (User) o; if (!login.equals(user.login)) { return false; } return true; } @Override public int hashCode() { return login.hashCode(); } @Override public String toString() { return "User{" + "login='" + login + '\'' + ", password='" + password + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", email='" + email + '\'' + ", activated='" + activated + '\'' + ", langKey='" + langKey + '\'' + ", activationKey='" + activationKey + '\'' + "}"; } }
Set column length = 5 to match liquibase and constraint validation
app/templates/src/main/java/package/domain/_User.java
Set column length = 5 to match liquibase and constraint validation
Java
apache-2.0
0d10424a7f577c9f7c654e87f05e83ad36373e35
0
likaiwalkman/cassandra,ptuckey/cassandra,sbtourist/cassandra,yangzhe1991/cassandra,pcmanus/cassandra,blerer/cassandra,whitepages/cassandra,jasonstack/cassandra,phact/cassandra,DICL/cassandra,joesiewert/cassandra,bmel/cassandra,instaclustr/cassandra,Instagram/cassandra,Stratio/stratio-cassandra,carlyeks/cassandra,weideng1/cassandra,wreda/cassandra,bpupadhyaya/cassandra,kgreav/cassandra,emolsson/cassandra,blambov/cassandra,juiceblender/cassandra,yanbit/cassandra,asias/cassandra,ptnapoleon/cassandra,bcoverston/cassandra,bcoverston/cassandra,sedulam/CASSANDRA-12201,mkjellman/cassandra,a-buck/cassandra,jrwest/cassandra,krummas/cassandra,leomrocha/cassandra,RyanMagnusson/cassandra,adelapena/cassandra,kangkot/stratio-cassandra,HidemotoNakada/cassandra-udf,mkjellman/cassandra,beobal/cassandra,guanxi55nba/db-improvement,thelastpickle/cassandra,taigetco/cassandra_read,clohfink/cassandra,chbatey/cassandra-1,iamaleksey/cassandra,pthomaid/cassandra,guard163/cassandra,beobal/cassandra,yhnishi/cassandra,mambocab/cassandra,mike-tr-adamson/cassandra,spodkowinski/cassandra,Jaumo/cassandra,bpupadhyaya/cassandra,blambov/cassandra,iamaleksey/cassandra,EnigmaCurry/cassandra,nvoron23/cassandra,adejanovski/cassandra,tjake/cassandra,swps/cassandra,juiceblender/cassandra,Stratio/cassandra,mheffner/cassandra-1,kgreav/cassandra,jasonwee/cassandra,jkni/cassandra,scylladb/scylla-tools-java,pauloricardomg/cassandra,DavidHerzogTU-Berlin/cassandra,rmarchei/cassandra,aureagle/cassandra,jasobrown/cassandra,helena/cassandra,nitsanw/cassandra,kangkot/stratio-cassandra,mambocab/cassandra,yonglehou/cassandra,aboudreault/cassandra,cooldoger/cassandra,sedulam/CASSANDRA-12201,sharvanath/cassandra,sayanh/ViewMaintenanceCassandra,tommystendahl/cassandra,mgmuscari/cassandra-cdh4,rmarchei/cassandra,Imran-C/cassandra,josh-mckenzie/cassandra,scaledata/cassandra,sayanh/ViewMaintenanceCassandra,vramaswamy456/cassandra,lalithsuresh/cassandra-c3,belliottsmith/cassandra,sivikt/cassandra,aweisberg/cassandra,a-buck/cassandra,sayanh/ViewMaintenanceSupport,mshuler/cassandra,sayanh/ViewMaintenanceSupport,snazy/cassandra,AtwooTM/cassandra,nakomis/cassandra,strapdata/cassandra,macintoshio/cassandra,JeremiahDJordan/cassandra,instaclustr/cassandra,shawnkumar/cstargraph,GabrielNicolasAvellaneda/cassandra,ifesdjeen/cassandra,chaordic/cassandra,yukim/cassandra,apache/cassandra,pauloricardomg/cassandra,dprguiuc/Cassandra-Wasef,dprguiuc/Cassandra-Wasef,blambov/cassandra,tommystendahl/cassandra,nutbunnies/cassandra,nvoron23/cassandra,strapdata/cassandra,clohfink/cassandra,project-zerus/cassandra,aboudreault/cassandra,iburmistrov/Cassandra,michaelmior/cassandra,ibmsoe/cassandra,driftx/cassandra,mkjellman/cassandra,fengshao0907/Cassandra-Research,codefollower/Cassandra-Research,pauloricardomg/cassandra,vramaswamy456/cassandra,ollie314/cassandra,mike-tr-adamson/cassandra,jeromatron/cassandra,jasonwee/cassandra,ptuckey/cassandra,mike-tr-adamson/cassandra,mashuai/Cassandra-Research,pallavi510/cassandra,Stratio/stratio-cassandra,pcn/cassandra-1,modempachev4/kassandra,tongjixianing/projects,Instagram/cassandra,josh-mckenzie/cassandra,michaelmior/cassandra,jasobrown/cassandra,blambov/cassandra,tongjixianing/projects,kgreav/cassandra,thelastpickle/cassandra,pofallon/cassandra,ifesdjeen/cassandra,guanxi55nba/key-value-store,Instagram/cassandra,yonglehou/cassandra,boneill42/cassandra,DavidHerzogTU-Berlin/cassandraToRun,thelastpickle/cassandra,Stratio/stratio-cassandra,emolsson/cassandra,Imran-C/cassandra,knifewine/cassandra,adelapena/cassandra,iamaleksey/cassandra,pthomaid/cassandra,leomrocha/cassandra,mshuler/cassandra,beobal/cassandra,jsanda/cassandra,sluk3r/cassandra,yukim/cassandra,GabrielNicolasAvellaneda/cassandra,ifesdjeen/cassandra,ollie314/cassandra,RyanMagnusson/cassandra,nlalevee/cassandra,asias/cassandra,jrwest/cassandra,jasonwee/cassandra,gdusbabek/cassandra,matthewtt/cassandra_read,Bj0rnen/cassandra,kangkot/stratio-cassandra,guard163/cassandra,carlyeks/cassandra,snazy/cassandra,bcoverston/cassandra,christian-esken/cassandra,ejankan/cassandra,wreda/cassandra,vramaswamy456/cassandra,pcmanus/cassandra,whitepages/cassandra,aboudreault/cassandra,regispl/cassandra,likaiwalkman/cassandra,pcn/cassandra-1,nlalevee/cassandra,weipinghe/cassandra,newrelic-forks/cassandra,stef1927/cassandra,macintoshio/cassandra,christian-esken/cassandra,krummas/cassandra,taigetco/cassandra_read,asias/cassandra,clohfink/cassandra,josh-mckenzie/cassandra,fengshao0907/cassandra-1,ifesdjeen/cassandra,carlyeks/cassandra,mashuai/Cassandra-Research,jasobrown/cassandra,sbtourist/cassandra,guanxi55nba/key-value-store,kangkot/stratio-cassandra,guanxi55nba/key-value-store,jeromatron/cassandra,jsanda/cassandra,dongjiaqiang/cassandra,rdio/cassandra,qinjin/mdtc-cassandra,WorksApplications/cassandra,sluk3r/cassandra,pcmanus/cassandra,dkua/cassandra,knifewine/cassandra,Imran-C/cassandra,gdusbabek/cassandra,aweisberg/cassandra,pkdevbox/cassandra,hengxin/cassandra,JeremiahDJordan/cassandra,driftx/cassandra,jkni/cassandra,yonglehou/cassandra,scylladb/scylla-tools-java,thobbs/cassandra,yangzhe1991/cassandra,ptnapoleon/cassandra,yangzhe1991/cassandra,dprguiuc/Cassandra-Wasef,modempachev4/kassandra,ibmsoe/cassandra,krummas/cassandra,boneill42/cassandra,joesiewert/cassandra,juiceblender/cassandra,clohfink/cassandra,Jollyplum/cassandra,xiongzheng/Cassandra-Research,hhorii/cassandra,project-zerus/cassandra,ptnapoleon/cassandra,WorksApplications/cassandra,dkua/cassandra,vaibhi9/cassandra,cooldoger/cassandra,pofallon/cassandra,mt0803/cassandra,Bj0rnen/cassandra,helena/cassandra,nutbunnies/cassandra,DavidHerzogTU-Berlin/cassandraToRun,DikangGu/cassandra,dongjiaqiang/cassandra,Stratio/cassandra,belliottsmith/cassandra,LatencyUtils/cassandra-stress2,mgmuscari/cassandra-cdh4,blerer/cassandra,aarushi12002/cassandra,hengxin/cassandra,rmarchei/cassandra,nvoron23/cassandra,pallavi510/cassandra,sriki77/cassandra,mkjellman/cassandra,chaordic/cassandra,stef1927/cassandra,aureagle/cassandra,mheffner/cassandra-1,weideng1/cassandra,krummas/cassandra,caidongyun/cassandra,matthewtt/cassandra_read,spodkowinski/cassandra,shawnkumar/cstargraph,pcn/cassandra-1,whitepages/cassandra,scylladb/scylla-tools-java,driftx/cassandra,bmel/cassandra,ben-manes/cassandra,codefollower/Cassandra-Research,wreda/cassandra,ben-manes/cassandra,blerer/cassandra,yhnishi/cassandra,WorksApplications/cassandra,bdeggleston/cassandra,aweisberg/cassandra,stef1927/cassandra,yanbit/cassandra,aweisberg/cassandra,bcoverston/apache-hosted-cassandra,aarushi12002/cassandra,snazy/cassandra,rdio/cassandra,belliottsmith/cassandra,gdusbabek/cassandra,ejankan/cassandra,sharvanath/cassandra,michaelsembwever/cassandra,jeromatron/cassandra,DICL/cassandra,juiceblender/cassandra,stef1927/cassandra,Bj0rnen/cassandra,guard163/cassandra,michaelsembwever/cassandra,DikangGu/cassandra,ejankan/cassandra,leomrocha/cassandra,adelapena/cassandra,iburmistrov/Cassandra,bdeggleston/cassandra,ibmsoe/cassandra,mt0803/cassandra,darach/cassandra,sriki77/cassandra,mt0803/cassandra,guanxi55nba/db-improvement,phact/cassandra,helena/cassandra,AtwooTM/cassandra,pkdevbox/cassandra,beobal/cassandra,pthomaid/cassandra,strapdata/cassandra,exoscale/cassandra,jbellis/cassandra,cooldoger/cassandra,instaclustr/cassandra,weipinghe/cassandra,ollie314/cassandra,pauloricardomg/cassandra,macintoshio/cassandra,yhnishi/cassandra,xiongzheng/Cassandra-Research,jeffjirsa/cassandra,shawnkumar/cstargraph,WorksApplications/cassandra,guanxi55nba/db-improvement,hengxin/cassandra,miguel0afd/cassandra-cqlMod,mashuai/Cassandra-Research,exoscale/cassandra,snazy/cassandra,jasonstack/cassandra,kangkot/stratio-cassandra,nakomis/cassandra,tjake/cassandra,scaledata/cassandra,nitsanw/cassandra,iburmistrov/Cassandra,Instagram/cassandra,vaibhi9/cassandra,jrwest/cassandra,michaelsembwever/cassandra,adelapena/cassandra,yukim/cassandra,tjake/cassandra,jasonstack/cassandra,jrwest/cassandra,fengshao0907/Cassandra-Research,scylladb/scylla-tools-java,sriki77/cassandra,ptuckey/cassandra,jeffjirsa/cassandra,nakomis/cassandra,vaibhi9/cassandra,instaclustr/cassandra,modempachev4/kassandra,thelastpickle/cassandra,lalithsuresh/cassandra-c3,caidongyun/cassandra,knifewine/cassandra,bmel/cassandra,project-zerus/cassandra,caidongyun/cassandra,josh-mckenzie/cassandra,sivikt/cassandra,emolsson/cassandra,rackerlabs/cloudmetrics-cassandra,pkdevbox/cassandra,rogerchina/cassandra,tongjixianing/projects,scaledata/cassandra,michaelmior/cassandra,nitsanw/cassandra,apache/cassandra,rdio/cassandra,phact/cassandra,iamaleksey/cassandra,yukim/cassandra,dongjiaqiang/cassandra,mgmuscari/cassandra-cdh4,szhou1234/cassandra,blerer/cassandra,jbellis/cassandra,EnigmaCurry/cassandra,Jaumo/cassandra,sbtourist/cassandra,tommystendahl/cassandra,qinjin/mdtc-cassandra,mike-tr-adamson/cassandra,Stratio/stratio-cassandra,codefollower/Cassandra-Research,Jollyplum/cassandra,taigetco/cassandra_read,driftx/cassandra,JeremiahDJordan/cassandra,MasahikoSawada/cassandra,AtwooTM/cassandra,newrelic-forks/cassandra,tommystendahl/cassandra,GabrielNicolasAvellaneda/cassandra,fengshao0907/cassandra-1,exoscale/cassandra,regispl/cassandra,LatencyUtils/cassandra-stress2,fengshao0907/Cassandra-Research,apache/cassandra,Stratio/stratio-cassandra,xiongzheng/Cassandra-Research,mshuler/cassandra,Stratio/cassandra,sluk3r/cassandra,bdeggleston/cassandra,heiko-braun/cassandra,DavidHerzogTU-Berlin/cassandraToRun,sayanh/ViewMaintenanceCassandra,matthewtt/cassandra_read,heiko-braun/cassandra,RyanMagnusson/cassandra,LatencyUtils/cassandra-stress2,Jollyplum/cassandra,bcoverston/apache-hosted-cassandra,miguel0afd/cassandra-cqlMod,fengshao0907/cassandra-1,szhou1234/cassandra,sharvanath/cassandra,jasobrown/cassandra,miguel0afd/cassandra-cqlMod,regispl/cassandra,rackerlabs/cloudmetrics-cassandra,lalithsuresh/cassandra-c3,nlalevee/cassandra,swps/cassandra,apache/cassandra,newrelic-forks/cassandra,darach/cassandra,chbatey/cassandra-1,bcoverston/apache-hosted-cassandra,darach/cassandra,sivikt/cassandra,DikangGu/cassandra,chbatey/cassandra-1,szhou1234/cassandra,weideng1/cassandra,jsanda/cassandra,hhorii/cassandra,szhou1234/cassandra,tjake/cassandra,DavidHerzogTU-Berlin/cassandra,heiko-braun/cassandra,mshuler/cassandra,boneill42/cassandra,kgreav/cassandra,mambocab/cassandra,bcoverston/cassandra,chaordic/cassandra,MasahikoSawada/cassandra,rackerlabs/cloudmetrics-cassandra,michaelsembwever/cassandra,hhorii/cassandra,jkni/cassandra,nutbunnies/cassandra,spodkowinski/cassandra,EnigmaCurry/cassandra,aureagle/cassandra,likaiwalkman/cassandra,a-buck/cassandra,thobbs/cassandra,swps/cassandra,mheffner/cassandra-1,jbellis/cassandra,qinjin/mdtc-cassandra,christian-esken/cassandra,strapdata/cassandra,weipinghe/cassandra,rogerchina/cassandra,HidemotoNakada/cassandra-udf,sedulam/CASSANDRA-12201,ben-manes/cassandra,belliottsmith/cassandra,rogerchina/cassandra,adejanovski/cassandra,DICL/cassandra,yanbit/cassandra,bpupadhyaya/cassandra,aarushi12002/cassandra,bdeggleston/cassandra,HidemotoNakada/cassandra-udf,DavidHerzogTU-Berlin/cassandra,jeffjirsa/cassandra,MasahikoSawada/cassandra,spodkowinski/cassandra,Jaumo/cassandra,joesiewert/cassandra,pofallon/cassandra,thobbs/cassandra,adejanovski/cassandra,cooldoger/cassandra,pallavi510/cassandra,dkua/cassandra,jeffjirsa/cassandra
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; import javax.management.*; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.cache.IRowCacheEntry; import org.apache.cassandra.cache.RowCacheKey; import org.apache.cassandra.cache.RowCacheSentinel; import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor; import org.apache.cassandra.concurrent.NamedThreadFactory; import org.apache.cassandra.concurrent.StageManager; import org.apache.cassandra.config.*; import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.commitlog.CommitLog; import org.apache.cassandra.db.commitlog.ReplayPosition; import org.apache.cassandra.db.compaction.AbstractCompactionStrategy; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.compaction.LeveledCompactionStrategy; import org.apache.cassandra.db.compaction.OperationType; import org.apache.cassandra.db.filter.ExtendedFilter; import org.apache.cassandra.db.filter.IFilter; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.db.filter.QueryPath; import org.apache.cassandra.db.index.SecondaryIndex; import org.apache.cassandra.db.index.SecondaryIndexManager; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.dht.*; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.compress.CompressionParameters; import org.apache.cassandra.io.sstable.*; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.metrics.ColumnFamilyMetrics; import org.apache.cassandra.service.CacheService; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.thrift.IndexExpression; import org.apache.cassandra.utils.*; import static org.apache.cassandra.config.CFMetaData.Caching; public class ColumnFamilyStore implements ColumnFamilyStoreMBean { private static final Logger logger = LoggerFactory.getLogger(ColumnFamilyStore.class); /* * maybeSwitchMemtable puts Memtable.getSortedContents on the writer executor. When the write is complete, * we turn the writer into an SSTableReader and add it to ssTables where it is available for reads. * * There are two other things that maybeSwitchMemtable does. * First, it puts the Memtable into memtablesPendingFlush, where it stays until the flush is complete * and it's been added as an SSTableReader to ssTables_. Second, it adds an entry to commitLogUpdater * that waits for the flush to complete, then calls onMemtableFlush. This allows multiple flushes * to happen simultaneously on multicore systems, while still calling onMF in the correct order, * which is necessary for replay in case of a restart since CommitLog assumes that when onMF is * called, all data up to the given context has been persisted to SSTables. */ private static final ExecutorService flushWriter = new JMXEnabledThreadPoolExecutor(DatabaseDescriptor.getFlushWriters(), StageManager.KEEPALIVE, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(DatabaseDescriptor.getFlushQueueSize()), new NamedThreadFactory("FlushWriter"), "internal"); public static final ExecutorService postFlushExecutor = new JMXEnabledThreadPoolExecutor("MemtablePostFlusher"); static { // (can block if flush queue fills up, so don't put on scheduledTasks) StorageService.optionalTasks.scheduleWithFixedDelay(new MeteredFlusher(), 1000, 1000, TimeUnit.MILLISECONDS); } public final Table table; public final String columnFamily; public final CFMetaData metadata; public final IPartitioner partitioner; private final String mbeanName; private volatile boolean valid = true; /* Memtables and SSTables on disk for this column family */ private final DataTracker data; /* This is used to generate the next index for a SSTable */ private final AtomicInteger fileIndexGenerator = new AtomicInteger(0); public final SecondaryIndexManager indexManager; private static final int INTERN_CUTOFF = 256; public final ConcurrentMap<ByteBuffer, ByteBuffer> internedNames = new NonBlockingHashMap<ByteBuffer, ByteBuffer>(); /* These are locally held copies to be changed from the config during runtime */ private volatile DefaultInteger minCompactionThreshold; private volatile DefaultInteger maxCompactionThreshold; private volatile AbstractCompactionStrategy compactionStrategy; public final Directories directories; /** ratio of in-memory memtable size, to serialized size */ volatile double liveRatio = 1.0; /** ops count last time we computed liveRatio */ private final AtomicLong liveRatioComputedAt = new AtomicLong(32); public final ColumnFamilyMetrics metric; public void reload() { // metadata object has been mutated directly. make all the members jibe with new settings. // only update these runtime-modifiable settings if they have not been modified. if (!minCompactionThreshold.isModified()) for (ColumnFamilyStore cfs : concatWithIndexes()) cfs.minCompactionThreshold = new DefaultInteger(metadata.getMinCompactionThreshold()); if (!maxCompactionThreshold.isModified()) for (ColumnFamilyStore cfs : concatWithIndexes()) cfs.maxCompactionThreshold = new DefaultInteger(metadata.getMaxCompactionThreshold()); maybeReloadCompactionStrategy(); indexManager.reload(); // If the CF comparator has changed, we need to change the memtable, // because the old one still aliases the previous comparator. We don't // call forceFlush() because it can skip the switch if the memtable is // clean, which we don't want here. Also, because there can be a race // between the time we acquire the current memtable and we flush it // (another thread can have flushed it first), we attempt the switch // until we know the memtable has the current comparator. try { while (true) { AbstractType comparator = metadata.comparator; Memtable memtable = getMemtableThreadSafe(); if (memtable.initialComparator == comparator) break; Future future = maybeSwitchMemtable(getMemtableThreadSafe(), true); if (future != null) future.get(); } } catch (ExecutionException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new AssertionError(e); } } private void maybeReloadCompactionStrategy() { // Check if there is a need for reloading if (metadata.compactionStrategyClass.equals(compactionStrategy.getClass()) && metadata.compactionStrategyOptions.equals(compactionStrategy.getOptions())) return; // TODO is there a way to avoid locking here? CompactionManager.instance.getCompactionLock().lock(); try { compactionStrategy.shutdown(); compactionStrategy = metadata.createCompactionStrategyInstance(this); } finally { CompactionManager.instance.getCompactionLock().unlock(); } } public void setCompactionStrategyClass(String compactionStrategyClass) throws ConfigurationException { metadata.compactionStrategyClass = CFMetaData.createCompactionStrategy(compactionStrategyClass); maybeReloadCompactionStrategy(); } public String getCompactionStrategyClass() { return metadata.compactionStrategyClass.getName(); } public Map<String,String> getCompressionParameters() { return metadata.compressionParameters().asThriftOptions(); } public void setCompressionParameters(Map<String,String> opts) throws ConfigurationException { metadata.compressionParameters = CompressionParameters.create(opts); } private ColumnFamilyStore(Table table, String columnFamilyName, IPartitioner partitioner, int generation, CFMetaData metadata, Directories directories, boolean loadSSTables) { assert metadata != null : "null metadata for " + table + ":" + columnFamilyName; this.table = table; columnFamily = columnFamilyName; this.metadata = metadata; this.minCompactionThreshold = new DefaultInteger(metadata.getMinCompactionThreshold()); this.maxCompactionThreshold = new DefaultInteger(metadata.getMaxCompactionThreshold()); this.partitioner = partitioner; this.directories = directories; this.indexManager = new SecondaryIndexManager(this); this.metric = new ColumnFamilyMetrics(this); fileIndexGenerator.set(generation); Caching caching = metadata.getCaching(); if (logger.isDebugEnabled()) logger.debug("Starting CFS {}", columnFamily); // scan for sstables corresponding to this cf and load them data = new DataTracker(this); if (loadSSTables) { Directories.SSTableLister sstableFiles = directories.sstableLister().skipTemporary(true); Collection<SSTableReader> sstables = SSTableReader.batchOpen(sstableFiles.list().entrySet(), metadata, this.partitioner); // Filter non-compacted sstables, remove compacted ones Set<Integer> compactedSSTables = new HashSet<Integer>(); for (SSTableReader sstable : sstables) compactedSSTables.addAll(sstable.getAncestors()); Set<SSTableReader> liveSSTables = new HashSet<SSTableReader>(); for (SSTableReader sstable : sstables) { if (compactedSSTables.contains(sstable.descriptor.generation)) sstable.releaseReference(); // this amount to deleting the sstable else liveSSTables.add(sstable); } data.addInitialSSTables(liveSSTables); } if (caching == Caching.ALL || caching == Caching.KEYS_ONLY) CacheService.instance.keyCache.loadSaved(this); // compaction strategy should be created after the CFS has been prepared this.compactionStrategy = metadata.createCompactionStrategyInstance(this); // create the private ColumnFamilyStores for the secondary column indexes for (ColumnDefinition info : metadata.getColumn_metadata().values()) { if (info.getIndexType() != null) indexManager.addIndexedColumn(info); } // register the mbean String type = this.partitioner instanceof LocalPartitioner ? "IndexColumnFamilies" : "ColumnFamilies"; mbeanName = "org.apache.cassandra.db:type=" + type + ",keyspace=" + this.table.name + ",columnfamily=" + columnFamily; try { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); ObjectName nameObj = new ObjectName(mbeanName); mbs.registerMBean(this, nameObj); } catch (Exception e) { throw new RuntimeException(e); } } /** call when dropping or renaming a CF. Performs mbean housekeeping and invalidates CFS to other operations */ public void invalidate() { try { valid = false; unregisterMBean(); data.unreferenceSSTables(); indexManager.invalidate(); } catch (Exception e) { // this shouldn't block anything. logger.warn("Failed unregistering mbean: " + mbeanName, e); } } /** * Removes every SSTable in the directory from the DataTracker's view. * @param directory the unreadable directory, possibly with SSTables in it, but not necessarily. */ void maybeRemoveUnreadableSSTables(File directory) { data.removeUnreadableSSTables(directory); } void unregisterMBean() throws MalformedObjectNameException, InstanceNotFoundException, MBeanRegistrationException { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); ObjectName nameObj = new ObjectName(mbeanName); if (mbs.isRegistered(nameObj)) mbs.unregisterMBean(nameObj); // unregister metrics metric.release(); } public long getMinRowSize() { return metric.minRowSize.value(); } public long getMaxRowSize() { return metric.maxRowSize.value(); } public long getMeanRowSize() { return metric.meanRowSize.value(); } public int getMeanColumns() { return data.getMeanColumns(); } public static ColumnFamilyStore createColumnFamilyStore(Table table, String columnFamily, boolean loadSSTables) { return createColumnFamilyStore(table, columnFamily, StorageService.getPartitioner(), Schema.instance.getCFMetaData(table.name, columnFamily), loadSSTables); } public static ColumnFamilyStore createColumnFamilyStore(Table table, String columnFamily, IPartitioner partitioner, CFMetaData metadata) { return createColumnFamilyStore(table, columnFamily, partitioner, metadata, true); } private static synchronized ColumnFamilyStore createColumnFamilyStore(Table table, String columnFamily, IPartitioner partitioner, CFMetaData metadata, boolean loadSSTables) { // get the max generation number, to prevent generation conflicts Directories directories = Directories.create(table.name, columnFamily); Directories.SSTableLister lister = directories.sstableLister().includeBackups(true); List<Integer> generations = new ArrayList<Integer>(); for (Map.Entry<Descriptor, Set<Component>> entry : lister.list().entrySet()) { Descriptor desc = entry.getKey(); generations.add(desc.generation); if (!desc.isCompatible()) throw new RuntimeException(String.format("Can't open incompatible SSTable! Current version %s, found file: %s", Descriptor.Version.CURRENT, desc)); } Collections.sort(generations); int value = (generations.size() > 0) ? (generations.get(generations.size() - 1)) : 0; return new ColumnFamilyStore(table, columnFamily, partitioner, value, metadata, directories, loadSSTables); } /** * Removes unnecessary files from the cf directory at startup: these include temp files, orphans, zero-length files * and compacted sstables. Files that cannot be recognized will be ignored. */ public static void scrubDataDirectories(String table, String columnFamily) { logger.debug("Removing compacted SSTable files from {} (see http://wiki.apache.org/cassandra/MemtableSSTable)", columnFamily); Directories directories = Directories.create(table, columnFamily); for (Map.Entry<Descriptor,Set<Component>> sstableFiles : directories.sstableLister().list().entrySet()) { Descriptor desc = sstableFiles.getKey(); Set<Component> components = sstableFiles.getValue(); if (components.contains(Component.COMPACTED_MARKER) || desc.temporary) { SSTable.delete(desc, components); continue; } File dataFile = new File(desc.filenameFor(Component.DATA)); if (components.contains(Component.DATA) && dataFile.length() > 0) // everything appears to be in order... moving on. continue; // missing the DATA file! all components are orphaned logger.warn("Removing orphans for {}: {}", desc, components); for (Component component : components) { FileUtils.deleteWithConfirm(desc.filenameFor(component)); } } // cleanup incomplete saved caches Pattern tmpCacheFilePattern = Pattern.compile(table + "-" + columnFamily + "-(Key|Row)Cache.*\\.tmp$"); File dir = new File(DatabaseDescriptor.getSavedCachesLocation()); if (dir.exists()) { assert dir.isDirectory(); for (File file : dir.listFiles()) if (tmpCacheFilePattern.matcher(file.getName()).matches()) if (!file.delete()) logger.warn("could not delete " + file.getAbsolutePath()); } // also clean out any index leftovers. CFMetaData cfm = Schema.instance.getCFMetaData(table, columnFamily); if (cfm != null) // secondary indexes aren't stored in DD. { for (ColumnDefinition def : cfm.getColumn_metadata().values()) scrubDataDirectories(table, cfm.indexColumnFamilyName(def)); } } // must be called after all sstables are loaded since row cache merges all row versions public void initRowCache() { if (!isRowCacheEnabled()) return; long start = System.currentTimeMillis(); int cachedRowsRead = CacheService.instance.rowCache.loadSaved(this); if (cachedRowsRead > 0) logger.info(String.format("completed loading (%d ms; %d keys) row cache for %s.%s", System.currentTimeMillis() - start, cachedRowsRead, table.name, columnFamily)); } /** * See #{@code StorageService.loadNewSSTables(String, String)} for more info * * @param ksName The keyspace name * @param cfName The columnFamily name */ public static synchronized void loadNewSSTables(String ksName, String cfName) { /** ks/cf existence checks will be done by open and getCFS methods for us */ Table table = Table.open(ksName); table.getColumnFamilyStore(cfName).loadNewSSTables(); } /** * #{@inheritDoc} */ public synchronized void loadNewSSTables() { logger.info("Loading new SSTables for " + table.name + "/" + columnFamily + "..."); Set<Descriptor> currentDescriptors = new HashSet<Descriptor>(); for (SSTableReader sstable : data.getView().sstables) currentDescriptors.add(sstable.descriptor); Set<SSTableReader> newSSTables = new HashSet<SSTableReader>(); Directories.SSTableLister lister = directories.sstableLister().skipTemporary(true); for (Map.Entry<Descriptor, Set<Component>> entry : lister.list().entrySet()) { Descriptor descriptor = entry.getKey(); if (currentDescriptors.contains(descriptor)) continue; // old (initialized) SSTable found, skipping if (descriptor.temporary) // in the process of being written continue; if (!descriptor.isCompatible()) throw new RuntimeException(String.format("Can't open incompatible SSTable! Current version %s, found file: %s", Descriptor.Version.CURRENT, descriptor)); Descriptor newDescriptor = new Descriptor(descriptor.version, descriptor.directory, descriptor.ksname, descriptor.cfname, fileIndexGenerator.incrementAndGet(), false); logger.info("Renaming new SSTable {} to {}", descriptor, newDescriptor); SSTableWriter.rename(descriptor, newDescriptor, entry.getValue()); SSTableReader reader; try { reader = SSTableReader.open(newDescriptor, entry.getValue(), metadata, partitioner); } catch (IOException e) { SSTableReader.logOpenException(entry.getKey(), e); continue; } newSSTables.add(reader); } if (newSSTables.isEmpty()) { logger.info("No new SSTables were found for " + table.name + "/" + columnFamily); return; } logger.info("Loading new SSTables and building secondary indexes for " + table.name + "/" + columnFamily + ": " + newSSTables); SSTableReader.acquireReferences(newSSTables); data.addSSTables(newSSTables); try { indexManager.maybeBuildSecondaryIndexes(newSSTables, indexManager.allIndexesNames()); } finally { SSTableReader.releaseReferences(newSSTables); } logger.info("Done loading load new SSTables for " + table.name + "/" + columnFamily); } public static void rebuildSecondaryIndex(String ksName, String cfName, String... idxNames) { ColumnFamilyStore cfs = Table.open(ksName).getColumnFamilyStore(cfName); Set<String> indexes = new HashSet<String>(Arrays.asList(idxNames)); Collection<SSTableReader> sstables = cfs.getSSTables(); try { cfs.indexManager.setIndexRemoved(indexes); SSTableReader.acquireReferences(sstables); logger.info(String.format("User Requested secondary index re-build for %s/%s indexes", ksName, cfName)); cfs.indexManager.maybeBuildSecondaryIndexes(sstables, indexes); cfs.indexManager.setIndexBuilt(indexes); } finally { SSTableReader.releaseReferences(sstables); } } /** * @return the name of the column family */ public String getColumnFamilyName() { return columnFamily; } public String getTempSSTablePath(File directory) { return getTempSSTablePath(directory, Descriptor.Version.CURRENT); } private String getTempSSTablePath(File directory, Descriptor.Version version) { Descriptor desc = new Descriptor(version, directory, table.name, columnFamily, fileIndexGenerator.incrementAndGet(), true); return desc.filenameFor(Component.DATA); } /** flush the given memtable and swap in a new one for its CFS, if it hasn't been frozen already. threadsafe. */ public Future<?> maybeSwitchMemtable(Memtable oldMemtable, final boolean writeCommitLog) { if (oldMemtable.isFrozen()) { logger.debug("memtable is already frozen; another thread must be flushing it"); return null; } /* * If we can get the writelock, that means no new updates can come in and * all ongoing updates to memtables have completed. We can get the tail * of the log and use it as the starting position for log replay on recovery. * * This is why we Table.switchLock needs to be global instead of per-Table: * we need to schedule discardCompletedSegments calls in the same order as their * contexts (commitlog position) were read, even though the flush executor * is multithreaded. */ Table.switchLock.writeLock().lock(); try { if (oldMemtable.isFrozen()) { logger.debug("memtable is already frozen; another thread must be flushing it"); return null; } assert getMemtableThreadSafe() == oldMemtable; final Future<ReplayPosition> ctx = writeCommitLog ? CommitLog.instance.getContext() : Futures.immediateFuture(ReplayPosition.NONE); // submit the memtable for any indexed sub-cfses, and our own. final List<ColumnFamilyStore> icc = new ArrayList<ColumnFamilyStore>(); // don't assume that this.memtable is dirty; forceFlush can bring us here during index build even if it is not for (ColumnFamilyStore cfs : concatWithIndexes()) { Memtable mt = cfs.getMemtableThreadSafe(); if (!mt.isClean() && !mt.isFrozen()) { // We need to freeze indexes too because they can be concurrently flushed too (#3547) mt.freeze(); icc.add(cfs); } } final CountDownLatch latch = new CountDownLatch(icc.size()); for (ColumnFamilyStore cfs : icc) { Memtable memtable = cfs.data.switchMemtable(); logger.info("Enqueuing flush of {}", memtable); memtable.flushAndSignal(latch, flushWriter, ctx); } if (metric.memtableSwitchCount.count() == Long.MAX_VALUE) metric.memtableSwitchCount.clear(); metric.memtableSwitchCount.inc(); // when all the memtables have been written, including for indexes, mark the flush in the commitlog header. // a second executor makes sure the onMemtableFlushes get called in the right order, // while keeping the wait-for-flush (future.get) out of anything latency-sensitive. return postFlushExecutor.submit(new WrappedRunnable() { public void runMayThrow() throws InterruptedException, ExecutionException { latch.await(); if (!icc.isEmpty()) { //only valid when memtables exist for (SecondaryIndex index : indexManager.getIndexesNotBackedByCfs()) { // flush any non-cfs backed indexes logger.info("Flushing SecondaryIndex {}", index); index.forceBlockingFlush(); } } if (writeCommitLog) { // if we're not writing to the commit log, we are replaying the log, so marking // the log header with "you can discard anything written before the context" is not valid CommitLog.instance.discardCompletedSegments(metadata.cfId, ctx.get()); } } }); } finally { Table.switchLock.writeLock().unlock(); } } public Future<?> forceFlush() { // during index build, 2ary index memtables can be dirty even if parent is not. if so, // we want flushLargestMemtables to flush the 2ary index ones too. boolean clean = true; for (ColumnFamilyStore cfs : concatWithIndexes()) clean &= cfs.getMemtableThreadSafe().isClean(); if (clean) { logger.debug("forceFlush requested but everything is clean in {}", columnFamily); return null; } return maybeSwitchMemtable(getMemtableThreadSafe(), true); } public void forceBlockingFlush() throws ExecutionException, InterruptedException { Future<?> future = forceFlush(); if (future != null) future.get(); } public void updateRowCache(DecoratedKey key, ColumnFamily columnFamily) { if (metadata.cfId == null) return; // secondary index RowCacheKey cacheKey = new RowCacheKey(metadata.cfId, key); // always invalidate a copying cache value if (CacheService.instance.rowCache.isPutCopying()) { invalidateCachedRow(cacheKey); return; } // invalidate a normal cache value if it's a sentinel, so the read will retry (and include the new update) IRowCacheEntry cachedRow = getCachedRowInternal(cacheKey); if (cachedRow != null) { if (cachedRow instanceof RowCacheSentinel) invalidateCachedRow(cacheKey); else // columnFamily is what is written in the commit log. Because of the PeriodicCommitLog, this can be done in concurrency // with this. So columnFamily shouldn't be modified and if it contains super columns, neither should they. So for super // columns, we must make sure to clone them when adding to the cache. That's what addAllWithSCCopy does (see #3957) ((ColumnFamily) cachedRow).addAllWithSCCopy(columnFamily, HeapAllocator.instance); } } /** * Insert/Update the column family for this key. * Caller is responsible for acquiring Table.flusherLock! * param @ lock - lock that needs to be used. * param @ key - key for update/insert * param @ columnFamily - columnFamily changes */ public void apply(DecoratedKey key, ColumnFamily columnFamily, SecondaryIndexManager.Updater indexer) { long start = System.nanoTime(); Memtable mt = getMemtableThreadSafe(); mt.put(key, columnFamily, indexer); updateRowCache(key, columnFamily); metric.writeLatency.addNano(System.nanoTime() - start); // recompute liveRatio, if we have doubled the number of ops since last calculated while (true) { long last = liveRatioComputedAt.get(); long operations = metric.writeLatency.latency.count(); if (operations < 2 * last) break; if (liveRatioComputedAt.compareAndSet(last, operations)) { logger.debug("computing liveRatio of {} at {} ops", this, operations); mt.updateLiveRatio(); } } } public static ColumnFamily removeDeletedCF(ColumnFamily cf, int gcBefore) { cf.maybeResetDeletionTimes(gcBefore); return cf.getColumnCount() == 0 && !cf.isMarkedForDelete() ? null : cf; } public static ColumnFamily removeDeleted(ColumnFamily cf, int gcBefore) { return removeDeleted(cf, gcBefore, SecondaryIndexManager.nullUpdater); } /* This is complicated because we need to preserve deleted columns, supercolumns, and columnfamilies until they have been deleted for at least GC_GRACE_IN_SECONDS. But, we do not need to preserve their contents; just the object itself as a "tombstone" that can be used to repair other replicas that do not know about the deletion. */ public static ColumnFamily removeDeleted(ColumnFamily cf, int gcBefore, SecondaryIndexManager.Updater indexer) { if (cf == null) { return null; } removeDeletedColumnsOnly(cf, gcBefore, indexer); return removeDeletedCF(cf, gcBefore); } private static void removeDeletedColumnsOnly(ColumnFamily cf, int gcBefore, SecondaryIndexManager.Updater indexer) { if (cf.isSuper()) removeDeletedSuper(cf, gcBefore); else removeDeletedStandard(cf, gcBefore, indexer); } public static void removeDeletedColumnsOnly(ColumnFamily cf, int gcBefore) { removeDeletedColumnsOnly(cf, gcBefore, SecondaryIndexManager.nullUpdater); } private static void removeDeletedStandard(ColumnFamily cf, int gcBefore, SecondaryIndexManager.Updater indexer) { Iterator<IColumn> iter = cf.iterator(); while (iter.hasNext()) { IColumn c = iter.next(); // remove columns if // (a) the column itself is gcable or // (b) the column is shadowed by a CF tombstone if (c.getLocalDeletionTime() < gcBefore || cf.deletionInfo().isDeleted(c)) { iter.remove(); indexer.remove(c); } } } private static void removeDeletedSuper(ColumnFamily cf, int gcBefore) { // TODO assume deletion means "most are deleted?" and add to clone, instead of remove from original? // this could be improved by having compaction, or possibly even removeDeleted, r/m the tombstone // once gcBefore has passed, so if new stuff is added in it doesn't used the wrong algorithm forever Iterator<IColumn> iter = cf.iterator(); while (iter.hasNext()) { SuperColumn c = (SuperColumn)iter.next(); Iterator<IColumn> subIter = c.getSubColumns().iterator(); while (subIter.hasNext()) { IColumn subColumn = subIter.next(); // remove subcolumns if // (a) the subcolumn itself is gcable or // (b) the supercolumn is shadowed by the CF and the column is not newer // (b) the subcolumn is shadowed by the supercolumn if (subColumn.getLocalDeletionTime() < gcBefore || cf.deletionInfo().isDeleted(c.name(), subColumn.timestamp()) || c.deletionInfo().isDeleted(subColumn)) { subIter.remove(); } } c.maybeResetDeletionTimes(gcBefore); if (c.getSubColumns().isEmpty() && !c.isMarkedForDelete()) { iter.remove(); } } } /** * @param sstables * @return sstables whose key range overlaps with that of the given sstables, not including itself. * (The given sstables may or may not overlap with each other.) */ public Set<SSTableReader> getOverlappingSSTables(Collection<SSTableReader> sstables) { logger.debug("Checking for sstables overlapping {}", sstables); // a normal compaction won't ever have an empty sstables list, but we create a skeleton // compaction controller for streaming, and that passes an empty list. if (sstables.isEmpty()) return ImmutableSet.of(); DataTracker.SSTableIntervalTree tree = data.getView().intervalTree; Set<SSTableReader> results = null; for (SSTableReader sstable : sstables) { Set<SSTableReader> overlaps = ImmutableSet.copyOf(tree.search(Interval.<RowPosition, SSTableReader>create(sstable.first, sstable.last))); assert overlaps.contains(sstable); results = results == null ? overlaps : Sets.union(results, overlaps).immutableCopy(); } results = Sets.difference(results, ImmutableSet.copyOf(sstables)); return results; } /* * Called after a BinaryMemtable flushes its in-memory data, or we add a file * via bootstrap. This information is cached in the ColumnFamilyStore. * This is useful for reads because the ColumnFamilyStore first looks in * the in-memory store and the into the disk to find the key. If invoked * during recoveryMode the onMemtableFlush() need not be invoked. * * param @ filename - filename just flushed to disk */ public void addSSTable(SSTableReader sstable) { assert sstable.getColumnFamilyName().equals(columnFamily); addSSTables(Arrays.asList(sstable)); } public void addSSTables(Collection<SSTableReader> sstables) { data.addSSTables(sstables); CompactionManager.instance.submitBackground(this); } /** * Calculate expected file size of SSTable after compaction. * * If operation type is {@code CLEANUP} and we're not dealing with an index sstable, * then we calculate expected file size with checking token range to be eliminated. * * Otherwise, we just add up all the files' size, which is the worst case file * size for compaction of all the list of files given. * * @param sstables SSTables to calculate expected compacted file size * @param operation Operation type * @return Expected file size of SSTable after compaction */ public long getExpectedCompactedFileSize(Iterable<SSTableReader> sstables, OperationType operation) { if (operation != OperationType.CLEANUP || isIndex()) { return SSTable.getTotalBytes(sstables); } // cleanup size estimation only counts bytes for keys local to this node long expectedFileSize = 0; Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(table.name); for (SSTableReader sstable : sstables) { List<Pair<Long, Long>> positions = sstable.getPositionsForRanges(ranges); for (Pair<Long, Long> position : positions) expectedFileSize += position.right - position.left; } return expectedFileSize; } /* * Find the maximum size file in the list . */ public SSTableReader getMaxSizeFile(Iterable<SSTableReader> sstables) { long maxSize = 0L; SSTableReader maxFile = null; for (SSTableReader sstable : sstables) { if (sstable.onDiskLength() > maxSize) { maxSize = sstable.onDiskLength(); maxFile = sstable; } } return maxFile; } public void forceCleanup(CounterId.OneShotRenewer renewer) throws ExecutionException, InterruptedException { CompactionManager.instance.performCleanup(ColumnFamilyStore.this, renewer); } public void scrub() throws ExecutionException, InterruptedException { snapshotWithoutFlush("pre-scrub-" + System.currentTimeMillis()); CompactionManager.instance.performScrub(ColumnFamilyStore.this); } public void sstablesRewrite() throws ExecutionException, InterruptedException { CompactionManager.instance.performSSTableRewrite(ColumnFamilyStore.this); } public void markCompacted(Collection<SSTableReader> sstables, OperationType compactionType) { assert !sstables.isEmpty(); data.markCompacted(sstables, compactionType); } public void replaceCompactedSSTables(Collection<SSTableReader> sstables, Iterable<SSTableReader> replacements, OperationType compactionType) { data.replaceCompactedSSTables(sstables, replacements, compactionType); } void replaceFlushed(Memtable memtable, SSTableReader sstable) { data.replaceFlushed(memtable, sstable); if (sstable != null) CompactionManager.instance.submitBackground(this); } public boolean isValid() { return valid; } public long getMemtableColumnsCount() { return metric.memtableColumnsCount.value(); } public long getMemtableDataSize() { return metric.memtableDataSize.value(); } public long getTotalMemtableLiveSize() { return getMemtableDataSize() + indexManager.getTotalLiveSize(); } public int getMemtableSwitchCount() { return (int) metric.memtableSwitchCount.count(); } /** * get the current memtable in a threadsafe fashion. note that simply "return memtable_" is * incorrect; you need to lock to introduce a thread safe happens-before ordering. * * do NOT use this method to do either a put or get on the memtable object, since it could be * flushed in the meantime (and its executor terminated). * * also do NOT make this method public or it will really get impossible to reason about these things. * @return */ private Memtable getMemtableThreadSafe() { return data.getMemtable(); } /** * Package protected for access from the CompactionManager. */ public DataTracker getDataTracker() { return data; } public Collection<SSTableReader> getSSTables() { return data.getSSTables(); } public Set<SSTableReader> getUncompactingSSTables() { return data.getUncompactingSSTables(); } public long[] getRecentSSTablesPerReadHistogram() { return metric.recentSSTablesPerRead.getBuckets(true); } public long[] getSSTablesPerReadHistogram() { return metric.sstablesPerRead.getBuckets(false); } public long getReadCount() { return metric.readLatency.latency.count(); } public double getRecentReadLatencyMicros() { return metric.readLatency.getRecentLatency(); } public long[] getLifetimeReadLatencyHistogramMicros() { return metric.readLatency.totalLatencyHistogram.getBuckets(false); } public long[] getRecentReadLatencyHistogramMicros() { return metric.readLatency.recentLatencyHistogram.getBuckets(true); } public long getTotalReadLatencyMicros() { return metric.readLatency.totalLatency.count(); } public int getPendingTasks() { return metric.pendingTasks.value(); } public long getWriteCount() { return metric.writeLatency.latency.count(); } public long getTotalWriteLatencyMicros() { return metric.writeLatency.totalLatency.count(); } public double getRecentWriteLatencyMicros() { return metric.writeLatency.getRecentLatency(); } public long[] getLifetimeWriteLatencyHistogramMicros() { return metric.writeLatency.totalLatencyHistogram.getBuckets(false); } public long[] getRecentWriteLatencyHistogramMicros() { return metric.writeLatency.recentLatencyHistogram.getBuckets(true); } public ColumnFamily getColumnFamily(DecoratedKey key, QueryPath path, ByteBuffer start, ByteBuffer finish, boolean reversed, int limit) { return getColumnFamily(QueryFilter.getSliceFilter(key, path, start, finish, reversed, limit)); } /** * get a list of columns starting from a given column, in a specified order. * only the latest version of a column is returned. * @return null if there is no data and no tombstones; otherwise a ColumnFamily */ public ColumnFamily getColumnFamily(QueryFilter filter) { return getColumnFamily(filter, gcBefore()); } public int gcBefore() { return (int) (System.currentTimeMillis() / 1000) - metadata.getGcGraceSeconds(); } /** * fetch the row given by filter.key if it is in the cache; if not, read it from disk and cache it * @param cfId the column family to read the row from * @param filter the columns being queried. Note that we still cache entire rows, but if a row is uncached * and we race to cache it, only the winner will read the entire row * @return the entire row for filter.key, if present in the cache (or we can cache it), or just the column * specified by filter otherwise */ private ColumnFamily getThroughCache(UUID cfId, QueryFilter filter) { assert isRowCacheEnabled() : String.format("Row cache is not enabled on column family [" + getColumnFamilyName() + "]"); RowCacheKey key = new RowCacheKey(cfId, filter.key); // attempt a sentinel-read-cache sequence. if a write invalidates our sentinel, we'll return our // (now potentially obsolete) data, but won't cache it. see CASSANDRA-3862 IRowCacheEntry cached = CacheService.instance.rowCache.get(key); if (cached != null) { if (cached instanceof RowCacheSentinel) { // Some other read is trying to cache the value, just do a normal non-caching read return getTopLevelColumns(filter, Integer.MIN_VALUE, false); } return (ColumnFamily) cached; } RowCacheSentinel sentinel = new RowCacheSentinel(); boolean sentinelSuccess = CacheService.instance.rowCache.putIfAbsent(key, sentinel); try { ColumnFamily data = getTopLevelColumns(QueryFilter.getIdentityFilter(filter.key, new QueryPath(columnFamily)), Integer.MIN_VALUE, true); if (sentinelSuccess && data != null) CacheService.instance.rowCache.replace(key, sentinel, data); return data; } finally { if (sentinelSuccess && data == null) CacheService.instance.rowCache.remove(key); } } ColumnFamily getColumnFamily(QueryFilter filter, int gcBefore) { assert columnFamily.equals(filter.getColumnFamilyName()) : filter.getColumnFamilyName(); logger.debug("Executing single-partition query"); ColumnFamily result = null; long start = System.nanoTime(); try { if (!isRowCacheEnabled()) { ColumnFamily cf = getTopLevelColumns(filter, gcBefore, false); if (cf == null) return null; // TODO this is necessary because when we collate supercolumns together, we don't check // their subcolumns for relevance, so we need to do a second prune post facto here. result = cf.isSuper() ? removeDeleted(cf, gcBefore) : removeDeletedCF(cf, gcBefore); } else { UUID cfId = Schema.instance.getId(table.name, columnFamily); if (cfId == null) { logger.trace("no id found for {}.{}", table.name, columnFamily); return null; } ColumnFamily cached = getThroughCache(cfId, filter); if (cached == null) { logger.trace("cached row is empty"); return null; } result = filterColumnFamily(cached, filter, gcBefore); } } finally { metric.readLatency.addNano(System.nanoTime() - start); } logger.debug("Read {} cells", result == null ? 0 : result.getColumnCount()); return result; } /** * Filter a cached row, which will not be modified by the filter, but may be modified by throwing out * tombstones that are no longer relevant. * The returned column family won't be thread safe. */ ColumnFamily filterColumnFamily(ColumnFamily cached, QueryFilter filter, int gcBefore) { ColumnFamily cf = cached.cloneMeShallow(ArrayBackedSortedColumns.factory(), filter.filter.isReversed()); OnDiskAtomIterator ci = filter.getMemtableColumnIterator(cached, null); filter.collateOnDiskAtom(cf, Collections.singletonList(ci), gcBefore); // TODO this is necessary because when we collate supercolumns together, we don't check // their subcolumns for relevance, so we need to do a second prune post facto here. return cf.isSuper() ? removeDeleted(cf, gcBefore) : removeDeletedCF(cf, gcBefore); } /** * Get the current view and acquires references on all its sstables. * This is a bit tricky because we must ensure that between the time we * get the current view and the time we acquire the references the set of * sstables hasn't changed. Otherwise we could get a view for which an * sstable have been deleted in the meantime. * * At the end of this method, a reference on all the sstables of the * returned view will have been acquired and must thus be released when * appropriate. */ private DataTracker.View markCurrentViewReferenced() { while (true) { DataTracker.View currentView = data.getView(); if (SSTableReader.acquireReferences(currentView.sstables)) return currentView; } } /** * Get the current sstables, acquiring references on all of them. * The caller is in charge of releasing the references on the sstables. * * See markCurrentViewReferenced() above. */ public Collection<SSTableReader> markCurrentSSTablesReferenced() { return markCurrentViewReferenced().sstables; } /** * @return a ViewFragment containing the sstables and memtables that may need to be merged * for the given @param key, according to the interval tree */ public ViewFragment markReferenced(DecoratedKey key) { assert !key.isMinimum(); DataTracker.View view; List<SSTableReader> sstables; while (true) { view = data.getView(); sstables = view.intervalTree.search(key); if (SSTableReader.acquireReferences(sstables)) break; // retry w/ new view } return new ViewFragment(sstables, Iterables.concat(Collections.singleton(view.memtable), view.memtablesPendingFlush)); } /** * @return a ViewFragment containing the sstables and memtables that may need to be merged * for rows between @param startWith and @param stopAt, inclusive, according to the interval tree */ public ViewFragment markReferenced(RowPosition startWith, RowPosition stopAt) { DataTracker.View view; List<SSTableReader> sstables; while (true) { view = data.getView(); // startAt == minimum is ok, but stopAt == minimum is confusing because all IntervalTree deals with // is Comparable, so it won't know to special-case that. However max() should not be call if the // intervalTree is empty sochecking that first // if (view.intervalTree.isEmpty()) { sstables = Collections.emptyList(); break; } RowPosition stopInTree = stopAt.isMinimum() ? view.intervalTree.max() : stopAt; sstables = view.intervalTree.search(Interval.<RowPosition, SSTableReader>create(startWith, stopInTree)); if (SSTableReader.acquireReferences(sstables)) break; // retry w/ new view } return new ViewFragment(sstables, Iterables.concat(Collections.singleton(view.memtable), view.memtablesPendingFlush)); } public List<String> getSSTablesForKey(String key) { DecoratedKey dk = new DecoratedKey(partitioner.getToken(ByteBuffer.wrap(key.getBytes())), ByteBuffer.wrap(key.getBytes())); ViewFragment view = markReferenced(dk); try { List<String> files = new ArrayList<String>(); for (SSTableReader sstr : view.sstables) { // check if the key actually exists in this sstable, without updating cache and stats if (sstr.getPosition(dk, SSTableReader.Operator.EQ, false) != null) files.add(sstr.getFilename()); } return files; } finally { SSTableReader.releaseReferences(view.sstables); } } public ColumnFamily getTopLevelColumns(QueryFilter filter, int gcBefore, boolean forCache) { CollationController controller = new CollationController(this, forCache, filter, gcBefore); ColumnFamily columns = controller.getTopLevelColumns(); metric.updateSSTableIterated(controller.getSstablesIterated()); return columns; } public static abstract class AbstractScanIterator extends AbstractIterator<Row> implements CloseableIterator<Row> { public boolean needsFiltering() { return true; } } /** * Iterate over a range of rows and columns from memtables/sstables. * * @param superColumn optional SuperColumn to slice subcolumns of; null to slice top-level columns * @param range Either a Bounds, which includes start key, or a Range, which does not. * @param columnFilter description of the columns we're interested in for each row */ public AbstractScanIterator getSequentialIterator(ByteBuffer superColumn, final AbstractBounds<RowPosition> range, IFilter columnFilter) { assert !(range instanceof Range) || !((Range)range).isWrapAround() || range.right.isMinimum() : range; final RowPosition startWith = range.left; final RowPosition stopAt = range.right; QueryFilter filter = new QueryFilter(null, new QueryPath(columnFamily, superColumn, null), columnFilter); final ViewFragment view = markReferenced(startWith, stopAt); try { final CloseableIterator<Row> iterator = RowIteratorFactory.getIterator(view.memtables, view.sstables, startWith, stopAt, filter, this); final int gcBefore = (int)(System.currentTimeMillis() / 1000) - metadata.getGcGraceSeconds(); return new AbstractScanIterator() { protected Row computeNext() { // pull a row out of the iterator if (!iterator.hasNext()) return endOfData(); Row current = iterator.next(); DecoratedKey key = current.key; if (!stopAt.isMinimum() && stopAt.compareTo(key) < 0) return endOfData(); // skipping outside of assigned range if (!range.contains(key)) return computeNext(); logger.trace("scanned {}", key); // TODO this is necessary because when we collate supercolumns together, we don't check // their subcolumns for relevance, so we need to do a second prune post facto here. return current.cf != null && current.cf.isSuper() ? new Row(current.key, removeDeleted(current.cf, gcBefore)) : current; } public void close() throws IOException { SSTableReader.releaseReferences(view.sstables); iterator.close(); } }; } catch (RuntimeException e) { // In case getIterator() throws, otherwise the iteror close method releases the references. SSTableReader.releaseReferences(view.sstables); throw e; } } public List<Row> getRangeSlice(ByteBuffer superColumn, final AbstractBounds<RowPosition> range, int maxResults, IFilter columnFilter, List<IndexExpression> rowFilter) { return getRangeSlice(superColumn, range, maxResults, columnFilter, rowFilter, false, false); } public List<Row> getRangeSlice(ByteBuffer superColumn, final AbstractBounds<RowPosition> range, int maxResults, IFilter columnFilter, List<IndexExpression> rowFilter, boolean maxIsColumns, boolean isPaging) { logger.debug("Executing seq scan for {}..{}", range.left, range.right); return filter(getSequentialIterator(superColumn, range, columnFilter), ExtendedFilter.create(this, columnFilter, rowFilter, maxResults, maxIsColumns, isPaging)); } public List<Row> search(List<IndexExpression> clause, AbstractBounds<RowPosition> range, int maxResults, IFilter dataFilter) { return search(clause, range, maxResults, dataFilter, false); } public List<Row> search(List<IndexExpression> clause, AbstractBounds<RowPosition> range, int maxResults, IFilter dataFilter, boolean maxIsColumns) { logger.debug("Executing indexed scan for {}..{}", range.left, range.right); return indexManager.search(clause, range, maxResults, dataFilter, maxIsColumns); } public List<Row> filter(AbstractScanIterator rowIterator, ExtendedFilter filter) { logger.trace("Filtering {} for rows matching {}", rowIterator, filter); List<Row> rows = new ArrayList<Row>(); int columnsCount = 0; try { while (rowIterator.hasNext() && rows.size() < filter.maxRows() && columnsCount < filter.maxColumns()) { // get the raw columns requested, and additional columns for the expressions if necessary Row rawRow = rowIterator.next(); ColumnFamily data = rawRow.cf; if (rowIterator.needsFiltering()) { // roughtly IFilter extraFilter = filter.getExtraFilter(data); if (extraFilter != null) { QueryPath path = new QueryPath(columnFamily); ColumnFamily cf = filter.cfs.getColumnFamily(new QueryFilter(rawRow.key, path, extraFilter)); if (cf != null) data.addAll(cf, HeapAllocator.instance); } if (!filter.isSatisfiedBy(data, null)) continue; logger.trace("{} satisfies all filter expressions", data); // cut the resultset back to what was requested, if necessary data = filter.prune(data); } rows.add(new Row(rawRow.key, data)); if (data != null) columnsCount += filter.lastCounted(data); // Update the underlying filter to avoid querying more columns per slice than necessary and to handle paging filter.updateFilter(columnsCount); } return rows; } finally { try { rowIterator.close(); } catch (IOException e) { throw new RuntimeException(e); } } } public AbstractType<?> getComparator() { return metadata.comparator; } public void snapshotWithoutFlush(String snapshotName) { for (ColumnFamilyStore cfs : concatWithIndexes()) { DataTracker.View currentView = cfs.markCurrentViewReferenced(); try { for (SSTableReader ssTable : currentView.sstables) { File snapshotDirectory = Directories.getSnapshotDirectory(ssTable.descriptor, snapshotName); ssTable.createLinks(snapshotDirectory.getPath()); // hard links if (logger.isDebugEnabled()) logger.debug("Snapshot for " + table + " keyspace data file " + ssTable.getFilename() + " created in " + snapshotDirectory); } if (cfs.compactionStrategy instanceof LeveledCompactionStrategy) cfs.directories.snapshotLeveledManifest(snapshotName); } finally { SSTableReader.releaseReferences(currentView.sstables); } } } public List<SSTableReader> getSnapshotSSTableReader(String tag) throws IOException { Map<Descriptor, Set<Component>> snapshots = directories.sstableLister().snapshots(tag).list(); List<SSTableReader> readers = new ArrayList<SSTableReader>(snapshots.size()); for (Map.Entry<Descriptor, Set<Component>> entries : snapshots.entrySet()) readers.add(SSTableReader.open(entries.getKey(), entries.getValue(), metadata, partitioner)); return readers; } /** * Take a snap shot of this columnfamily store. * * @param snapshotName the name of the associated with the snapshot */ public void snapshot(String snapshotName) { try { forceBlockingFlush(); } catch (ExecutionException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new AssertionError(e); } snapshotWithoutFlush(snapshotName); } public boolean snapshotExists(String snapshotName) { return directories.snapshotExists(snapshotName); } public void clearSnapshot(String snapshotName) { directories.clearSnapshot(snapshotName); } public boolean hasUnreclaimedSpace() { return getLiveDiskSpaceUsed() < getTotalDiskSpaceUsed(); } public long getTotalDiskSpaceUsed() { return metric.totalDiskSpaceUsed.count(); } public long getLiveDiskSpaceUsed() { return metric.liveDiskSpaceUsed.count(); } public int getLiveSSTableCount() { return metric.liveSSTableCount.value(); } /** * @return the cached row for @param key if it is already present in the cache. * That is, unlike getThroughCache, it will not readAndCache the row if it is not present, nor * are these calls counted in cache statistics. * * Note that this WILL cause deserialization of a SerializingCache row, so if all you * need to know is whether a row is present or not, use containsCachedRow instead. */ public ColumnFamily getRawCachedRow(DecoratedKey key) { if (metadata.cfId == null) return null; // secondary index IRowCacheEntry cached = getCachedRowInternal(new RowCacheKey(metadata.cfId, key)); return cached == null || cached instanceof RowCacheSentinel ? null : (ColumnFamily) cached; } private IRowCacheEntry getCachedRowInternal(RowCacheKey key) { return CacheService.instance.rowCache.getCapacity() == 0 ? null : CacheService.instance.rowCache.getInternal(key); } /** * @return true if @param key is contained in the row cache */ public boolean containsCachedRow(DecoratedKey key) { return CacheService.instance.rowCache.getCapacity() != 0 && CacheService.instance.rowCache.containsKey(new RowCacheKey(metadata.cfId, key)); } public void invalidateCachedRow(RowCacheKey key) { CacheService.instance.rowCache.remove(key); } public void invalidateCachedRow(DecoratedKey key) { UUID cfId = Schema.instance.getId(table.name, this.columnFamily); if (cfId == null) return; // secondary index invalidateCachedRow(new RowCacheKey(cfId, key)); } public void forceMajorCompaction() throws InterruptedException, ExecutionException { CompactionManager.instance.performMaximal(this); } public static Iterable<ColumnFamilyStore> all() { List<Iterable<ColumnFamilyStore>> stores = new ArrayList<Iterable<ColumnFamilyStore>>(Schema.instance.getTables().size()); for (Table table : Table.all()) { stores.add(table.getColumnFamilyStores()); } return Iterables.concat(stores); } public static List<ColumnFamilyStore> allUserDefined() { List<ColumnFamilyStore> cfses = new ArrayList<ColumnFamilyStore>(); for (Table table : Sets.difference(ImmutableSet.copyOf(Table.all()), ImmutableSet.of(Table.open(Table.SYSTEM_KS)))) cfses.addAll(table.getColumnFamilyStores()); return cfses; } public Iterable<DecoratedKey> keySamples(Range<Token> range) { Collection<SSTableReader> sstables = getSSTables(); Iterable<DecoratedKey>[] samples = new Iterable[sstables.size()]; int i = 0; for (SSTableReader sstable: sstables) { samples[i++] = sstable.getKeySamples(range); } return Iterables.concat(samples); } /** * For testing. No effort is made to clear historical or even the current memtables, nor for * thread safety. All we do is wipe the sstable containers clean, while leaving the actual * data files present on disk. (This allows tests to easily call loadNewSSTables on them.) */ public void clearUnsafe() { for (ColumnFamilyStore cfs : concatWithIndexes()) cfs.data.init(); } /** * Waits for flushes started BEFORE THIS METHOD IS CALLED to finish. * Does NOT guarantee that no flush is active when it returns. */ private void waitForActiveFlushes() { Future<?> future; Table.switchLock.writeLock().lock(); try { future = postFlushExecutor.submit(new Runnable() { public void run() { } }); } finally { Table.switchLock.writeLock().unlock(); } try { future.get(); } catch (InterruptedException e) { throw new AssertionError(e); } catch (ExecutionException e) { throw new AssertionError(e); } } /** * Truncate practically deletes the entire column family's data * @return a Future to the delete operation. Call the future's get() to make * sure the column family has been deleted */ public Future<?> truncate() throws ExecutionException, InterruptedException { // We have two goals here: // - truncate should delete everything written before truncate was invoked // - but not delete anything that isn't part of the snapshot we create. // We accomplish this by first flushing manually, then snapshotting, and // recording the timestamp IN BETWEEN those actions. Any sstables created // with this timestamp or greater time, will not be marked for delete. // // Bonus complication: since we store replay position in sstable metadata, // truncating those sstables means we will replay any CL segments from the // beginning if we restart before they are discarded for normal reasons // post-truncate. So we need to (a) force a new segment so the currently // active one can be discarded, and (b) flush *all* CFs so that unflushed // data in others don't keep any pre-truncate CL segments alive. // // Bonus bonus: simply forceFlush of all the CF is not enough, because if // for a given column family the memtable is clean, forceFlush will return // immediately, even though there could be a memtable being flushed at the same // time. So to guarantee that all segments can be cleaned out, we need to // "waitForActiveFlushes" after the new segment has been created. logger.debug("truncating {}", columnFamily); if (DatabaseDescriptor.isAutoSnapshot()) { // flush the CF being truncated before forcing the new segment forceBlockingFlush(); } else { // just nuke the memtable data w/o writing to disk first Table.switchLock.writeLock().lock(); try { for (ColumnFamilyStore cfs : concatWithIndexes()) { Memtable mt = cfs.getMemtableThreadSafe(); if (!mt.isClean() && !mt.isFrozen()) { mt.cfs.data.renewMemtable(); } } } finally { Table.switchLock.writeLock().unlock(); } } KSMetaData ksm = Schema.instance.getKSMetaData(this.table.name); if (ksm.durableWrites) { CommitLog.instance.forceNewSegment(); Future<ReplayPosition> position = CommitLog.instance.getContext(); // now flush everyone else. re-flushing ourselves is not necessary, but harmless for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) cfs.forceFlush(); waitForActiveFlushes(); // if everything was clean, flush won't have called discard CommitLog.instance.discardCompletedSegments(metadata.cfId, position.get()); } // sleep a little to make sure that our truncatedAt comes after any sstable // that was part of the flushed we forced; otherwise on a tie, it won't get deleted. try { long starttime = System.currentTimeMillis(); while ((System.currentTimeMillis() - starttime) < 1) { Thread.sleep(1); } } catch (InterruptedException e) { throw new AssertionError(e); } long truncatedAt = System.currentTimeMillis(); if (DatabaseDescriptor.isAutoSnapshot()) snapshot(Table.getTimestampedSnapshotName(columnFamily)); return CompactionManager.instance.submitTruncate(this, truncatedAt); } public long getBloomFilterFalsePositives() { return metric.bloomFilterFalsePositives.value(); } public long getRecentBloomFilterFalsePositives() { return metric.recentBloomFilterFalsePositives.value(); } public double getBloomFilterFalseRatio() { return metric.bloomFilterFalseRatio.value(); } public double getRecentBloomFilterFalseRatio() { return metric.recentBloomFilterFalseRatio.value(); } public long getBloomFilterDiskSpaceUsed() { return metric.bloomFilterDiskSpaceUsed.value(); } @Override public String toString() { return "CFS(" + "Keyspace='" + table.name + '\'' + ", ColumnFamily='" + columnFamily + '\'' + ')'; } public void disableAutoCompaction() { minCompactionThreshold.set(0); maxCompactionThreshold.set(0); } public void enableAutoCompaction() { minCompactionThreshold.reset(); maxCompactionThreshold.reset(); } /* JMX getters and setters for the Default<T>s. - get/set minCompactionThreshold - get/set maxCompactionThreshold - get memsize - get memops - get/set memtime */ public AbstractCompactionStrategy getCompactionStrategy() { return compactionStrategy; } public void setCompactionThresholds(int minThreshold, int maxThreshold) { validateCompactionThresholds(minThreshold, maxThreshold); minCompactionThreshold.set(minThreshold); maxCompactionThreshold.set(maxThreshold); // this is called as part of CompactionStrategy constructor; avoid circular dependency by checking for null if (compactionStrategy != null) CompactionManager.instance.submitBackground(this); } public int getMinimumCompactionThreshold() { return minCompactionThreshold.value(); } public void setMinimumCompactionThreshold(int minCompactionThreshold) { validateCompactionThresholds(minCompactionThreshold, maxCompactionThreshold.value()); this.minCompactionThreshold.set(minCompactionThreshold); } public int getMaximumCompactionThreshold() { return maxCompactionThreshold.value(); } public void setMaximumCompactionThreshold(int maxCompactionThreshold) { validateCompactionThresholds(minCompactionThreshold.value(), maxCompactionThreshold); this.maxCompactionThreshold.set(maxCompactionThreshold); } private void validateCompactionThresholds(int minThreshold, int maxThreshold) { if (minThreshold > maxThreshold && maxThreshold != 0) throw new RuntimeException(String.format("The min_compaction_threshold cannot be larger than the max_compaction_threshold. " + "Min is '%d', Max is '%d'.", minThreshold, maxThreshold)); } public boolean isCompactionDisabled() { return getMinimumCompactionThreshold() <= 0 || getMaximumCompactionThreshold() <= 0; } // End JMX get/set. public long estimateKeys() { return data.estimatedKeys(); } public long[] getEstimatedRowSizeHistogram() { return metric.estimatedRowSizeHistogram.value(); } public long[] getEstimatedColumnCountHistogram() { return metric.estimatedColumnCountHistogram.value(); } public double getCompressionRatio() { return metric.compressionRatio.value(); } /** true if this CFS contains secondary index data */ public boolean isIndex() { return partitioner instanceof LocalPartitioner; } private ByteBuffer intern(ByteBuffer name) { ByteBuffer internedName = internedNames.get(name); if (internedName == null) { internedName = ByteBufferUtil.clone(name); ByteBuffer concurrentName = internedNames.putIfAbsent(internedName, internedName); if (concurrentName != null) internedName = concurrentName; } return internedName; } public ByteBuffer internOrCopy(ByteBuffer name, Allocator allocator) { if (internedNames.size() >= INTERN_CUTOFF) return allocator.clone(name); return intern(name); } public ByteBuffer maybeIntern(ByteBuffer name) { if (internedNames.size() >= INTERN_CUTOFF) return null; return intern(name); } public SSTableWriter createCompactionWriter(long estimatedRows, File location, Collection<SSTableReader> sstables) { ReplayPosition rp = ReplayPosition.getReplayPosition(sstables); SSTableMetadata.Collector sstableMetadataCollector = SSTableMetadata.createCollector().replayPosition(rp); // Get the max timestamp of the precompacted sstables // and adds generation of live ancestors for (SSTableReader sstable : sstables) { sstableMetadataCollector.updateMaxTimestamp(sstable.getMaxTimestamp()); sstableMetadataCollector.addAncestor(sstable.descriptor.generation); for (Integer i : sstable.getAncestors()) { if (new File(sstable.descriptor.withGeneration(i).filenameFor(Component.DATA)).exists()) sstableMetadataCollector.addAncestor(i); } } return new SSTableWriter(getTempSSTablePath(location), estimatedRows, metadata, partitioner, sstableMetadataCollector); } public Iterable<ColumnFamilyStore> concatWithIndexes() { return Iterables.concat(indexManager.getIndexesBackedByCfs(), Collections.singleton(this)); } public Set<Memtable> getMemtablesPendingFlush() { return data.getMemtablesPendingFlush(); } public List<String> getBuiltIndexes() { return indexManager.getBuiltIndexes(); } public int getUnleveledSSTables() { return this.compactionStrategy instanceof LeveledCompactionStrategy ? ((LeveledCompactionStrategy) this.compactionStrategy).getLevelSize(0) : 0; } public int[] getSSTableCountPerLevel() { return compactionStrategy instanceof LeveledCompactionStrategy ? ((LeveledCompactionStrategy) compactionStrategy).getAllLevelSize() : null; } public static class ViewFragment { public final List<SSTableReader> sstables; public final Iterable<Memtable> memtables; public ViewFragment(List<SSTableReader> sstables, Iterable<Memtable> memtables) { this.sstables = sstables; this.memtables = memtables; } } /** * Returns the creation time of the oldest memtable not fully flushed yet. */ public long oldestUnflushedMemtable() { DataTracker.View view = data.getView(); long oldest = view.memtable.creationTime(); for (Memtable memtable : view.memtablesPendingFlush) oldest = Math.min(oldest, memtable.creationTime()); return oldest; } public boolean isEmpty() { DataTracker.View view = data.getView(); return view.sstables.isEmpty() && view.memtable.getOperations() == 0 && view.memtablesPendingFlush.isEmpty(); } private boolean isRowCacheEnabled() { return !(metadata.getCaching() == Caching.NONE || metadata.getCaching() == Caching.KEYS_ONLY || CacheService.instance.rowCache.getCapacity() == 0); } /** * Discard all SSTables that were created before given timestamp. Caller is responsible to obtain compactionLock. * * @param truncatedAt The timestamp of the truncation * (all SSTables before that timestamp are going be marked as compacted) */ public void discardSSTables(long truncatedAt) { List<SSTableReader> truncatedSSTables = new ArrayList<SSTableReader>(); for (SSTableReader sstable : getSSTables()) { if (!sstable.newSince(truncatedAt)) truncatedSSTables.add(sstable); } if (!truncatedSSTables.isEmpty()) markCompacted(truncatedSSTables, OperationType.UNKNOWN); } }
src/java/org/apache/cassandra/db/ColumnFamilyStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; import javax.management.*; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import org.cliffc.high_scale_lib.NonBlockingHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.cache.IRowCacheEntry; import org.apache.cassandra.cache.RowCacheKey; import org.apache.cassandra.cache.RowCacheSentinel; import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor; import org.apache.cassandra.concurrent.NamedThreadFactory; import org.apache.cassandra.concurrent.StageManager; import org.apache.cassandra.config.*; import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.commitlog.CommitLog; import org.apache.cassandra.db.commitlog.ReplayPosition; import org.apache.cassandra.db.compaction.AbstractCompactionStrategy; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.compaction.LeveledCompactionStrategy; import org.apache.cassandra.db.compaction.OperationType; import org.apache.cassandra.db.filter.ExtendedFilter; import org.apache.cassandra.db.filter.IFilter; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.db.filter.QueryPath; import org.apache.cassandra.db.index.SecondaryIndex; import org.apache.cassandra.db.index.SecondaryIndexManager; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.dht.*; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.compress.CompressionParameters; import org.apache.cassandra.io.sstable.*; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.metrics.ColumnFamilyMetrics; import org.apache.cassandra.service.CacheService; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.thrift.IndexExpression; import org.apache.cassandra.utils.*; import static org.apache.cassandra.config.CFMetaData.Caching; public class ColumnFamilyStore implements ColumnFamilyStoreMBean { private static final Logger logger = LoggerFactory.getLogger(ColumnFamilyStore.class); /* * maybeSwitchMemtable puts Memtable.getSortedContents on the writer executor. When the write is complete, * we turn the writer into an SSTableReader and add it to ssTables where it is available for reads. * * There are two other things that maybeSwitchMemtable does. * First, it puts the Memtable into memtablesPendingFlush, where it stays until the flush is complete * and it's been added as an SSTableReader to ssTables_. Second, it adds an entry to commitLogUpdater * that waits for the flush to complete, then calls onMemtableFlush. This allows multiple flushes * to happen simultaneously on multicore systems, while still calling onMF in the correct order, * which is necessary for replay in case of a restart since CommitLog assumes that when onMF is * called, all data up to the given context has been persisted to SSTables. */ private static final ExecutorService flushWriter = new JMXEnabledThreadPoolExecutor(DatabaseDescriptor.getFlushWriters(), StageManager.KEEPALIVE, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(DatabaseDescriptor.getFlushQueueSize()), new NamedThreadFactory("FlushWriter"), "internal"); public static final ExecutorService postFlushExecutor = new JMXEnabledThreadPoolExecutor("MemtablePostFlusher"); static { // (can block if flush queue fills up, so don't put on scheduledTasks) StorageService.optionalTasks.scheduleWithFixedDelay(new MeteredFlusher(), 1000, 1000, TimeUnit.MILLISECONDS); } public final Table table; public final String columnFamily; public final CFMetaData metadata; public final IPartitioner partitioner; private final String mbeanName; private volatile boolean valid = true; /* Memtables and SSTables on disk for this column family */ private final DataTracker data; /* This is used to generate the next index for a SSTable */ private final AtomicInteger fileIndexGenerator = new AtomicInteger(0); public final SecondaryIndexManager indexManager; private static final int INTERN_CUTOFF = 256; public final ConcurrentMap<ByteBuffer, ByteBuffer> internedNames = new NonBlockingHashMap<ByteBuffer, ByteBuffer>(); /* These are locally held copies to be changed from the config during runtime */ private volatile DefaultInteger minCompactionThreshold; private volatile DefaultInteger maxCompactionThreshold; private volatile AbstractCompactionStrategy compactionStrategy; public final Directories directories; /** ratio of in-memory memtable size, to serialized size */ volatile double liveRatio = 1.0; /** ops count last time we computed liveRatio */ private final AtomicLong liveRatioComputedAt = new AtomicLong(32); public final ColumnFamilyMetrics metric; public void reload() { // metadata object has been mutated directly. make all the members jibe with new settings. // only update these runtime-modifiable settings if they have not been modified. if (!minCompactionThreshold.isModified()) for (ColumnFamilyStore cfs : concatWithIndexes()) cfs.minCompactionThreshold = new DefaultInteger(metadata.getMinCompactionThreshold()); if (!maxCompactionThreshold.isModified()) for (ColumnFamilyStore cfs : concatWithIndexes()) cfs.maxCompactionThreshold = new DefaultInteger(metadata.getMaxCompactionThreshold()); maybeReloadCompactionStrategy(); indexManager.reload(); // If the CF comparator has changed, we need to change the memtable, // because the old one still aliases the previous comparator. We don't // call forceFlush() because it can skip the switch if the memtable is // clean, which we don't want here. Also, because there can be a race // between the time we acquire the current memtable and we flush it // (another thread can have flushed it first), we attempt the switch // until we know the memtable has the current comparator. try { while (true) { AbstractType comparator = metadata.comparator; Memtable memtable = getMemtableThreadSafe(); if (memtable.initialComparator == comparator) break; Future future = maybeSwitchMemtable(getMemtableThreadSafe(), true); if (future != null) future.get(); } } catch (ExecutionException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new AssertionError(e); } } private void maybeReloadCompactionStrategy() { // Check if there is a need for reloading if (metadata.compactionStrategyClass.equals(compactionStrategy.getClass()) && metadata.compactionStrategyOptions.equals(compactionStrategy.getOptions())) return; // TODO is there a way to avoid locking here? CompactionManager.instance.getCompactionLock().lock(); try { compactionStrategy.shutdown(); compactionStrategy = metadata.createCompactionStrategyInstance(this); } finally { CompactionManager.instance.getCompactionLock().unlock(); } } public void setCompactionStrategyClass(String compactionStrategyClass) throws ConfigurationException { metadata.compactionStrategyClass = CFMetaData.createCompactionStrategy(compactionStrategyClass); maybeReloadCompactionStrategy(); } public String getCompactionStrategyClass() { return metadata.compactionStrategyClass.getName(); } public Map<String,String> getCompressionParameters() { return metadata.compressionParameters().asThriftOptions(); } public void setCompressionParameters(Map<String,String> opts) throws ConfigurationException { metadata.compressionParameters = CompressionParameters.create(opts); } private ColumnFamilyStore(Table table, String columnFamilyName, IPartitioner partitioner, int generation, CFMetaData metadata, Directories directories, boolean loadSSTables) { assert metadata != null : "null metadata for " + table + ":" + columnFamilyName; this.table = table; columnFamily = columnFamilyName; this.metadata = metadata; this.minCompactionThreshold = new DefaultInteger(metadata.getMinCompactionThreshold()); this.maxCompactionThreshold = new DefaultInteger(metadata.getMaxCompactionThreshold()); this.partitioner = partitioner; this.directories = directories; this.indexManager = new SecondaryIndexManager(this); this.metric = new ColumnFamilyMetrics(this); fileIndexGenerator.set(generation); Caching caching = metadata.getCaching(); if (logger.isDebugEnabled()) logger.debug("Starting CFS {}", columnFamily); // scan for sstables corresponding to this cf and load them data = new DataTracker(this); if (loadSSTables) { Directories.SSTableLister sstableFiles = directories.sstableLister().skipTemporary(true); Collection<SSTableReader> sstables = SSTableReader.batchOpen(sstableFiles.list().entrySet(), metadata, this.partitioner); // Filter non-compacted sstables, remove compacted ones Set<Integer> compactedSSTables = new HashSet<Integer>(); for (SSTableReader sstable : sstables) compactedSSTables.addAll(sstable.getAncestors()); Set<SSTableReader> liveSSTables = new HashSet<SSTableReader>(); for (SSTableReader sstable : sstables) { if (compactedSSTables.contains(sstable.descriptor.generation)) sstable.releaseReference(); // this amount to deleting the sstable else liveSSTables.add(sstable); } data.addInitialSSTables(liveSSTables); } if (caching == Caching.ALL || caching == Caching.KEYS_ONLY) CacheService.instance.keyCache.loadSaved(this); // compaction strategy should be created after the CFS has been prepared this.compactionStrategy = metadata.createCompactionStrategyInstance(this); // create the private ColumnFamilyStores for the secondary column indexes for (ColumnDefinition info : metadata.getColumn_metadata().values()) { if (info.getIndexType() != null) indexManager.addIndexedColumn(info); } // register the mbean String type = this.partitioner instanceof LocalPartitioner ? "IndexColumnFamilies" : "ColumnFamilies"; mbeanName = "org.apache.cassandra.db:type=" + type + ",keyspace=" + this.table.name + ",columnfamily=" + columnFamily; try { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); ObjectName nameObj = new ObjectName(mbeanName); mbs.registerMBean(this, nameObj); } catch (Exception e) { throw new RuntimeException(e); } } /** call when dropping or renaming a CF. Performs mbean housekeeping and invalidates CFS to other operations */ public void invalidate() { try { valid = false; unregisterMBean(); data.unreferenceSSTables(); indexManager.invalidate(); } catch (Exception e) { // this shouldn't block anything. logger.warn("Failed unregistering mbean: " + mbeanName, e); } } /** * Removes every SSTable in the directory from the DataTracker's view. * @param directory the unreadable directory, possibly with SSTables in it, but not necessarily. */ void maybeRemoveUnreadableSSTables(File directory) { data.removeUnreadableSSTables(directory); } void unregisterMBean() throws MalformedObjectNameException, InstanceNotFoundException, MBeanRegistrationException { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); ObjectName nameObj = new ObjectName(mbeanName); if (mbs.isRegistered(nameObj)) mbs.unregisterMBean(nameObj); // unregister metrics metric.release(); } public long getMinRowSize() { return metric.minRowSize.value(); } public long getMaxRowSize() { return metric.maxRowSize.value(); } public long getMeanRowSize() { return metric.meanRowSize.value(); } public int getMeanColumns() { return data.getMeanColumns(); } public static ColumnFamilyStore createColumnFamilyStore(Table table, String columnFamily, boolean loadSSTables) { return createColumnFamilyStore(table, columnFamily, StorageService.getPartitioner(), Schema.instance.getCFMetaData(table.name, columnFamily), loadSSTables); } public static ColumnFamilyStore createColumnFamilyStore(Table table, String columnFamily, IPartitioner partitioner, CFMetaData metadata) { return createColumnFamilyStore(table, columnFamily, partitioner, metadata, true); } private static synchronized ColumnFamilyStore createColumnFamilyStore(Table table, String columnFamily, IPartitioner partitioner, CFMetaData metadata, boolean loadSSTables) { // get the max generation number, to prevent generation conflicts Directories directories = Directories.create(table.name, columnFamily); Directories.SSTableLister lister = directories.sstableLister().includeBackups(true); List<Integer> generations = new ArrayList<Integer>(); for (Map.Entry<Descriptor, Set<Component>> entry : lister.list().entrySet()) { Descriptor desc = entry.getKey(); generations.add(desc.generation); if (!desc.isCompatible()) throw new RuntimeException(String.format("Can't open incompatible SSTable! Current version %s, found file: %s", Descriptor.Version.CURRENT, desc)); } Collections.sort(generations); int value = (generations.size() > 0) ? (generations.get(generations.size() - 1)) : 0; return new ColumnFamilyStore(table, columnFamily, partitioner, value, metadata, directories, loadSSTables); } /** * Removes unnecessary files from the cf directory at startup: these include temp files, orphans, zero-length files * and compacted sstables. Files that cannot be recognized will be ignored. */ public static void scrubDataDirectories(String table, String columnFamily) { logger.debug("Removing compacted SSTable files from {} (see http://wiki.apache.org/cassandra/MemtableSSTable)", columnFamily); Directories directories = Directories.create(table, columnFamily); for (Map.Entry<Descriptor,Set<Component>> sstableFiles : directories.sstableLister().list().entrySet()) { Descriptor desc = sstableFiles.getKey(); Set<Component> components = sstableFiles.getValue(); if (components.contains(Component.COMPACTED_MARKER) || desc.temporary) { SSTable.delete(desc, components); continue; } File dataFile = new File(desc.filenameFor(Component.DATA)); if (components.contains(Component.DATA) && dataFile.length() > 0) // everything appears to be in order... moving on. continue; // missing the DATA file! all components are orphaned logger.warn("Removing orphans for {}: {}", desc, components); for (Component component : components) { FileUtils.deleteWithConfirm(desc.filenameFor(component)); } } // cleanup incomplete saved caches Pattern tmpCacheFilePattern = Pattern.compile(table + "-" + columnFamily + "-(Key|Row)Cache.*\\.tmp$"); File dir = new File(DatabaseDescriptor.getSavedCachesLocation()); if (dir.exists()) { assert dir.isDirectory(); for (File file : dir.listFiles()) if (tmpCacheFilePattern.matcher(file.getName()).matches()) if (!file.delete()) logger.warn("could not delete " + file.getAbsolutePath()); } // also clean out any index leftovers. CFMetaData cfm = Schema.instance.getCFMetaData(table, columnFamily); if (cfm != null) // secondary indexes aren't stored in DD. { for (ColumnDefinition def : cfm.getColumn_metadata().values()) scrubDataDirectories(table, cfm.indexColumnFamilyName(def)); } } // must be called after all sstables are loaded since row cache merges all row versions public void initRowCache() { if (!isRowCacheEnabled()) return; long start = System.currentTimeMillis(); int cachedRowsRead = CacheService.instance.rowCache.loadSaved(this); if (cachedRowsRead > 0) logger.info(String.format("completed loading (%d ms; %d keys) row cache for %s.%s", System.currentTimeMillis() - start, cachedRowsRead, table.name, columnFamily)); } /** * See #{@code StorageService.loadNewSSTables(String, String)} for more info * * @param ksName The keyspace name * @param cfName The columnFamily name */ public static synchronized void loadNewSSTables(String ksName, String cfName) { /** ks/cf existence checks will be done by open and getCFS methods for us */ Table table = Table.open(ksName); table.getColumnFamilyStore(cfName).loadNewSSTables(); } /** * #{@inheritDoc} */ public synchronized void loadNewSSTables() { logger.info("Loading new SSTables for " + table.name + "/" + columnFamily + "..."); Set<Descriptor> currentDescriptors = new HashSet<Descriptor>(); for (SSTableReader sstable : data.getView().sstables) currentDescriptors.add(sstable.descriptor); Set<SSTableReader> newSSTables = new HashSet<SSTableReader>(); Directories.SSTableLister lister = directories.sstableLister().skipTemporary(true); for (Map.Entry<Descriptor, Set<Component>> entry : lister.list().entrySet()) { Descriptor descriptor = entry.getKey(); if (currentDescriptors.contains(descriptor)) continue; // old (initialized) SSTable found, skipping if (descriptor.temporary) // in the process of being written continue; if (!descriptor.isCompatible()) throw new RuntimeException(String.format("Can't open incompatible SSTable! Current version %s, found file: %s", Descriptor.Version.CURRENT, descriptor)); Descriptor newDescriptor = new Descriptor(descriptor.version, descriptor.directory, descriptor.ksname, descriptor.cfname, fileIndexGenerator.incrementAndGet(), false); logger.info("Renaming new SSTable {} to {}", descriptor, newDescriptor); SSTableWriter.rename(descriptor, newDescriptor, entry.getValue()); SSTableReader reader; try { reader = SSTableReader.open(newDescriptor, entry.getValue(), metadata, partitioner); } catch (IOException e) { SSTableReader.logOpenException(entry.getKey(), e); continue; } newSSTables.add(reader); } if (newSSTables.isEmpty()) { logger.info("No new SSTables were found for " + table.name + "/" + columnFamily); return; } logger.info("Loading new SSTables and building secondary indexes for " + table.name + "/" + columnFamily + ": " + newSSTables); SSTableReader.acquireReferences(newSSTables); data.addSSTables(newSSTables); try { indexManager.maybeBuildSecondaryIndexes(newSSTables, indexManager.allIndexesNames()); } finally { SSTableReader.releaseReferences(newSSTables); } logger.info("Done loading load new SSTables for " + table.name + "/" + columnFamily); } public static void rebuildSecondaryIndex(String ksName, String cfName, String... idxNames) { ColumnFamilyStore cfs = Table.open(ksName).getColumnFamilyStore(cfName); Set<String> indexes = new HashSet<String>(Arrays.asList(idxNames)); Collection<SSTableReader> sstables = cfs.getSSTables(); try { cfs.indexManager.setIndexRemoved(indexes); SSTableReader.acquireReferences(sstables); logger.info(String.format("User Requested secondary index re-build for %s/%s indexes", ksName, cfName)); cfs.indexManager.maybeBuildSecondaryIndexes(sstables, indexes); cfs.indexManager.setIndexBuilt(indexes); } finally { SSTableReader.releaseReferences(sstables); } } /** * @return the name of the column family */ public String getColumnFamilyName() { return columnFamily; } public String getTempSSTablePath(File directory) { return getTempSSTablePath(directory, Descriptor.Version.CURRENT); } private String getTempSSTablePath(File directory, Descriptor.Version version) { Descriptor desc = new Descriptor(version, directory, table.name, columnFamily, fileIndexGenerator.incrementAndGet(), true); return desc.filenameFor(Component.DATA); } /** flush the given memtable and swap in a new one for its CFS, if it hasn't been frozen already. threadsafe. */ public Future<?> maybeSwitchMemtable(Memtable oldMemtable, final boolean writeCommitLog) { if (oldMemtable.isFrozen()) { logger.debug("memtable is already frozen; another thread must be flushing it"); return null; } /* * If we can get the writelock, that means no new updates can come in and * all ongoing updates to memtables have completed. We can get the tail * of the log and use it as the starting position for log replay on recovery. * * This is why we Table.switchLock needs to be global instead of per-Table: * we need to schedule discardCompletedSegments calls in the same order as their * contexts (commitlog position) were read, even though the flush executor * is multithreaded. */ Table.switchLock.writeLock().lock(); try { if (oldMemtable.isFrozen()) { logger.debug("memtable is already frozen; another thread must be flushing it"); return null; } assert getMemtableThreadSafe() == oldMemtable; final Future<ReplayPosition> ctx = writeCommitLog ? CommitLog.instance.getContext() : Futures.immediateFuture(ReplayPosition.NONE); // submit the memtable for any indexed sub-cfses, and our own. final List<ColumnFamilyStore> icc = new ArrayList<ColumnFamilyStore>(); // don't assume that this.memtable is dirty; forceFlush can bring us here during index build even if it is not for (ColumnFamilyStore cfs : concatWithIndexes()) { Memtable mt = cfs.getMemtableThreadSafe(); if (!mt.isClean() && !mt.isFrozen()) { // We need to freeze indexes too because they can be concurrently flushed too (#3547) mt.freeze(); icc.add(cfs); } } final CountDownLatch latch = new CountDownLatch(icc.size()); for (ColumnFamilyStore cfs : icc) { Memtable memtable = cfs.data.switchMemtable(); logger.info("Enqueuing flush of {}", memtable); memtable.flushAndSignal(latch, flushWriter, ctx); } if (metric.memtableSwitchCount.count() == Long.MAX_VALUE) metric.memtableSwitchCount.clear(); metric.memtableSwitchCount.inc(); // when all the memtables have been written, including for indexes, mark the flush in the commitlog header. // a second executor makes sure the onMemtableFlushes get called in the right order, // while keeping the wait-for-flush (future.get) out of anything latency-sensitive. return postFlushExecutor.submit(new WrappedRunnable() { public void runMayThrow() throws InterruptedException, ExecutionException { latch.await(); if (!icc.isEmpty()) { //only valid when memtables exist for (SecondaryIndex index : indexManager.getIndexesNotBackedByCfs()) { // flush any non-cfs backed indexes logger.info("Flushing SecondaryIndex {}", index); index.forceBlockingFlush(); } } if (writeCommitLog) { // if we're not writing to the commit log, we are replaying the log, so marking // the log header with "you can discard anything written before the context" is not valid CommitLog.instance.discardCompletedSegments(metadata.cfId, ctx.get()); } } }); } finally { Table.switchLock.writeLock().unlock(); } } public Future<?> forceFlush() { // during index build, 2ary index memtables can be dirty even if parent is not. if so, // we want flushLargestMemtables to flush the 2ary index ones too. boolean clean = true; for (ColumnFamilyStore cfs : concatWithIndexes()) clean &= cfs.getMemtableThreadSafe().isClean(); if (clean) { logger.debug("forceFlush requested but everything is clean in {}", columnFamily); return null; } return maybeSwitchMemtable(getMemtableThreadSafe(), true); } public void forceBlockingFlush() throws ExecutionException, InterruptedException { Future<?> future = forceFlush(); if (future != null) future.get(); } public void updateRowCache(DecoratedKey key, ColumnFamily columnFamily) { if (metadata.cfId == null) return; // secondary index RowCacheKey cacheKey = new RowCacheKey(metadata.cfId, key); // always invalidate a copying cache value if (CacheService.instance.rowCache.isPutCopying()) { invalidateCachedRow(cacheKey); return; } // invalidate a normal cache value if it's a sentinel, so the read will retry (and include the new update) IRowCacheEntry cachedRow = getCachedRowInternal(cacheKey); if (cachedRow != null) { if (cachedRow instanceof RowCacheSentinel) invalidateCachedRow(cacheKey); else // columnFamily is what is written in the commit log. Because of the PeriodicCommitLog, this can be done in concurrency // with this. So columnFamily shouldn't be modified and if it contains super columns, neither should they. So for super // columns, we must make sure to clone them when adding to the cache. That's what addAllWithSCCopy does (see #3957) ((ColumnFamily) cachedRow).addAllWithSCCopy(columnFamily, HeapAllocator.instance); } } /** * Insert/Update the column family for this key. * Caller is responsible for acquiring Table.flusherLock! * param @ lock - lock that needs to be used. * param @ key - key for update/insert * param @ columnFamily - columnFamily changes */ public void apply(DecoratedKey key, ColumnFamily columnFamily, SecondaryIndexManager.Updater indexer) { long start = System.nanoTime(); Memtable mt = getMemtableThreadSafe(); mt.put(key, columnFamily, indexer); updateRowCache(key, columnFamily); metric.writeLatency.addNano(System.nanoTime() - start); // recompute liveRatio, if we have doubled the number of ops since last calculated while (true) { long last = liveRatioComputedAt.get(); long operations = metric.writeLatency.latency.count(); if (operations < 2 * last) break; if (liveRatioComputedAt.compareAndSet(last, operations)) { logger.debug("computing liveRatio of {} at {} ops", this, operations); mt.updateLiveRatio(); } } } public static ColumnFamily removeDeletedCF(ColumnFamily cf, int gcBefore) { cf.maybeResetDeletionTimes(gcBefore); return cf.getColumnCount() == 0 && !cf.isMarkedForDelete() ? null : cf; } public static ColumnFamily removeDeleted(ColumnFamily cf, int gcBefore) { return removeDeleted(cf, gcBefore, SecondaryIndexManager.nullUpdater); } /* This is complicated because we need to preserve deleted columns, supercolumns, and columnfamilies until they have been deleted for at least GC_GRACE_IN_SECONDS. But, we do not need to preserve their contents; just the object itself as a "tombstone" that can be used to repair other replicas that do not know about the deletion. */ public static ColumnFamily removeDeleted(ColumnFamily cf, int gcBefore, SecondaryIndexManager.Updater indexer) { if (cf == null) { return null; } removeDeletedColumnsOnly(cf, gcBefore, indexer); return removeDeletedCF(cf, gcBefore); } private static void removeDeletedColumnsOnly(ColumnFamily cf, int gcBefore, SecondaryIndexManager.Updater indexer) { if (cf.isSuper()) removeDeletedSuper(cf, gcBefore); else removeDeletedStandard(cf, gcBefore, indexer); } public static void removeDeletedColumnsOnly(ColumnFamily cf, int gcBefore) { removeDeletedColumnsOnly(cf, gcBefore, SecondaryIndexManager.nullUpdater); } private static void removeDeletedStandard(ColumnFamily cf, int gcBefore, SecondaryIndexManager.Updater indexer) { Iterator<IColumn> iter = cf.iterator(); while (iter.hasNext()) { IColumn c = iter.next(); // remove columns if // (a) the column itself is gcable or // (b) the column is shadowed by a CF tombstone if (c.getLocalDeletionTime() < gcBefore || cf.deletionInfo().isDeleted(c)) { iter.remove(); indexer.remove(c); } } } private static void removeDeletedSuper(ColumnFamily cf, int gcBefore) { // TODO assume deletion means "most are deleted?" and add to clone, instead of remove from original? // this could be improved by having compaction, or possibly even removeDeleted, r/m the tombstone // once gcBefore has passed, so if new stuff is added in it doesn't used the wrong algorithm forever Iterator<IColumn> iter = cf.iterator(); while (iter.hasNext()) { SuperColumn c = (SuperColumn)iter.next(); Iterator<IColumn> subIter = c.getSubColumns().iterator(); while (subIter.hasNext()) { IColumn subColumn = subIter.next(); // remove subcolumns if // (a) the subcolumn itself is gcable or // (b) the supercolumn is shadowed by the CF and the column is not newer // (b) the subcolumn is shadowed by the supercolumn if (subColumn.getLocalDeletionTime() < gcBefore || cf.deletionInfo().isDeleted(c.name(), subColumn.timestamp()) || c.deletionInfo().isDeleted(subColumn)) { subIter.remove(); } } c.maybeResetDeletionTimes(gcBefore); if (c.getSubColumns().isEmpty() && !c.isMarkedForDelete()) { iter.remove(); } } } /** * @param sstables * @return sstables whose key range overlaps with that of the given sstables, not including itself. * (The given sstables may or may not overlap with each other.) */ public Set<SSTableReader> getOverlappingSSTables(Collection<SSTableReader> sstables) { logger.debug("Checking for sstables overlapping {}", sstables); // a normal compaction won't ever have an empty sstables list, but we create a skeleton // compaction controller for streaming, and that passes an empty list. if (sstables.isEmpty()) return ImmutableSet.of(); DataTracker.SSTableIntervalTree tree = data.getView().intervalTree; Set<SSTableReader> results = null; for (SSTableReader sstable : sstables) { Set<SSTableReader> overlaps = ImmutableSet.copyOf(tree.search(Interval.<RowPosition, SSTableReader>create(sstable.first, sstable.last))); assert overlaps.contains(sstable); results = results == null ? overlaps : Sets.union(results, overlaps).immutableCopy(); } results = Sets.difference(results, ImmutableSet.copyOf(sstables)); return results; } /* * Called after a BinaryMemtable flushes its in-memory data, or we add a file * via bootstrap. This information is cached in the ColumnFamilyStore. * This is useful for reads because the ColumnFamilyStore first looks in * the in-memory store and the into the disk to find the key. If invoked * during recoveryMode the onMemtableFlush() need not be invoked. * * param @ filename - filename just flushed to disk */ public void addSSTable(SSTableReader sstable) { assert sstable.getColumnFamilyName().equals(columnFamily); addSSTables(Arrays.asList(sstable)); } public void addSSTables(Collection<SSTableReader> sstables) { data.addSSTables(sstables); CompactionManager.instance.submitBackground(this); } /** * Calculate expected file size of SSTable after compaction. * * If operation type is {@code CLEANUP} and we're not dealing with an index sstable, * then we calculate expected file size with checking token range to be eliminated. * * Otherwise, we just add up all the files' size, which is the worst case file * size for compaction of all the list of files given. * * @param sstables SSTables to calculate expected compacted file size * @param operation Operation type * @return Expected file size of SSTable after compaction */ public long getExpectedCompactedFileSize(Iterable<SSTableReader> sstables, OperationType operation) { if (operation != OperationType.CLEANUP || isIndex()) { return SSTable.getTotalBytes(sstables); } // cleanup size estimation only counts bytes for keys local to this node long expectedFileSize = 0; Collection<Range<Token>> ranges = StorageService.instance.getLocalRanges(table.name); for (SSTableReader sstable : sstables) { List<Pair<Long, Long>> positions = sstable.getPositionsForRanges(ranges); for (Pair<Long, Long> position : positions) expectedFileSize += position.right - position.left; } return expectedFileSize; } /* * Find the maximum size file in the list . */ public SSTableReader getMaxSizeFile(Iterable<SSTableReader> sstables) { long maxSize = 0L; SSTableReader maxFile = null; for (SSTableReader sstable : sstables) { if (sstable.onDiskLength() > maxSize) { maxSize = sstable.onDiskLength(); maxFile = sstable; } } return maxFile; } public void forceCleanup(CounterId.OneShotRenewer renewer) throws ExecutionException, InterruptedException { CompactionManager.instance.performCleanup(ColumnFamilyStore.this, renewer); } public void scrub() throws ExecutionException, InterruptedException { snapshotWithoutFlush("pre-scrub-" + System.currentTimeMillis()); CompactionManager.instance.performScrub(ColumnFamilyStore.this); } public void sstablesRewrite() throws ExecutionException, InterruptedException { CompactionManager.instance.performSSTableRewrite(ColumnFamilyStore.this); } public void markCompacted(Collection<SSTableReader> sstables, OperationType compactionType) { assert !sstables.isEmpty(); data.markCompacted(sstables, compactionType); } public void replaceCompactedSSTables(Collection<SSTableReader> sstables, Iterable<SSTableReader> replacements, OperationType compactionType) { data.replaceCompactedSSTables(sstables, replacements, compactionType); } void replaceFlushed(Memtable memtable, SSTableReader sstable) { data.replaceFlushed(memtable, sstable); if (sstable != null) CompactionManager.instance.submitBackground(this); } public boolean isValid() { return valid; } public long getMemtableColumnsCount() { return metric.memtableColumnsCount.value(); } public long getMemtableDataSize() { return metric.memtableDataSize.value(); } public long getTotalMemtableLiveSize() { return getMemtableDataSize() + indexManager.getTotalLiveSize(); } public int getMemtableSwitchCount() { return (int) metric.memtableSwitchCount.count(); } /** * get the current memtable in a threadsafe fashion. note that simply "return memtable_" is * incorrect; you need to lock to introduce a thread safe happens-before ordering. * * do NOT use this method to do either a put or get on the memtable object, since it could be * flushed in the meantime (and its executor terminated). * * also do NOT make this method public or it will really get impossible to reason about these things. * @return */ private Memtable getMemtableThreadSafe() { return data.getMemtable(); } /** * Package protected for access from the CompactionManager. */ public DataTracker getDataTracker() { return data; } public Collection<SSTableReader> getSSTables() { return data.getSSTables(); } public Set<SSTableReader> getUncompactingSSTables() { return data.getUncompactingSSTables(); } public long[] getRecentSSTablesPerReadHistogram() { return metric.recentSSTablesPerRead.getBuckets(true); } public long[] getSSTablesPerReadHistogram() { return metric.sstablesPerRead.getBuckets(false); } public long getReadCount() { return metric.readLatency.latency.count(); } public double getRecentReadLatencyMicros() { return metric.readLatency.getRecentLatency(); } public long[] getLifetimeReadLatencyHistogramMicros() { return metric.readLatency.totalLatencyHistogram.getBuckets(false); } public long[] getRecentReadLatencyHistogramMicros() { return metric.readLatency.recentLatencyHistogram.getBuckets(true); } public long getTotalReadLatencyMicros() { return metric.readLatency.totalLatency.count(); } public int getPendingTasks() { return metric.pendingTasks.value(); } public long getWriteCount() { return metric.writeLatency.latency.count(); } public long getTotalWriteLatencyMicros() { return metric.writeLatency.totalLatency.count(); } public double getRecentWriteLatencyMicros() { return metric.writeLatency.getRecentLatency(); } public long[] getLifetimeWriteLatencyHistogramMicros() { return metric.writeLatency.totalLatencyHistogram.getBuckets(false); } public long[] getRecentWriteLatencyHistogramMicros() { return metric.writeLatency.recentLatencyHistogram.getBuckets(true); } public ColumnFamily getColumnFamily(DecoratedKey key, QueryPath path, ByteBuffer start, ByteBuffer finish, boolean reversed, int limit) { return getColumnFamily(QueryFilter.getSliceFilter(key, path, start, finish, reversed, limit)); } /** * get a list of columns starting from a given column, in a specified order. * only the latest version of a column is returned. * @return null if there is no data and no tombstones; otherwise a ColumnFamily */ public ColumnFamily getColumnFamily(QueryFilter filter) { return getColumnFamily(filter, gcBefore()); } public int gcBefore() { return (int) (System.currentTimeMillis() / 1000) - metadata.getGcGraceSeconds(); } /** * fetch the row given by filter.key if it is in the cache; if not, read it from disk and cache it * @param cfId the column family to read the row from * @param filter the columns being queried. Note that we still cache entire rows, but if a row is uncached * and we race to cache it, only the winner will read the entire row * @return the entire row for filter.key, if present in the cache (or we can cache it), or just the column * specified by filter otherwise */ private ColumnFamily getThroughCache(UUID cfId, QueryFilter filter) { assert isRowCacheEnabled() : String.format("Row cache is not enabled on column family [" + getColumnFamilyName() + "]"); RowCacheKey key = new RowCacheKey(cfId, filter.key); // attempt a sentinel-read-cache sequence. if a write invalidates our sentinel, we'll return our // (now potentially obsolete) data, but won't cache it. see CASSANDRA-3862 IRowCacheEntry cached = CacheService.instance.rowCache.get(key); if (cached != null) { if (cached instanceof RowCacheSentinel) { // Some other read is trying to cache the value, just do a normal non-caching read return getTopLevelColumns(filter, Integer.MIN_VALUE, false); } return (ColumnFamily) cached; } RowCacheSentinel sentinel = new RowCacheSentinel(); boolean sentinelSuccess = CacheService.instance.rowCache.putIfAbsent(key, sentinel); try { ColumnFamily data = getTopLevelColumns(QueryFilter.getIdentityFilter(filter.key, new QueryPath(columnFamily)), Integer.MIN_VALUE, true); if (sentinelSuccess && data != null) CacheService.instance.rowCache.replace(key, sentinel, data); return data; } finally { if (sentinelSuccess && data == null) CacheService.instance.rowCache.remove(key); } } ColumnFamily getColumnFamily(QueryFilter filter, int gcBefore) { assert columnFamily.equals(filter.getColumnFamilyName()) : filter.getColumnFamilyName(); logger.debug("Executing single-partition query"); ColumnFamily result = null; long start = System.nanoTime(); try { if (!isRowCacheEnabled()) { ColumnFamily cf = getTopLevelColumns(filter, gcBefore, false); if (cf == null) return null; // TODO this is necessary because when we collate supercolumns together, we don't check // their subcolumns for relevance, so we need to do a second prune post facto here. result = cf.isSuper() ? removeDeleted(cf, gcBefore) : removeDeletedCF(cf, gcBefore); } else { UUID cfId = Schema.instance.getId(table.name, columnFamily); if (cfId == null) { logger.trace("no id found for {}.{}", table.name, columnFamily); return null; } ColumnFamily cached = getThroughCache(cfId, filter); if (cached == null) { logger.trace("cached row is empty"); return null; } result = filterColumnFamily(cached, filter, gcBefore); } } finally { metric.readLatency.addNano(System.nanoTime() - start); } logger.debug("Read {} cells", result == null ? 0 : result.getColumnCount()); return result; } /** * Filter a cached row, which will not be modified by the filter, but may be modified by throwing out * tombstones that are no longer relevant. * The returned column family won't be thread safe. */ ColumnFamily filterColumnFamily(ColumnFamily cached, QueryFilter filter, int gcBefore) { ColumnFamily cf = cached.cloneMeShallow(ArrayBackedSortedColumns.factory(), filter.filter.isReversed()); OnDiskAtomIterator ci = filter.getMemtableColumnIterator(cached, null); filter.collateOnDiskAtom(cf, Collections.singletonList(ci), gcBefore); // TODO this is necessary because when we collate supercolumns together, we don't check // their subcolumns for relevance, so we need to do a second prune post facto here. return cf.isSuper() ? removeDeleted(cf, gcBefore) : removeDeletedCF(cf, gcBefore); } /** * Get the current view and acquires references on all its sstables. * This is a bit tricky because we must ensure that between the time we * get the current view and the time we acquire the references the set of * sstables hasn't changed. Otherwise we could get a view for which an * sstable have been deleted in the meantime. * * At the end of this method, a reference on all the sstables of the * returned view will have been acquired and must thus be released when * appropriate. */ private DataTracker.View markCurrentViewReferenced() { while (true) { DataTracker.View currentView = data.getView(); if (SSTableReader.acquireReferences(currentView.sstables)) return currentView; } } /** * Get the current sstables, acquiring references on all of them. * The caller is in charge of releasing the references on the sstables. * * See markCurrentViewReferenced() above. */ public Collection<SSTableReader> markCurrentSSTablesReferenced() { return markCurrentViewReferenced().sstables; } /** * @return a ViewFragment containing the sstables and memtables that may need to be merged * for the given @param key, according to the interval tree */ public ViewFragment markReferenced(DecoratedKey key) { assert !key.isMinimum(); DataTracker.View view; List<SSTableReader> sstables; while (true) { view = data.getView(); sstables = view.intervalTree.search(key); if (SSTableReader.acquireReferences(sstables)) break; // retry w/ new view } return new ViewFragment(sstables, Iterables.concat(Collections.singleton(view.memtable), view.memtablesPendingFlush)); } /** * @return a ViewFragment containing the sstables and memtables that may need to be merged * for rows between @param startWith and @param stopAt, inclusive, according to the interval tree */ public ViewFragment markReferenced(RowPosition startWith, RowPosition stopAt) { DataTracker.View view; List<SSTableReader> sstables; while (true) { view = data.getView(); // startAt == minimum is ok, but stopAt == minimum is confusing because all IntervalTree deals with // is Comparable, so it won't know to special-case that. However max() should not be call if the // intervalTree is empty sochecking that first // if (view.intervalTree.isEmpty()) { sstables = Collections.emptyList(); break; } RowPosition stopInTree = stopAt.isMinimum() ? view.intervalTree.max() : stopAt; sstables = view.intervalTree.search(Interval.<RowPosition, SSTableReader>create(startWith, stopInTree)); if (SSTableReader.acquireReferences(sstables)) break; // retry w/ new view } return new ViewFragment(sstables, Iterables.concat(Collections.singleton(view.memtable), view.memtablesPendingFlush)); } public List<String> getSSTablesForKey(String key) { DecoratedKey dk = new DecoratedKey(partitioner.getToken(ByteBuffer.wrap(key.getBytes())), ByteBuffer.wrap(key.getBytes())); ViewFragment view = markReferenced(dk); try { List<String> files = new ArrayList<String>(); for (SSTableReader sstr : view.sstables) { // check if the key actually exists in this sstable, without updating cache and stats if (sstr.getPosition(dk, SSTableReader.Operator.EQ, false) != null) files.add(sstr.getFilename()); } return files; } finally { SSTableReader.releaseReferences(view.sstables); } } public ColumnFamily getTopLevelColumns(QueryFilter filter, int gcBefore, boolean forCache) { CollationController controller = new CollationController(this, forCache, filter, gcBefore); ColumnFamily columns = controller.getTopLevelColumns(); metric.updateSSTableIterated(controller.getSstablesIterated()); return columns; } public static abstract class AbstractScanIterator extends AbstractIterator<Row> implements CloseableIterator<Row> { public boolean needsFiltering() { return true; } } /** * Iterate over a range of rows and columns from memtables/sstables. * * @param superColumn optional SuperColumn to slice subcolumns of; null to slice top-level columns * @param range Either a Bounds, which includes start key, or a Range, which does not. * @param columnFilter description of the columns we're interested in for each row */ public AbstractScanIterator getSequentialIterator(ByteBuffer superColumn, final AbstractBounds<RowPosition> range, IFilter columnFilter) { assert !(range instanceof Range) || !((Range)range).isWrapAround() || range.right.isMinimum() : range; final RowPosition startWith = range.left; final RowPosition stopAt = range.right; QueryFilter filter = new QueryFilter(null, new QueryPath(columnFamily, superColumn, null), columnFilter); final ViewFragment view = markReferenced(startWith, stopAt); try { final CloseableIterator<Row> iterator = RowIteratorFactory.getIterator(view.memtables, view.sstables, startWith, stopAt, filter, this); final int gcBefore = (int)(System.currentTimeMillis() / 1000) - metadata.getGcGraceSeconds(); return new AbstractScanIterator() { protected Row computeNext() { // pull a row out of the iterator if (!iterator.hasNext()) return endOfData(); Row current = iterator.next(); DecoratedKey key = current.key; if (!stopAt.isMinimum() && stopAt.compareTo(key) < 0) return endOfData(); // skipping outside of assigned range if (!range.contains(key)) return computeNext(); logger.trace("scanned {}", key); // TODO this is necessary because when we collate supercolumns together, we don't check // their subcolumns for relevance, so we need to do a second prune post facto here. return current.cf != null && current.cf.isSuper() ? new Row(current.key, removeDeleted(current.cf, gcBefore)) : current; } public void close() throws IOException { SSTableReader.releaseReferences(view.sstables); iterator.close(); } }; } catch (RuntimeException e) { // In case getIterator() throws, otherwise the iteror close method releases the references. SSTableReader.releaseReferences(view.sstables); throw e; } } public List<Row> getRangeSlice(ByteBuffer superColumn, final AbstractBounds<RowPosition> range, int maxResults, IFilter columnFilter, List<IndexExpression> rowFilter) { return getRangeSlice(superColumn, range, maxResults, columnFilter, rowFilter, false, false); } public List<Row> getRangeSlice(ByteBuffer superColumn, final AbstractBounds<RowPosition> range, int maxResults, IFilter columnFilter, List<IndexExpression> rowFilter, boolean maxIsColumns, boolean isPaging) { logger.debug("Executing seq scan"); return filter(getSequentialIterator(superColumn, range, columnFilter), ExtendedFilter.create(this, columnFilter, rowFilter, maxResults, maxIsColumns, isPaging)); } public List<Row> search(List<IndexExpression> clause, AbstractBounds<RowPosition> range, int maxResults, IFilter dataFilter) { return search(clause, range, maxResults, dataFilter, false); } public List<Row> search(List<IndexExpression> clause, AbstractBounds<RowPosition> range, int maxResults, IFilter dataFilter, boolean maxIsColumns) { logger.debug("Executing indexed scan"); return indexManager.search(clause, range, maxResults, dataFilter, maxIsColumns); } public List<Row> filter(AbstractScanIterator rowIterator, ExtendedFilter filter) { logger.trace("Filtering {} for rows matching {}", rowIterator, filter); List<Row> rows = new ArrayList<Row>(); int columnsCount = 0; try { while (rowIterator.hasNext() && rows.size() < filter.maxRows() && columnsCount < filter.maxColumns()) { // get the raw columns requested, and additional columns for the expressions if necessary Row rawRow = rowIterator.next(); ColumnFamily data = rawRow.cf; if (rowIterator.needsFiltering()) { // roughtly IFilter extraFilter = filter.getExtraFilter(data); if (extraFilter != null) { QueryPath path = new QueryPath(columnFamily); ColumnFamily cf = filter.cfs.getColumnFamily(new QueryFilter(rawRow.key, path, extraFilter)); if (cf != null) data.addAll(cf, HeapAllocator.instance); } if (!filter.isSatisfiedBy(data, null)) continue; logger.trace("{} satisfies all filter expressions", data); // cut the resultset back to what was requested, if necessary data = filter.prune(data); } rows.add(new Row(rawRow.key, data)); if (data != null) columnsCount += filter.lastCounted(data); // Update the underlying filter to avoid querying more columns per slice than necessary and to handle paging filter.updateFilter(columnsCount); } return rows; } finally { try { rowIterator.close(); } catch (IOException e) { throw new RuntimeException(e); } } } public AbstractType<?> getComparator() { return metadata.comparator; } public void snapshotWithoutFlush(String snapshotName) { for (ColumnFamilyStore cfs : concatWithIndexes()) { DataTracker.View currentView = cfs.markCurrentViewReferenced(); try { for (SSTableReader ssTable : currentView.sstables) { File snapshotDirectory = Directories.getSnapshotDirectory(ssTable.descriptor, snapshotName); ssTable.createLinks(snapshotDirectory.getPath()); // hard links if (logger.isDebugEnabled()) logger.debug("Snapshot for " + table + " keyspace data file " + ssTable.getFilename() + " created in " + snapshotDirectory); } if (cfs.compactionStrategy instanceof LeveledCompactionStrategy) cfs.directories.snapshotLeveledManifest(snapshotName); } finally { SSTableReader.releaseReferences(currentView.sstables); } } } public List<SSTableReader> getSnapshotSSTableReader(String tag) throws IOException { Map<Descriptor, Set<Component>> snapshots = directories.sstableLister().snapshots(tag).list(); List<SSTableReader> readers = new ArrayList<SSTableReader>(snapshots.size()); for (Map.Entry<Descriptor, Set<Component>> entries : snapshots.entrySet()) readers.add(SSTableReader.open(entries.getKey(), entries.getValue(), metadata, partitioner)); return readers; } /** * Take a snap shot of this columnfamily store. * * @param snapshotName the name of the associated with the snapshot */ public void snapshot(String snapshotName) { try { forceBlockingFlush(); } catch (ExecutionException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new AssertionError(e); } snapshotWithoutFlush(snapshotName); } public boolean snapshotExists(String snapshotName) { return directories.snapshotExists(snapshotName); } public void clearSnapshot(String snapshotName) { directories.clearSnapshot(snapshotName); } public boolean hasUnreclaimedSpace() { return getLiveDiskSpaceUsed() < getTotalDiskSpaceUsed(); } public long getTotalDiskSpaceUsed() { return metric.totalDiskSpaceUsed.count(); } public long getLiveDiskSpaceUsed() { return metric.liveDiskSpaceUsed.count(); } public int getLiveSSTableCount() { return metric.liveSSTableCount.value(); } /** * @return the cached row for @param key if it is already present in the cache. * That is, unlike getThroughCache, it will not readAndCache the row if it is not present, nor * are these calls counted in cache statistics. * * Note that this WILL cause deserialization of a SerializingCache row, so if all you * need to know is whether a row is present or not, use containsCachedRow instead. */ public ColumnFamily getRawCachedRow(DecoratedKey key) { if (metadata.cfId == null) return null; // secondary index IRowCacheEntry cached = getCachedRowInternal(new RowCacheKey(metadata.cfId, key)); return cached == null || cached instanceof RowCacheSentinel ? null : (ColumnFamily) cached; } private IRowCacheEntry getCachedRowInternal(RowCacheKey key) { return CacheService.instance.rowCache.getCapacity() == 0 ? null : CacheService.instance.rowCache.getInternal(key); } /** * @return true if @param key is contained in the row cache */ public boolean containsCachedRow(DecoratedKey key) { return CacheService.instance.rowCache.getCapacity() != 0 && CacheService.instance.rowCache.containsKey(new RowCacheKey(metadata.cfId, key)); } public void invalidateCachedRow(RowCacheKey key) { CacheService.instance.rowCache.remove(key); } public void invalidateCachedRow(DecoratedKey key) { UUID cfId = Schema.instance.getId(table.name, this.columnFamily); if (cfId == null) return; // secondary index invalidateCachedRow(new RowCacheKey(cfId, key)); } public void forceMajorCompaction() throws InterruptedException, ExecutionException { CompactionManager.instance.performMaximal(this); } public static Iterable<ColumnFamilyStore> all() { List<Iterable<ColumnFamilyStore>> stores = new ArrayList<Iterable<ColumnFamilyStore>>(Schema.instance.getTables().size()); for (Table table : Table.all()) { stores.add(table.getColumnFamilyStores()); } return Iterables.concat(stores); } public static List<ColumnFamilyStore> allUserDefined() { List<ColumnFamilyStore> cfses = new ArrayList<ColumnFamilyStore>(); for (Table table : Sets.difference(ImmutableSet.copyOf(Table.all()), ImmutableSet.of(Table.open(Table.SYSTEM_KS)))) cfses.addAll(table.getColumnFamilyStores()); return cfses; } public Iterable<DecoratedKey> keySamples(Range<Token> range) { Collection<SSTableReader> sstables = getSSTables(); Iterable<DecoratedKey>[] samples = new Iterable[sstables.size()]; int i = 0; for (SSTableReader sstable: sstables) { samples[i++] = sstable.getKeySamples(range); } return Iterables.concat(samples); } /** * For testing. No effort is made to clear historical or even the current memtables, nor for * thread safety. All we do is wipe the sstable containers clean, while leaving the actual * data files present on disk. (This allows tests to easily call loadNewSSTables on them.) */ public void clearUnsafe() { for (ColumnFamilyStore cfs : concatWithIndexes()) cfs.data.init(); } /** * Waits for flushes started BEFORE THIS METHOD IS CALLED to finish. * Does NOT guarantee that no flush is active when it returns. */ private void waitForActiveFlushes() { Future<?> future; Table.switchLock.writeLock().lock(); try { future = postFlushExecutor.submit(new Runnable() { public void run() { } }); } finally { Table.switchLock.writeLock().unlock(); } try { future.get(); } catch (InterruptedException e) { throw new AssertionError(e); } catch (ExecutionException e) { throw new AssertionError(e); } } /** * Truncate practically deletes the entire column family's data * @return a Future to the delete operation. Call the future's get() to make * sure the column family has been deleted */ public Future<?> truncate() throws ExecutionException, InterruptedException { // We have two goals here: // - truncate should delete everything written before truncate was invoked // - but not delete anything that isn't part of the snapshot we create. // We accomplish this by first flushing manually, then snapshotting, and // recording the timestamp IN BETWEEN those actions. Any sstables created // with this timestamp or greater time, will not be marked for delete. // // Bonus complication: since we store replay position in sstable metadata, // truncating those sstables means we will replay any CL segments from the // beginning if we restart before they are discarded for normal reasons // post-truncate. So we need to (a) force a new segment so the currently // active one can be discarded, and (b) flush *all* CFs so that unflushed // data in others don't keep any pre-truncate CL segments alive. // // Bonus bonus: simply forceFlush of all the CF is not enough, because if // for a given column family the memtable is clean, forceFlush will return // immediately, even though there could be a memtable being flushed at the same // time. So to guarantee that all segments can be cleaned out, we need to // "waitForActiveFlushes" after the new segment has been created. logger.debug("truncating {}", columnFamily); if (DatabaseDescriptor.isAutoSnapshot()) { // flush the CF being truncated before forcing the new segment forceBlockingFlush(); } else { // just nuke the memtable data w/o writing to disk first Table.switchLock.writeLock().lock(); try { for (ColumnFamilyStore cfs : concatWithIndexes()) { Memtable mt = cfs.getMemtableThreadSafe(); if (!mt.isClean() && !mt.isFrozen()) { mt.cfs.data.renewMemtable(); } } } finally { Table.switchLock.writeLock().unlock(); } } KSMetaData ksm = Schema.instance.getKSMetaData(this.table.name); if (ksm.durableWrites) { CommitLog.instance.forceNewSegment(); Future<ReplayPosition> position = CommitLog.instance.getContext(); // now flush everyone else. re-flushing ourselves is not necessary, but harmless for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) cfs.forceFlush(); waitForActiveFlushes(); // if everything was clean, flush won't have called discard CommitLog.instance.discardCompletedSegments(metadata.cfId, position.get()); } // sleep a little to make sure that our truncatedAt comes after any sstable // that was part of the flushed we forced; otherwise on a tie, it won't get deleted. try { long starttime = System.currentTimeMillis(); while ((System.currentTimeMillis() - starttime) < 1) { Thread.sleep(1); } } catch (InterruptedException e) { throw new AssertionError(e); } long truncatedAt = System.currentTimeMillis(); if (DatabaseDescriptor.isAutoSnapshot()) snapshot(Table.getTimestampedSnapshotName(columnFamily)); return CompactionManager.instance.submitTruncate(this, truncatedAt); } public long getBloomFilterFalsePositives() { return metric.bloomFilterFalsePositives.value(); } public long getRecentBloomFilterFalsePositives() { return metric.recentBloomFilterFalsePositives.value(); } public double getBloomFilterFalseRatio() { return metric.bloomFilterFalseRatio.value(); } public double getRecentBloomFilterFalseRatio() { return metric.recentBloomFilterFalseRatio.value(); } public long getBloomFilterDiskSpaceUsed() { return metric.bloomFilterDiskSpaceUsed.value(); } @Override public String toString() { return "CFS(" + "Keyspace='" + table.name + '\'' + ", ColumnFamily='" + columnFamily + '\'' + ')'; } public void disableAutoCompaction() { minCompactionThreshold.set(0); maxCompactionThreshold.set(0); } public void enableAutoCompaction() { minCompactionThreshold.reset(); maxCompactionThreshold.reset(); } /* JMX getters and setters for the Default<T>s. - get/set minCompactionThreshold - get/set maxCompactionThreshold - get memsize - get memops - get/set memtime */ public AbstractCompactionStrategy getCompactionStrategy() { return compactionStrategy; } public void setCompactionThresholds(int minThreshold, int maxThreshold) { validateCompactionThresholds(minThreshold, maxThreshold); minCompactionThreshold.set(minThreshold); maxCompactionThreshold.set(maxThreshold); // this is called as part of CompactionStrategy constructor; avoid circular dependency by checking for null if (compactionStrategy != null) CompactionManager.instance.submitBackground(this); } public int getMinimumCompactionThreshold() { return minCompactionThreshold.value(); } public void setMinimumCompactionThreshold(int minCompactionThreshold) { validateCompactionThresholds(minCompactionThreshold, maxCompactionThreshold.value()); this.minCompactionThreshold.set(minCompactionThreshold); } public int getMaximumCompactionThreshold() { return maxCompactionThreshold.value(); } public void setMaximumCompactionThreshold(int maxCompactionThreshold) { validateCompactionThresholds(minCompactionThreshold.value(), maxCompactionThreshold); this.maxCompactionThreshold.set(maxCompactionThreshold); } private void validateCompactionThresholds(int minThreshold, int maxThreshold) { if (minThreshold > maxThreshold && maxThreshold != 0) throw new RuntimeException(String.format("The min_compaction_threshold cannot be larger than the max_compaction_threshold. " + "Min is '%d', Max is '%d'.", minThreshold, maxThreshold)); } public boolean isCompactionDisabled() { return getMinimumCompactionThreshold() <= 0 || getMaximumCompactionThreshold() <= 0; } // End JMX get/set. public long estimateKeys() { return data.estimatedKeys(); } public long[] getEstimatedRowSizeHistogram() { return metric.estimatedRowSizeHistogram.value(); } public long[] getEstimatedColumnCountHistogram() { return metric.estimatedColumnCountHistogram.value(); } public double getCompressionRatio() { return metric.compressionRatio.value(); } /** true if this CFS contains secondary index data */ public boolean isIndex() { return partitioner instanceof LocalPartitioner; } private ByteBuffer intern(ByteBuffer name) { ByteBuffer internedName = internedNames.get(name); if (internedName == null) { internedName = ByteBufferUtil.clone(name); ByteBuffer concurrentName = internedNames.putIfAbsent(internedName, internedName); if (concurrentName != null) internedName = concurrentName; } return internedName; } public ByteBuffer internOrCopy(ByteBuffer name, Allocator allocator) { if (internedNames.size() >= INTERN_CUTOFF) return allocator.clone(name); return intern(name); } public ByteBuffer maybeIntern(ByteBuffer name) { if (internedNames.size() >= INTERN_CUTOFF) return null; return intern(name); } public SSTableWriter createCompactionWriter(long estimatedRows, File location, Collection<SSTableReader> sstables) { ReplayPosition rp = ReplayPosition.getReplayPosition(sstables); SSTableMetadata.Collector sstableMetadataCollector = SSTableMetadata.createCollector().replayPosition(rp); // Get the max timestamp of the precompacted sstables // and adds generation of live ancestors for (SSTableReader sstable : sstables) { sstableMetadataCollector.updateMaxTimestamp(sstable.getMaxTimestamp()); sstableMetadataCollector.addAncestor(sstable.descriptor.generation); for (Integer i : sstable.getAncestors()) { if (new File(sstable.descriptor.withGeneration(i).filenameFor(Component.DATA)).exists()) sstableMetadataCollector.addAncestor(i); } } return new SSTableWriter(getTempSSTablePath(location), estimatedRows, metadata, partitioner, sstableMetadataCollector); } public Iterable<ColumnFamilyStore> concatWithIndexes() { return Iterables.concat(indexManager.getIndexesBackedByCfs(), Collections.singleton(this)); } public Set<Memtable> getMemtablesPendingFlush() { return data.getMemtablesPendingFlush(); } public List<String> getBuiltIndexes() { return indexManager.getBuiltIndexes(); } public int getUnleveledSSTables() { return this.compactionStrategy instanceof LeveledCompactionStrategy ? ((LeveledCompactionStrategy) this.compactionStrategy).getLevelSize(0) : 0; } public int[] getSSTableCountPerLevel() { return compactionStrategy instanceof LeveledCompactionStrategy ? ((LeveledCompactionStrategy) compactionStrategy).getAllLevelSize() : null; } public static class ViewFragment { public final List<SSTableReader> sstables; public final Iterable<Memtable> memtables; public ViewFragment(List<SSTableReader> sstables, Iterable<Memtable> memtables) { this.sstables = sstables; this.memtables = memtables; } } /** * Returns the creation time of the oldest memtable not fully flushed yet. */ public long oldestUnflushedMemtable() { DataTracker.View view = data.getView(); long oldest = view.memtable.creationTime(); for (Memtable memtable : view.memtablesPendingFlush) oldest = Math.min(oldest, memtable.creationTime()); return oldest; } public boolean isEmpty() { DataTracker.View view = data.getView(); return view.sstables.isEmpty() && view.memtable.getOperations() == 0 && view.memtablesPendingFlush.isEmpty(); } private boolean isRowCacheEnabled() { return !(metadata.getCaching() == Caching.NONE || metadata.getCaching() == Caching.KEYS_ONLY || CacheService.instance.rowCache.getCapacity() == 0); } /** * Discard all SSTables that were created before given timestamp. Caller is responsible to obtain compactionLock. * * @param truncatedAt The timestamp of the truncation * (all SSTables before that timestamp are going be marked as compacted) */ public void discardSSTables(long truncatedAt) { List<SSTableReader> truncatedSSTables = new ArrayList<SSTableReader>(); for (SSTableReader sstable : getSSTables()) { if (!sstable.newSince(truncatedAt)) truncatedSSTables.add(sstable); } if (!truncatedSSTables.isEmpty()) markCompacted(truncatedSSTables, OperationType.UNKNOWN); } }
include range bounds in debug logging of range and index scans
src/java/org/apache/cassandra/db/ColumnFamilyStore.java
include range bounds in debug logging of range and index scans
Java
apache-2.0
ee8dbe522468d2a196a56eab405ea528872cacb8
0
phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida
package ca.corefacility.bioinformatics.irida.security.permissions; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.repository.CrudRepository; import org.springframework.security.core.Authentication; import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.model.user.Role; /** * Generic super-class for permission types to extend from. * * @author Franklin Bristow <[email protected]> * * @param <DomainObjectType> * the type of domain object that this permission is evaluating. */ public abstract class BasePermission<DomainObjectType> { private static final Logger logger = LoggerFactory.getLogger(BasePermission.class); /** * Get the implementation-specific permission provided. * * @return the permission provided by the permission class. */ public abstract String getPermissionProvided(); /** * This method is called by {@link BasePermission} to evaluate the custom * permissions provided by implementing classes. * * @param authentication * the authenticated user. * @param targetDomainObject * the object that the user is attempting to access. * @return true if permitted, false otherwise. */ protected abstract boolean customPermissionAllowed(Authentication authentication, DomainObjectType targetDomainObject); /** * The type of object to be loaded from the database. */ private Class<DomainObjectType> domainObjectType; /** * The repository to load objects with. */ private CrudRepository<DomainObjectType, Long> repository; /** * Constructor with handles on the type of repository and type of domain * object. * * @param domainObjectType * the domain object type managed by this permission. * @param repositoryId * the identifier of the repository to load from the spring * application context. */ protected BasePermission(Class<DomainObjectType> domainObjectType, CrudRepository<DomainObjectType, Long> repository) { this.repository = repository; this.domainObjectType = domainObjectType; } /** * Evaluates the permission of a single object. * @param authentication The Authentication object. * @param targetDomainObject The target domain object to evaluate permission (assumes this is not a collection). * @return True if permission is allowed on this object, false otherwise. * @throws EntityNotFoundException If the object does not exist. */ @SuppressWarnings("unchecked") private boolean customPermissionAllowedSingleObject(Authentication authentication, Object targetDomainObject) { DomainObjectType domainObject; if (targetDomainObject instanceof Long) { Long id = (Long)targetDomainObject; logger.trace("Trying to find domain object by id [" + id + "]"); domainObject = repository.findOne((Long) id); if (domainObject == null) { throw new EntityNotFoundException("Could not find entity with id [" + id + "]"); } } else if (domainObjectType.isAssignableFrom(targetDomainObject.getClass())) { // reflection replacement for instanceof domainObject = (DomainObjectType) targetDomainObject; } else { throw new IllegalArgumentException("Parameter to " + getClass().getName() + " must be of type Long or " + domainObjectType.getName() + "."); } return customPermissionAllowed(authentication, domainObject); } /** * Tests permission for a collection of objects. * @param authentication The Authentication object. * @param targetDomainObjects The collection of domain objects to check for permission. * @return True if permission is allowed for every object in the collection, false otherwise. * @throws EntityNotFoundException If one of the objects in the collection does not exist. */ private boolean customPermissionAllowedCollection(Authentication authentication, Collection<?> targetDomainObjects) { boolean permitted = true; for (Object domainObjectInCollection : targetDomainObjects) { permitted &= customPermissionAllowedSingleObject(authentication, domainObjectInCollection); } return permitted; } /** * Is the authenticated user allowed to perform some action on the target * domain object? * * @param authentication * the authenticated user. * @param targetDomainObject * the object the user is requesting to perform an action on. * @return true if the action is allowed, false otherwise. */ public final boolean isAllowed(Authentication authentication, Object targetDomainObject) { // fast pass for administrators -- administrators are allowed to access // everything. if (authentication.getAuthorities().contains(Role.ROLE_ADMIN)) { return true; } if (targetDomainObject instanceof Collection<?>) { return customPermissionAllowedCollection(authentication, (Collection<?>)targetDomainObject); } else { return customPermissionAllowedSingleObject(authentication, targetDomainObject); } } }
src/main/java/ca/corefacility/bioinformatics/irida/security/permissions/BasePermission.java
package ca.corefacility.bioinformatics.irida.security.permissions; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.repository.CrudRepository; import org.springframework.security.core.Authentication; import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.model.user.Role; /** * Generic super-class for permission types to extend from. * * @author Franklin Bristow <[email protected]> * * @param <DomainObjectType> * the type of domain object that this permission is evaluating. */ public abstract class BasePermission<DomainObjectType> { private static final Logger logger = LoggerFactory.getLogger(BasePermission.class); /** * Get the implementation-specific permission provided. * * @return the permission provided by the permission class. */ public abstract String getPermissionProvided(); /** * This method is called by {@link BasePermission} to evaluate the custom * permissions provided by implementing classes. * * @param authentication * the authenticated user. * @param targetDomainObject * the object that the user is attempting to access. * @return true if permitted, false otherwise. */ protected abstract boolean customPermissionAllowed(Authentication authentication, DomainObjectType targetDomainObject); /** * The type of object to be loaded from the database. */ private Class<DomainObjectType> domainObjectType; /** * The repository to load objects with. */ private CrudRepository<DomainObjectType, Long> repository; /** * Constructor with handles on the type of repository and type of domain * object. * * @param domainObjectType * the domain object type managed by this permission. * @param repositoryId * the identifier of the repository to load from the spring * application context. */ protected BasePermission(Class<DomainObjectType> domainObjectType, CrudRepository<DomainObjectType, Long> repository) { this.repository = repository; this.domainObjectType = domainObjectType; } /** * Given a Long id, find the corresponding DomainObject. * @param id The Long id to search for. * @return A DomainObject of the particular type corresponding to this id. * @throws EntityNotFoundException If the DomainObject could not be found. */ private DomainObjectType findDomainObjectByLongId(Long id) { logger.trace("Trying to find domain object by id [" + id + "]"); DomainObjectType domainObject = repository.findOne((Long) id); if (domainObject == null) { throw new EntityNotFoundException("Could not find entity with id [" + id + "]"); } return domainObject; } /** * Is the authenticated user allowed to perform some action on the target * domain object? * * @param authentication * the authenticated user. * @param targetDomainObject * the object the user is requesting to perform an action on. * @return true if the action is allowed, false otherwise. */ @SuppressWarnings("unchecked") public final boolean isAllowed(Authentication authentication, Object targetDomainObject) { // fast pass for administrators -- administrators are allowed to access // everything. if (authentication.getAuthorities().contains(Role.ROLE_ADMIN)) { return true; } // load the domain object (if necessary) so that the subclass can // evaluate access DomainObjectType domainObject; if (targetDomainObject instanceof Long) { domainObject = findDomainObjectByLongId((Long)targetDomainObject); } else if (domainObjectType.isAssignableFrom(targetDomainObject.getClass())) { // reflection replacement for instanceof domainObject = (DomainObjectType) targetDomainObject; } else if (targetDomainObject instanceof Collection<?>) { Collection<?> domainObjects = (Collection<?>)targetDomainObject; boolean permitted = true; for (Object domainObjectObject : domainObjects) { if (domainObjectObject instanceof Long) { domainObject = findDomainObjectByLongId((Long)domainObjectObject); } else if (domainObjectType.isAssignableFrom(domainObjectObject.getClass())) { domainObject = (DomainObjectType)domainObjectObject; } else { throw new IllegalArgumentException("Parameter to " + getClass().getName() + " is not a valid Collection, must be of type" + "Collection<" + domainObjectType.getName() + "or Long>."); } permitted &= customPermissionAllowed(authentication, domainObject); } return permitted; } else { throw new IllegalArgumentException("Parameter to " + getClass().getName() + " must be of type Long or " + domainObjectType.getName() + "."); } // pass off any other logic to the implementing permission class. return customPermissionAllowed(authentication, domainObject); } }
Re-factored BasePermission to be much easier to understand
src/main/java/ca/corefacility/bioinformatics/irida/security/permissions/BasePermission.java
Re-factored BasePermission to be much easier to understand
Java
apache-2.0
7da104335cf1ec93118f90b961df907455ffb097
0
FredGithub/libgdx,hyvas/libgdx,MikkelTAndersen/libgdx,sarkanyi/libgdx,samskivert/libgdx,tommyettinger/libgdx,josephknight/libgdx,cypherdare/libgdx,josephknight/libgdx,FredGithub/libgdx,Zomby2D/libgdx,MovingBlocks/libgdx,samskivert/libgdx,MikkelTAndersen/libgdx,hyvas/libgdx,codepoke/libgdx,ttencate/libgdx,sarkanyi/libgdx,josephknight/libgdx,tommyettinger/libgdx,NathanSweet/libgdx,bladecoder/libgdx,NathanSweet/libgdx,ttencate/libgdx,fwolff/libgdx,sarkanyi/libgdx,ttencate/libgdx,MikkelTAndersen/libgdx,sarkanyi/libgdx,hyvas/libgdx,ttencate/libgdx,MovingBlocks/libgdx,stinsonga/libgdx,fwolff/libgdx,stinsonga/libgdx,tommyettinger/libgdx,MikkelTAndersen/libgdx,MovingBlocks/libgdx,cypherdare/libgdx,MovingBlocks/libgdx,libgdx/libgdx,MikkelTAndersen/libgdx,samskivert/libgdx,josephknight/libgdx,FredGithub/libgdx,FredGithub/libgdx,fwolff/libgdx,sarkanyi/libgdx,cypherdare/libgdx,Zomby2D/libgdx,MovingBlocks/libgdx,NathanSweet/libgdx,cypherdare/libgdx,alex-dorokhov/libgdx,Zomby2D/libgdx,sarkanyi/libgdx,fwolff/libgdx,hyvas/libgdx,tommyettinger/libgdx,ttencate/libgdx,hyvas/libgdx,alex-dorokhov/libgdx,ttencate/libgdx,libgdx/libgdx,alex-dorokhov/libgdx,bladecoder/libgdx,MovingBlocks/libgdx,FredGithub/libgdx,Zomby2D/libgdx,hyvas/libgdx,libgdx/libgdx,FredGithub/libgdx,FredGithub/libgdx,samskivert/libgdx,codepoke/libgdx,sarkanyi/libgdx,josephknight/libgdx,fwolff/libgdx,NathanSweet/libgdx,fwolff/libgdx,samskivert/libgdx,cypherdare/libgdx,stinsonga/libgdx,MovingBlocks/libgdx,codepoke/libgdx,codepoke/libgdx,alex-dorokhov/libgdx,MikkelTAndersen/libgdx,stinsonga/libgdx,MikkelTAndersen/libgdx,stinsonga/libgdx,FredGithub/libgdx,samskivert/libgdx,NathanSweet/libgdx,libgdx/libgdx,MovingBlocks/libgdx,alex-dorokhov/libgdx,ttencate/libgdx,josephknight/libgdx,codepoke/libgdx,codepoke/libgdx,codepoke/libgdx,samskivert/libgdx,josephknight/libgdx,ttencate/libgdx,libgdx/libgdx,Zomby2D/libgdx,samskivert/libgdx,sarkanyi/libgdx,hyvas/libgdx,alex-dorokhov/libgdx,hyvas/libgdx,alex-dorokhov/libgdx,josephknight/libgdx,alex-dorokhov/libgdx,tommyettinger/libgdx,fwolff/libgdx,codepoke/libgdx,fwolff/libgdx,bladecoder/libgdx,MikkelTAndersen/libgdx,bladecoder/libgdx,bladecoder/libgdx
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.scenes.scene2d; import com.badlogic.gdx.Application.ApplicationType; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Graphics; import com.badlogic.gdx.Input; import com.badlogic.gdx.InputAdapter; import com.badlogic.gdx.InputMultiplexer; import com.badlogic.gdx.graphics.Camera; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.scenes.scene2d.InputEvent.Type; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.Table.Debug; import com.badlogic.gdx.scenes.scene2d.utils.FocusListener; import com.badlogic.gdx.scenes.scene2d.utils.FocusListener.FocusEvent; import com.badlogic.gdx.scenes.scene2d.utils.ScissorStack; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.Disposable; import com.badlogic.gdx.utils.Pool.Poolable; import com.badlogic.gdx.utils.Pools; import com.badlogic.gdx.utils.Scaling; import com.badlogic.gdx.utils.SnapshotArray; import com.badlogic.gdx.utils.viewport.ScalingViewport; import com.badlogic.gdx.utils.viewport.Viewport; /** A 2D scene graph containing hierarchies of {@link Actor actors}. Stage handles the viewport and distributes input events. * <p> * {@link #setViewport(Viewport)} controls the coordinates used within the stage and sets up the camera used to convert between * stage coordinates and screen coordinates. * <p> * A stage must receive input events so it can distribute them to actors. This is typically done by passing the stage to * {@link Input#setInputProcessor(com.badlogic.gdx.InputProcessor) Gdx.input.setInputProcessor}. An {@link InputMultiplexer} may * be used to handle input events before or after the stage does. If an actor handles an event by returning true from the input * method, then the stage's input method will also return true, causing subsequent InputProcessors to not receive the event. * <p> * The Stage and its constituents (like Actors and Listeners) are not thread-safe and should only be updated and queried from a * single thread (presumably the main render thread). Methods should be reentrant, so you can update Actors and Stages from within * callbacks and handlers. * @author mzechner * @author Nathan Sweet */ public class Stage extends InputAdapter implements Disposable { /** True if any actor has ever had debug enabled. */ static boolean debug; private Viewport viewport; private final Batch batch; private boolean ownsBatch; private Group root; private final Vector2 tempCoords = new Vector2(); private final Actor[] pointerOverActors = new Actor[20]; private final boolean[] pointerTouched = new boolean[20]; private final int[] pointerScreenX = new int[20]; private final int[] pointerScreenY = new int[20]; private int mouseScreenX, mouseScreenY; private Actor mouseOverActor; private Actor keyboardFocus, scrollFocus; private final SnapshotArray<TouchFocus> touchFocuses = new SnapshotArray(true, 4, TouchFocus.class); private boolean actionsRequestRendering = true; private ShapeRenderer debugShapes; private boolean debugInvisible, debugAll, debugUnderMouse, debugParentUnderMouse; private Debug debugTableUnderMouse = Debug.none; private final Color debugColor = new Color(0, 1, 0, 0.85f); /** Creates a stage with a {@link ScalingViewport} set to {@link Scaling#stretch}. The stage will use its own {@link Batch} * which will be disposed when the stage is disposed. */ public Stage () { this(new ScalingViewport(Scaling.stretch, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), new OrthographicCamera()), new SpriteBatch()); ownsBatch = true; } /** Creates a stage with the specified viewport. The stage will use its own {@link Batch} which will be disposed when the stage * is disposed. */ public Stage (Viewport viewport) { this(viewport, new SpriteBatch()); ownsBatch = true; } /** Creates a stage with the specified viewport and batch. This can be used to avoid creating a new batch (which can be * somewhat slow) if multiple stages are used during an application's life time. * @param batch Will not be disposed if {@link #dispose()} is called, handle disposal yourself. */ public Stage (Viewport viewport, Batch batch) { if (viewport == null) throw new IllegalArgumentException("viewport cannot be null."); if (batch == null) throw new IllegalArgumentException("batch cannot be null."); this.viewport = viewport; this.batch = batch; root = new Group(); root.setStage(this); viewport.update(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), true); } public void draw () { Camera camera = viewport.getCamera(); camera.update(); if (!root.isVisible()) return; Batch batch = this.batch; batch.setProjectionMatrix(camera.combined); batch.begin(); root.draw(batch, 1); batch.end(); if (debug) drawDebug(); } private void drawDebug () { if (debugShapes == null) { debugShapes = new ShapeRenderer(); debugShapes.setAutoShapeType(true); } if (debugUnderMouse || debugParentUnderMouse || debugTableUnderMouse != Debug.none) { screenToStageCoordinates(tempCoords.set(Gdx.input.getX(), Gdx.input.getY())); Actor actor = hit(tempCoords.x, tempCoords.y, true); if (actor == null) return; if (debugParentUnderMouse && actor.parent != null) actor = actor.parent; if (debugTableUnderMouse == Debug.none) actor.setDebug(true); else { while (actor != null) { if (actor instanceof Table) break; actor = actor.parent; } if (actor == null) return; ((Table)actor).debug(debugTableUnderMouse); } if (debugAll && actor instanceof Group) ((Group)actor).debugAll(); disableDebug(root, actor); } else { if (debugAll) root.debugAll(); } Gdx.gl.glEnable(GL20.GL_BLEND); debugShapes.setProjectionMatrix(viewport.getCamera().combined); debugShapes.begin(); root.drawDebug(debugShapes); debugShapes.end(); } /** Disables debug on all actors recursively except the specified actor and any children. */ private void disableDebug (Actor actor, Actor except) { if (actor == except) return; actor.setDebug(false); if (actor instanceof Group) { SnapshotArray<Actor> children = ((Group)actor).children; for (int i = 0, n = children.size; i < n; i++) disableDebug(children.get(i), except); } } /** Calls {@link #act(float)} with {@link Graphics#getDeltaTime()}, limited to a minimum of 30fps. */ public void act () { act(Math.min(Gdx.graphics.getDeltaTime(), 1 / 30f)); } /** Calls the {@link Actor#act(float)} method on each actor in the stage. Typically called each frame. This method also fires * enter and exit events. * @param delta Time in seconds since the last frame. */ public void act (float delta) { // Update over actors. Done in act() because actors may change position, which can fire enter/exit without an input event. for (int pointer = 0, n = pointerOverActors.length; pointer < n; pointer++) { Actor overLast = pointerOverActors[pointer]; // Check if pointer is gone. if (!pointerTouched[pointer]) { if (overLast != null) { pointerOverActors[pointer] = null; screenToStageCoordinates(tempCoords.set(pointerScreenX[pointer], pointerScreenY[pointer])); // Exit over last. InputEvent event = Pools.obtain(InputEvent.class); event.setType(InputEvent.Type.exit); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setRelatedActor(overLast); event.setPointer(pointer); overLast.fire(event); Pools.free(event); } continue; } // Update over actor for the pointer. pointerOverActors[pointer] = fireEnterAndExit(overLast, pointerScreenX[pointer], pointerScreenY[pointer], pointer); } // Update over actor for the mouse on the desktop. ApplicationType type = Gdx.app.getType(); if (type == ApplicationType.Desktop || type == ApplicationType.Applet || type == ApplicationType.WebGL) mouseOverActor = fireEnterAndExit(mouseOverActor, mouseScreenX, mouseScreenY, -1); root.act(delta); } private Actor fireEnterAndExit (Actor overLast, int screenX, int screenY, int pointer) { // Find the actor under the point. screenToStageCoordinates(tempCoords.set(screenX, screenY)); Actor over = hit(tempCoords.x, tempCoords.y, true); if (over == overLast) return overLast; // Exit overLast. if (overLast != null) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setType(InputEvent.Type.exit); event.setRelatedActor(over); overLast.fire(event); Pools.free(event); } // Enter over. if (over != null) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setType(InputEvent.Type.enter); event.setRelatedActor(overLast); over.fire(event); Pools.free(event); } return over; } /** Applies a touch down event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. */ public boolean touchDown (int screenX, int screenY, int pointer, int button) { if (!isInsideViewport(screenX, screenY)) return false; pointerTouched[pointer] = true; pointerScreenX[pointer] = screenX; pointerScreenY[pointer] = screenY; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setType(Type.touchDown); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setButton(button); Actor target = hit(tempCoords.x, tempCoords.y, true); if (target == null) { if (root.getTouchable() == Touchable.enabled) root.fire(event); } else { target.fire(event); } boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a touch moved event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. Only {@link InputListener listeners} that returned true for touchDown will receive this event. */ public boolean touchDragged (int screenX, int screenY, int pointer) { pointerScreenX[pointer] = screenX; pointerScreenY[pointer] = screenY; mouseScreenX = screenX; mouseScreenY = screenY; if (touchFocuses.size == 0) return false; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setType(Type.touchDragged); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] focuses = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = focuses[i]; if (focus.pointer != pointer) continue; if (!touchFocuses.contains(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); if (focus.listener.handle(event)) event.handle(); } touchFocuses.end(); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a touch up event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the event. * Only {@link InputListener listeners} that returned true for touchDown will receive this event. */ public boolean touchUp (int screenX, int screenY, int pointer, int button) { pointerTouched[pointer] = false; pointerScreenX[pointer] = screenX; pointerScreenY[pointer] = screenY; if (touchFocuses.size == 0) return false; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setType(Type.touchUp); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setButton(button); SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] focuses = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = focuses[i]; if (focus.pointer != pointer || focus.button != button) continue; if (!touchFocuses.removeValue(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); if (focus.listener.handle(event)) event.handle(); Pools.free(focus); } touchFocuses.end(); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a mouse moved event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. This event only occurs on the desktop. */ public boolean mouseMoved (int screenX, int screenY) { if (!isInsideViewport(screenX, screenY)) return false; mouseScreenX = screenX; mouseScreenY = screenY; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(Type.mouseMoved); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); Actor target = hit(tempCoords.x, tempCoords.y, true); if (target == null) target = root; target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a mouse scroll event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. This event only occurs on the desktop. */ public boolean scrolled (int amount) { Actor target = scrollFocus == null ? root : scrollFocus; screenToStageCoordinates(tempCoords.set(mouseScreenX, mouseScreenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.scrolled); event.setScrollAmount(amount); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a key down event to the actor that has {@link Stage#setKeyboardFocus(Actor) keyboard focus}, if any, and returns * true if the event was {@link Event#handle() handled}. */ public boolean keyDown (int keyCode) { Actor target = keyboardFocus == null ? root : keyboardFocus; InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.keyDown); event.setKeyCode(keyCode); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a key up event to the actor that has {@link Stage#setKeyboardFocus(Actor) keyboard focus}, if any, and returns true * if the event was {@link Event#handle() handled}. */ public boolean keyUp (int keyCode) { Actor target = keyboardFocus == null ? root : keyboardFocus; InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.keyUp); event.setKeyCode(keyCode); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a key typed event to the actor that has {@link Stage#setKeyboardFocus(Actor) keyboard focus}, if any, and returns * true if the event was {@link Event#handle() handled}. */ public boolean keyTyped (char character) { Actor target = keyboardFocus == null ? root : keyboardFocus; InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.keyTyped); event.setCharacter(character); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Adds the listener to be notified for all touchDragged and touchUp events for the specified pointer and button. The actor * will be used as the {@link Event#getListenerActor() listener actor} and {@link Event#getTarget() target}. */ public void addTouchFocus (EventListener listener, Actor listenerActor, Actor target, int pointer, int button) { TouchFocus focus = Pools.obtain(TouchFocus.class); focus.listenerActor = listenerActor; focus.target = target; focus.listener = listener; focus.pointer = pointer; focus.button = button; touchFocuses.add(focus); } /** Removes the listener from being notified for all touchDragged and touchUp events for the specified pointer and button. Note * the listener may never receive a touchUp event if this method is used. */ public void removeTouchFocus (EventListener listener, Actor listenerActor, Actor target, int pointer, int button) { SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; for (int i = touchFocuses.size - 1; i >= 0; i--) { TouchFocus focus = touchFocuses.get(i); if (focus.listener == listener && focus.listenerActor == listenerActor && focus.target == target && focus.pointer == pointer && focus.button == button) { touchFocuses.removeIndex(i); Pools.free(focus); } } } /** Cancels touch focus for the specified actor. * @see #cancelTouchFocus() */ public void cancelTouchFocus (Actor actor) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.touchUp); event.setStageX(Integer.MIN_VALUE); event.setStageY(Integer.MIN_VALUE); // Cancel all current touch focuses for the specified listener, allowing for concurrent modification, and never cancel the // same focus twice. SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] items = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = items[i]; if (focus.listenerActor != actor) continue; if (!touchFocuses.removeValue(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); event.setPointer(focus.pointer); event.setButton(focus.button); focus.listener.handle(event); // Cannot return TouchFocus to pool, as it may still be in use (eg if cancelTouchFocus is called from touchDragged). } touchFocuses.end(); Pools.free(event); } /** Sends a touchUp event to all listeners that are registered to receive touchDragged and touchUp events and removes their * touch focus. This method removes all touch focus listeners, but sends a touchUp event so that the state of the listeners * remains consistent (listeners typically expect to receive touchUp eventually). The location of the touchUp is * {@link Integer#MIN_VALUE}. Listeners can use {@link InputEvent#isTouchFocusCancel()} to ignore this event if needed. */ public void cancelTouchFocus () { cancelTouchFocusExcept(null, null); } /** Cancels touch focus for all listeners except the specified listener. * @see #cancelTouchFocus() */ public void cancelTouchFocusExcept (EventListener exceptListener, Actor exceptActor) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.touchUp); event.setStageX(Integer.MIN_VALUE); event.setStageY(Integer.MIN_VALUE); // Cancel all current touch focuses except for the specified listener, allowing for concurrent modification, and never // cancel the same focus twice. SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] items = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = items[i]; if (focus.listener == exceptListener && focus.listenerActor == exceptActor) continue; if (!touchFocuses.removeValue(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); event.setPointer(focus.pointer); event.setButton(focus.button); focus.listener.handle(event); // Cannot return TouchFocus to pool, as it may still be in use (eg if cancelTouchFocus is called from touchDragged). } touchFocuses.end(); Pools.free(event); } /** Adds an actor to the root of the stage. * @see Group#addActor(Actor) */ public void addActor (Actor actor) { root.addActor(actor); } /** Adds an action to the root of the stage. * @see Group#addAction(Action) */ public void addAction (Action action) { root.addAction(action); } /** Returns the root's child actors. * @see Group#getChildren() */ public Array<Actor> getActors () { return root.children; } /** Adds a listener to the root. * @see Actor#addListener(EventListener) */ public boolean addListener (EventListener listener) { return root.addListener(listener); } /** Removes a listener from the root. * @see Actor#removeListener(EventListener) */ public boolean removeListener (EventListener listener) { return root.removeListener(listener); } /** Adds a capture listener to the root. * @see Actor#addCaptureListener(EventListener) */ public boolean addCaptureListener (EventListener listener) { return root.addCaptureListener(listener); } /** Removes a listener from the root. * @see Actor#removeCaptureListener(EventListener) */ public boolean removeCaptureListener (EventListener listener) { return root.removeCaptureListener(listener); } /** Removes the root's children, actions, and listeners. */ public void clear () { unfocusAll(); root.clear(); } /** Removes the touch, keyboard, and scroll focused actors. */ public void unfocusAll () { setScrollFocus(null); setKeyboardFocus(null); cancelTouchFocus(); } /** Removes the touch, keyboard, and scroll focus for the specified actor and any descendants. */ public void unfocus (Actor actor) { cancelTouchFocus(actor); if (scrollFocus != null && scrollFocus.isDescendantOf(actor)) setScrollFocus(null); if (keyboardFocus != null && keyboardFocus.isDescendantOf(actor)) setKeyboardFocus(null); } /** Sets the actor that will receive key events. * @param actor May be null. * @return true if the unfocus and focus events were not cancelled by a {@link FocusListener}. */ public boolean setKeyboardFocus (Actor actor) { if (keyboardFocus == actor) return true; FocusEvent event = Pools.obtain(FocusEvent.class); event.setStage(this); event.setType(FocusEvent.Type.keyboard); Actor oldKeyboardFocus = keyboardFocus; if (oldKeyboardFocus != null) { event.setFocused(false); event.setRelatedActor(actor); oldKeyboardFocus.fire(event); } boolean success = !event.isCancelled(); if (success) { keyboardFocus = actor; if (actor != null) { event.setFocused(true); event.setRelatedActor(oldKeyboardFocus); actor.fire(event); success = !event.isCancelled(); if (!success) setKeyboardFocus(oldKeyboardFocus); } } Pools.free(event); return success; } /** Gets the actor that will receive key events. * @return May be null. */ public Actor getKeyboardFocus () { return keyboardFocus; } /** Sets the actor that will receive scroll events. * @param actor May be null. * @return true if the unfocus and focus events were not cancelled by a {@link FocusListener}. */ public boolean setScrollFocus (Actor actor) { if (scrollFocus == actor) return true; FocusEvent event = Pools.obtain(FocusEvent.class); event.setStage(this); event.setType(FocusEvent.Type.scroll); Actor oldScrollFocus = scrollFocus; if (oldScrollFocus != null) { event.setFocused(false); event.setRelatedActor(actor); oldScrollFocus.fire(event); } boolean success = !event.isCancelled(); if (success) { scrollFocus = actor; if (actor != null) { event.setFocused(true); event.setRelatedActor(oldScrollFocus); actor.fire(event); success = !event.isCancelled(); if (!success) setScrollFocus(oldScrollFocus); } } Pools.free(event); return success; } /** Gets the actor that will receive scroll events. * @return May be null. */ public Actor getScrollFocus () { return scrollFocus; } public Batch getBatch () { return batch; } public Viewport getViewport () { return viewport; } public void setViewport (Viewport viewport) { this.viewport = viewport; } /** The viewport's world width. */ public float getWidth () { return viewport.getWorldWidth(); } /** The viewport's world height. */ public float getHeight () { return viewport.getWorldHeight(); } /** The viewport's camera. */ public Camera getCamera () { return viewport.getCamera(); } /** Returns the root group which holds all actors in the stage. */ public Group getRoot () { return root; } /** Replaces the root group. Usually this is not necessary but a subclass may be desired in some cases, eg being notified of * {@link Group#childrenChanged()}. */ public void setRoot (Group root) { this.root = root; } /** Returns the {@link Actor} at the specified location in stage coordinates. Hit testing is performed in the order the actors * were inserted into the stage, last inserted actors being tested first. To get stage coordinates from screen coordinates, use * {@link #screenToStageCoordinates(Vector2)}. * @param touchable If true, the hit detection will respect the {@link Actor#setTouchable(Touchable) touchability}. * @return May be null if no actor was hit. */ public Actor hit (float stageX, float stageY, boolean touchable) { root.parentToLocalCoordinates(tempCoords.set(stageX, stageY)); return root.hit(tempCoords.x, tempCoords.y, touchable); } /** Transforms the screen coordinates to stage coordinates. * @param screenCoords Input screen coordinates and output for resulting stage coordinates. */ public Vector2 screenToStageCoordinates (Vector2 screenCoords) { viewport.unproject(screenCoords); return screenCoords; } /** Transforms the stage coordinates to screen coordinates. * @param stageCoords Input stage coordinates and output for resulting screen coordinates. */ public Vector2 stageToScreenCoordinates (Vector2 stageCoords) { viewport.project(stageCoords); stageCoords.y = viewport.getScreenHeight() - stageCoords.y; return stageCoords; } /** Transforms the coordinates to screen coordinates. The coordinates can be anywhere in the stage since the transform matrix * describes how to convert them. The transform matrix is typically obtained from {@link Batch#getTransformMatrix()} during * {@link Actor#draw(Batch, float)}. * @see Actor#localToStageCoordinates(Vector2) */ public Vector2 toScreenCoordinates (Vector2 coords, Matrix4 transformMatrix) { return viewport.toScreenCoordinates(coords, transformMatrix); } /** Calculates window scissor coordinates from local coordinates using the batch's current transformation matrix. * @see ScissorStack#calculateScissors(Camera, float, float, float, float, Matrix4, Rectangle, Rectangle) */ public void calculateScissors (Rectangle localRect, Rectangle scissorRect) { viewport.calculateScissors(batch.getTransformMatrix(), localRect, scissorRect); Matrix4 transformMatrix; if (debugShapes != null && debugShapes.isDrawing()) transformMatrix = debugShapes.getTransformMatrix(); else transformMatrix = batch.getTransformMatrix(); viewport.calculateScissors(transformMatrix, localRect, scissorRect); } /** If true, any actions executed during a call to {@link #act()}) will result in a call to {@link Graphics#requestRendering()} * . Widgets that animate or otherwise require additional rendering may check this setting before calling * {@link Graphics#requestRendering()}. Default is true. */ public void setActionsRequestRendering (boolean actionsRequestRendering) { this.actionsRequestRendering = actionsRequestRendering; } public boolean getActionsRequestRendering () { return actionsRequestRendering; } /** The default color that can be used by actors to draw debug lines. */ public Color getDebugColor () { return debugColor; } /** If true, debug lines are shown for actors even when {@link Actor#isVisible()} is false. */ public void setDebugInvisible (boolean debugInvisible) { this.debugInvisible = debugInvisible; } /** If true, debug lines are shown for all actors. */ public void setDebugAll (boolean debugAll) { if (this.debugAll == debugAll) return; this.debugAll = debugAll; if (debugAll) debug = true; else root.setDebug(false, true); } public boolean isDebugAll () { return debugAll; } /** If true, debug is enabled only for the actor under the mouse. Can be combined with {@link #setDebugAll(boolean)}. */ public void setDebugUnderMouse (boolean debugUnderMouse) { if (this.debugUnderMouse == debugUnderMouse) return; this.debugUnderMouse = debugUnderMouse; if (debugUnderMouse) debug = true; else root.setDebug(false, true); } /** If true, debug is enabled only for the parent of the actor under the mouse. Can be combined with * {@link #setDebugAll(boolean)}. */ public void setDebugParentUnderMouse (boolean debugParentUnderMouse) { if (this.debugParentUnderMouse == debugParentUnderMouse) return; this.debugParentUnderMouse = debugParentUnderMouse; if (debugParentUnderMouse) debug = true; else root.setDebug(false, true); } /** If not {@link Debug#none}, debug is enabled only for the first ascendant of the actor under the mouse that is a table. Can * be combined with {@link #setDebugAll(boolean)}. * @param debugTableUnderMouse May be null for {@link Debug#none}. */ public void setDebugTableUnderMouse (Debug debugTableUnderMouse) { if (debugTableUnderMouse == null) debugTableUnderMouse = Debug.none; if (this.debugTableUnderMouse == debugTableUnderMouse) return; this.debugTableUnderMouse = debugTableUnderMouse; if (debugTableUnderMouse != Debug.none) debug = true; else root.setDebug(false, true); } /** If true, debug is enabled only for the first ascendant of the actor under the mouse that is a table. Can be combined with * {@link #setDebugAll(boolean)}. */ public void setDebugTableUnderMouse (boolean debugTableUnderMouse) { setDebugTableUnderMouse(debugTableUnderMouse ? Debug.all : Debug.none); } public void dispose () { clear(); if (ownsBatch) batch.dispose(); } /** Check if screen coordinates are inside the viewport's screen area. */ protected boolean isInsideViewport (int screenX, int screenY) { int x0 = viewport.getScreenX(); int x1 = x0 + viewport.getScreenWidth(); int y0 = viewport.getScreenY(); int y1 = y0 + viewport.getScreenHeight(); screenY = Gdx.graphics.getHeight() - screenY; return screenX >= x0 && screenX < x1 && screenY >= y0 && screenY < y1; } /** Internal class for managing touch focus. Public only for GWT. * @author Nathan Sweet */ public static final class TouchFocus implements Poolable { EventListener listener; Actor listenerActor, target; int pointer, button; public void reset () { listenerActor = null; listener = null; target = null; } } }
gdx/src/com/badlogic/gdx/scenes/scene2d/Stage.java
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.scenes.scene2d; import com.badlogic.gdx.Application.ApplicationType; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Graphics; import com.badlogic.gdx.Input; import com.badlogic.gdx.InputAdapter; import com.badlogic.gdx.InputMultiplexer; import com.badlogic.gdx.graphics.Camera; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.scenes.scene2d.InputEvent.Type; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.Table.Debug; import com.badlogic.gdx.scenes.scene2d.utils.FocusListener; import com.badlogic.gdx.scenes.scene2d.utils.FocusListener.FocusEvent; import com.badlogic.gdx.scenes.scene2d.utils.ScissorStack; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.Disposable; import com.badlogic.gdx.utils.Pool.Poolable; import com.badlogic.gdx.utils.Pools; import com.badlogic.gdx.utils.Scaling; import com.badlogic.gdx.utils.SnapshotArray; import com.badlogic.gdx.utils.viewport.ScalingViewport; import com.badlogic.gdx.utils.viewport.Viewport; /** A 2D scene graph containing hierarchies of {@link Actor actors}. Stage handles the viewport and distributes input events. * <p> * {@link #setViewport(Viewport)} controls the coordinates used within the stage and sets up the camera used to convert between * stage coordinates and screen coordinates. * <p> * A stage must receive input events so it can distribute them to actors. This is typically done by passing the stage to * {@link Input#setInputProcessor(com.badlogic.gdx.InputProcessor) Gdx.input.setInputProcessor}. An {@link InputMultiplexer} may * be used to handle input events before or after the stage does. If an actor handles an event by returning true from the input * method, then the stage's input method will also return true, causing subsequent InputProcessors to not receive the event. * <p> * The Stage and its constituents (like Actors and Listeners) are not thread-safe and should only be updated and queried from a * single thread (presumably the main render thread). Methods should be reentrant, so you can update Actors and Stages from within * callbacks and handlers. * @author mzechner * @author Nathan Sweet */ public class Stage extends InputAdapter implements Disposable { /** True if any actor has ever had debug enabled. */ static boolean debug; private Viewport viewport; private final Batch batch; private boolean ownsBatch; private Group root; private final Vector2 tempCoords = new Vector2(); private final Actor[] pointerOverActors = new Actor[20]; private final boolean[] pointerTouched = new boolean[20]; private final int[] pointerScreenX = new int[20]; private final int[] pointerScreenY = new int[20]; private int mouseScreenX, mouseScreenY; private Actor mouseOverActor; private Actor keyboardFocus, scrollFocus; private final SnapshotArray<TouchFocus> touchFocuses = new SnapshotArray(true, 4, TouchFocus.class); private boolean actionsRequestRendering = true; private ShapeRenderer debugShapes; private boolean debugInvisible, debugAll, debugUnderMouse, debugParentUnderMouse; private Debug debugTableUnderMouse = Debug.none; private final Color debugColor = new Color(0, 1, 0, 0.85f); /** Creates a stage with a {@link ScalingViewport} set to {@link Scaling#stretch}. The stage will use its own {@link Batch} * which will be disposed when the stage is disposed. */ public Stage () { this(new ScalingViewport(Scaling.stretch, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), new OrthographicCamera()), new SpriteBatch()); ownsBatch = true; } /** Creates a stage with the specified viewport. The stage will use its own {@link Batch} which will be disposed when the stage * is disposed. */ public Stage (Viewport viewport) { this(viewport, new SpriteBatch()); ownsBatch = true; } /** Creates a stage with the specified viewport and batch. This can be used to avoid creating a new batch (which can be * somewhat slow) if multiple stages are used during an application's life time. * @param batch Will not be disposed if {@link #dispose()} is called, handle disposal yourself. */ public Stage (Viewport viewport, Batch batch) { if (viewport == null) throw new IllegalArgumentException("viewport cannot be null."); if (batch == null) throw new IllegalArgumentException("batch cannot be null."); this.viewport = viewport; this.batch = batch; root = new Group(); root.setStage(this); viewport.update(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), true); } public void draw () { Camera camera = viewport.getCamera(); camera.update(); if (!root.isVisible()) return; Batch batch = this.batch; batch.setProjectionMatrix(camera.combined); batch.begin(); root.draw(batch, 1); batch.end(); if (debug) drawDebug(); } private void drawDebug () { if (debugShapes == null) { debugShapes = new ShapeRenderer(); debugShapes.setAutoShapeType(true); } if (debugUnderMouse || debugParentUnderMouse || debugTableUnderMouse != Debug.none) { screenToStageCoordinates(tempCoords.set(Gdx.input.getX(), Gdx.input.getY())); Actor actor = hit(tempCoords.x, tempCoords.y, true); if (actor == null) return; if (debugParentUnderMouse && actor.parent != null) actor = actor.parent; if (debugTableUnderMouse == Debug.none) actor.setDebug(true); else { while (actor != null) { if (actor instanceof Table) break; actor = actor.parent; } if (actor == null) return; ((Table)actor).debug(debugTableUnderMouse); } if (debugAll && actor instanceof Group) ((Group)actor).debugAll(); disableDebug(root, actor); } else { if (debugAll) root.debugAll(); } Gdx.gl.glEnable(GL20.GL_BLEND); debugShapes.setProjectionMatrix(viewport.getCamera().combined); debugShapes.begin(); root.drawDebug(debugShapes); debugShapes.end(); } /** Disables debug on all actors recursively except the specified actor and any children. */ private void disableDebug (Actor actor, Actor except) { if (actor == except) return; actor.setDebug(false); if (actor instanceof Group) { SnapshotArray<Actor> children = ((Group)actor).children; for (int i = 0, n = children.size; i < n; i++) disableDebug(children.get(i), except); } } /** Calls {@link #act(float)} with {@link Graphics#getDeltaTime()}, limited to a minimum of 30fps. */ public void act () { act(Math.min(Gdx.graphics.getDeltaTime(), 1 / 30f)); } /** Calls the {@link Actor#act(float)} method on each actor in the stage. Typically called each frame. This method also fires * enter and exit events. * @param delta Time in seconds since the last frame. */ public void act (float delta) { // Update over actors. Done in act() because actors may change position, which can fire enter/exit without an input event. for (int pointer = 0, n = pointerOverActors.length; pointer < n; pointer++) { Actor overLast = pointerOverActors[pointer]; // Check if pointer is gone. if (!pointerTouched[pointer]) { if (overLast != null) { pointerOverActors[pointer] = null; screenToStageCoordinates(tempCoords.set(pointerScreenX[pointer], pointerScreenY[pointer])); // Exit over last. InputEvent event = Pools.obtain(InputEvent.class); event.setType(InputEvent.Type.exit); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setRelatedActor(overLast); event.setPointer(pointer); overLast.fire(event); Pools.free(event); } continue; } // Update over actor for the pointer. pointerOverActors[pointer] = fireEnterAndExit(overLast, pointerScreenX[pointer], pointerScreenY[pointer], pointer); } // Update over actor for the mouse on the desktop. ApplicationType type = Gdx.app.getType(); if (type == ApplicationType.Desktop || type == ApplicationType.Applet || type == ApplicationType.WebGL) mouseOverActor = fireEnterAndExit(mouseOverActor, mouseScreenX, mouseScreenY, -1); root.act(delta); } private Actor fireEnterAndExit (Actor overLast, int screenX, int screenY, int pointer) { // Find the actor under the point. screenToStageCoordinates(tempCoords.set(screenX, screenY)); Actor over = hit(tempCoords.x, tempCoords.y, true); if (over == overLast) return overLast; // Exit overLast. if (overLast != null) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setType(InputEvent.Type.exit); event.setRelatedActor(over); overLast.fire(event); Pools.free(event); } // Enter over. if (over != null) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setType(InputEvent.Type.enter); event.setRelatedActor(overLast); over.fire(event); Pools.free(event); } return over; } /** Applies a touch down event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. */ public boolean touchDown (int screenX, int screenY, int pointer, int button) { if (!isInsideViewport(screenX, screenY)) return false; pointerTouched[pointer] = true; pointerScreenX[pointer] = screenX; pointerScreenY[pointer] = screenY; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setType(Type.touchDown); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setButton(button); Actor target = hit(tempCoords.x, tempCoords.y, true); if (target == null) { if (root.getTouchable() == Touchable.enabled) root.fire(event); } else { target.fire(event); } boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a touch moved event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. Only {@link InputListener listeners} that returned true for touchDown will receive this event. */ public boolean touchDragged (int screenX, int screenY, int pointer) { pointerScreenX[pointer] = screenX; pointerScreenY[pointer] = screenY; mouseScreenX = screenX; mouseScreenY = screenY; if (touchFocuses.size == 0) return false; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setType(Type.touchDragged); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] focuses = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = focuses[i]; if (focus.pointer != pointer) continue; if (!touchFocuses.contains(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); if (focus.listener.handle(event)) event.handle(); } touchFocuses.end(); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a touch up event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the event. * Only {@link InputListener listeners} that returned true for touchDown will receive this event. */ public boolean touchUp (int screenX, int screenY, int pointer, int button) { pointerTouched[pointer] = false; pointerScreenX[pointer] = screenX; pointerScreenY[pointer] = screenY; if (touchFocuses.size == 0) return false; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setType(Type.touchUp); event.setStage(this); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); event.setPointer(pointer); event.setButton(button); SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] focuses = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = focuses[i]; if (focus.pointer != pointer || focus.button != button) continue; if (!touchFocuses.removeValue(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); if (focus.listener.handle(event)) event.handle(); Pools.free(focus); } touchFocuses.end(); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a mouse moved event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. This event only occurs on the desktop. */ public boolean mouseMoved (int screenX, int screenY) { if (!isInsideViewport(screenX, screenY)) return false; mouseScreenX = screenX; mouseScreenY = screenY; screenToStageCoordinates(tempCoords.set(screenX, screenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(Type.mouseMoved); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); Actor target = hit(tempCoords.x, tempCoords.y, true); if (target == null) target = root; target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a mouse scroll event to the stage and returns true if an actor in the scene {@link Event#handle() handled} the * event. This event only occurs on the desktop. */ public boolean scrolled (int amount) { Actor target = scrollFocus == null ? root : scrollFocus; screenToStageCoordinates(tempCoords.set(mouseScreenX, mouseScreenY)); InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.scrolled); event.setScrollAmount(amount); event.setStageX(tempCoords.x); event.setStageY(tempCoords.y); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a key down event to the actor that has {@link Stage#setKeyboardFocus(Actor) keyboard focus}, if any, and returns * true if the event was {@link Event#handle() handled}. */ public boolean keyDown (int keyCode) { Actor target = keyboardFocus == null ? root : keyboardFocus; InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.keyDown); event.setKeyCode(keyCode); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a key up event to the actor that has {@link Stage#setKeyboardFocus(Actor) keyboard focus}, if any, and returns true * if the event was {@link Event#handle() handled}. */ public boolean keyUp (int keyCode) { Actor target = keyboardFocus == null ? root : keyboardFocus; InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.keyUp); event.setKeyCode(keyCode); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Applies a key typed event to the actor that has {@link Stage#setKeyboardFocus(Actor) keyboard focus}, if any, and returns * true if the event was {@link Event#handle() handled}. */ public boolean keyTyped (char character) { Actor target = keyboardFocus == null ? root : keyboardFocus; InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.keyTyped); event.setCharacter(character); target.fire(event); boolean handled = event.isHandled(); Pools.free(event); return handled; } /** Adds the listener to be notified for all touchDragged and touchUp events for the specified pointer and button. The actor * will be used as the {@link Event#getListenerActor() listener actor} and {@link Event#getTarget() target}. */ public void addTouchFocus (EventListener listener, Actor listenerActor, Actor target, int pointer, int button) { TouchFocus focus = Pools.obtain(TouchFocus.class); focus.listenerActor = listenerActor; focus.target = target; focus.listener = listener; focus.pointer = pointer; focus.button = button; touchFocuses.add(focus); } /** Removes the listener from being notified for all touchDragged and touchUp events for the specified pointer and button. Note * the listener may never receive a touchUp event if this method is used. */ public void removeTouchFocus (EventListener listener, Actor listenerActor, Actor target, int pointer, int button) { SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; for (int i = touchFocuses.size - 1; i >= 0; i--) { TouchFocus focus = touchFocuses.get(i); if (focus.listener == listener && focus.listenerActor == listenerActor && focus.target == target && focus.pointer == pointer && focus.button == button) { touchFocuses.removeIndex(i); Pools.free(focus); } } } /** Cancels touch focus for the specified actor. * @see #cancelTouchFocus() */ public void cancelTouchFocus (Actor actor) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.touchUp); event.setStageX(Integer.MIN_VALUE); event.setStageY(Integer.MIN_VALUE); // Cancel all current touch focuses for the specified listener, allowing for concurrent modification, and never cancel the // same focus twice. SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] items = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = items[i]; if (focus.listenerActor != actor) continue; if (!touchFocuses.removeValue(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); event.setPointer(focus.pointer); event.setButton(focus.button); focus.listener.handle(event); // Cannot return TouchFocus to pool, as it may still be in use (eg if cancelTouchFocus is called from touchDragged). } touchFocuses.end(); Pools.free(event); } /** Sends a touchUp event to all listeners that are registered to receive touchDragged and touchUp events and removes their * touch focus. This method removes all touch focus listeners, but sends a touchUp event so that the state of the listeners * remains consistent (listeners typically expect to receive touchUp eventually). The location of the touchUp is * {@link Integer#MIN_VALUE}. Listeners can use {@link InputEvent#isTouchFocusCancel()} to ignore this event if needed. */ public void cancelTouchFocus () { cancelTouchFocusExcept(null, null); } /** Cancels touch focus for all listeners except the specified listener. * @see #cancelTouchFocus() */ public void cancelTouchFocusExcept (EventListener exceptListener, Actor exceptActor) { InputEvent event = Pools.obtain(InputEvent.class); event.setStage(this); event.setType(InputEvent.Type.touchUp); event.setStageX(Integer.MIN_VALUE); event.setStageY(Integer.MIN_VALUE); // Cancel all current touch focuses except for the specified listener, allowing for concurrent modification, and never // cancel the same focus twice. SnapshotArray<TouchFocus> touchFocuses = this.touchFocuses; TouchFocus[] items = touchFocuses.begin(); for (int i = 0, n = touchFocuses.size; i < n; i++) { TouchFocus focus = items[i]; if (focus.listener == exceptListener && focus.listenerActor == exceptActor) continue; if (!touchFocuses.removeValue(focus, true)) continue; // Touch focus already gone. event.setTarget(focus.target); event.setListenerActor(focus.listenerActor); event.setPointer(focus.pointer); event.setButton(focus.button); focus.listener.handle(event); // Cannot return TouchFocus to pool, as it may still be in use (eg if cancelTouchFocus is called from touchDragged). } touchFocuses.end(); Pools.free(event); } /** Adds an actor to the root of the stage. * @see Group#addActor(Actor) */ public void addActor (Actor actor) { root.addActor(actor); } /** Adds an action to the root of the stage. * @see Group#addAction(Action) */ public void addAction (Action action) { root.addAction(action); } /** Returns the root's child actors. * @see Group#getChildren() */ public Array<Actor> getActors () { return root.children; } /** Adds a listener to the root. * @see Actor#addListener(EventListener) */ public boolean addListener (EventListener listener) { return root.addListener(listener); } /** Removes a listener from the root. * @see Actor#removeListener(EventListener) */ public boolean removeListener (EventListener listener) { return root.removeListener(listener); } /** Adds a capture listener to the root. * @see Actor#addCaptureListener(EventListener) */ public boolean addCaptureListener (EventListener listener) { return root.addCaptureListener(listener); } /** Removes a listener from the root. * @see Actor#removeCaptureListener(EventListener) */ public boolean removeCaptureListener (EventListener listener) { return root.removeCaptureListener(listener); } /** Removes the root's children, actions, and listeners. */ public void clear () { unfocusAll(); root.clear(); } /** Removes the touch, keyboard, and scroll focused actors. */ public void unfocusAll () { setScrollFocus(null); setKeyboardFocus(null); cancelTouchFocus(); } /** Removes the touch, keyboard, and scroll focus for the specified actor and any descendants. */ public void unfocus (Actor actor) { cancelTouchFocus(actor); if (scrollFocus != null && scrollFocus.isDescendantOf(actor)) setScrollFocus(null); if (keyboardFocus != null && keyboardFocus.isDescendantOf(actor)) setKeyboardFocus(null); } /** Sets the actor that will receive key events. * @param actor May be null. * @return true if the unfocus and focus events were not cancelled by a {@link FocusListener}. */ public boolean setKeyboardFocus (Actor actor) { if (keyboardFocus == actor) return true; FocusEvent event = Pools.obtain(FocusEvent.class); event.setStage(this); event.setType(FocusEvent.Type.keyboard); Actor oldKeyboardFocus = keyboardFocus; if (oldKeyboardFocus != null) { event.setFocused(false); event.setRelatedActor(actor); oldKeyboardFocus.fire(event); } boolean success = !event.isCancelled(); if (success) { keyboardFocus = actor; if (actor != null) { event.setFocused(true); event.setRelatedActor(oldKeyboardFocus); actor.fire(event); success = !event.isCancelled(); if (!success) setKeyboardFocus(oldKeyboardFocus); } } Pools.free(event); return success; } /** Gets the actor that will receive key events. * @return May be null. */ public Actor getKeyboardFocus () { return keyboardFocus; } /** Sets the actor that will receive scroll events. * @param actor May be null. * @return true if the unfocus and focus events were not cancelled by a {@link FocusListener}. */ public boolean setScrollFocus (Actor actor) { if (scrollFocus == actor) return true; FocusEvent event = Pools.obtain(FocusEvent.class); event.setStage(this); event.setType(FocusEvent.Type.scroll); Actor oldScrollFocus = scrollFocus; if (oldScrollFocus != null) { event.setFocused(false); event.setRelatedActor(actor); oldScrollFocus.fire(event); } boolean success = !event.isCancelled(); if (success) { scrollFocus = actor; if (actor != null) { event.setFocused(true); event.setRelatedActor(oldScrollFocus); actor.fire(event); success = !event.isCancelled(); if (!success) setScrollFocus(oldScrollFocus); } } Pools.free(event); return success; } /** Gets the actor that will receive scroll events. * @return May be null. */ public Actor getScrollFocus () { return scrollFocus; } public Batch getBatch () { return batch; } public Viewport getViewport () { return viewport; } public void setViewport (Viewport viewport) { this.viewport = viewport; } /** The viewport's world width. */ public float getWidth () { return viewport.getWorldWidth(); } /** The viewport's world height. */ public float getHeight () { return viewport.getWorldHeight(); } /** The viewport's camera. */ public Camera getCamera () { return viewport.getCamera(); } /** Returns the root group which holds all actors in the stage. */ public Group getRoot () { return root; } /** Replaces the root group. Usually this is not necessary but a subclass may be desired in some cases, eg being notified of * {@link Group#childrenChanged()}. */ public void setRoot (Group root) { this.root = root; } /** Returns the {@link Actor} at the specified location in stage coordinates. Hit testing is performed in the order the actors * were inserted into the stage, last inserted actors being tested first. To get stage coordinates from screen coordinates, use * {@link #screenToStageCoordinates(Vector2)}. * @param touchable If true, the hit detection will respect the {@link Actor#setTouchable(Touchable) touchability}. * @return May be null if no actor was hit. */ public Actor hit (float stageX, float stageY, boolean touchable) { root.parentToLocalCoordinates(tempCoords.set(stageX, stageY)); return root.hit(tempCoords.x, tempCoords.y, touchable); } /** Transforms the screen coordinates to stage coordinates. * @param screenCoords Input screen coordinates and output for resulting stage coordinates. */ public Vector2 screenToStageCoordinates (Vector2 screenCoords) { viewport.unproject(screenCoords); return screenCoords; } /** Transforms the stage coordinates to screen coordinates. * @param stageCoords Input stage coordinates and output for resulting screen coordinates. */ public Vector2 stageToScreenCoordinates (Vector2 stageCoords) { viewport.project(stageCoords); stageCoords.y = viewport.getScreenHeight() - stageCoords.y; return stageCoords; } /** Transforms the coordinates to screen coordinates. The coordinates can be anywhere in the stage since the transform matrix * describes how to convert them. The transform matrix is typically obtained from {@link Batch#getTransformMatrix()} during * {@link Actor#draw(Batch, float)}. * @see Actor#localToStageCoordinates(Vector2) */ public Vector2 toScreenCoordinates (Vector2 coords, Matrix4 transformMatrix) { return viewport.toScreenCoordinates(coords, transformMatrix); } /** Calculates window scissor coordinates from local coordinates using the batch's current transformation matrix. * @see ScissorStack#calculateScissors(Camera, float, float, float, float, Matrix4, Rectangle, Rectangle) */ public void calculateScissors (Rectangle localRect, Rectangle scissorRect) { viewport.calculateScissors(batch.getTransformMatrix(), localRect, scissorRect); Matrix4 transformMatrix; if (debugShapes != null && debugShapes.isDrawing()) transformMatrix = debugShapes.getTransformMatrix(); else transformMatrix = batch.getTransformMatrix(); viewport.calculateScissors(transformMatrix, localRect, scissorRect); } /** If true, any actions executed during a call to {@link #act()}) will result in a call to {@link Graphics#requestRendering()} * . Widgets that animate or otherwise require additional rendering may check this setting before calling * {@link Graphics#requestRendering()}. Default is true. */ public void setActionsRequestRendering (boolean actionsRequestRendering) { this.actionsRequestRendering = actionsRequestRendering; } public boolean getActionsRequestRendering () { return actionsRequestRendering; } /** The default color that can be used by actors to draw debug lines. */ public Color getDebugColor () { return debugColor; } /** If true, debug lines are shown for actors even when {@link Actor#isVisible()} is false. */ public void setDebugInvisible (boolean debugInvisible) { this.debugInvisible = debugInvisible; } /** If true, debug lines are shown for all actors. */ public void setDebugAll (boolean debugAll) { if (this.debugAll == debugAll) return; this.debugAll = debugAll; if (debugAll) debug = true; else root.setDebug(false, true); } /** If true, debug is enabled only for the actor under the mouse. Can be combined with {@link #setDebugAll(boolean)}. */ public void setDebugUnderMouse (boolean debugUnderMouse) { if (this.debugUnderMouse == debugUnderMouse) return; this.debugUnderMouse = debugUnderMouse; if (debugUnderMouse) debug = true; else root.setDebug(false, true); } /** If true, debug is enabled only for the parent of the actor under the mouse. Can be combined with * {@link #setDebugAll(boolean)}. */ public void setDebugParentUnderMouse (boolean debugParentUnderMouse) { if (this.debugParentUnderMouse == debugParentUnderMouse) return; this.debugParentUnderMouse = debugParentUnderMouse; if (debugParentUnderMouse) debug = true; else root.setDebug(false, true); } /** If not {@link Debug#none}, debug is enabled only for the first ascendant of the actor under the mouse that is a table. Can * be combined with {@link #setDebugAll(boolean)}. * @param debugTableUnderMouse May be null for {@link Debug#none}. */ public void setDebugTableUnderMouse (Debug debugTableUnderMouse) { if (debugTableUnderMouse == null) debugTableUnderMouse = Debug.none; if (this.debugTableUnderMouse == debugTableUnderMouse) return; this.debugTableUnderMouse = debugTableUnderMouse; if (debugTableUnderMouse != Debug.none) debug = true; else root.setDebug(false, true); } /** If true, debug is enabled only for the first ascendant of the actor under the mouse that is a table. Can be combined with * {@link #setDebugAll(boolean)}. */ public void setDebugTableUnderMouse (boolean debugTableUnderMouse) { setDebugTableUnderMouse(debugTableUnderMouse ? Debug.all : Debug.none); } public void dispose () { clear(); if (ownsBatch) batch.dispose(); } /** Check if screen coordinates are inside the viewport's screen area. */ protected boolean isInsideViewport (int screenX, int screenY) { int x0 = viewport.getScreenX(); int x1 = x0 + viewport.getScreenWidth(); int y0 = viewport.getScreenY(); int y1 = y0 + viewport.getScreenHeight(); screenY = Gdx.graphics.getHeight() - screenY; return screenX >= x0 && screenX < x1 && screenY >= y0 && screenY < y1; } /** Internal class for managing touch focus. Public only for GWT. * @author Nathan Sweet */ public static final class TouchFocus implements Poolable { EventListener listener; Actor listenerActor, target; int pointer, button; public void reset () { listenerActor = null; listener = null; target = null; } } }
Stage: allow to access/read debugAll Variable (#4663) its already allowed to change the debugAll variable, why not allow to access/read it as well ? (its realy inconvinient to access it otherwise)
gdx/src/com/badlogic/gdx/scenes/scene2d/Stage.java
Stage: allow to access/read debugAll Variable (#4663)
Java
apache-2.0
7c07f62fbc954ab30b948d6f165e648809930bdd
0
yufenghua/zhaoxi,yufenghua/zhaoxi,yufenghua/zhaoxi
package com.ylp.date.security.impl; import org.apache.commons.lang.StringUtils; import com.ylp.date.security.Pmchecker; public class RolePmChecker implements Pmchecker { private static final String ROLE_AUDITOR = "auditor"; private String role; public RolePmChecker(String role) { this.role = role; } public boolean check(String oper, Object obj) { return StringUtils.equals(role, ROLE_AUDITOR); } public boolean check(String oper, Object obj, boolean throwex) { return StringUtils.equals(role, ROLE_AUDITOR); } public boolean check(String oper) { return StringUtils.equals(role, ROLE_AUDITOR); } public boolean check(String oper, boolean throwex) { return StringUtils.equals(role, ROLE_AUDITOR); } }
src/com/ylp/date/security/impl/RolePmChecker.java
package com.ylp.date.security.impl; import com.ylp.date.security.Pmchecker; public class RolePmChecker implements Pmchecker { private String role; public RolePmChecker(String role) { this.role = role; } public boolean check(String oper, Object obj) { return true; } public boolean check(String oper, Object obj, boolean throwex) { return true; } public boolean check(String oper) { return true; } public boolean check(String oper, boolean throwex) { return true; } }
角色权限简单处理
src/com/ylp/date/security/impl/RolePmChecker.java
角色权限简单处理
Java
apache-2.0
9544347e57843718c3caf439b3111610cfd04eea
0
mikosik/smooth-build,mikosik/smooth-build
package org.smoothbuild.db.hashed; import static org.smoothbuild.SmoothConstants.CHARSET; import com.google.common.hash.HashCode; import com.google.common.hash.HashFunction; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; public class Hash { public static Hasher newHasher() { return function().newHasher(); } public static HashCode string(String string) { return function().hashString(string, CHARSET); } public static HashCode integer(int value) { return function().hashInt(value); } public static HashCode bytes(byte[] bytes) { return function().hashBytes(bytes); } public static int size() { return function().bits() / 8; } private static HashFunction function() { return Hashing.sha1(); } }
src/main/java/org/smoothbuild/db/hashed/Hash.java
package org.smoothbuild.db.hashed; import static org.smoothbuild.SmoothConstants.CHARSET; import com.google.common.hash.HashCode; import com.google.common.hash.HashFunction; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; public class Hash { public static Hasher newHasher() { return function().newHasher(); } public static HashCode string(String string) { return Hash.function().hashString(string, CHARSET); } public static HashCode integer(int value) { return Hash.function().hashInt(value); } public static HashCode bytes(byte[] bytes) { return Hash.function().hashBytes(bytes); } public static int size() { return Hash.function().bits() / 8; } private static HashFunction function() { return Hashing.sha1(); } }
refactored Hash class
src/main/java/org/smoothbuild/db/hashed/Hash.java
refactored Hash class
Java
apache-2.0
a150df5288ee73d9bbb90f9317c5713d20f88537
0
lookout/commons-compress,lookout/commons-compress,krosenvold/commons-compress,mohanaraosv/commons-compress,mohanaraosv/commons-compress,krosenvold/commons-compress,apache/commons-compress,mohanaraosv/commons-compress,lookout/commons-compress,apache/commons-compress,krosenvold/commons-compress,apache/commons-compress
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.compressors.snappy; import java.io.IOException; import java.io.InputStream; import org.apache.commons.compress.compressors.CompressorInputStream; /** * CompressorInputStream for the raw Snappy format. * * <p>This implementation uses an internal buffer in order to handle * the back-references that are at the heart of the LZ77 algorithm. * The size of the buffer must be at least as big as the biggest * offset used in the compressed stream. The current version of the * Snappy algorithm as defined by Google works on 32k blocks and * doesn't contain offsets bigger than 32k which is the default block * size used by this class.</p> * * @see "http://code.google.com/p/snappy/source/browse/trunk/format_description.txt" * @since 1.7 */ public class SnappyCompressorInputStream extends CompressorInputStream { /** Mask used to determine the type of "tag" is being processed */ private static final int TAG_MASK = 0x03; /** Default block size */ public static final int DEFAULT_BLOCK_SIZE = 32768; /** Buffer to write decompressed bytes to for back-references */ private final byte[] decompressBuf; /** * One behind the index of the last byte in the buffer that was * written */ private int writeIndex; /** * Index of the next byte to be read. */ private int readIndex; /** The actual block size specified */ private final int blockSize; /** The underlying stream to read compressed data from */ private final InputStream in; /** The size of the uncompressed data */ private final int size; /** * Number of uncompressed bytes still to be read. */ private int uncompressedBytesRemaining; // used in no-arg read method private final byte[] oneByte = new byte[1]; private boolean endReached = false; /** * Constructor using the default buffer size of 32k. * * @param is * An InputStream to read compressed data from * * @throws IOException */ public SnappyCompressorInputStream(final InputStream is) throws IOException { this(is, DEFAULT_BLOCK_SIZE); } /** * Constructor using a configurable buffer size. * * @param is * An InputStream to read compressed data from * @param blockSize * The block size used in compression * * @throws IOException */ public SnappyCompressorInputStream(final InputStream is, final int blockSize) throws IOException { this.in = is; this.blockSize = blockSize; this.decompressBuf = new byte[blockSize * 3]; this.writeIndex = readIndex = 0; uncompressedBytesRemaining = size = (int) readSize(); } /** {@inheritDoc} */ @Override public int read() throws IOException { return read(oneByte, 0, 1) == -1 ? -1 : (oneByte[0] & 0xFF); } /** {@inheritDoc} */ @Override public void close() throws IOException { in.close(); } /** {@inheritDoc} */ @Override public int available() { return writeIndex - readIndex; } /** * {@inheritDoc} */ @Override public int read(byte[] b, int off, int len) throws IOException { if (endReached) { return -1; } final int avail = available(); if (len > avail) { fill(len - avail); } int readable = Math.min(len, available()); System.arraycopy(decompressBuf, readIndex, b, off, readable); readIndex += readable; if (readIndex > blockSize) { slideBuffer(); } return readable; } /** * Try to fill the buffer with enough bytes to satisfy the current * read request. * * @param len the number of uncompressed bytes to read */ private void fill(int len) throws IOException { if (uncompressedBytesRemaining == 0) { endReached = true; } int readNow = Math.min(len, uncompressedBytesRemaining); while (readNow > 0) { final int b = readOneByte(); int length = 0; long offset = 0; switch (b & TAG_MASK) { case 0x00: length = readLiteralLength(b); if (expandLiteral(length)) { return; } break; case 0x01: /* * These elements can encode lengths between [4..11] bytes and * offsets between [0..2047] bytes. (len-4) occupies three bits * and is stored in bits [2..4] of the tag byte. The offset * occupies 11 bits, of which the upper three are stored in the * upper three bits ([5..7]) of the tag byte, and the lower * eight are stored in a byte following the tag byte. */ length = 4 + ((b >> 2) & 0x07); offset = (b & 0xE0) << 3; offset |= readOneByte(); if (expandCopy(offset, length)) { return; } break; case 0x02: /* * These elements can encode lengths between [1..64] and offsets * from [0..65535]. (len-1) occupies six bits and is stored in * the upper six bits ([2..7]) of the tag byte. The offset is * stored as a little-endian 16-bit integer in the two bytes * following the tag byte. */ length = (b >> 2) + 1; offset = readOneByte(); offset |= readOneByte() << 8; if (expandCopy(offset, length)) { return; } break; case 0x03: /* * These are like the copies with 2-byte offsets (see previous * subsection), except that the offset is stored as a 32-bit * integer instead of a 16-bit integer (and thus will occupy * four bytes). */ length = (b >> 2) + 1; offset = readOneByte(); offset |= readOneByte() << 8; offset |= readOneByte() << 16; offset |= readOneByte() << 24; if (expandCopy(offset, length)) { return; } break; } readNow -= length; uncompressedBytesRemaining -= length; } } /** * Slide buffer. * * <p>Move all bytes of the buffer after the first block down to * the beginning of the buffer.</p> */ private void slideBuffer() { System.arraycopy(decompressBuf, blockSize, decompressBuf, 0, blockSize * 2); writeIndex -= blockSize; readIndex -= blockSize; } /* * For literals up to and including 60 bytes in length, the * upper six bits of the tag byte contain (len-1). The literal * follows immediately thereafter in the bytestream. - For * longer literals, the (len-1) value is stored after the tag * byte, little-endian. The upper six bits of the tag byte * describe how many bytes are used for the length; 60, 61, 62 * or 63 for 1-4 bytes, respectively. The literal itself follows * after the length. */ private int readLiteralLength(int b) throws IOException { int length; switch (b >> 2) { case 60: length = readOneByte(); break; case 61: length = readOneByte(); length |= (readOneByte() << 8); break; case 62: length = readOneByte(); length |= (readOneByte() << 8); length |= (readOneByte() << 16); break; case 63: length = readOneByte(); length |= (readOneByte() << 8); length |= (readOneByte() << 16); length |= (readOneByte() << 24); break; default: length = b >> 2; break; } return length + 1; } /** * Literals are uncompressed data stored directly in the byte stream. * * @param length * The number of bytes to read from the underlying stream * * @throws IOException * If the first byte cannot be read for any reason other than * end of file, or if the input stream has been closed, or if * some other I/O error occurs. * @return True if the decompressed data should be flushed */ private boolean expandLiteral(final int length) throws IOException { int bytesRead = in.read(decompressBuf, writeIndex, length); count(bytesRead); if (length != bytesRead) { throw new IOException("Premature end of stream"); } writeIndex += length; return (writeIndex >= (2 * this.blockSize)); } /** * Copies are references back into previous decompressed data, telling the * decompressor to reuse data it has previously decoded. They encode two * values: The offset, saying how many bytes back from the current position * to read, and the length, how many bytes to copy. Offsets of zero can be * encoded, but are not legal; similarly, it is possible to encode * backreferences that would go past the end of the block (offset > current * decompressed position), which is also nonsensical and thus not allowed. * * @param off * The offset from the backward from the end of expanded stream * @param length * The number of bytes to copy * * @throws IOException * An the offset expands past the front of the decompression * buffer * @return True if the decompressed data should be flushed */ private boolean expandCopy(final long off, int length) throws IOException { if (off > blockSize) { throw new IOException("Offset is larger than block size"); } int offset = (int) off; if (offset == 1) { byte lastChar = decompressBuf[writeIndex - 1]; for (int i = 0; i < length; i++) { decompressBuf[writeIndex++] = lastChar; } } else if (length < offset) { System.arraycopy(decompressBuf, writeIndex - offset, decompressBuf, writeIndex, length); writeIndex += length; } else { int fullRotations = length / offset; int pad = length - (offset * fullRotations); while (fullRotations-- != 0) { System.arraycopy(decompressBuf, writeIndex - offset, decompressBuf, writeIndex, offset); writeIndex += offset; } if (pad > 0) { System.arraycopy(decompressBuf, writeIndex - offset, decompressBuf, writeIndex, pad); writeIndex += pad; } } return (writeIndex >= (2 * this.blockSize)); } /** * This helper method reads the next byte of data from the input stream. The * value byte is returned as an <code>int</code> in the range <code>0</code> * to <code>255</code>. If no byte is available because the end of the * stream has been reached, an Exception is thrown. * * @return The next byte of data * @throws IOException * EOF is reached or error reading the stream */ private int readOneByte() throws IOException { int b = in.read(); if (b == -1) { throw new IOException("Premature end of stream"); } count(1); return b & 0xFF; } /** * The stream starts with the uncompressed length (up to a maximum of 2^32 - * 1), stored as a little-endian varint. Varints consist of a series of * bytes, where the lower 7 bits are data and the upper bit is set iff there * are more bytes to be read. In other words, an uncompressed length of 64 * would be stored as 0x40, and an uncompressed length of 2097150 (0x1FFFFE) * would be stored as 0xFE 0xFF 0x7F. * * @return The size of the uncompressed data * * @throws IOException * Could not read a byte */ private long readSize() throws IOException { int index = 0; long sz = 0; int b = 0; do { b = readOneByte(); sz |= (b & 0x7f) << (index++ * 7); } while (0 != (b & 0x80)); return sz; } /** * Get the uncompressed size of the stream * * @return the uncompressed size */ public int getSize() { return size; } }
src/main/java/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.compressors.snappy; import java.io.IOException; import java.io.InputStream; import org.apache.commons.compress.compressors.CompressorInputStream; /** * CompressorInputStream for the raw Snappy format. * * <p>This implementation uses an internal buffer in order to handle * the back-references that are at the heart of the LZ77 algorithm. * The size of the buffer must be at least as big as the biggest * offset used in the compressed stream. The current version of the * Snappy algorithm as defined by Google works on 32k blocks and * doesn't contain offsets bigger than 32k which is the default block * size used by this class.</p> * * @see "http://code.google.com/p/snappy/source/browse/trunk/format_description.txt" * @since 1.7 */ public class SnappyCompressorInputStream extends CompressorInputStream { /** Mask used to determine the type of "tag" is being processed */ private static final int TAG_MASK = 0x03; /** Default block size */ public static final int DEFAULT_BLOCK_SIZE = 32768; /** Buffer to write decompressed bytes to for back-references */ private final byte[] decompressBuf; /** * One behind the index of the last byte in the buffer that was * written */ private int writeIndex; /** * Index of the next byte to be read. */ private int readIndex; /** The actual block size specified */ private final int blockSize; /** The underlying stream to read compressed data from */ private final InputStream in; /** The size of the uncompressed data */ private final int size; /** * Number of uncompressed bytes still to be read. */ private int uncompressedBytesRemaining; // used in no-arg read method private final byte[] oneByte = new byte[1]; private boolean endReached = false; /** * Constructor using the default buffer size of 32k. * * @param is * An InputStream to read compressed data from * * @throws IOException */ public SnappyCompressorInputStream(final InputStream is) throws IOException { this(is, DEFAULT_BLOCK_SIZE); } /** * Constructor using a configurable buffer size. * * @param is * An InputStream to read compressed data from * @param blockSize * The block size used in compression * * @throws IOException */ public SnappyCompressorInputStream(final InputStream is, final int blockSize) throws IOException { this.in = is; this.blockSize = blockSize; this.decompressBuf = new byte[blockSize * 3]; this.writeIndex = readIndex = 0; uncompressedBytesRemaining = size = (int) readSize(); } /** {@inheritDoc} */ @Override public int read() throws IOException { return read(oneByte, 0, 1) == -1 ? -1 : (oneByte[0] & 0xFF); } /** {@inheritDoc} */ @Override public void close() throws IOException { in.close(); } /** {@inheritDoc} */ @Override public int available() { return writeIndex - readIndex; } /** * {@inheritDoc} */ @Override public int read(byte[] b, int off, int len) throws IOException { if (endReached) { return -1; } final int avail = available(); if (len > avail) { fill(len - avail); } int readable = Math.min(len, available()); System.arraycopy(decompressBuf, readIndex, b, off, readable); readIndex += readable; if (readIndex > blockSize) { slideBuffer(); } return readable; } /** * Try to fill the buffer with enough bytes to satisfy the current * read request. * * @param len the number of uncompressed bytes to read */ private void fill(int len) throws IOException { if (uncompressedBytesRemaining == 0) { endReached = true; } int readNow = Math.min(len, uncompressedBytesRemaining); while (readNow > 0) { final int b = readOneByte(); int length = 0; int offset = 0; switch (b & TAG_MASK) { case 0x00: length = readLiteralLength(b); if (expandLiteral(length)) { return; } break; case 0x01: /* * These elements can encode lengths between [4..11] bytes and * offsets between [0..2047] bytes. (len-4) occupies three bits * and is stored in bits [2..4] of the tag byte. The offset * occupies 11 bits, of which the upper three are stored in the * upper three bits ([5..7]) of the tag byte, and the lower * eight are stored in a byte following the tag byte. */ length = 4 + ((b >> 2) & 0x07); offset = (b & 0xE0) << 3; offset |= readOneByte(); if (expandCopy(offset, length)) { return; } break; case 0x02: /* * These elements can encode lengths between [1..64] and offsets * from [0..65535]. (len-1) occupies six bits and is stored in * the upper six bits ([2..7]) of the tag byte. The offset is * stored as a little-endian 16-bit integer in the two bytes * following the tag byte. */ length = (b >> 2) + 1; offset = readOneByte(); offset |= readOneByte() << 8; if (expandCopy(offset, length)) { return; } break; case 0x03: /* * These are like the copies with 2-byte offsets (see previous * subsection), except that the offset is stored as a 32-bit * integer instead of a 16-bit integer (and thus will occupy * four bytes). */ length = (b >> 2) + 1; offset = readOneByte(); offset |= readOneByte() << 8; offset |= readOneByte() << 16; offset |= readOneByte() << 24; if (expandCopy(offset, length)) { return; } break; } readNow -= length; uncompressedBytesRemaining -= length; } } /** * Slide buffer. * * <p>Move all bytes of the buffer after the first block down to * the beginning of the buffer.</p> */ private void slideBuffer() { System.arraycopy(decompressBuf, blockSize, decompressBuf, 0, blockSize * 2); writeIndex -= blockSize; readIndex -= blockSize; } /* * For literals up to and including 60 bytes in length, the * upper six bits of the tag byte contain (len-1). The literal * follows immediately thereafter in the bytestream. - For * longer literals, the (len-1) value is stored after the tag * byte, little-endian. The upper six bits of the tag byte * describe how many bytes are used for the length; 60, 61, 62 * or 63 for 1-4 bytes, respectively. The literal itself follows * after the length. */ private int readLiteralLength(int b) throws IOException { int length; switch (b >> 2) { case 60: length = readOneByte(); break; case 61: length = readOneByte(); length |= (readOneByte() << 8); break; case 62: length = readOneByte(); length |= (readOneByte() << 8); length |= (readOneByte() << 16); break; case 63: length = readOneByte(); length |= (readOneByte() << 8); length |= (readOneByte() << 16); length |= (readOneByte() << 24); break; default: length = b >> 2; break; } return length + 1; } /** * Literals are uncompressed data stored directly in the byte stream. * * @param length * The number of bytes to read from the underlying stream * * @throws IOException * If the first byte cannot be read for any reason other than * end of file, or if the input stream has been closed, or if * some other I/O error occurs. * @return True if the decompressed data should be flushed */ private boolean expandLiteral(final int length) throws IOException { int bytesRead = in.read(decompressBuf, writeIndex, length); count(bytesRead); if (length != bytesRead) { throw new IOException("Premature end of stream"); } writeIndex += length; return (writeIndex >= (2 * this.blockSize)); } /** * Copies are references back into previous decompressed data, telling the * decompressor to reuse data it has previously decoded. They encode two * values: The offset, saying how many bytes back from the current position * to read, and the length, how many bytes to copy. Offsets of zero can be * encoded, but are not legal; similarly, it is possible to encode * backreferences that would go past the end of the block (offset > current * decompressed position), which is also nonsensical and thus not allowed. * * @param offset * The offset from the backward from the end of expanded stream * @param length * The number of bytes to copy * * @throws IOException * An the offset expands past the front of the decompression * buffer * @return True if the decompressed data should be flushed */ private boolean expandCopy(final int offset, int length) throws IOException { if (offset > blockSize) { throw new IOException("Offset is larger than block size"); } if (offset == 1) { byte lastChar = decompressBuf[writeIndex - 1]; for (int i = 0; i < length; i++) { decompressBuf[writeIndex++] = lastChar; } } else if (length < offset) { System.arraycopy(decompressBuf, writeIndex - offset, decompressBuf, writeIndex, length); writeIndex += length; } else { int fullRotations = length / offset; int pad = length - (offset * fullRotations); while (fullRotations-- != 0) { System.arraycopy(decompressBuf, writeIndex - offset, decompressBuf, writeIndex, offset); writeIndex += offset; } if (pad > 0) { System.arraycopy(decompressBuf, writeIndex - offset, decompressBuf, writeIndex, pad); writeIndex += pad; } } return (writeIndex >= (2 * this.blockSize)); } /** * This helper method reads the next byte of data from the input stream. The * value byte is returned as an <code>int</code> in the range <code>0</code> * to <code>255</code>. If no byte is available because the end of the * stream has been reached, an Exception is thrown. * * @return The next byte of data * @throws IOException * EOF is reached or error reading the stream */ private int readOneByte() throws IOException { int b = in.read(); if (b == -1) { throw new IOException("Premature end of stream"); } count(1); return b & 0xFF; } /** * The stream starts with the uncompressed length (up to a maximum of 2^32 - * 1), stored as a little-endian varint. Varints consist of a series of * bytes, where the lower 7 bits are data and the upper bit is set iff there * are more bytes to be read. In other words, an uncompressed length of 64 * would be stored as 0x40, and an uncompressed length of 2097150 (0x1FFFFE) * would be stored as 0xFE 0xFF 0x7F. * * @return The size of the uncompressed data * * @throws IOException * Could not read a byte */ private long readSize() throws IOException { int index = 0; long sz = 0; int b = 0; do { b = readOneByte(); sz |= (b & 0x7f) << (index++ * 7); } while (0 != (b & 0x80)); return sz; } /** * Get the uncompressed size of the stream * * @return the uncompressed size */ public int getSize() { return size; } }
COMPRESS-147 be extra defensive against overflows - can actually not happen with the current snappy compressors where offset will always be <= 32768 git-svn-id: fb13a56e2874bbe7f090676f40e1dce4dcf67111@1548804 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java
COMPRESS-147 be extra defensive against overflows - can actually not happen with the current snappy compressors where offset will always be <= 32768
Java
apache-2.0
0bca0808e885a384b50e7656ba8c4424394a736b
0
apache/openwebbeans,apache/openwebbeans,apache/openwebbeans
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.webbeans.component.creation; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.decorator.Decorator; import javax.enterprise.context.Dependent; import javax.enterprise.context.NormalScope; import javax.enterprise.inject.Any; import javax.enterprise.inject.Specializes; import javax.enterprise.inject.Typed; import javax.enterprise.inject.spi.Annotated; import javax.enterprise.inject.spi.AnnotatedField; import javax.enterprise.inject.spi.AnnotatedMember; import javax.enterprise.inject.spi.AnnotatedMethod; import javax.enterprise.inject.spi.AnnotatedParameter; import javax.enterprise.inject.spi.AnnotatedType; import javax.enterprise.util.Nonbinding; import javax.inject.Named; import javax.inject.Scope; import javax.interceptor.Interceptor; import org.apache.webbeans.annotation.AnnotationManager; import org.apache.webbeans.annotation.AnyLiteral; import org.apache.webbeans.annotation.DefaultLiteral; import org.apache.webbeans.annotation.NamedLiteral; import org.apache.webbeans.component.BeanAttributesImpl; import org.apache.webbeans.config.OWBLogConst; import org.apache.webbeans.config.WebBeansContext; import org.apache.webbeans.container.ExternalScope; import org.apache.webbeans.exception.WebBeansConfigurationException; import org.apache.webbeans.logger.WebBeansLoggerFacade; import org.apache.webbeans.portable.AbstractAnnotated; import org.apache.webbeans.util.AnnotationUtil; import org.apache.webbeans.util.Asserts; import org.apache.webbeans.util.ClassUtil; import org.apache.webbeans.util.WebBeansUtil; /** * Abstract implementation. * * @version $Rev$ $Date$ * * @param <T> bean class info */ public abstract class BeanAttributesBuilder<T, A extends Annotated> { protected A annotated; protected WebBeansContext webBeansContext; protected Set<Type> types = new HashSet<Type>(); protected Set<Annotation> qualifiers = new HashSet<Annotation>(); protected Class<? extends Annotation> scope; protected String name; protected boolean nullable; protected Set<Class<? extends Annotation>> stereotypes; protected Boolean alternative; public static BeanAttributesBuilderFactory forContext(WebBeansContext webBeansContext) { return new BeanAttributesBuilderFactory(webBeansContext); } /** * Creates a bean instance. * * @param annotated */ protected BeanAttributesBuilder(WebBeansContext webBeansContext, A annotated) { this.annotated = annotated; this.webBeansContext = webBeansContext; } public BeanAttributesBuilder<T, A> alternative(final boolean alternative) { this.alternative = alternative; return this; } public BeanAttributesImpl<T> build() { // we need to check the stereotypes first because we might need it to determine the scope stereotypes = defineStereotypes(annotated); defineScope(); if (scope == null) { // this indicates that we shall not use this AnnotatedType to create Beans from it. return null; } defineTypes(); defineName(); defineQualifiers(); defineNullable(); defineAlternative(); return new BeanAttributesImpl<T>(types, qualifiers, scope, name, nullable, stereotypes, alternative); } protected A getAnnotated() { return annotated; } /** * {@inheritDoc} */ protected void defineTypes() { Class<?> baseType = ClassUtil.getClass(annotated.getBaseType()); if (baseType.isArray()) { // 3.3.1 types.add(Object.class); types.add(baseType); } else { Typed beanTypes = annotated.getAnnotation(Typed.class); if (beanTypes != null) { Class<?>[] typedTypes = beanTypes.value(); //New api types Set<Type> newTypes = new HashSet<Type>(); for (Class<?> type : typedTypes) { Type foundType = null; for (Type apiType : annotated.getTypeClosure()) { if(ClassUtil.getClazz(apiType) == type) { foundType = apiType; break; } } if(foundType == null) { throw new WebBeansConfigurationException("@Type values must be in bean api types of class: " + baseType); } newTypes.add(foundType); } this.types.addAll(newTypes); this.types.add(Object.class); } else { this.types.addAll(annotated.getTypeClosure()); } Set<String> ignored = webBeansContext.getOpenWebBeansConfiguration().getIgnoredInterfaces(); if (!ignored.isEmpty()) { for (Iterator<Type> i = this.types.iterator(); i.hasNext(); ) { Type t = i.next(); if (t instanceof Class && ignored.contains(((Class<?>) t).getName())) { i.remove(); } } } } } /** * {@inheritDoc} */ protected void defineQualifiers() { HashSet<Class<? extends Annotation>> qualifiedTypes = new HashSet<Class<? extends Annotation>>(); if (annotated.isAnnotationPresent(Specializes.class)) { defineQualifiers(getSuperAnnotated(), qualifiedTypes); } defineQualifiers(annotated, qualifiedTypes); } private void defineQualifiers(Annotated annotated, Set<Class<? extends Annotation>> qualifiedTypes) { Annotation[] annotations = AnnotationUtil.asArray(annotated.getAnnotations()); final AnnotationManager annotationManager = webBeansContext.getAnnotationManager(); for (Annotation annotation : annotations) { Class<? extends Annotation> type = annotation.annotationType(); if (annotationManager.isQualifierAnnotation(type)) { Method[] methods = webBeansContext.getSecurityService().doPrivilegedGetDeclaredMethods(type); for (Method method : methods) { Class<?> clazz = method.getReturnType(); if (clazz.isArray() || clazz.isAnnotation()) { if (!AnnotationUtil.hasAnnotation(method.getDeclaredAnnotations(), Nonbinding.class)) { throw new WebBeansConfigurationException("WebBeans definition class : " + method.getDeclaringClass().getName() + " @Qualifier : " + annotation.annotationType().getName() + " must have @NonBinding valued members for its array-valued and annotation valued members"); } } } if (qualifiedTypes.contains(annotation.annotationType()) && !isRepetable(annotated, annotation)) { continue; } else { qualifiedTypes.add(annotation.annotationType()); } if (annotation.annotationType().equals(Named.class) && name != null) { qualifiers.add(new NamedLiteral(name)); } else { qualifiers.add(annotation); } } } // No-binding annotation if (qualifiers.size() == 0 ) { qualifiers.add(DefaultLiteral.INSTANCE); } else if(qualifiers.size() == 1) { // section 2.3.1 // If a bean does not explicitly declare a qualifier other than @Named or @Any, // the bean has exactly one additional qualifier, of type @Default. Annotation annot = qualifiers.iterator().next(); if(annot.annotationType().equals(Named.class) || annot.annotationType().equals(Any.class)) { qualifiers.add(DefaultLiteral.INSTANCE); } } else if (qualifiers.size() == 2) { Iterator<Annotation> qualiIt = qualifiers.iterator(); Class<? extends Annotation> q1 = qualiIt.next().annotationType(); Class<? extends Annotation> q2 = qualiIt.next().annotationType(); if (q1.equals(Named.class) && q2.equals(Any.class) || q2.equals(Named.class) && q1.equals(Any.class) ) { qualifiers.add(DefaultLiteral.INSTANCE); } } //Add @Any support if(!hasAnyQualifier()) { qualifiers.add(AnyLiteral.INSTANCE); } } // we don't want to do the getRepeatableMethod() logic *again* if we can but we can need for custom AT private boolean isRepetable(final Annotated annotated, final Annotation annotation) { return AbstractAnnotated.class.isInstance(annotated) ? AbstractAnnotated.class.cast(annotated).getRepeatables().contains(annotation.annotationType()) : webBeansContext.getAnnotationManager().getRepeatableMethod(annotation.annotationType()) != null; } /** * Returns true if any binding exist * * @return true if any binding exist */ private boolean hasAnyQualifier() { return AnnotationUtil.getAnnotation(qualifiers, Any.class) != null; } protected abstract void defineScope(); protected void defineScope(String errorMessage) { defineScope(null, false, errorMessage); } protected void defineScope(Class<?> declaringClass, boolean onlyScopedBeans, String errorMessage) { Annotation[] annotations = AnnotationUtil.asArray(annotated.getAnnotations()); boolean found = false; List<ExternalScope> additionalScopes = webBeansContext.getBeanManagerImpl().getAdditionalScopes(); for (Annotation annotation : annotations) { if (declaringClass != null && AnnotationUtil.getDeclaringClass(annotation, declaringClass) != null && !AnnotationUtil.isDeclaringClass(declaringClass, annotation)) { continue; } Class<? extends Annotation> annotationType = annotation.annotationType(); if (!webBeansContext.getBeanManagerImpl().isScope(annotationType)) { continue; } /*Normal scope*/ Annotation var = annotationType.getAnnotation(NormalScope.class); /*Pseudo scope*/ Annotation pseudo = annotationType.getAnnotation(Scope.class); if (var == null && pseudo == null) { // check for additional scopes registered via a CDI Extension for (ExternalScope additionalScope : additionalScopes) { if (annotationType.equals(additionalScope.getScope())) { // create a proxy which implements the given annotation Annotation scopeAnnotation = additionalScope.getScopeAnnotation(); if (additionalScope.isNormal()) { var = scopeAnnotation; } else { pseudo = scopeAnnotation; } } } } if (var != null) { if(pseudo != null) { throw new WebBeansConfigurationException("Not to define both @Scope and @NormalScope on bean : " + ClassUtil.getClass(annotated.getBaseType()).getName()); } if (found) { throw new WebBeansConfigurationException(errorMessage); } found = true; scope = annotation.annotationType(); } else { if(pseudo != null) { if (found) { throw new WebBeansConfigurationException(errorMessage); } found = true; scope = annotation.annotationType(); } } } if (found && annotated.getAnnotation(Interceptor.class) != null && scope != Dependent.class) { throw new WebBeansConfigurationException("An Interceptor must declare any other Scope than @Dependent: " + ClassUtil.getClass(annotated.getBaseType()).getName()); } if (found && annotated.getAnnotation(Decorator.class) != null && scope != Dependent.class) { throw new WebBeansConfigurationException("A Decorator must declare any other Scope than @Dependent: " + ClassUtil.getClass(annotated.getBaseType()).getName()); } if (!found && declaringClass != null && !hasDeclaredNonInheritedScope(declaringClass)) { defineScope(declaringClass.getSuperclass(), onlyScopedBeans, errorMessage); } else if (!found) { defineDefaultScope(errorMessage, onlyScopedBeans); } } private void defineDefaultScope(String exceptionMessage, boolean onlyScopedBeans) { if (scope == null) { Set<Class<? extends Annotation>> stereos = stereotypes; if (stereos != null && stereos.size() > 0) { Annotation defined = null; Set<Class<? extends Annotation>> anns = stereotypes; for (Class<? extends Annotation> stero : anns) { boolean containsNormal = AnnotationUtil.hasMetaAnnotation(stero.getDeclaredAnnotations(), NormalScope.class); if (AnnotationUtil.hasMetaAnnotation(stero.getDeclaredAnnotations(), NormalScope.class) || AnnotationUtil.hasMetaAnnotation(stero.getDeclaredAnnotations(), Scope.class)) { Annotation next; if(containsNormal) { next = AnnotationUtil.getMetaAnnotations(stero.getDeclaredAnnotations(), NormalScope.class)[0]; } else { next = AnnotationUtil.getMetaAnnotations(stero.getDeclaredAnnotations(), Scope.class)[0]; } if (defined == null) { defined = next; } else { if (!defined.equals(next)) { throw new WebBeansConfigurationException(exceptionMessage); } } } } if (defined != null) { scope = defined.annotationType(); } else { scope = Dependent.class; } } if (scope == null && (!onlyScopedBeans || annotated.getAnnotation(Interceptor.class) != null || annotated.getAnnotation(Decorator.class) != null)) { // only add a 'default' Dependent scope // * if it's not in a bean-discovery-mode='scoped' module, or // * if it's a Decorator or Interceptor scope = Dependent.class; } } } private boolean hasDeclaredNonInheritedScope(Class<?> type) { return webBeansContext.getAnnotationManager().getDeclaredScopeAnnotation(type) != null; } protected abstract void defineName(); protected void defineName(Annotated annotated, String name) { Annotation[] anns = AnnotationUtil.asArray(annotated.getAnnotations()); Named nameAnnot = null; boolean isDefault = false; for (Annotation ann : anns) { if (ann.annotationType().equals(Named.class)) { nameAnnot = (Named) ann; break; } } if (nameAnnot == null) // no @Named { // Check for stereottype if (webBeansContext.getAnnotationManager().hasNamedOnStereoTypes(stereotypes)) { isDefault = true; } } else // yes @Named { if (nameAnnot.value().equals("")) { isDefault = true; } else { this.name = nameAnnot.value(); } } if (isDefault) { this.name = name; } } /** * @return the AnnotatedType of the next non-Specialized superclass */ protected abstract Annotated getSuperAnnotated(); protected abstract void defineNullable(); protected void defineNullable(boolean nullable) { this.nullable = nullable; } /** * {@inheritDoc} */ protected Set<Class<? extends Annotation>> defineStereotypes(Annotated annot) { Set<Class<? extends Annotation>> stereos = null; Annotation[] anns = AnnotationUtil.asArray(annot.getAnnotations()); final AnnotationManager annotationManager = webBeansContext.getAnnotationManager(); if (annotationManager.hasStereoTypeMetaAnnotation(anns)) { Annotation[] steroAnns = annotationManager.getStereotypeMetaAnnotations(anns); for (Annotation stereo : steroAnns) { if (stereos == null) { stereos = new HashSet<Class<? extends Annotation>>(); } stereos.add(stereo.annotationType()); } } return stereos != null ? stereos : Collections.EMPTY_SET; } // these alternatives can be not activated protected void defineAlternative() { alternative = alternative == null || !alternative ? WebBeansUtil.isAlternative(annotated, stereotypes) : alternative; } public static class BeanAttributesBuilderFactory { private WebBeansContext webBeansContext; private BeanAttributesBuilderFactory(WebBeansContext webBeansContext) { Asserts.assertNotNull(webBeansContext, Asserts.PARAM_NAME_WEBBEANSCONTEXT); this.webBeansContext = webBeansContext; } public <T> BeanAttributesBuilder<T, AnnotatedType<T>> newBeanAttibutes(AnnotatedType<T> annotatedType) { return newBeanAttibutes(annotatedType, false); } public <T> BeanAttributesBuilder<T, AnnotatedType<T>> newBeanAttibutes(AnnotatedType<T> annotatedType, boolean onlyScopedBeans) { return new AnnotatedTypeBeanAttributesBuilder<T>(webBeansContext, annotatedType, onlyScopedBeans); } public <T> BeanAttributesBuilder<T, AnnotatedField<T>> newBeanAttibutes(AnnotatedField<T> annotatedField) { return new AnnotatedFieldBeanAttributesBuilder<T>(webBeansContext, annotatedField); } public <T> BeanAttributesBuilder<T, AnnotatedMethod<T>> newBeanAttibutes(AnnotatedMethod<T> annotatedMethod) { return new AnnotatedMethodBeanAttributesBuilder<T>(webBeansContext, annotatedMethod); } } private static class AnnotatedTypeBeanAttributesBuilder<C> extends BeanAttributesBuilder<C, AnnotatedType<C>> { private final boolean onlyScopedBeans; public AnnotatedTypeBeanAttributesBuilder(WebBeansContext webBeansContext, AnnotatedType<C> annotated, boolean onlyScopedBeans) { super(webBeansContext, annotated); this.onlyScopedBeans = onlyScopedBeans; } @Override protected void defineScope() { defineScope(getAnnotated().getJavaClass(), onlyScopedBeans, WebBeansLoggerFacade.getTokenString(OWBLogConst.TEXT_MB_IMPL) + getAnnotated().getJavaClass().getName() + WebBeansLoggerFacade.getTokenString(OWBLogConst.TEXT_SAME_SCOPE)); } @Override protected void defineName() { if (getAnnotated().isAnnotationPresent(Specializes.class)) { AnnotatedType<? super C> annotatedToSpecialize = getAnnotated(); do { Class<? super C> superclass = annotatedToSpecialize.getJavaClass().getSuperclass(); if (superclass.equals(Object.class)) { throw new WebBeansConfigurationException("@Specialized Class : " + getAnnotated().getJavaClass().getName() + " must not directly extend Object.class"); } annotatedToSpecialize = webBeansContext.getAnnotatedElementFactory().newAnnotatedType(superclass); } while(annotatedToSpecialize.getAnnotation(Specializes.class) != null); defineName(annotatedToSpecialize, WebBeansUtil.getManagedBeanDefaultName(annotatedToSpecialize.getJavaClass().getSimpleName())); } if (name == null) { defineName(getAnnotated(), WebBeansUtil.getManagedBeanDefaultName(getAnnotated().getJavaClass().getSimpleName())); } else { // TODO XXX We have to check stereotypes here, too if (getAnnotated().getJavaClass().isAnnotationPresent(Named.class)) { throw new WebBeansConfigurationException("@Specialized Class : " + getAnnotated().getJavaClass().getName() + " may not explicitly declare a bean name"); } } } @Override protected void defineNullable() { defineNullable(false); } @Override protected AnnotatedType<? super C> getSuperAnnotated() { AnnotatedType<? super C> annotatedType = getAnnotated(); do { Class<? super C> superclass = annotatedType.getJavaClass().getSuperclass(); if (superclass == null || superclass.equals(Object.class)) { return null; } annotatedType = webBeansContext.getAnnotatedElementFactory().newAnnotatedType(superclass); } while (annotatedType.getAnnotation(Specializes.class) != null); return annotatedType; } } private static class AnnotatedFieldBeanAttributesBuilder<M> extends AnnotatedMemberBeanAttributesBuilder<M, AnnotatedField<M>> { protected AnnotatedFieldBeanAttributesBuilder(WebBeansContext webBeansContext, AnnotatedField<M> annotated) { super(webBeansContext, annotated); } @Override protected void defineScope() { defineScope("Annotated producer field: " + getAnnotated().getJavaMember() + "must declare default @Scope annotation"); } @Override protected void defineName() { defineName(getAnnotated(), WebBeansUtil.getProducerDefaultName(getAnnotated().getJavaMember().getName())); } @Override protected void defineNullable() { defineNullable(!getAnnotated().getJavaMember().getType().isPrimitive()); } @Override protected AnnotatedField<? super M> getSuperAnnotated() { AnnotatedField<M> thisField = getAnnotated(); for (AnnotatedField<? super M> superField: getSuperType().getFields()) { if (thisField.getJavaMember().getName().equals(superField.getJavaMember().getName()) && thisField.getBaseType().equals(superField.getBaseType())) { return superField; } } return null; } } private static class AnnotatedMethodBeanAttributesBuilder<M> extends AnnotatedMemberBeanAttributesBuilder<M, AnnotatedMethod<M>> { protected AnnotatedMethodBeanAttributesBuilder(WebBeansContext webBeansContext, AnnotatedMethod<M> annotated) { super(webBeansContext, annotated); } @Override protected void defineScope() { defineScope("Annotated producer method : " + getAnnotated().getJavaMember() + "must declare default @Scope annotation"); } @Override protected void defineName() { if (getAnnotated().isAnnotationPresent(Specializes.class)) { AnnotatedMethod<? super M> superAnnotated = getSuperAnnotated(); defineName(superAnnotated, WebBeansUtil.getProducerDefaultName(superAnnotated.getJavaMember().getName())); } if (name == null) { defineName(getAnnotated(), WebBeansUtil.getProducerDefaultName(getAnnotated().getJavaMember().getName())); } else { // TODO XXX We have to check stereotypes here, too if (getAnnotated().isAnnotationPresent(Named.class)) { throw new WebBeansConfigurationException("@Specialized Producer method : " + getAnnotated().getJavaMember().getName() + " may not explicitly declare a bean name"); } } } @Override protected void defineNullable() { defineNullable(!getAnnotated().getJavaMember().getReturnType().isPrimitive()); } @Override protected AnnotatedMethod<? super M> getSuperAnnotated() { AnnotatedMethod<M> thisMethod = getAnnotated(); for (AnnotatedMethod<? super M> superMethod: webBeansContext.getAnnotatedElementFactory().getFilteredAnnotatedMethods(getSuperType())) { List<AnnotatedParameter<M>> thisParameters = thisMethod.getParameters(); if (thisMethod.getJavaMember().getName().equals(superMethod.getJavaMember().getName()) && thisMethod.getBaseType().equals(superMethod.getBaseType()) && thisParameters.size() == superMethod.getParameters().size()) { List<AnnotatedParameter<?>> superParameters = (List<AnnotatedParameter<?>>)(List<?>)superMethod.getParameters(); boolean match = true; for (int i = 0; i < thisParameters.size(); i++) { if (!thisParameters.get(i).getBaseType().equals(superParameters.get(i).getBaseType())) { match = false; break; } } if (match) { return superMethod; } } } return null; } } private abstract static class AnnotatedMemberBeanAttributesBuilder<M, A extends AnnotatedMember<M>> extends BeanAttributesBuilder<M, A> { protected AnnotatedMemberBeanAttributesBuilder(WebBeansContext webBeansContext, A annotated) { super(webBeansContext, annotated); } protected AnnotatedType<? super M> getSuperType() { Class<? super M> superclass = getAnnotated().getDeclaringType().getJavaClass().getSuperclass(); if (superclass == null) { return null; } return webBeansContext.getAnnotatedElementFactory().getAnnotatedType(superclass); } } }
webbeans-impl/src/main/java/org/apache/webbeans/component/creation/BeanAttributesBuilder.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.webbeans.component.creation; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.decorator.Decorator; import javax.enterprise.context.Dependent; import javax.enterprise.context.NormalScope; import javax.enterprise.inject.Any; import javax.enterprise.inject.Specializes; import javax.enterprise.inject.Typed; import javax.enterprise.inject.spi.Annotated; import javax.enterprise.inject.spi.AnnotatedField; import javax.enterprise.inject.spi.AnnotatedMember; import javax.enterprise.inject.spi.AnnotatedMethod; import javax.enterprise.inject.spi.AnnotatedParameter; import javax.enterprise.inject.spi.AnnotatedType; import javax.enterprise.util.Nonbinding; import javax.inject.Named; import javax.inject.Scope; import javax.interceptor.Interceptor; import org.apache.webbeans.annotation.AnnotationManager; import org.apache.webbeans.annotation.AnyLiteral; import org.apache.webbeans.annotation.DefaultLiteral; import org.apache.webbeans.annotation.NamedLiteral; import org.apache.webbeans.component.BeanAttributesImpl; import org.apache.webbeans.config.OWBLogConst; import org.apache.webbeans.config.WebBeansContext; import org.apache.webbeans.container.ExternalScope; import org.apache.webbeans.exception.WebBeansConfigurationException; import org.apache.webbeans.logger.WebBeansLoggerFacade; import org.apache.webbeans.portable.AbstractAnnotated; import org.apache.webbeans.util.AnnotationUtil; import org.apache.webbeans.util.Asserts; import org.apache.webbeans.util.ClassUtil; import org.apache.webbeans.util.WebBeansUtil; /** * Abstract implementation. * * @version $Rev$ $Date$ * * @param <T> bean class info */ public abstract class BeanAttributesBuilder<T, A extends Annotated> { protected A annotated; protected WebBeansContext webBeansContext; protected Set<Type> types = new HashSet<Type>(); protected Set<Annotation> qualifiers = new HashSet<Annotation>(); protected Class<? extends Annotation> scope; protected String name; protected boolean nullable; protected Set<Class<? extends Annotation>> stereotypes; protected Boolean alternative; public static BeanAttributesBuilderFactory forContext(WebBeansContext webBeansContext) { return new BeanAttributesBuilderFactory(webBeansContext); } /** * Creates a bean instance. * * @param annotated */ protected BeanAttributesBuilder(WebBeansContext webBeansContext, A annotated) { this.annotated = annotated; this.webBeansContext = webBeansContext; } public BeanAttributesBuilder<T, A> alternative(final boolean alternative) { this.alternative = alternative; return this; } public BeanAttributesImpl<T> build() { // we need to check the stereotypes first because we might need it to determine the scope stereotypes = defineStereotypes(annotated); defineScope(); if (scope == null) { // this indicates that we shall not use this AnnotatedType to create Beans from it. return null; } defineTypes(); defineName(); defineQualifiers(); defineNullable(); defineAlternative(); return new BeanAttributesImpl<T>(types, qualifiers, scope, name, nullable, stereotypes, alternative); } protected A getAnnotated() { return annotated; } /** * {@inheritDoc} */ protected void defineTypes() { Class<?> baseType = ClassUtil.getClass(annotated.getBaseType()); if (baseType.isArray()) { // 3.3.1 types.add(Object.class); types.add(baseType); } else { Typed beanTypes = annotated.getAnnotation(Typed.class); if (beanTypes != null) { Class<?>[] typedTypes = beanTypes.value(); //New api types Set<Type> newTypes = new HashSet<Type>(); for (Class<?> type : typedTypes) { Type foundType = null; for (Type apiType : annotated.getTypeClosure()) { if(ClassUtil.getClazz(apiType) == type) { foundType = apiType; break; } } if(foundType == null) { throw new WebBeansConfigurationException("@Type values must be in bean api types of class: " + baseType); } newTypes.add(foundType); } this.types.addAll(newTypes); this.types.add(Object.class); } else { this.types.addAll(annotated.getTypeClosure()); } Set<String> ignored = webBeansContext.getOpenWebBeansConfiguration().getIgnoredInterfaces(); if (!ignored.isEmpty()) { for (Iterator<Type> i = this.types.iterator(); i.hasNext(); ) { Type t = i.next(); if (t instanceof Class && ignored.contains(((Class<?>) t).getName())) { i.remove(); } } } } } /** * {@inheritDoc} */ protected void defineQualifiers() { HashSet<Class<? extends Annotation>> qualifiedTypes = new HashSet<Class<? extends Annotation>>(); if (annotated.isAnnotationPresent(Specializes.class)) { defineQualifiers(getSuperAnnotated(), qualifiedTypes); } defineQualifiers(annotated, qualifiedTypes); } private void defineQualifiers(Annotated annotated, Set<Class<? extends Annotation>> qualifiedTypes) { Annotation[] annotations = AnnotationUtil.asArray(annotated.getAnnotations()); final AnnotationManager annotationManager = webBeansContext.getAnnotationManager(); for (Annotation annotation : annotations) { Class<? extends Annotation> type = annotation.annotationType(); if (annotationManager.isQualifierAnnotation(type)) { Method[] methods = webBeansContext.getSecurityService().doPrivilegedGetDeclaredMethods(type); for (Method method : methods) { Class<?> clazz = method.getReturnType(); if (clazz.isArray() || clazz.isAnnotation()) { if (!AnnotationUtil.hasAnnotation(method.getDeclaredAnnotations(), Nonbinding.class)) { throw new WebBeansConfigurationException("WebBeans definition class : " + method.getDeclaringClass().getName() + " @Qualifier : " + annotation.annotationType().getName() + " must have @NonBinding valued members for its array-valued and annotation valued members"); } } } if (qualifiedTypes.contains(annotation.annotationType()) && !isRepetable(annotated, annotation)) { continue; } else { qualifiedTypes.add(annotation.annotationType()); } if (annotation.annotationType().equals(Named.class) && name != null) { qualifiers.add(new NamedLiteral(name)); } else { qualifiers.add(annotation); } } } // No-binding annotation if (qualifiers.size() == 0 ) { qualifiers.add(DefaultLiteral.INSTANCE); } else if(qualifiers.size() == 1) { // section 2.3.1 // If a bean does not explicitly declare a qualifier other than @Named or @Any, // the bean has exactly one additional qualifier, of type @Default. Annotation annot = qualifiers.iterator().next(); if(annot.annotationType().equals(Named.class) || annot.annotationType().equals(Any.class)) { qualifiers.add(DefaultLiteral.INSTANCE); } } else if (qualifiers.size() == 2) { Iterator<Annotation> qualiIt = qualifiers.iterator(); Class<? extends Annotation> q1 = qualiIt.next().annotationType(); Class<? extends Annotation> q2 = qualiIt.next().annotationType(); if (q1.equals(Named.class) && q2.equals(Any.class) || q2.equals(Named.class) && q1.equals(Any.class) ) { qualifiers.add(DefaultLiteral.INSTANCE); } } //Add @Any support if(!hasAnyQualifier()) { qualifiers.add(AnyLiteral.INSTANCE); } } // we don't want to do the getRepeatableMethod() logic *again* if we can but we can need for custom AT private boolean isRepetable(final Annotated annotated, final Annotation annotation) { return AbstractAnnotated.class.isInstance(annotated) ? AbstractAnnotated.class.cast(annotated).getRepeatables().contains(annotation.annotationType()) : webBeansContext.getAnnotationManager().getRepeatableMethod(annotation.annotationType()) != null; } /** * Returns true if any binding exist * * @return true if any binding exist */ private boolean hasAnyQualifier() { return AnnotationUtil.getAnnotation(qualifiers, Any.class) != null; } protected abstract void defineScope(); protected void defineScope(String errorMessage) { defineScope(null, false, errorMessage); } protected void defineScope(Class<?> declaringClass, boolean onlyScopedBeans, String errorMessage) { Annotation[] annotations = AnnotationUtil.asArray(annotated.getAnnotations()); boolean found = false; List<ExternalScope> additionalScopes = webBeansContext.getBeanManagerImpl().getAdditionalScopes(); for (Annotation annotation : annotations) { if (declaringClass != null && AnnotationUtil.getDeclaringClass(annotation, declaringClass) != null && !AnnotationUtil.isDeclaringClass(declaringClass, annotation)) { continue; } Class<? extends Annotation> annotationType = annotation.annotationType(); if (!webBeansContext.getBeanManagerImpl().isScope(annotationType)) { continue; } /*Normal scope*/ Annotation var = annotationType.getAnnotation(NormalScope.class); /*Pseudo scope*/ Annotation pseudo = annotationType.getAnnotation(Scope.class); if (var == null && pseudo == null) { // check for additional scopes registered via a CDI Extension for (ExternalScope additionalScope : additionalScopes) { if (annotationType.equals(additionalScope.getScope())) { // create a proxy which implements the given annotation Annotation scopeAnnotation = additionalScope.getScopeAnnotation(); if (additionalScope.isNormal()) { var = scopeAnnotation; } else { pseudo = scopeAnnotation; } } } } if (var != null) { if(pseudo != null) { throw new WebBeansConfigurationException("Not to define both @Scope and @NormalScope on bean : " + ClassUtil.getClass(annotated.getBaseType()).getName()); } if (found) { throw new WebBeansConfigurationException(errorMessage); } found = true; scope = annotation.annotationType(); } else { if(pseudo != null) { if (found) { throw new WebBeansConfigurationException(errorMessage); } found = true; scope = annotation.annotationType(); } } } if (found && annotated.getAnnotation(Interceptor.class) != null && scope != Dependent.class) { throw new WebBeansConfigurationException("An Interceptor must declare any other Scope than @Dependent: " + ClassUtil.getClass(annotated.getBaseType()).getName()); } if (found && annotated.getAnnotation(Decorator.class) != null && scope != Dependent.class) { throw new WebBeansConfigurationException("A Decorator must declare any other Scope than @Dependent: " + ClassUtil.getClass(annotated.getBaseType()).getName()); } if (!found && declaringClass != null && !hasDeclaredNonInheritedScope(declaringClass)) { defineScope(declaringClass.getSuperclass(), onlyScopedBeans, errorMessage); } else if (!found) { defineDefaultScope(errorMessage, onlyScopedBeans); } } private void defineDefaultScope(String exceptionMessage, boolean onlyScopedBeans) { if (scope == null) { Set<Class<? extends Annotation>> stereos = stereotypes; if (stereos != null && stereos.size() > 0) { Annotation defined = null; Set<Class<? extends Annotation>> anns = stereotypes; for (Class<? extends Annotation> stero : anns) { boolean containsNormal = AnnotationUtil.hasMetaAnnotation(stero.getDeclaredAnnotations(), NormalScope.class); if (AnnotationUtil.hasMetaAnnotation(stero.getDeclaredAnnotations(), NormalScope.class) || AnnotationUtil.hasMetaAnnotation(stero.getDeclaredAnnotations(), Scope.class)) { Annotation next; if(containsNormal) { next = AnnotationUtil.getMetaAnnotations(stero.getDeclaredAnnotations(), NormalScope.class)[0]; } else { next = AnnotationUtil.getMetaAnnotations(stero.getDeclaredAnnotations(), Scope.class)[0]; } if (defined == null) { defined = next; } else { if (!defined.equals(next)) { throw new WebBeansConfigurationException(exceptionMessage); } } } } if (defined != null) { scope = defined.annotationType(); } else { scope = Dependent.class; } } if (scope == null && (!onlyScopedBeans || annotated.getAnnotation(Interceptor.class) != null || annotated.getAnnotation(Decorator.class) != null)) { // only add a 'default' Dependent scope // * if it's not in a bean-discovery-mode='scoped' module, or // * if it's a Decorator or Interceptor scope = Dependent.class; } } } private boolean hasDeclaredNonInheritedScope(Class<?> type) { return webBeansContext.getAnnotationManager().getDeclaredScopeAnnotation(type) != null; } protected abstract void defineName(); protected void defineName(Annotated annotated, String name) { Annotation[] anns = AnnotationUtil.asArray(annotated.getAnnotations()); Named nameAnnot = null; boolean isDefault = false; for (Annotation ann : anns) { if (ann.annotationType().equals(Named.class)) { nameAnnot = (Named) ann; break; } } if (nameAnnot == null) // no @Named { // Check for stereottype if (webBeansContext.getAnnotationManager().hasNamedOnStereoTypes(stereotypes)) { isDefault = true; } } else // yes @Named { if (nameAnnot.value().equals("")) { isDefault = true; } else { this.name = nameAnnot.value(); } } if (isDefault) { this.name = name; } } /** * @return the AnnotatedType of the next non-Specialized superclass */ protected abstract Annotated getSuperAnnotated(); protected abstract void defineNullable(); protected void defineNullable(boolean nullable) { this.nullable = nullable; } /** * {@inheritDoc} */ protected Set<Class<? extends Annotation>> defineStereotypes(Annotated annot) { Set<Class<? extends Annotation>> stereos = null; Annotation[] anns = AnnotationUtil.asArray(annot.getAnnotations()); final AnnotationManager annotationManager = webBeansContext.getAnnotationManager(); if (annotationManager.hasStereoTypeMetaAnnotation(anns)) { Annotation[] steroAnns = annotationManager.getStereotypeMetaAnnotations(anns); for (Annotation stereo : steroAnns) { if (stereos == null) { stereos = new HashSet<Class<? extends Annotation>>(); } stereos.add(stereo.annotationType()); } } return stereos != null ? stereos : Collections.EMPTY_SET; } // these alternatives can be not activated protected void defineAlternative() { alternative = alternative == null || !alternative ? WebBeansUtil.isAlternative(annotated, stereotypes) : alternative; } protected abstract Class<?> getType(); public static class BeanAttributesBuilderFactory { private WebBeansContext webBeansContext; private BeanAttributesBuilderFactory(WebBeansContext webBeansContext) { Asserts.assertNotNull(webBeansContext, Asserts.PARAM_NAME_WEBBEANSCONTEXT); this.webBeansContext = webBeansContext; } public <T> BeanAttributesBuilder<T, AnnotatedType<T>> newBeanAttibutes(AnnotatedType<T> annotatedType) { return newBeanAttibutes(annotatedType, false); } public <T> BeanAttributesBuilder<T, AnnotatedType<T>> newBeanAttibutes(AnnotatedType<T> annotatedType, boolean onlyScopedBeans) { return new AnnotatedTypeBeanAttributesBuilder<T>(webBeansContext, annotatedType, onlyScopedBeans); } public <T> BeanAttributesBuilder<T, AnnotatedField<T>> newBeanAttibutes(AnnotatedField<T> annotatedField) { return new AnnotatedFieldBeanAttributesBuilder<T>(webBeansContext, annotatedField); } public <T> BeanAttributesBuilder<T, AnnotatedMethod<T>> newBeanAttibutes(AnnotatedMethod<T> annotatedMethod) { return new AnnotatedMethodBeanAttributesBuilder<T>(webBeansContext, annotatedMethod); } } private static class AnnotatedTypeBeanAttributesBuilder<C> extends BeanAttributesBuilder<C, AnnotatedType<C>> { private final boolean onlyScopedBeans; public AnnotatedTypeBeanAttributesBuilder(WebBeansContext webBeansContext, AnnotatedType<C> annotated, boolean onlyScopedBeans) { super(webBeansContext, annotated); this.onlyScopedBeans = onlyScopedBeans; } @Override protected void defineScope() { defineScope(getAnnotated().getJavaClass(), onlyScopedBeans, WebBeansLoggerFacade.getTokenString(OWBLogConst.TEXT_MB_IMPL) + getAnnotated().getJavaClass().getName() + WebBeansLoggerFacade.getTokenString(OWBLogConst.TEXT_SAME_SCOPE)); } @Override protected void defineName() { if (getAnnotated().isAnnotationPresent(Specializes.class)) { AnnotatedType<? super C> annotatedToSpecialize = getAnnotated(); do { Class<? super C> superclass = annotatedToSpecialize.getJavaClass().getSuperclass(); if (superclass.equals(Object.class)) { throw new WebBeansConfigurationException("@Specialized Class : " + getAnnotated().getJavaClass().getName() + " must not directly extend Object.class"); } annotatedToSpecialize = webBeansContext.getAnnotatedElementFactory().newAnnotatedType(superclass); } while(annotatedToSpecialize.getAnnotation(Specializes.class) != null); defineName(annotatedToSpecialize, WebBeansUtil.getManagedBeanDefaultName(annotatedToSpecialize.getJavaClass().getSimpleName())); } if (name == null) { defineName(getAnnotated(), WebBeansUtil.getManagedBeanDefaultName(getAnnotated().getJavaClass().getSimpleName())); } else { // TODO XXX We have to check stereotypes here, too if (getAnnotated().getJavaClass().isAnnotationPresent(Named.class)) { throw new WebBeansConfigurationException("@Specialized Class : " + getAnnotated().getJavaClass().getName() + " may not explicitly declare a bean name"); } } } @Override protected void defineNullable() { defineNullable(false); } @Override protected Class<?> getType() { return annotated.getJavaClass(); } @Override protected AnnotatedType<? super C> getSuperAnnotated() { AnnotatedType<? super C> annotatedType = getAnnotated(); do { Class<? super C> superclass = annotatedType.getJavaClass().getSuperclass(); if (superclass == null || superclass.equals(Object.class)) { return null; } annotatedType = webBeansContext.getAnnotatedElementFactory().newAnnotatedType(superclass); } while (annotatedType.getAnnotation(Specializes.class) != null); return annotatedType; } } private static class AnnotatedFieldBeanAttributesBuilder<M> extends AnnotatedMemberBeanAttributesBuilder<M, AnnotatedField<M>> { protected AnnotatedFieldBeanAttributesBuilder(WebBeansContext webBeansContext, AnnotatedField<M> annotated) { super(webBeansContext, annotated); } @Override protected Class<?> getType() { return annotated.getJavaMember().getType(); } @Override protected void defineScope() { defineScope("Annotated producer field: " + getAnnotated().getJavaMember() + "must declare default @Scope annotation"); } @Override protected void defineName() { defineName(getAnnotated(), WebBeansUtil.getProducerDefaultName(getAnnotated().getJavaMember().getName())); } @Override protected void defineNullable() { defineNullable(!getAnnotated().getJavaMember().getType().isPrimitive()); } @Override protected AnnotatedField<? super M> getSuperAnnotated() { AnnotatedField<M> thisField = getAnnotated(); for (AnnotatedField<? super M> superField: getSuperType().getFields()) { if (thisField.getJavaMember().getName().equals(superField.getJavaMember().getName()) && thisField.getBaseType().equals(superField.getBaseType())) { return superField; } } return null; } } private static class AnnotatedMethodBeanAttributesBuilder<M> extends AnnotatedMemberBeanAttributesBuilder<M, AnnotatedMethod<M>> { protected AnnotatedMethodBeanAttributesBuilder(WebBeansContext webBeansContext, AnnotatedMethod<M> annotated) { super(webBeansContext, annotated); } @Override protected Class<?> getType() { return annotated.getJavaMember().getReturnType(); } @Override protected void defineScope() { defineScope("Annotated producer method : " + getAnnotated().getJavaMember() + "must declare default @Scope annotation"); } @Override protected void defineName() { if (getAnnotated().isAnnotationPresent(Specializes.class)) { AnnotatedMethod<? super M> superAnnotated = getSuperAnnotated(); defineName(superAnnotated, WebBeansUtil.getProducerDefaultName(superAnnotated.getJavaMember().getName())); } if (name == null) { defineName(getAnnotated(), WebBeansUtil.getProducerDefaultName(getAnnotated().getJavaMember().getName())); } else { // TODO XXX We have to check stereotypes here, too if (getAnnotated().isAnnotationPresent(Named.class)) { throw new WebBeansConfigurationException("@Specialized Producer method : " + getAnnotated().getJavaMember().getName() + " may not explicitly declare a bean name"); } } } @Override protected void defineNullable() { defineNullable(!getAnnotated().getJavaMember().getReturnType().isPrimitive()); } @Override protected AnnotatedMethod<? super M> getSuperAnnotated() { AnnotatedMethod<M> thisMethod = getAnnotated(); for (AnnotatedMethod<? super M> superMethod: webBeansContext.getAnnotatedElementFactory().getFilteredAnnotatedMethods(getSuperType())) { List<AnnotatedParameter<M>> thisParameters = thisMethod.getParameters(); if (thisMethod.getJavaMember().getName().equals(superMethod.getJavaMember().getName()) && thisMethod.getBaseType().equals(superMethod.getBaseType()) && thisParameters.size() == superMethod.getParameters().size()) { List<AnnotatedParameter<?>> superParameters = (List<AnnotatedParameter<?>>)(List<?>)superMethod.getParameters(); boolean match = true; for (int i = 0; i < thisParameters.size(); i++) { if (!thisParameters.get(i).getBaseType().equals(superParameters.get(i).getBaseType())) { match = false; break; } } if (match) { return superMethod; } } } return null; } } private abstract static class AnnotatedMemberBeanAttributesBuilder<M, A extends AnnotatedMember<M>> extends BeanAttributesBuilder<M, A> { protected AnnotatedMemberBeanAttributesBuilder(WebBeansContext webBeansContext, A annotated) { super(webBeansContext, annotated); } protected AnnotatedType<? super M> getSuperType() { Class<? super M> superclass = getAnnotated().getDeclaringType().getJavaClass().getSuperclass(); if (superclass == null) { return null; } return webBeansContext.getAnnotatedElementFactory().getAnnotatedType(superclass); } } }
OWB-1182 remove unused getType() method git-svn-id: 6e2e506005f11016269006bf59d22f905406eeba@1798863 13f79535-47bb-0310-9956-ffa450edef68
webbeans-impl/src/main/java/org/apache/webbeans/component/creation/BeanAttributesBuilder.java
OWB-1182 remove unused getType() method
Java
apache-2.0
ba2d181bb6409ff1ab022dc51e6927f9cfc4e7bb
0
devemux86/graphhopper,ammagamma/graphhopper,boldtrn/graphhopper,routexl/graphhopper,devemux86/graphhopper,graphhopper/graphhopper,fbonzon/graphhopper,boldtrn/graphhopper,fbonzon/graphhopper,ammagamma/graphhopper,fbonzon/graphhopper,fbonzon/graphhopper,boldtrn/graphhopper,don-philipe/graphhopper,routexl/graphhopper,don-philipe/graphhopper,routexl/graphhopper,ammagamma/graphhopper,boldtrn/graphhopper,don-philipe/graphhopper,graphhopper/graphhopper,graphhopper/graphhopper,ammagamma/graphhopper,routexl/graphhopper,devemux86/graphhopper,don-philipe/graphhopper,devemux86/graphhopper,graphhopper/graphhopper
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.weighting; import com.graphhopper.routing.profiles.DecimalEncodedValue; import com.graphhopper.routing.profiles.TurnCost; import com.graphhopper.routing.util.FlagEncoder; import com.graphhopper.routing.util.HintsMap; import com.graphhopper.storage.IntsRef; import com.graphhopper.storage.TurnCostStorage; import com.graphhopper.util.EdgeIterator; import com.graphhopper.util.EdgeIteratorState; import static com.graphhopper.routing.profiles.TurnCost.EV_SUFFIX; import static com.graphhopper.routing.util.EncodingManager.getKey; /** * Provides methods to retrieve turn costs for a specific turn. * * @author Karl Hübner * @author Peter Karich */ public class TurnWeighting implements Weighting { public static final int INFINITE_U_TURN_COSTS = -1; private final DecimalEncodedValue turnCostEnc; private final TurnCostStorage turnCostStorage; private final Weighting superWeighting; private final double uTurnCosts; private final IntsRef tcFlags = TurnCost.createFlags(); public TurnWeighting(Weighting superWeighting, TurnCostStorage turnCostStorage) { this(superWeighting, turnCostStorage, INFINITE_U_TURN_COSTS); } /** * @param superWeighting the weighting that is wrapped by this {@link TurnWeighting} and used to calculate the * edge weights for example * @param turnCostStorage the turn cost storage to be used * @param uTurnCosts the cost of a u-turn in seconds, this value will be applied to all u-turn costs no matter * whether or not turnCostExt contains explicit values for these turns. */ public TurnWeighting(Weighting superWeighting, TurnCostStorage turnCostStorage, double uTurnCosts) { if (turnCostStorage == null) { throw new RuntimeException("No storage set to calculate turn weight"); } FlagEncoder encoder = superWeighting.getFlagEncoder(); String key = getKey(encoder.toString(), EV_SUFFIX); // if null the TurnWeighting can be still useful for edge-based routing this.turnCostEnc = encoder.hasEncodedValue(key) ? encoder.getDecimalEncodedValue(key) : null; this.superWeighting = superWeighting; this.turnCostStorage = turnCostStorage; this.uTurnCosts = uTurnCosts < 0 ? Double.POSITIVE_INFINITY : uTurnCosts; } public double getUTurnCosts() { return uTurnCosts; } @Override public double getMinWeight(double distance) { return superWeighting.getMinWeight(distance); } @Override public double calcWeight(EdgeIteratorState edgeState, boolean reverse, int prevOrNextEdgeId) { double weight = superWeighting.calcWeight(edgeState, reverse, prevOrNextEdgeId); if (!EdgeIterator.Edge.isValid(prevOrNextEdgeId)) return weight; final int origEdgeId = reverse ? edgeState.getOrigEdgeLast() : edgeState.getOrigEdgeFirst(); double turnCosts = reverse ? calcTurnWeight(origEdgeId, edgeState.getBaseNode(), prevOrNextEdgeId) : calcTurnWeight(prevOrNextEdgeId, edgeState.getBaseNode(), origEdgeId); return weight + turnCosts; } @Override public long calcMillis(EdgeIteratorState edgeState, boolean reverse, int prevOrNextEdgeId) { long millis = superWeighting.calcMillis(edgeState, reverse, prevOrNextEdgeId); if (!EdgeIterator.Edge.isValid(prevOrNextEdgeId)) return millis; // should we also separate weighting vs. time for turn? E.g. a fast but dangerous turn - is this common? // todo: why no first/last orig edge here as in calcWeight ? final int origEdgeId = edgeState.getEdge(); long turnCostMillis = reverse ? calcTurnMillis(origEdgeId, edgeState.getBaseNode(), prevOrNextEdgeId) : calcTurnMillis(prevOrNextEdgeId, edgeState.getBaseNode(), origEdgeId); return millis + turnCostMillis; } /** * This method calculates the turn weight separately. */ public double calcTurnWeight(int edgeFrom, int nodeVia, int edgeTo) { if (!EdgeIterator.Edge.isValid(edgeFrom) || !EdgeIterator.Edge.isValid(edgeTo)) { return 0; } double tCost = 0; if (turnCostStorage.isUTurn(edgeFrom, edgeTo)) { // note that the u-turn costs overwrite any turn costs set in TurnCostStorage tCost = turnCostStorage.isUTurnAllowed(nodeVia) ? uTurnCosts : Double.POSITIVE_INFINITY; } else { if (turnCostEnc != null) tCost = turnCostStorage.get(turnCostEnc, tcFlags, edgeFrom, nodeVia, edgeTo); } return tCost; } public long calcTurnMillis(int inEdge, int viaNode, int outEdge) { return (long) (1000 * calcTurnWeight(inEdge, viaNode, outEdge)); } @Override public FlagEncoder getFlagEncoder() { return superWeighting.getFlagEncoder(); } @Override public boolean matches(HintsMap weightingMap) { // TODO without 'turn' in comparison return superWeighting.matches(weightingMap); } @Override public String toString() { return "turn|" + superWeighting.toString(); } @Override public String getName() { return "turn|" + superWeighting.getName(); } }
core/src/main/java/com/graphhopper/routing/weighting/TurnWeighting.java
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.weighting; import com.graphhopper.routing.profiles.DecimalEncodedValue; import com.graphhopper.routing.profiles.TurnCost; import com.graphhopper.routing.util.FlagEncoder; import com.graphhopper.routing.util.HintsMap; import com.graphhopper.storage.IntsRef; import com.graphhopper.storage.TurnCostStorage; import com.graphhopper.util.EdgeIterator; import com.graphhopper.util.EdgeIteratorState; import static com.graphhopper.routing.profiles.TurnCost.EV_SUFFIX; import static com.graphhopper.routing.util.EncodingManager.getKey; /** * Provides methods to retrieve turn costs for a specific turn. * * @author Karl Hübner * @author Peter Karich */ public class TurnWeighting implements Weighting { public static final int INFINITE_U_TURN_COSTS = -1; private final DecimalEncodedValue turnCostEnc; private final TurnCostStorage turnCostStorage; private final Weighting superWeighting; private final double uTurnCosts; private final IntsRef tcFlags = TurnCost.createFlags(); public TurnWeighting(Weighting superWeighting, TurnCostStorage turnCostStorage) { this(superWeighting, turnCostStorage, INFINITE_U_TURN_COSTS); } /** * @param superWeighting the weighting that is wrapped by this {@link TurnWeighting} and used to calculate the * edge weights for example * @param turnCostStorage the turn cost storage to be used * @param uTurnCosts the cost of a u-turn in seconds, this value will be applied to all u-turn costs no matter * whether or not turnCostExt contains explicit values for these turns. */ public TurnWeighting(Weighting superWeighting, TurnCostStorage turnCostStorage, double uTurnCosts) { if (turnCostStorage == null) { throw new RuntimeException("No storage set to calculate turn weight"); } FlagEncoder encoder = superWeighting.getFlagEncoder(); String key = getKey(encoder.toString(), EV_SUFFIX); // if null the TurnWeighting can be still useful for edge-based routing this.turnCostEnc = encoder.hasEncodedValue(key) ? encoder.getDecimalEncodedValue(key) : null; this.superWeighting = superWeighting; this.turnCostStorage = turnCostStorage; this.uTurnCosts = uTurnCosts < 0 ? Double.POSITIVE_INFINITY : uTurnCosts; } public double getUTurnCosts() { return uTurnCosts; } @Override public double getMinWeight(double distance) { return superWeighting.getMinWeight(distance); } @Override public double calcWeight(EdgeIteratorState edgeState, boolean reverse, int prevOrNextEdgeId) { double weight = superWeighting.calcWeight(edgeState, reverse, prevOrNextEdgeId); if (!EdgeIterator.Edge.isValid(prevOrNextEdgeId)) return weight; final int origEdgeId = reverse ? edgeState.getOrigEdgeLast() : edgeState.getOrigEdgeFirst(); double turnCosts = reverse ? calcTurnWeight(origEdgeId, edgeState.getBaseNode(), prevOrNextEdgeId) : calcTurnWeight(prevOrNextEdgeId, edgeState.getBaseNode(), origEdgeId); return weight + turnCosts; } @Override public long calcMillis(EdgeIteratorState edgeState, boolean reverse, int prevOrNextEdgeId) { long millis = superWeighting.calcMillis(edgeState, reverse, prevOrNextEdgeId); if (!EdgeIterator.Edge.isValid(prevOrNextEdgeId)) return millis; // should we also separate weighting vs. time for turn? E.g. a fast but dangerous turn - is this common? // todo: why no first/last orig edge here as in calcWeight ? final int origEdgeId = edgeState.getEdge(); long turnCostsInSeconds = (long) (reverse ? calcTurnWeight(origEdgeId, edgeState.getBaseNode(), prevOrNextEdgeId) : calcTurnWeight(prevOrNextEdgeId, edgeState.getBaseNode(), origEdgeId)); return millis + 1000 * turnCostsInSeconds; } /** * This method calculates the turn weight separately. */ public double calcTurnWeight(int edgeFrom, int nodeVia, int edgeTo) { if (!EdgeIterator.Edge.isValid(edgeFrom) || !EdgeIterator.Edge.isValid(edgeTo)) { return 0; } double tCost = 0; if (turnCostStorage.isUTurn(edgeFrom, edgeTo)) { // note that the u-turn costs overwrite any turn costs set in TurnCostStorage tCost = turnCostStorage.isUTurnAllowed(nodeVia) ? uTurnCosts : Double.POSITIVE_INFINITY; } else { if (turnCostEnc != null) tCost = turnCostStorage.get(turnCostEnc, tcFlags, edgeFrom, nodeVia, edgeTo); } return tCost; } @Override public FlagEncoder getFlagEncoder() { return superWeighting.getFlagEncoder(); } @Override public boolean matches(HintsMap weightingMap) { // TODO without 'turn' in comparison return superWeighting.matches(weightingMap); } @Override public String toString() { return "turn|" + superWeighting.toString(); } @Override public String getName() { return "turn|" + superWeighting.getName(); } }
Extract TurnWeighting#calcTurnMillis
core/src/main/java/com/graphhopper/routing/weighting/TurnWeighting.java
Extract TurnWeighting#calcTurnMillis
Java
apache-2.0
60b74f424798e960f2f6bf1ea9a8a0f56ff86bc3
0
fishercoder1534/Leetcode,fishercoder1534/Leetcode,fishercoder1534/Leetcode,fishercoder1534/Leetcode,fishercoder1534/Leetcode
package com.fishercoder.solutions; public class _186 { public static class Solution1 { public void reverseWords(char[] s) { // Three steps to reverse // 1, reverse the whole sentence reverse(s, 0, s.length - 1); // 2, reverse each word int start = 0; for (int i = 0; i < s.length; i++) { if (s[i] == ' ') { reverse(s, start, i - 1); start = i + 1; } } // 3, reverse the last word, if there is only one word this will solve the corner case reverse(s, start, s.length - 1); } private void reverse(char[] s, int start, int end) { while (start < end) { char temp = s[start]; s[start++] = s[end]; s[end--] = temp; } } } public static class Solution2 { public void reverseWords(char[] s) { reverse(s, 0, s.length); for (int i = 0; i < s.length; i++) { int start = i; while (i < s.length && s[i] != ' ') { i++; } reverse(s, start, i); } } private void reverse(char[] chars, int start, int end) { int left = start; int right = end - 1; while (left < right) { char tmp = chars[left]; chars[left] = chars[right]; chars[right] = tmp; left++; right--; } } } }
src/main/java/com/fishercoder/solutions/_186.java
package com.fishercoder.solutions; /** * 186. Reverse Words in a String II Given an input string, reverse the string word by word. A word is defined as a sequence of non-space characters. The input string does not contain leading or trailing spaces and the words are always separated by a single space. For example, Given s = "the sky is blue", return "blue is sky the". Could you do it in-place without allocating extra space? */ public class _186 { public static class Solution1 { public void reverseWords(char[] s) { // Three steps to reverse // 1, reverse the whole sentence reverse(s, 0, s.length - 1); // 2, reverse each word int start = 0; for (int i = 0; i < s.length; i++) { if (s[i] == ' ') { reverse(s, start, i - 1); start = i + 1; } } // 3, reverse the last word, if there is only one word this will solve the corner case reverse(s, start, s.length - 1); } private void reverse(char[] s, int start, int end) { while (start < end) { char temp = s[start]; s[start++] = s[end]; s[end--] = temp; } } } }
add a solution for 186
src/main/java/com/fishercoder/solutions/_186.java
add a solution for 186
Java
apache-2.0
6a2f6906c006ab92d6f3e29314419ab3b019ab60
0
yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,yumingjuan/selenium,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,yumingjuan/selenium,yumingjuan/selenium
/* Copyright 2011 WebDriver committers Copyright 2011 Software Freedom Conservancy. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import org.junit.Test; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.TestUtilities; import org.openqa.selenium.testing.drivers.SauceDriver; import java.util.concurrent.Callable; import java.util.logging.Logger; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.openqa.selenium.TestWaiter.waitFor; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; @Ignore(value = {ANDROID, CHROME, HTMLUNIT, IPHONE, OPERA, SELENESE}, reason = "Not yet implemented.") public class WindowTest extends JUnit4TestBase { private static Logger log = Logger.getLogger(WindowTest.class.getName()); @Test public void testGetsTheSizeOfTheCurrentWindow() { Dimension size = driver.manage().window().getSize(); assertThat(size.width, is(greaterThan(0))); assertThat(size.height, is(greaterThan(0))); } @Test public void testSetsTheSizeOfTheCurrentWindow() { WebDriver.Window window = driver.manage().window(); Dimension size = window.getSize(); // resize relative to the initial size, since we don't know what it is Dimension targetSize = new Dimension(size.width - 20, size.height - 20); window.setSize(targetSize); Dimension newSize = window.getSize(); assertEquals(targetSize.width, newSize.width); assertEquals(targetSize.height, newSize.height); } @Ignore(IE) @Test public void testGetsThePositionOfTheCurrentWindow() { Point position = driver.manage().window().getPosition(); assertThat(position.x, is(greaterThanOrEqualTo(0))); assertThat(position.y, is(greaterThanOrEqualTo(0))); } @Test public void testSetsThePositionOfTheCurrentWindow() throws InterruptedException { WebDriver.Window window = driver.manage().window(); Point position = window.getPosition(); // Some Linux window managers start taking liberties wrt window positions when moving the window // off-screen. Therefore, try to stay on-screen. Hopefully you have more than 210 px, // or this may fail. window.setSize(new Dimension(200, 200)); Point targetPosition = new Point(position.x + 10, position.y + 10); window.setPosition(targetPosition); waitFor(xEqual(driver, targetPosition)); waitFor(yEqual(driver, targetPosition)); } @Ignore(value = {ANDROID, CHROME, HTMLUNIT, IE, IPHONE, OPERA, SELENESE}) @Test public void testCanMaximizeTheWindow() throws InterruptedException { if(SauceDriver.shouldUseSauce() && TestUtilities.getEffectivePlatform().is(Platform.LINUX)) { // This test requires a window manager on Linux, and Sauce currently doesn't have one. return; } WebDriver.Window window = driver.manage().window(); Dimension targetSize = new Dimension(200, 200); window.setSize(targetSize); waitFor(windowHeightToEqual(driver,targetSize)); waitFor(windowWidthToEqual(driver, targetSize)); Dimension size = window.getSize(); window.maximize(); waitFor(windowWidthToBeGreaterThan(driver, size)); waitFor(windowHeightToBeGreaterThan(driver, size)); } private Callable<Boolean> windowWidthToEqual(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); if(newSize.width == size.width) { return true; } return null; } }; } private Callable<Boolean> windowHeightToEqual(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); if(newSize.height == size.height) { return true; } return null; } }; } private Callable<Boolean> windowWidthToBeGreaterThan(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); log.info("waiting for width, Current dimensions are " + newSize); if(newSize.width != size.width) { return true; } return null; } }; } private Callable<Boolean> windowHeightToBeGreaterThan(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); log.info("waiting for height, Current dimensions are " + newSize); if(newSize.height != size.height) { return true; } return null; } }; } private Callable<Boolean> xEqual(final WebDriver driver, final Point targetPosition) { return new Callable<Boolean>() { public Boolean call() throws Exception { Point newPosition = driver.manage().window().getPosition(); if(newPosition.x == targetPosition.x) { return true; } return null; } }; } private Callable<Boolean> yEqual(final WebDriver driver, final Point targetPosition) { return new Callable<Boolean>() { public Boolean call() throws Exception { Point newPosition = driver.manage().window().getPosition(); if(newPosition.y == targetPosition.y) { return true; } return null; } }; } }
java/client/test/org/openqa/selenium/WindowTest.java
/* Copyright 2011 WebDriver committers Copyright 2011 Software Freedom Conservancy. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import java.util.concurrent.Callable; import java.util.logging.Logger; import org.junit.Test; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; import static org.openqa.selenium.TestWaiter.waitFor; @Ignore(value = {ANDROID, CHROME, HTMLUNIT, IPHONE, OPERA, SELENESE}, reason = "Not yet implemented.") public class WindowTest extends JUnit4TestBase { private static Logger log = Logger.getLogger(WindowTest.class.getName()); @Test public void testGetsTheSizeOfTheCurrentWindow() { Dimension size = driver.manage().window().getSize(); assertThat(size.width, is(greaterThan(0))); assertThat(size.height, is(greaterThan(0))); } @Test public void testSetsTheSizeOfTheCurrentWindow() { WebDriver.Window window = driver.manage().window(); Dimension size = window.getSize(); // resize relative to the initial size, since we don't know what it is Dimension targetSize = new Dimension(size.width - 20, size.height - 20); window.setSize(targetSize); Dimension newSize = window.getSize(); assertEquals(targetSize.width, newSize.width); assertEquals(targetSize.height, newSize.height); } @Ignore(IE) @Test public void testGetsThePositionOfTheCurrentWindow() { Point position = driver.manage().window().getPosition(); assertThat(position.x, is(greaterThanOrEqualTo(0))); assertThat(position.y, is(greaterThanOrEqualTo(0))); } @Test public void testSetsThePositionOfTheCurrentWindow() throws InterruptedException { WebDriver.Window window = driver.manage().window(); Point position = window.getPosition(); // Some Linux window managers start taking liberties wrt window positions when moving the window // off-screen. Therefore, try to stay on-screen. Hopefully you have more than 210 px, // or this may fail. window.setSize(new Dimension(200, 200)); Point targetPosition = new Point(position.x + 10, position.y + 10); window.setPosition(targetPosition); waitFor(xEqual(driver, targetPosition)); waitFor(yEqual(driver, targetPosition)); } @Ignore(value = {ANDROID, CHROME, HTMLUNIT, IE, IPHONE, OPERA, SELENESE}) @Test public void testCanMaximizeTheWindow() throws InterruptedException { WebDriver.Window window = driver.manage().window(); Dimension targetSize = new Dimension(200, 200); window.setSize(targetSize); waitFor(windowHeightToEqual(driver,targetSize)); waitFor(windowWidthToEqual(driver, targetSize)); Dimension size = window.getSize(); window.maximize(); waitFor(windowWidthToBeGreaterThan(driver, size)); waitFor(windowHeightToBeGreaterThan(driver, size)); } private Callable<Boolean> windowWidthToEqual(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); if(newSize.width == size.width) { return true; } return null; } }; } private Callable<Boolean> windowHeightToEqual(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); if(newSize.height == size.height) { return true; } return null; } }; } private Callable<Boolean> windowWidthToBeGreaterThan(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); log.info("waiting for width, Current dimensions are " + newSize); if(newSize.width != size.width) { return true; } return null; } }; } private Callable<Boolean> windowHeightToBeGreaterThan(final WebDriver driver, final Dimension size) { return new Callable<Boolean>() { public Boolean call() throws Exception { Dimension newSize = driver.manage().window().getSize(); log.info("waiting for height, Current dimensions are " + newSize); if(newSize.height != size.height) { return true; } return null; } }; } private Callable<Boolean> xEqual(final WebDriver driver, final Point targetPosition) { return new Callable<Boolean>() { public Boolean call() throws Exception { Point newPosition = driver.manage().window().getPosition(); if(newPosition.x == targetPosition.x) { return true; } return null; } }; } private Callable<Boolean> yEqual(final WebDriver driver, final Point targetPosition) { return new Callable<Boolean>() { public Boolean call() throws Exception { Point newPosition = driver.manage().window().getPosition(); if(newPosition.y == targetPosition.y) { return true; } return null; } }; } }
JariBakken: Ignore testCanMaximizeTheWindow() on Sauce + Linux. This test requires a window manager to function, and Sauce currently doesn't use one. git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@16473 07704840-8298-11de-bf8c-fd130f914ac9
java/client/test/org/openqa/selenium/WindowTest.java
JariBakken: Ignore testCanMaximizeTheWindow() on Sauce + Linux.
Java
apache-2.0
c5db5d55da4b62cdf7a18c201f33dda661963d5c
0
aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v4.view; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.os.SystemClock; import android.support.v4.os.ParcelableCompat; import android.support.v4.os.ParcelableCompatCreatorCallbacks; import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat; import android.support.v4.widget.EdgeEffectCompat; import android.util.AttributeSet; import android.util.FloatMath; import android.util.Log; import android.view.FocusFinder; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.VelocityTracker; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewParent; import android.view.accessibility.AccessibilityEvent; import android.view.animation.Interpolator; import android.widget.Scroller; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; /** * Layout manager that allows the user to flip left and right * through pages of data. You supply an implementation of a * {@link PagerAdapter} to generate the pages that the view shows. * * <p>Note this class is currently under early design and * development. The API will likely change in later updates of * the compatibility library, requiring changes to the source code * of apps when they are compiled against the newer version.</p> * * <p>ViewPager is most often used in conjunction with {@link android.app.Fragment}, * which is a convenient way to supply and manage the lifecycle of each page. * There are standard adapters implemented for using fragments with the ViewPager, * which cover the most common use cases. These are * {@link android.support.v4.app.FragmentPagerAdapter}, * {@link android.support.v4.app.FragmentStatePagerAdapter}, * {@link android.support.v13.app.FragmentPagerAdapter}, and * {@link android.support.v13.app.FragmentStatePagerAdapter}; each of these * classes have simple code showing how to build a full user interface * with them. * * <p>Here is a more complicated example of ViewPager, using it in conjuction * with {@link android.app.ActionBar} tabs. You can find other examples of using * ViewPager in the API 4+ Support Demos and API 13+ Support Demos sample code. * * {@sample development/samples/Support13Demos/src/com/example/android/supportv13/app/ActionBarTabsPager.java * complete} */ public class ViewPager extends ViewGroup { private static final String TAG = "ViewPager"; private static final boolean DEBUG = false; private static final boolean USE_CACHE = false; private static final int DEFAULT_OFFSCREEN_PAGES = 1; private static final int MAX_SETTLE_DURATION = 600; // ms private static final int MIN_DISTANCE_FOR_FLING = 25; // dips private static final int DEFAULT_GUTTER_SIZE = 16; // dips private static final int[] LAYOUT_ATTRS = new int[] { android.R.attr.layout_gravity }; static class ItemInfo { Object object; int position; boolean scrolling; float widthFactor; float offset; } private static final Comparator<ItemInfo> COMPARATOR = new Comparator<ItemInfo>(){ @Override public int compare(ItemInfo lhs, ItemInfo rhs) { return lhs.position - rhs.position; } }; private static final Interpolator sInterpolator = new Interpolator() { public float getInterpolation(float t) { t -= 1.0f; return t * t * t * t * t + 1.0f; } }; private final ArrayList<ItemInfo> mItems = new ArrayList<ItemInfo>(); private final ItemInfo mTempItem = new ItemInfo(); private final Rect mTempRect = new Rect(); private PagerAdapter mAdapter; private int mCurItem; // Index of currently displayed page. private int mRestoredCurItem = -1; private Parcelable mRestoredAdapterState = null; private ClassLoader mRestoredClassLoader = null; private Scroller mScroller; private PagerObserver mObserver; private int mPageMargin; private Drawable mMarginDrawable; private int mTopPageBounds; private int mBottomPageBounds; // Offsets of the first and last items, if known. // Set during population, used to determine if we are at the beginning // or end of the pager data set during touch scrolling. private float mFirstOffset = -Float.MAX_VALUE; private float mLastOffset = Float.MAX_VALUE; private int mChildWidthMeasureSpec; private int mChildHeightMeasureSpec; private boolean mInLayout; private boolean mScrollingCacheEnabled; private boolean mPopulatePending; private int mOffscreenPageLimit = DEFAULT_OFFSCREEN_PAGES; private boolean mIsBeingDragged; private boolean mIsUnableToDrag; private boolean mIgnoreGutter; private int mDefaultGutterSize; private int mGutterSize; private int mTouchSlop; private float mInitialMotionX; /** * Position of the last motion event. */ private float mLastMotionX; private float mLastMotionY; /** * ID of the active pointer. This is used to retain consistency during * drags/flings if multiple pointers are used. */ private int mActivePointerId = INVALID_POINTER; /** * Sentinel value for no current active pointer. * Used by {@link #mActivePointerId}. */ private static final int INVALID_POINTER = -1; /** * Determines speed during touch scrolling */ private VelocityTracker mVelocityTracker; private int mMinimumVelocity; private int mMaximumVelocity; private int mFlingDistance; private int mCloseEnough; private int mSeenPositionMin; private int mSeenPositionMax; // If the pager is at least this close to its final position, complete the scroll // on touch down and let the user interact with the content inside instead of // "catching" the flinging pager. private static final int CLOSE_ENOUGH = 2; // dp private boolean mFakeDragging; private long mFakeDragBeginTime; private EdgeEffectCompat mLeftEdge; private EdgeEffectCompat mRightEdge; private boolean mFirstLayout = true; private boolean mNeedCalculatePageOffsets = false; private boolean mCalledSuper; private int mDecorChildCount; private OnPageChangeListener mOnPageChangeListener; private OnPageChangeListener mInternalPageChangeListener; private OnAdapterChangeListener mAdapterChangeListener; private PageTransformer mPageTransformer; private Method mSetChildrenDrawingOrderEnabled; private static final int DRAW_ORDER_DEFAULT = 0; private static final int DRAW_ORDER_FORWARD = 1; private static final int DRAW_ORDER_REVERSE = 2; private int mDrawingOrder; private ArrayList<View> mDrawingOrderedChildren; private static final ViewPositionComparator sPositionComparator = new ViewPositionComparator(); /** * Indicates that the pager is in an idle, settled state. The current page * is fully in view and no animation is in progress. */ public static final int SCROLL_STATE_IDLE = 0; /** * Indicates that the pager is currently being dragged by the user. */ public static final int SCROLL_STATE_DRAGGING = 1; /** * Indicates that the pager is in the process of settling to a final position. */ public static final int SCROLL_STATE_SETTLING = 2; private int mScrollState = SCROLL_STATE_IDLE; /** * Callback interface for responding to changing state of the selected page. */ public interface OnPageChangeListener { /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param positionOffset Value from [0, 1) indicating the offset from the page at position. * @param positionOffsetPixels Value in pixels indicating the offset from position. */ public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels); /** * This method will be invoked when a new page becomes selected. Animation is not * necessarily complete. * * @param position Position index of the new selected page. */ public void onPageSelected(int position); /** * Called when the scroll state changes. Useful for discovering when the user * begins dragging, when the pager is automatically settling to the current page, * or when it is fully stopped/idle. * * @param state The new scroll state. * @see ViewPager#SCROLL_STATE_IDLE * @see ViewPager#SCROLL_STATE_DRAGGING * @see ViewPager#SCROLL_STATE_SETTLING */ public void onPageScrollStateChanged(int state); } /** * Simple implementation of the {@link OnPageChangeListener} interface with stub * implementations of each method. Extend this if you do not intend to override * every method of {@link OnPageChangeListener}. */ public static class SimpleOnPageChangeListener implements OnPageChangeListener { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { // This space for rent } @Override public void onPageSelected(int position) { // This space for rent } @Override public void onPageScrollStateChanged(int state) { // This space for rent } } /** * A PageTransformer is invoked whenever a visible/attached page is scrolled. * This offers an opportunity for the application to apply a custom transformation * to the page views using animation properties. * * <p>As property animation is only supported as of Android 3.0 and forward, * setting a PageTransformer on a ViewPager on earlier platform versions will * be ignored.</p> */ public interface PageTransformer { /** * Apply a property transformation to the given page. * * @param page Apply the transformation to this page * @param position Position of page relative to the current front-and-center * position of the pager. 0 is front and center. 1 is one full * page position to the right, and -1 is one page position to the left. */ public void transformPage(View page, float position); } /** * Used internally to monitor when adapters are switched. */ interface OnAdapterChangeListener { public void onAdapterChanged(PagerAdapter oldAdapter, PagerAdapter newAdapter); } /** * Used internally to tag special types of child views that should be added as * pager decorations by default. */ interface Decor {} public ViewPager(Context context) { super(context); initViewPager(); } public ViewPager(Context context, AttributeSet attrs) { super(context, attrs); initViewPager(); } void initViewPager() { setWillNotDraw(false); setDescendantFocusability(FOCUS_AFTER_DESCENDANTS); setFocusable(true); final Context context = getContext(); mScroller = new Scroller(context, sInterpolator); final ViewConfiguration configuration = ViewConfiguration.get(context); mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration); mMinimumVelocity = configuration.getScaledMinimumFlingVelocity(); mMaximumVelocity = configuration.getScaledMaximumFlingVelocity(); mLeftEdge = new EdgeEffectCompat(context); mRightEdge = new EdgeEffectCompat(context); final float density = context.getResources().getDisplayMetrics().density; mFlingDistance = (int) (MIN_DISTANCE_FOR_FLING * density); mCloseEnough = (int) (CLOSE_ENOUGH * density); mDefaultGutterSize = (int) (DEFAULT_GUTTER_SIZE * density); ViewCompat.setAccessibilityDelegate(this, new MyAccessibilityDelegate()); if (ViewCompat.getImportantForAccessibility(this) == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) { ViewCompat.setImportantForAccessibility(this, ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_YES); } } private void setScrollState(int newState) { if (mScrollState == newState) { return; } mScrollState = newState; if (newState == SCROLL_STATE_DRAGGING) { mSeenPositionMin = mSeenPositionMax = -1; } if (mPageTransformer != null) { // PageTransformers can do complex things that benefit from hardware layers. enableLayers(newState != SCROLL_STATE_IDLE); } if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrollStateChanged(newState); } } /** * Set a PagerAdapter that will supply views for this pager as needed. * * @param adapter Adapter to use */ public void setAdapter(PagerAdapter adapter) { if (mAdapter != null) { mAdapter.unregisterDataSetObserver(mObserver); mAdapter.startUpdate(this); for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); mAdapter.destroyItem(this, ii.position, ii.object); } mAdapter.finishUpdate(this); mItems.clear(); removeNonDecorViews(); mCurItem = 0; scrollTo(0, 0); } final PagerAdapter oldAdapter = mAdapter; mAdapter = adapter; if (mAdapter != null) { if (mObserver == null) { mObserver = new PagerObserver(); } mAdapter.registerDataSetObserver(mObserver); mPopulatePending = false; mFirstLayout = true; if (mRestoredCurItem >= 0) { mAdapter.restoreState(mRestoredAdapterState, mRestoredClassLoader); setCurrentItemInternal(mRestoredCurItem, false, true); mRestoredCurItem = -1; mRestoredAdapterState = null; mRestoredClassLoader = null; } else { populate(); } } if (mAdapterChangeListener != null && oldAdapter != adapter) { mAdapterChangeListener.onAdapterChanged(oldAdapter, adapter); } } private void removeNonDecorViews() { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { removeViewAt(i); i--; } } } /** * Retrieve the current adapter supplying pages. * * @return The currently registered PagerAdapter */ public PagerAdapter getAdapter() { return mAdapter; } void setOnAdapterChangeListener(OnAdapterChangeListener listener) { mAdapterChangeListener = listener; } private int getClientWidth() { return getWidth() - getPaddingLeft() - getPaddingRight(); } /** * Set the currently selected page. If the ViewPager has already been through its first * layout with its current adapter there will be a smooth animated transition between * the current item and the specified item. * * @param item Item index to select */ public void setCurrentItem(int item) { mPopulatePending = false; setCurrentItemInternal(item, !mFirstLayout, false); } /** * Set the currently selected page. * * @param item Item index to select * @param smoothScroll True to smoothly scroll to the new item, false to transition immediately */ public void setCurrentItem(int item, boolean smoothScroll) { mPopulatePending = false; setCurrentItemInternal(item, smoothScroll, false); } public int getCurrentItem() { return mCurItem; } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always) { setCurrentItemInternal(item, smoothScroll, always, 0); } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always, int velocity) { if (mAdapter == null || mAdapter.getCount() <= 0) { setScrollingCacheEnabled(false); return; } if (!always && mCurItem == item && mItems.size() != 0) { setScrollingCacheEnabled(false); return; } if (item < 0) { item = 0; } else if (item >= mAdapter.getCount()) { item = mAdapter.getCount() - 1; } final int pageLimit = mOffscreenPageLimit; if (item > (mCurItem + pageLimit) || item < (mCurItem - pageLimit)) { // We are doing a jump by more than one page. To avoid // glitches, we want to keep all current pages in the view // until the scroll ends. for (int i=0; i<mItems.size(); i++) { mItems.get(i).scrolling = true; } } final boolean dispatchSelected = mCurItem != item; populate(item); final ItemInfo curInfo = infoForPosition(item); int destX = 0; if (curInfo != null) { final int width = getClientWidth(); destX = (int) (width * Math.max(mFirstOffset, Math.min(curInfo.offset, mLastOffset))); } if (smoothScroll) { smoothScrollTo(destX, 0, velocity); if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } } else { if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } completeScroll(); scrollTo(destX, 0); } } /** * Set a listener that will be invoked whenever the page changes or is incrementally * scrolled. See {@link OnPageChangeListener}. * * @param listener Listener to set */ public void setOnPageChangeListener(OnPageChangeListener listener) { mOnPageChangeListener = listener; } /** * Set a {@link PageTransformer} that will be called for each attached page whenever * the scroll position is changed. This allows the application to apply custom property * transformations to each page, overriding the default sliding look and feel. * * <p><em>Note:</em> Prior to Android 3.0 the property animation APIs did not exist. * As a result, setting a PageTransformer prior to Android 3.0 (API 11) will have no effect.</p> * * @param reverseDrawingOrder true if the supplied PageTransformer requires page views * to be drawn from last to first instead of first to last. * @param transformer PageTransformer that will modify each page's animation properties */ public void setPageTransformer(boolean reverseDrawingOrder, PageTransformer transformer) { if (Build.VERSION.SDK_INT >= 11) { final boolean hasTransformer = transformer != null; final boolean needsPopulate = hasTransformer != (mPageTransformer != null); mPageTransformer = transformer; setChildrenDrawingOrderEnabledCompat(hasTransformer); if (hasTransformer) { mDrawingOrder = reverseDrawingOrder ? DRAW_ORDER_REVERSE : DRAW_ORDER_FORWARD; } else { mDrawingOrder = DRAW_ORDER_DEFAULT; } if (needsPopulate) populate(); } } void setChildrenDrawingOrderEnabledCompat(boolean enable) { if (mSetChildrenDrawingOrderEnabled == null) { try { mSetChildrenDrawingOrderEnabled = ViewGroup.class.getDeclaredMethod( "setChildrenDrawingOrderEnabled", new Class[] { Boolean.TYPE }); } catch (NoSuchMethodException e) { Log.e(TAG, "Can't find setChildrenDrawingOrderEnabled", e); } } try { mSetChildrenDrawingOrderEnabled.invoke(this, enable); } catch (Exception e) { Log.e(TAG, "Error changing children drawing order", e); } } @Override protected int getChildDrawingOrder(int childCount, int i) { final int index = mDrawingOrder == DRAW_ORDER_REVERSE ? childCount - 1 - i : i; final int result = ((LayoutParams) mDrawingOrderedChildren.get(index).getLayoutParams()).childIndex; return result; } /** * Set a separate OnPageChangeListener for internal use by the support library. * * @param listener Listener to set * @return The old listener that was set, if any. */ OnPageChangeListener setInternalPageChangeListener(OnPageChangeListener listener) { OnPageChangeListener oldListener = mInternalPageChangeListener; mInternalPageChangeListener = listener; return oldListener; } /** * Returns the number of pages that will be retained to either side of the * current page in the view hierarchy in an idle state. Defaults to 1. * * @return How many pages will be kept offscreen on either side * @see #setOffscreenPageLimit(int) */ public int getOffscreenPageLimit() { return mOffscreenPageLimit; } /** * Set the number of pages that should be retained to either side of the * current page in the view hierarchy in an idle state. Pages beyond this * limit will be recreated from the adapter when needed. * * <p>This is offered as an optimization. If you know in advance the number * of pages you will need to support or have lazy-loading mechanisms in place * on your pages, tweaking this setting can have benefits in perceived smoothness * of paging animations and interaction. If you have a small number of pages (3-4) * that you can keep active all at once, less time will be spent in layout for * newly created view subtrees as the user pages back and forth.</p> * * <p>You should keep this limit low, especially if your pages have complex layouts. * This setting defaults to 1.</p> * * @param limit How many pages will be kept offscreen in an idle state. */ public void setOffscreenPageLimit(int limit) { if (limit < DEFAULT_OFFSCREEN_PAGES) { Log.w(TAG, "Requested offscreen page limit " + limit + " too small; defaulting to " + DEFAULT_OFFSCREEN_PAGES); limit = DEFAULT_OFFSCREEN_PAGES; } if (limit != mOffscreenPageLimit) { mOffscreenPageLimit = limit; populate(); } } /** * Set the margin between pages. * * @param marginPixels Distance between adjacent pages in pixels * @see #getPageMargin() * @see #setPageMarginDrawable(Drawable) * @see #setPageMarginDrawable(int) */ public void setPageMargin(int marginPixels) { final int oldMargin = mPageMargin; mPageMargin = marginPixels; final int width = getWidth(); recomputeScrollPosition(width, width, marginPixels, oldMargin); requestLayout(); } /** * Return the margin between pages. * * @return The size of the margin in pixels */ public int getPageMargin() { return mPageMargin; } /** * Set a drawable that will be used to fill the margin between pages. * * @param d Drawable to display between pages */ public void setPageMarginDrawable(Drawable d) { mMarginDrawable = d; if (d != null) refreshDrawableState(); setWillNotDraw(d == null); invalidate(); } /** * Set a drawable that will be used to fill the margin between pages. * * @param resId Resource ID of a drawable to display between pages */ public void setPageMarginDrawable(int resId) { setPageMarginDrawable(getContext().getResources().getDrawable(resId)); } @Override protected boolean verifyDrawable(Drawable who) { return super.verifyDrawable(who) || who == mMarginDrawable; } @Override protected void drawableStateChanged() { super.drawableStateChanged(); final Drawable d = mMarginDrawable; if (d != null && d.isStateful()) { d.setState(getDrawableState()); } } // We want the duration of the page snap animation to be influenced by the distance that // the screen has to travel, however, we don't want this duration to be effected in a // purely linear fashion. Instead, we use this method to moderate the effect that the distance // of travel has on the overall snap duration. float distanceInfluenceForSnapDuration(float f) { f -= 0.5f; // center the values about 0. f *= 0.3f * Math.PI / 2.0f; return (float) Math.sin(f); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis */ void smoothScrollTo(int x, int y) { smoothScrollTo(x, y, 0); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis * @param velocity the velocity associated with a fling, if applicable. (0 otherwise) */ void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { // Nothing to do. setScrollingCacheEnabled(false); return; } int sx = getScrollX(); int sy = getScrollY(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(); populate(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); setScrollState(SCROLL_STATE_SETTLING); final int width = getClientWidth(); final int halfWidth = width / 2; final float distanceRatio = Math.min(1f, 1.0f * Math.abs(dx) / width); final float distance = halfWidth + halfWidth * distanceInfluenceForSnapDuration(distanceRatio); int duration = 0; velocity = Math.abs(velocity); if (velocity > 0) { duration = 4 * Math.round(1000 * Math.abs(distance / velocity)); } else { final float pageWidth = width * mAdapter.getPageWidth(mCurItem); final float pageDelta = (float) Math.abs(dx) / (pageWidth + mPageMargin); duration = (int) ((pageDelta + 1) * 100); } duration = Math.min(duration, MAX_SETTLE_DURATION); mScroller.startScroll(sx, sy, dx, dy, duration); ViewCompat.postInvalidateOnAnimation(this); } ItemInfo addNewItem(int position, int index) { ItemInfo ii = new ItemInfo(); ii.position = position; ii.object = mAdapter.instantiateItem(this, position); ii.widthFactor = mAdapter.getPageWidth(position); if (index < 0 || index >= mItems.size()) { mItems.add(ii); } else { mItems.add(index, ii); } return ii; } void dataSetChanged() { // This method only gets called if our observer is attached, so mAdapter is non-null. boolean needPopulate = mItems.size() < mOffscreenPageLimit * 2 + 1 && mItems.size() < mAdapter.getCount(); int newCurrItem = mCurItem; boolean isUpdating = false; for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); final int newPos = mAdapter.getItemPosition(ii.object); if (newPos == PagerAdapter.POSITION_UNCHANGED) { continue; } if (newPos == PagerAdapter.POSITION_NONE) { mItems.remove(i); i--; if (!isUpdating) { mAdapter.startUpdate(this); isUpdating = true; } mAdapter.destroyItem(this, ii.position, ii.object); needPopulate = true; if (mCurItem == ii.position) { // Keep the current item in the valid range newCurrItem = Math.max(0, Math.min(mCurItem, mAdapter.getCount() - 1)); needPopulate = true; } continue; } if (ii.position != newPos) { if (ii.position == mCurItem) { // Our current item changed position. Follow it. newCurrItem = newPos; } ii.position = newPos; needPopulate = true; } } if (isUpdating) { mAdapter.finishUpdate(this); } Collections.sort(mItems, COMPARATOR); if (needPopulate) { // Reset our known page widths; populate will recompute them. final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { lp.widthFactor = 0.f; } } setCurrentItemInternal(newCurrItem, false, true); requestLayout(); } } void populate() { populate(mCurItem); } void populate(int newCurrentItem) { ItemInfo oldCurInfo = null; if (mCurItem != newCurrentItem) { oldCurInfo = infoForPosition(mCurItem); mCurItem = newCurrentItem; } if (mAdapter == null) { return; } // Bail now if we are waiting to populate. This is to hold off // on creating views from the time the user releases their finger to // fling to a new position until we have finished the scroll to // that position, avoiding glitches from happening at that point. if (mPopulatePending) { if (DEBUG) Log.i(TAG, "populate is pending, skipping for now..."); return; } // Also, don't populate until we are attached to a window. This is to // avoid trying to populate before we have restored our view hierarchy // state and conflicting with what is restored. if (getWindowToken() == null) { return; } mAdapter.startUpdate(this); final int pageLimit = mOffscreenPageLimit; final int startPos = Math.max(0, mCurItem - pageLimit); final int N = mAdapter.getCount(); final int endPos = Math.min(N-1, mCurItem + pageLimit); // Locate the currently focused item or add it if needed. int curIndex = -1; ItemInfo curItem = null; for (curIndex = 0; curIndex < mItems.size(); curIndex++) { final ItemInfo ii = mItems.get(curIndex); if (ii.position >= mCurItem) { if (ii.position == mCurItem) curItem = ii; break; } } if (curItem == null && N > 0) { curItem = addNewItem(mCurItem, curIndex); } // Fill 3x the available width or up to the number of offscreen // pages requested to either side, whichever is larger. // If we have no current item we have no work to do. if (curItem != null) { float extraWidthLeft = 0.f; int itemIndex = curIndex - 1; ItemInfo ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; final float leftWidthNeeded = 2.f - curItem.widthFactor + (float) getPaddingLeft() / (float) getClientWidth(); for (int pos = mCurItem - 1; pos >= 0; pos--) { if (extraWidthLeft >= leftWidthNeeded && pos < startPos) { if (ii == null) { break; } if (pos == ii.position && !ii.scrolling) { mItems.remove(itemIndex); mAdapter.destroyItem(this, pos, ii.object); itemIndex--; curIndex--; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } } else if (ii != null && pos == ii.position) { extraWidthLeft += ii.widthFactor; itemIndex--; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } else { ii = addNewItem(pos, itemIndex + 1); extraWidthLeft += ii.widthFactor; curIndex++; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } } float extraWidthRight = curItem.widthFactor; itemIndex = curIndex + 1; if (extraWidthRight < 2.f) { ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; final float rightWidthNeeded = (float) getPaddingRight() / (float) getClientWidth() + 2.f; for (int pos = mCurItem + 1; pos < N; pos++) { if (extraWidthRight >= rightWidthNeeded && pos > endPos) { if (ii == null) { break; } if (pos == ii.position && !ii.scrolling) { mItems.remove(itemIndex); mAdapter.destroyItem(this, pos, ii.object); ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } } else if (ii != null && pos == ii.position) { extraWidthRight += ii.widthFactor; itemIndex++; ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } else { ii = addNewItem(pos, itemIndex); itemIndex++; extraWidthRight += ii.widthFactor; ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } } } calculatePageOffsets(curItem, curIndex, oldCurInfo); } if (DEBUG) { Log.i(TAG, "Current page list:"); for (int i=0; i<mItems.size(); i++) { Log.i(TAG, "#" + i + ": page " + mItems.get(i).position); } } mAdapter.setPrimaryItem(this, mCurItem, curItem != null ? curItem.object : null); mAdapter.finishUpdate(this); // Check width measurement of current pages and drawing sort order. // Update LayoutParams as needed. final boolean sort = mDrawingOrder != DRAW_ORDER_DEFAULT; if (sort) { if (mDrawingOrderedChildren == null) { mDrawingOrderedChildren = new ArrayList<View>(); } else { mDrawingOrderedChildren.clear(); } } final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); lp.childIndex = i; if (!lp.isDecor && lp.widthFactor == 0.f) { // 0 means requery the adapter for this, it doesn't have a valid width. final ItemInfo ii = infoForChild(child); if (ii != null) { lp.widthFactor = ii.widthFactor; lp.position = ii.position; } } if (sort) mDrawingOrderedChildren.add(child); } if (sort) { Collections.sort(mDrawingOrderedChildren, sPositionComparator); } if (hasFocus()) { View currentFocused = findFocus(); ItemInfo ii = currentFocused != null ? infoForAnyChild(currentFocused) : null; if (ii == null || ii.position != mCurItem) { for (int i=0; i<getChildCount(); i++) { View child = getChildAt(i); ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(FOCUS_FORWARD)) { break; } } } } } } private void calculatePageOffsets(ItemInfo curItem, int curIndex, ItemInfo oldCurInfo) { final int N = mAdapter.getCount(); final int width = getClientWidth(); final float marginOffset = width > 0 ? (float) mPageMargin / width : 0; // Fix up offsets for later layout. if (oldCurInfo != null) { final int oldCurPosition = oldCurInfo.position; // Base offsets off of oldCurInfo. if (oldCurPosition < curItem.position) { int itemIndex = 0; ItemInfo ii = null; float offset = oldCurInfo.offset + oldCurInfo.widthFactor + marginOffset; for (int pos = oldCurPosition + 1; pos <= curItem.position && itemIndex < mItems.size(); pos++) { ii = mItems.get(itemIndex); while (pos > ii.position && itemIndex < mItems.size() - 1) { itemIndex++; ii = mItems.get(itemIndex); } while (pos < ii.position) { // We don't have an item populated for this, // ask the adapter for an offset. offset += mAdapter.getPageWidth(pos) + marginOffset; pos++; } ii.offset = offset; offset += ii.widthFactor + marginOffset; } } else if (oldCurPosition > curItem.position) { int itemIndex = mItems.size() - 1; ItemInfo ii = null; float offset = oldCurInfo.offset; for (int pos = oldCurPosition - 1; pos >= curItem.position && itemIndex >= 0; pos--) { ii = mItems.get(itemIndex); while (pos < ii.position && itemIndex > 0) { itemIndex--; ii = mItems.get(itemIndex); } while (pos > ii.position) { // We don't have an item populated for this, // ask the adapter for an offset. offset -= mAdapter.getPageWidth(pos) + marginOffset; pos--; } offset -= ii.widthFactor + marginOffset; ii.offset = offset; } } } // Base all offsets off of curItem. final int itemCount = mItems.size(); float offset = curItem.offset; int pos = curItem.position - 1; mFirstOffset = curItem.position == 0 ? curItem.offset : -Float.MAX_VALUE; mLastOffset = curItem.position == N - 1 ? curItem.offset + curItem.widthFactor - 1 : Float.MAX_VALUE; // Previous pages for (int i = curIndex - 1; i >= 0; i--, pos--) { final ItemInfo ii = mItems.get(i); while (pos > ii.position) { offset -= mAdapter.getPageWidth(pos--) + marginOffset; } offset -= ii.widthFactor + marginOffset; ii.offset = offset; if (ii.position == 0) mFirstOffset = offset; } offset = curItem.offset + curItem.widthFactor + marginOffset; pos = curItem.position + 1; // Next pages for (int i = curIndex + 1; i < itemCount; i++, pos++) { final ItemInfo ii = mItems.get(i); while (pos < ii.position) { offset += mAdapter.getPageWidth(pos++) + marginOffset; } if (ii.position == N - 1) { mLastOffset = offset + ii.widthFactor - 1; } ii.offset = offset; offset += ii.widthFactor + marginOffset; } mNeedCalculatePageOffsets = false; } /** * This is the persistent state that is saved by ViewPager. Only needed * if you are creating a sublass of ViewPager that must save its own * state, in which case it should implement a subclass of this which * contains that state. */ public static class SavedState extends BaseSavedState { int position; Parcelable adapterState; ClassLoader loader; public SavedState(Parcelable superState) { super(superState); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(position); out.writeParcelable(adapterState, flags); } @Override public String toString() { return "FragmentPager.SavedState{" + Integer.toHexString(System.identityHashCode(this)) + " position=" + position + "}"; } public static final Parcelable.Creator<SavedState> CREATOR = ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() { @Override public SavedState createFromParcel(Parcel in, ClassLoader loader) { return new SavedState(in, loader); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }); SavedState(Parcel in, ClassLoader loader) { super(in); if (loader == null) { loader = getClass().getClassLoader(); } position = in.readInt(); adapterState = in.readParcelable(loader); this.loader = loader; } } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.position = mCurItem; if (mAdapter != null) { ss.adapterState = mAdapter.saveState(); } return ss; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState)state; super.onRestoreInstanceState(ss.getSuperState()); if (mAdapter != null) { mAdapter.restoreState(ss.adapterState, ss.loader); setCurrentItemInternal(ss.position, false, true); } else { mRestoredCurItem = ss.position; mRestoredAdapterState = ss.adapterState; mRestoredClassLoader = ss.loader; } } @Override public void addView(View child, int index, ViewGroup.LayoutParams params) { if (!checkLayoutParams(params)) { params = generateLayoutParams(params); } final LayoutParams lp = (LayoutParams) params; lp.isDecor |= child instanceof Decor; if (mInLayout) { if (lp != null && lp.isDecor) { throw new IllegalStateException("Cannot add pager decor view during layout"); } lp.needsMeasure = true; addViewInLayout(child, index, params); } else { super.addView(child, index, params); } if (USE_CACHE) { if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(mScrollingCacheEnabled); } else { child.setDrawingCacheEnabled(false); } } } ItemInfo infoForChild(View child) { for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (mAdapter.isViewFromObject(child, ii.object)) { return ii; } } return null; } ItemInfo infoForAnyChild(View child) { ViewParent parent; while ((parent=child.getParent()) != this) { if (parent == null || !(parent instanceof View)) { return null; } child = (View)parent; } return infoForChild(child); } ItemInfo infoForPosition(int position) { for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.position == position) { return ii; } } return null; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // For simple implementation, or internal size is always 0. // We depend on the container to specify the layout size of // our view. We can't really know what it is since we will be // adding and removing different arbitrary views and do not // want the layout to change as this happens. setMeasuredDimension(getDefaultSize(0, widthMeasureSpec), getDefaultSize(0, heightMeasureSpec)); final int measuredWidth = getMeasuredWidth(); final int maxGutterSize = measuredWidth / 10; mGutterSize = Math.min(maxGutterSize, mDefaultGutterSize); // Children are just made to fill our space. int childWidthSize = measuredWidth - getPaddingLeft() - getPaddingRight(); int childHeightSize = getMeasuredHeight() - getPaddingTop() - getPaddingBottom(); /* * Make sure all children have been properly measured. Decor views first. * Right now we cheat and make this less complicated by assuming decor * views won't intersect. We will pin to edges based on gravity. */ int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp != null && lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; int widthMode = MeasureSpec.AT_MOST; int heightMode = MeasureSpec.AT_MOST; boolean consumeVertical = vgrav == Gravity.TOP || vgrav == Gravity.BOTTOM; boolean consumeHorizontal = hgrav == Gravity.LEFT || hgrav == Gravity.RIGHT; if (consumeVertical) { widthMode = MeasureSpec.EXACTLY; } else if (consumeHorizontal) { heightMode = MeasureSpec.EXACTLY; } int widthSize = childWidthSize; int heightSize = childHeightSize; if (lp.width != LayoutParams.WRAP_CONTENT) { widthMode = MeasureSpec.EXACTLY; if (lp.width != LayoutParams.FILL_PARENT) { widthSize = lp.width; } } if (lp.height != LayoutParams.WRAP_CONTENT) { heightMode = MeasureSpec.EXACTLY; if (lp.height != LayoutParams.FILL_PARENT) { heightSize = lp.height; } } final int widthSpec = MeasureSpec.makeMeasureSpec(widthSize, widthMode); final int heightSpec = MeasureSpec.makeMeasureSpec(heightSize, heightMode); child.measure(widthSpec, heightSpec); if (consumeVertical) { childHeightSize -= child.getMeasuredHeight(); } else if (consumeHorizontal) { childWidthSize -= child.getMeasuredWidth(); } } } } mChildWidthMeasureSpec = MeasureSpec.makeMeasureSpec(childWidthSize, MeasureSpec.EXACTLY); mChildHeightMeasureSpec = MeasureSpec.makeMeasureSpec(childHeightSize, MeasureSpec.EXACTLY); // Make sure we have created all fragments that we need to have shown. mInLayout = true; populate(); mInLayout = false; // Page views next. size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { if (DEBUG) Log.v(TAG, "Measuring #" + i + " " + child + ": " + mChildWidthMeasureSpec); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp == null || !lp.isDecor) { final int widthSpec = MeasureSpec.makeMeasureSpec( (int) (childWidthSize * lp.widthFactor), MeasureSpec.EXACTLY); child.measure(widthSpec, mChildHeightMeasureSpec); } } } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Make sure scroll position is set correctly. if (w != oldw) { recomputeScrollPosition(w, oldw, mPageMargin, mPageMargin); } } private void recomputeScrollPosition(int width, int oldWidth, int margin, int oldMargin) { if (oldWidth > 0 && !mItems.isEmpty()) { final int widthWithMargin = width - getPaddingLeft() - getPaddingRight() + margin; final int oldWidthWithMargin = oldWidth - getPaddingLeft() - getPaddingRight() + oldMargin; final int xpos = getScrollX(); final float pageOffset = (float) xpos / oldWidthWithMargin; final int newOffsetPixels = (int) (pageOffset * widthWithMargin); scrollTo(newOffsetPixels, getScrollY()); if (!mScroller.isFinished()) { // We now return to your regularly scheduled scroll, already in progress. final int newDuration = mScroller.getDuration() - mScroller.timePassed(); ItemInfo targetInfo = infoForPosition(mCurItem); mScroller.startScroll(newOffsetPixels, 0, (int) (targetInfo.offset * width), 0, newDuration); } } else { final ItemInfo ii = infoForPosition(mCurItem); final float scrollOffset = ii != null ? Math.min(ii.offset, mLastOffset) : 0; final int scrollPos = (int) (scrollOffset * (width - getPaddingLeft() - getPaddingRight())); if (scrollPos != getScrollX()) { completeScroll(); scrollTo(scrollPos, getScrollY()); } } } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { mInLayout = true; populate(); mInLayout = false; final int count = getChildCount(); int width = r - l; int height = b - t; int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); int paddingRight = getPaddingRight(); int paddingBottom = getPaddingBottom(); final int scrollX = getScrollX(); int decorCount = 0; // First pass - decor views. We need to do this in two passes so that // we have the proper offsets for non-decor views later. for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); int childLeft = 0; int childTop = 0; if (lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getMeasuredWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } switch (vgrav) { default: childTop = paddingTop; break; case Gravity.TOP: childTop = paddingTop; paddingTop += child.getMeasuredHeight(); break; case Gravity.CENTER_VERTICAL: childTop = Math.max((height - child.getMeasuredHeight()) / 2, paddingTop); break; case Gravity.BOTTOM: childTop = height - paddingBottom - child.getMeasuredHeight(); paddingBottom += child.getMeasuredHeight(); break; } childLeft += scrollX; child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); decorCount++; } } } final int childWidth = width - paddingLeft - paddingRight; // Page views. Do this once we have the right padding offsets from above. for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); ItemInfo ii; if (!lp.isDecor && (ii = infoForChild(child)) != null) { int loff = (int) (childWidth * ii.offset); int childLeft = paddingLeft + loff; int childTop = paddingTop; if (lp.needsMeasure) { // This was added during layout and needs measurement. // Do it now that we know what we're working with. lp.needsMeasure = false; final int widthSpec = MeasureSpec.makeMeasureSpec( (int) (childWidth * lp.widthFactor), MeasureSpec.EXACTLY); final int heightSpec = MeasureSpec.makeMeasureSpec( (int) (height - paddingTop - paddingBottom), MeasureSpec.EXACTLY); child.measure(widthSpec, heightSpec); } if (DEBUG) Log.v(TAG, "Positioning #" + i + " " + child + " f=" + ii.object + ":" + childLeft + "," + childTop + " " + child.getMeasuredWidth() + "x" + child.getMeasuredHeight()); child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } } } mTopPageBounds = paddingTop; mBottomPageBounds = height - paddingBottom; mDecorChildCount = decorCount; mFirstLayout = false; } @Override public void computeScroll() { if (!mScroller.isFinished() && mScroller.computeScrollOffset()) { int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); if (!pageScrolled(x)) { mScroller.abortAnimation(); scrollTo(0, y); } } // Keep on drawing until the animation has finished. ViewCompat.postInvalidateOnAnimation(this); return; } // Done with scroll, clean up state. completeScroll(); } private boolean pageScrolled(int xpos) { if (mItems.size() == 0) { mCalledSuper = false; onPageScrolled(0, 0, 0); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } return false; } final ItemInfo ii = infoForCurrentScrollPosition(); final int width = getClientWidth(); final int widthWithMargin = width + mPageMargin; final float marginOffset = (float) mPageMargin / width; final int currentPage = ii.position; final float pageOffset = (((float) xpos / width) - ii.offset) / (ii.widthFactor + marginOffset); final int offsetPixels = (int) (pageOffset * widthWithMargin); mCalledSuper = false; onPageScrolled(currentPage, pageOffset, offsetPixels); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } return true; } /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * If you override this method you must call through to the superclass implementation * (e.g. super.onPageScrolled(position, offset, offsetPixels)) before onPageScrolled * returns. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param offset Value from [0, 1) indicating the offset from the page at position. * @param offsetPixels Value in pixels indicating the offset from position. */ protected void onPageScrolled(int position, float offset, int offsetPixels) { // Offset any decor views if needed - keep them on-screen at all times. if (mDecorChildCount > 0) { final int scrollX = getScrollX(); int paddingLeft = getPaddingLeft(); int paddingRight = getPaddingRight(); final int width = getWidth(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) continue; final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; int childLeft = 0; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } childLeft += scrollX; final int childOffset = childLeft - child.getLeft(); if (childOffset != 0) { child.offsetLeftAndRight(childOffset); } } } if (mSeenPositionMin < 0 || position < mSeenPositionMin) { mSeenPositionMin = position; } if (mSeenPositionMax < 0 || FloatMath.ceil(position + offset) > mSeenPositionMax) { mSeenPositionMax = position + 1; } if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mPageTransformer != null) { final int scrollX = getScrollX(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp.isDecor) continue; final float transformPos = (float) (child.getLeft() - scrollX) / getClientWidth(); mPageTransformer.transformPage(child, transformPos); } } mCalledSuper = true; } private void completeScroll() { boolean needPopulate = mScrollState == SCROLL_STATE_SETTLING; if (needPopulate) { // Done with scroll, no longer want to cache view drawing. setScrollingCacheEnabled(false); mScroller.abortAnimation(); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); } setScrollState(SCROLL_STATE_IDLE); } mPopulatePending = false; for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.scrolling) { needPopulate = true; ii.scrolling = false; } } if (needPopulate) { populate(); } } private boolean isGutterDrag(float x, float dx) { return (x < mGutterSize && dx > 0) || (x > getWidth() - mGutterSize && dx < 0); } private void enableLayers(boolean enable) { final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final int layerType = enable ? ViewCompat.LAYER_TYPE_HARDWARE : ViewCompat.LAYER_TYPE_NONE; ViewCompat.setLayerType(getChildAt(i), layerType, null); } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { /* * This method JUST determines whether we want to intercept the motion. * If we return true, onMotionEvent will be called and we do the actual * scrolling there. */ final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; // Always take care of the touch gesture being complete. if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { // Release the drag. if (DEBUG) Log.v(TAG, "Intercept done!"); mIsBeingDragged = false; mIsUnableToDrag = false; mActivePointerId = INVALID_POINTER; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } return false; } // Nothing more to do here if we have decided whether or not we // are dragging. if (action != MotionEvent.ACTION_DOWN) { if (mIsBeingDragged) { if (DEBUG) Log.v(TAG, "Intercept returning true!"); return true; } if (mIsUnableToDrag) { if (DEBUG) Log.v(TAG, "Intercept returning false!"); return false; } } switch (action) { case MotionEvent.ACTION_MOVE: { /* * mIsBeingDragged == false, otherwise the shortcut would have caught it. Check * whether the user has moved far enough from his original down touch. */ /* * Locally do absolute value. mLastMotionY is set to the y value * of the down event. */ final int activePointerId = mActivePointerId; if (activePointerId == INVALID_POINTER) { // If we don't have a valid id, the touch down wasn't on content. break; } final int pointerIndex = MotionEventCompat.findPointerIndex(ev, activePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float dx = x - mLastMotionX; final float xDiff = Math.abs(dx); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (dx != 0 && !isGutterDrag(mLastMotionX, dx) && canScroll(this, false, (int) dx, (int) x, (int) y)) { // Nested view has scrollable area under this point. Let it be handled there. mInitialMotionX = mLastMotionX = x; mLastMotionY = y; mIsUnableToDrag = true; return false; } if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); mLastMotionX = dx > 0 ? mInitialMotionX + mTouchSlop : mInitialMotionX - mTouchSlop; setScrollingCacheEnabled(true); } else { if (yDiff > mTouchSlop) { // The finger has moved enough in the vertical // direction to be counted as a drag... abort // any attempt to drag horizontally, to work correctly // with children that have scrolling containers. if (DEBUG) Log.v(TAG, "Starting unable to drag!"); mIsUnableToDrag = true; } } if (mIsBeingDragged) { // Scroll to follow the motion event if (performDrag(x)) { ViewCompat.postInvalidateOnAnimation(this); } } break; } case MotionEvent.ACTION_DOWN: { /* * Remember location of down touch. * ACTION_DOWN always refers to pointer index 0. */ mLastMotionX = mInitialMotionX = ev.getX(); mLastMotionY = ev.getY(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); mIsUnableToDrag = false; mScroller.computeScrollOffset(); if (mScrollState == SCROLL_STATE_SETTLING && Math.abs(mScroller.getFinalX() - mScroller.getCurrX()) > mCloseEnough) { // Let the user 'catch' the pager as it animates. mScroller.abortAnimation(); mPopulatePending = false; populate(); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); } else { completeScroll(); mIsBeingDragged = false; } if (DEBUG) Log.v(TAG, "Down at " + mLastMotionX + "," + mLastMotionY + " mIsBeingDragged=" + mIsBeingDragged + "mIsUnableToDrag=" + mIsUnableToDrag); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); break; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); /* * The only time we want to intercept motion events is if we are in the * drag mode. */ return mIsBeingDragged; } @Override public boolean onTouchEvent(MotionEvent ev) { if (mFakeDragging) { // A fake drag is in progress already, ignore this real one // but still eat the touch events. // (It is likely that the user is multi-touching the screen.) return true; } if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getEdgeFlags() != 0) { // Don't handle edge touches immediately -- they may actually belong to one of our // descendants. return false; } if (mAdapter == null || mAdapter.getCount() == 0) { // Nothing to present or scroll; nothing to touch. return false; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); final int action = ev.getAction(); boolean needsInvalidate = false; switch (action & MotionEventCompat.ACTION_MASK) { case MotionEvent.ACTION_DOWN: { mScroller.abortAnimation(); mPopulatePending = false; populate(); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); // Remember where the motion event started mLastMotionX = mInitialMotionX = ev.getX(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); break; } case MotionEvent.ACTION_MOVE: if (!mIsBeingDragged) { final int pointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float xDiff = Math.abs(x - mLastMotionX); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; mLastMotionX = x - mInitialMotionX > 0 ? mInitialMotionX + mTouchSlop : mInitialMotionX - mTouchSlop; setScrollState(SCROLL_STATE_DRAGGING); setScrollingCacheEnabled(true); } } // Not else! Note that mIsBeingDragged can be set above. if (mIsBeingDragged) { // Scroll to follow the motion event final int activePointerIndex = MotionEventCompat.findPointerIndex( ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); needsInvalidate |= performDrag(x); } break; case MotionEvent.ACTION_UP: if (mIsBeingDragged) { final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int width = getClientWidth(); final int scrollX = getScrollX(); final ItemInfo ii = infoForCurrentScrollPosition(); final int currentPage = ii.position; final float pageOffset = (((float) scrollX / width) - ii.offset) / ii.widthFactor; final int activePointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); final int totalDelta = (int) (x - mInitialMotionX); int nextPage = determineTargetPage(currentPage, pageOffset, initialVelocity, totalDelta); setCurrentItemInternal(nextPage, true, true, initialVelocity); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEvent.ACTION_CANCEL: if (mIsBeingDragged) { setCurrentItemInternal(mCurItem, true, true); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); final float x = MotionEventCompat.getX(ev, index); mLastMotionX = x; mActivePointerId = MotionEventCompat.getPointerId(ev, index); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId)); break; } if (needsInvalidate) { ViewCompat.postInvalidateOnAnimation(this); } return true; } private boolean performDrag(float x) { boolean needsInvalidate = false; final float deltaX = mLastMotionX - x; mLastMotionX = x; float oldScrollX = getScrollX(); float scrollX = oldScrollX + deltaX; final int width = getClientWidth(); float leftBound = width * mFirstOffset; float rightBound = width * mLastOffset; boolean leftAbsolute = true; boolean rightAbsolute = true; final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); if (firstItem.position != 0) { leftAbsolute = false; leftBound = firstItem.offset * width; } if (lastItem.position != mAdapter.getCount() - 1) { rightAbsolute = false; rightBound = lastItem.offset * width; } if (scrollX < leftBound) { if (leftAbsolute) { float over = leftBound - scrollX; needsInvalidate = mLeftEdge.onPull(Math.abs(over) / width); } scrollX = leftBound; } else if (scrollX > rightBound) { if (rightAbsolute) { float over = scrollX - rightBound; needsInvalidate = mRightEdge.onPull(Math.abs(over) / width); } scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); return needsInvalidate; } /** * @return Info about the page at the current scroll position. * This can be synthetic for a missing middle page; the 'object' field can be null. */ private ItemInfo infoForCurrentScrollPosition() { final int width = getClientWidth(); final float scrollOffset = width > 0 ? (float) getScrollX() / width : 0; final float marginOffset = width > 0 ? (float) mPageMargin / width : 0; int lastPos = -1; float lastOffset = 0.f; float lastWidth = 0.f; boolean first = true; ItemInfo lastItem = null; for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); float offset; if (!first && ii.position != lastPos + 1) { // Create a synthetic item for a missing page. ii = mTempItem; ii.offset = lastOffset + lastWidth + marginOffset; ii.position = lastPos + 1; ii.widthFactor = mAdapter.getPageWidth(ii.position); i--; } offset = ii.offset; final float leftBound = offset; final float rightBound = offset + ii.widthFactor + marginOffset; if (first || scrollOffset >= leftBound) { if (scrollOffset < rightBound || i == mItems.size() - 1) { return ii; } } else { return lastItem; } first = false; lastPos = ii.position; lastOffset = offset; lastWidth = ii.widthFactor; lastItem = ii; } return lastItem; } private int determineTargetPage(int currentPage, float pageOffset, int velocity, int deltaX) { int targetPage; if (Math.abs(deltaX) > mFlingDistance && Math.abs(velocity) > mMinimumVelocity) { targetPage = velocity > 0 ? currentPage : currentPage + 1; } else if (mSeenPositionMin >= 0 && mSeenPositionMin < currentPage && pageOffset < 0.5f) { targetPage = currentPage + 1; } else if (mSeenPositionMax >= 0 && mSeenPositionMax > currentPage + 1 && pageOffset >= 0.5f) { targetPage = currentPage - 1; } else { targetPage = (int) (currentPage + pageOffset + 0.5f); } if (mItems.size() > 0) { final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); // Only let the user target pages we have items for targetPage = Math.max(firstItem.position, Math.min(targetPage, lastItem.position)); } return targetPage; } @Override public void draw(Canvas canvas) { super.draw(canvas); boolean needsInvalidate = false; final int overScrollMode = ViewCompat.getOverScrollMode(this); if (overScrollMode == ViewCompat.OVER_SCROLL_ALWAYS || (overScrollMode == ViewCompat.OVER_SCROLL_IF_CONTENT_SCROLLS && mAdapter != null && mAdapter.getCount() > 1)) { if (!mLeftEdge.isFinished()) { final int restoreCount = canvas.save(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); final int width = getWidth(); canvas.rotate(270); canvas.translate(-height + getPaddingTop(), mFirstOffset * width); mLeftEdge.setSize(height, width); needsInvalidate |= mLeftEdge.draw(canvas); canvas.restoreToCount(restoreCount); } if (!mRightEdge.isFinished()) { final int restoreCount = canvas.save(); final int width = getWidth(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); canvas.rotate(90); canvas.translate(-getPaddingTop(), -(mLastOffset + 1) * width); mRightEdge.setSize(height, width); needsInvalidate |= mRightEdge.draw(canvas); canvas.restoreToCount(restoreCount); } } else { mLeftEdge.finish(); mRightEdge.finish(); } if (needsInvalidate) { // Keep animating ViewCompat.postInvalidateOnAnimation(this); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); // Draw the margin drawable between pages if needed. if (mPageMargin > 0 && mMarginDrawable != null && mItems.size() > 0 && mAdapter != null) { final int scrollX = getScrollX(); final int width = getWidth(); final float marginOffset = (float) mPageMargin / width; int itemIndex = 0; ItemInfo ii = mItems.get(0); float offset = ii.offset; final int itemCount = mItems.size(); final int firstPos = ii.position; final int lastPos = mItems.get(itemCount - 1).position; for (int pos = firstPos; pos < lastPos; pos++) { while (pos > ii.position && itemIndex < itemCount) { ii = mItems.get(++itemIndex); } float drawAt; if (pos == ii.position) { drawAt = (ii.offset + ii.widthFactor) * width; offset = ii.offset + ii.widthFactor + marginOffset; } else { float widthFactor = mAdapter.getPageWidth(pos); drawAt = (offset + widthFactor) * width; offset += widthFactor + marginOffset; } if (drawAt + mPageMargin > scrollX) { mMarginDrawable.setBounds((int) drawAt, mTopPageBounds, (int) (drawAt + mPageMargin + 0.5f), mBottomPageBounds); mMarginDrawable.draw(canvas); } if (drawAt > scrollX + width) { break; // No more visible, no sense in continuing } } } } /** * Start a fake drag of the pager. * * <p>A fake drag can be useful if you want to synchronize the motion of the ViewPager * with the touch scrolling of another view, while still letting the ViewPager * control the snapping motion and fling behavior. (e.g. parallax-scrolling tabs.) * Call {@link #fakeDragBy(float)} to simulate the actual drag motion. Call * {@link #endFakeDrag()} to complete the fake drag and fling as necessary. * * <p>During a fake drag the ViewPager will ignore all touch events. If a real drag * is already in progress, this method will return false. * * @return true if the fake drag began successfully, false if it could not be started. * * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean beginFakeDrag() { if (mIsBeingDragged) { return false; } mFakeDragging = true; setScrollState(SCROLL_STATE_DRAGGING); mInitialMotionX = mLastMotionX = 0; if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } else { mVelocityTracker.clear(); } final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(time, time, MotionEvent.ACTION_DOWN, 0, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); mFakeDragBeginTime = time; return true; } /** * End a fake drag of the pager. * * @see #beginFakeDrag() * @see #fakeDragBy(float) */ public void endFakeDrag() { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int width = getClientWidth(); final int scrollX = getScrollX(); final ItemInfo ii = infoForCurrentScrollPosition(); final int currentPage = ii.position; final float pageOffset = (((float) scrollX / width) - ii.offset) / ii.widthFactor; final int totalDelta = (int) (mLastMotionX - mInitialMotionX); int nextPage = determineTargetPage(currentPage, pageOffset, initialVelocity, totalDelta); setCurrentItemInternal(nextPage, true, true, initialVelocity); endDrag(); mFakeDragging = false; } /** * Fake drag by an offset in pixels. You must have called {@link #beginFakeDrag()} first. * * @param xOffset Offset in pixels to drag by. * @see #beginFakeDrag() * @see #endFakeDrag() */ public void fakeDragBy(float xOffset) { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } mLastMotionX += xOffset; float oldScrollX = getScrollX(); float scrollX = oldScrollX - xOffset; final int width = getClientWidth(); float leftBound = width * mFirstOffset; float rightBound = width * mLastOffset; final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); if (firstItem.position != 0) { leftBound = firstItem.offset * width; } if (lastItem.position != mAdapter.getCount() - 1) { rightBound = lastItem.offset * width; } if (scrollX < leftBound) { scrollX = leftBound; } else if (scrollX > rightBound) { scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); // Synthesize an event for the VelocityTracker. final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(mFakeDragBeginTime, time, MotionEvent.ACTION_MOVE, mLastMotionX, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); } /** * Returns true if a fake drag is in progress. * * @return true if currently in a fake drag, false otherwise. * * @see #beginFakeDrag() * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean isFakeDragging() { return mFakeDragging; } private void onSecondaryPointerUp(MotionEvent ev) { final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex); if (pointerId == mActivePointerId) { // This was our active pointer going up. Choose a new // active pointer and adjust accordingly. final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mLastMotionX = MotionEventCompat.getX(ev, newPointerIndex); mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex); if (mVelocityTracker != null) { mVelocityTracker.clear(); } } } private void endDrag() { mIsBeingDragged = false; mIsUnableToDrag = false; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } } private void setScrollingCacheEnabled(boolean enabled) { if (mScrollingCacheEnabled != enabled) { mScrollingCacheEnabled = enabled; if (USE_CACHE) { final int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(enabled); } } } } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { // TODO: Add versioned support here for transformed views. // This will not work for transformed views in Honeycomb+ final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && ViewCompat.canScrollHorizontally(v, -dx); } @Override public boolean dispatchKeyEvent(KeyEvent event) { // Let the focused view and/or our descendants get the key first return super.dispatchKeyEvent(event) || executeKeyEvent(event); } /** * You can call this function yourself to have the scroll view perform * scrolling from a key event, just as if the event had been dispatched to * it by the view hierarchy. * * @param event The key event to execute. * @return Return true if the event was handled, else false. */ public boolean executeKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: handled = arrowScroll(FOCUS_LEFT); break; case KeyEvent.KEYCODE_DPAD_RIGHT: handled = arrowScroll(FOCUS_RIGHT); break; case KeyEvent.KEYCODE_TAB: if (Build.VERSION.SDK_INT >= 11) { // The focus finder had a bug handling FOCUS_FORWARD and FOCUS_BACKWARD // before Android 3.0. Ignore the tab key on those devices. if (KeyEventCompat.hasNoModifiers(event)) { handled = arrowScroll(FOCUS_FORWARD); } else if (KeyEventCompat.hasModifiers(event, KeyEvent.META_SHIFT_ON)) { handled = arrowScroll(FOCUS_BACKWARD); } } break; } } return handled; } public boolean arrowScroll(int direction) { View currentFocused = findFocus(); if (currentFocused == this) currentFocused = null; boolean handled = false; View nextFocused = FocusFinder.getInstance().findNextFocus(this, currentFocused, direction); if (nextFocused != null && nextFocused != currentFocused) { if (direction == View.FOCUS_LEFT) { // If there is nothing to the left, or this is causing us to // jump to the right, then what we really want to do is page left. final int nextLeft = getChildRectInPagerCoordinates(mTempRect, nextFocused).left; final int currLeft = getChildRectInPagerCoordinates(mTempRect, currentFocused).left; if (currentFocused != null && nextLeft >= currLeft) { handled = pageLeft(); } else { handled = nextFocused.requestFocus(); } } else if (direction == View.FOCUS_RIGHT) { // If there is nothing to the right, or this is causing us to // jump to the left, then what we really want to do is page right. final int nextLeft = getChildRectInPagerCoordinates(mTempRect, nextFocused).left; final int currLeft = getChildRectInPagerCoordinates(mTempRect, currentFocused).left; if (currentFocused != null && nextLeft <= currLeft) { handled = pageRight(); } else { handled = nextFocused.requestFocus(); } } } else if (direction == FOCUS_LEFT || direction == FOCUS_BACKWARD) { // Trying to move left and nothing there; try to page. handled = pageLeft(); } else if (direction == FOCUS_RIGHT || direction == FOCUS_FORWARD) { // Trying to move right and nothing there; try to page. handled = pageRight(); } if (handled) { playSoundEffect(SoundEffectConstants.getContantForFocusDirection(direction)); } return handled; } private Rect getChildRectInPagerCoordinates(Rect outRect, View child) { if (outRect == null) { outRect = new Rect(); } if (child == null) { outRect.set(0, 0, 0, 0); return outRect; } outRect.left = child.getLeft(); outRect.right = child.getRight(); outRect.top = child.getTop(); outRect.bottom = child.getBottom(); ViewParent parent = child.getParent(); while (parent instanceof ViewGroup && parent != this) { final ViewGroup group = (ViewGroup) parent; outRect.left += group.getLeft(); outRect.right += group.getRight(); outRect.top += group.getTop(); outRect.bottom += group.getBottom(); parent = group.getParent(); } return outRect; } boolean pageLeft() { if (mCurItem > 0) { setCurrentItem(mCurItem-1, true); return true; } return false; } boolean pageRight() { if (mAdapter != null && mCurItem < (mAdapter.getCount()-1)) { setCurrentItem(mCurItem+1, true); return true; } return false; } /** * We only want the current page that is being shown to be focusable. */ @Override public void addFocusables(ArrayList<View> views, int direction, int focusableMode) { final int focusableCount = views.size(); final int descendantFocusability = getDescendantFocusability(); if (descendantFocusability != FOCUS_BLOCK_DESCENDANTS) { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addFocusables(views, direction, focusableMode); } } } } // we add ourselves (if focusable) in all cases except for when we are // FOCUS_AFTER_DESCENDANTS and there are some descendants focusable. this is // to avoid the focus search finding layouts when a more precise search // among the focusable children would be more interesting. if ( descendantFocusability != FOCUS_AFTER_DESCENDANTS || // No focusable descendants (focusableCount == views.size())) { // Note that we can't call the superclass here, because it will // add all views in. So we need to do the same thing View does. if (!isFocusable()) { return; } if ((focusableMode & FOCUSABLES_TOUCH_MODE) == FOCUSABLES_TOUCH_MODE && isInTouchMode() && !isFocusableInTouchMode()) { return; } if (views != null) { views.add(this); } } } /** * We only want the current page that is being shown to be touchable. */ @Override public void addTouchables(ArrayList<View> views) { // Note that we don't call super.addTouchables(), which means that // we don't call View.addTouchables(). This is okay because a ViewPager // is itself not touchable. for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addTouchables(views); } } } } /** * We only want the current page that is being shown to be focusable. */ @Override protected boolean onRequestFocusInDescendants(int direction, Rect previouslyFocusedRect) { int index; int increment; int end; int count = getChildCount(); if ((direction & FOCUS_FORWARD) != 0) { index = 0; increment = 1; end = count; } else { index = count - 1; increment = -1; end = -1; } for (int i = index; i != end; i += increment) { View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(direction, previouslyFocusedRect)) { return true; } } } } return false; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { // ViewPagers should only report accessibility info for the current page, // otherwise things get very confusing. // TODO: Should this note something about the paging container? final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { final ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem && child.dispatchPopulateAccessibilityEvent(event)) { return true; } } } return false; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return generateDefaultLayoutParams(); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } class MyAccessibilityDelegate extends AccessibilityDelegateCompat { @Override public void onInitializeAccessibilityEvent(View host, AccessibilityEvent event) { super.onInitializeAccessibilityEvent(host, event); event.setClassName(ViewPager.class.getName()); } @Override public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfoCompat info) { super.onInitializeAccessibilityNodeInfo(host, info); info.setClassName(ViewPager.class.getName()); info.setScrollable(mAdapter != null && mAdapter.getCount() > 1); if (mAdapter != null && mCurItem >= 0 && mCurItem < mAdapter.getCount() - 1) { info.addAction(AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD); } if (mAdapter != null && mCurItem > 0 && mCurItem < mAdapter.getCount()) { info.addAction(AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD); } } @Override public boolean performAccessibilityAction(View host, int action, Bundle args) { if (super.performAccessibilityAction(host, action, args)) { return true; } switch (action) { case AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD: { if (mAdapter != null && mCurItem >= 0 && mCurItem < mAdapter.getCount() - 1) { setCurrentItem(mCurItem + 1); return true; } } return false; case AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD: { if (mAdapter != null && mCurItem > 0 && mCurItem < mAdapter.getCount()) { setCurrentItem(mCurItem - 1); return true; } } return false; } return false; } } private class PagerObserver extends DataSetObserver { @Override public void onChanged() { dataSetChanged(); } @Override public void onInvalidated() { dataSetChanged(); } } /** * Layout parameters that should be supplied for views added to a * ViewPager. */ public static class LayoutParams extends ViewGroup.LayoutParams { /** * true if this view is a decoration on the pager itself and not * a view supplied by the adapter. */ public boolean isDecor; /** * Gravity setting for use on decor views only: * Where to position the view page within the overall ViewPager * container; constants are defined in {@link android.view.Gravity}. */ public int gravity; /** * Width as a 0-1 multiplier of the measured pager width */ float widthFactor = 0.f; /** * true if this view was added during layout and needs to be measured * before being positioned. */ boolean needsMeasure; /** * Adapter position this view is for if !isDecor */ int position; /** * Current child index within the ViewPager that this view occupies */ int childIndex; public LayoutParams() { super(FILL_PARENT, FILL_PARENT); } public LayoutParams(Context context, AttributeSet attrs) { super(context, attrs); final TypedArray a = context.obtainStyledAttributes(attrs, LAYOUT_ATTRS); gravity = a.getInteger(0, Gravity.TOP); a.recycle(); } } static class ViewPositionComparator implements Comparator<View> { @Override public int compare(View lhs, View rhs) { final LayoutParams llp = (LayoutParams) lhs.getLayoutParams(); final LayoutParams rlp = (LayoutParams) rhs.getLayoutParams(); if (llp.isDecor != rlp.isDecor) { return llp.isDecor ? 1 : -1; } return llp.position - rlp.position; } } }
v4/java/android/support/v4/view/ViewPager.java
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v4.view; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.os.SystemClock; import android.support.v4.os.ParcelableCompat; import android.support.v4.os.ParcelableCompatCreatorCallbacks; import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat; import android.support.v4.widget.EdgeEffectCompat; import android.util.AttributeSet; import android.util.FloatMath; import android.util.Log; import android.view.FocusFinder; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.VelocityTracker; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewParent; import android.view.accessibility.AccessibilityEvent; import android.view.animation.Interpolator; import android.widget.Scroller; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; /** * Layout manager that allows the user to flip left and right * through pages of data. You supply an implementation of a * {@link PagerAdapter} to generate the pages that the view shows. * * <p>Note this class is currently under early design and * development. The API will likely change in later updates of * the compatibility library, requiring changes to the source code * of apps when they are compiled against the newer version.</p> * * <p>ViewPager is most often used in conjunction with {@link android.app.Fragment}, * which is a convenient way to supply and manage the lifecycle of each page. * There are standard adapters implemented for using fragments with the ViewPager, * which cover the most common use cases. These are * {@link android.support.v4.app.FragmentPagerAdapter}, * {@link android.support.v4.app.FragmentStatePagerAdapter}, * {@link android.support.v13.app.FragmentPagerAdapter}, and * {@link android.support.v13.app.FragmentStatePagerAdapter}; each of these * classes have simple code showing how to build a full user interface * with them. * * <p>Here is a more complicated example of ViewPager, using it in conjuction * with {@link android.app.ActionBar} tabs. You can find other examples of using * ViewPager in the API 4+ Support Demos and API 13+ Support Demos sample code. * * {@sample development/samples/Support13Demos/src/com/example/android/supportv13/app/ActionBarTabsPager.java * complete} */ public class ViewPager extends ViewGroup { private static final String TAG = "ViewPager"; private static final boolean DEBUG = false; private static final boolean USE_CACHE = false; private static final int DEFAULT_OFFSCREEN_PAGES = 1; private static final int MAX_SETTLE_DURATION = 600; // ms private static final int MIN_DISTANCE_FOR_FLING = 25; // dips private static final int DEFAULT_GUTTER_SIZE = 16; // dips private static final int[] LAYOUT_ATTRS = new int[] { android.R.attr.layout_gravity }; static class ItemInfo { Object object; int position; boolean scrolling; float widthFactor; float offset; } private static final Comparator<ItemInfo> COMPARATOR = new Comparator<ItemInfo>(){ @Override public int compare(ItemInfo lhs, ItemInfo rhs) { return lhs.position - rhs.position; } }; private static final Interpolator sInterpolator = new Interpolator() { public float getInterpolation(float t) { t -= 1.0f; return t * t * t * t * t + 1.0f; } }; private final ArrayList<ItemInfo> mItems = new ArrayList<ItemInfo>(); private final ItemInfo mTempItem = new ItemInfo(); private final Rect mTempRect = new Rect(); private PagerAdapter mAdapter; private int mCurItem; // Index of currently displayed page. private int mRestoredCurItem = -1; private Parcelable mRestoredAdapterState = null; private ClassLoader mRestoredClassLoader = null; private Scroller mScroller; private PagerObserver mObserver; private int mPageMargin; private Drawable mMarginDrawable; private int mTopPageBounds; private int mBottomPageBounds; // Offsets of the first and last items, if known. // Set during population, used to determine if we are at the beginning // or end of the pager data set during touch scrolling. private float mFirstOffset = -Float.MAX_VALUE; private float mLastOffset = Float.MAX_VALUE; private int mChildWidthMeasureSpec; private int mChildHeightMeasureSpec; private boolean mInLayout; private boolean mScrollingCacheEnabled; private boolean mPopulatePending; private int mOffscreenPageLimit = DEFAULT_OFFSCREEN_PAGES; private boolean mIsBeingDragged; private boolean mIsUnableToDrag; private boolean mIgnoreGutter; private int mDefaultGutterSize; private int mGutterSize; private int mTouchSlop; private float mInitialMotionX; /** * Position of the last motion event. */ private float mLastMotionX; private float mLastMotionY; /** * ID of the active pointer. This is used to retain consistency during * drags/flings if multiple pointers are used. */ private int mActivePointerId = INVALID_POINTER; /** * Sentinel value for no current active pointer. * Used by {@link #mActivePointerId}. */ private static final int INVALID_POINTER = -1; /** * Determines speed during touch scrolling */ private VelocityTracker mVelocityTracker; private int mMinimumVelocity; private int mMaximumVelocity; private int mFlingDistance; private int mCloseEnough; private int mSeenPositionMin; private int mSeenPositionMax; // If the pager is at least this close to its final position, complete the scroll // on touch down and let the user interact with the content inside instead of // "catching" the flinging pager. private static final int CLOSE_ENOUGH = 2; // dp private boolean mFakeDragging; private long mFakeDragBeginTime; private EdgeEffectCompat mLeftEdge; private EdgeEffectCompat mRightEdge; private boolean mFirstLayout = true; private boolean mNeedCalculatePageOffsets = false; private boolean mCalledSuper; private int mDecorChildCount; private OnPageChangeListener mOnPageChangeListener; private OnPageChangeListener mInternalPageChangeListener; private OnAdapterChangeListener mAdapterChangeListener; private PageTransformer mPageTransformer; private Method mSetChildrenDrawingOrderEnabled; private static final int DRAW_ORDER_DEFAULT = 0; private static final int DRAW_ORDER_FORWARD = 1; private static final int DRAW_ORDER_REVERSE = 2; private int mDrawingOrder; private ArrayList<View> mDrawingOrderedChildren; private static final ViewPositionComparator sPositionComparator = new ViewPositionComparator(); /** * Indicates that the pager is in an idle, settled state. The current page * is fully in view and no animation is in progress. */ public static final int SCROLL_STATE_IDLE = 0; /** * Indicates that the pager is currently being dragged by the user. */ public static final int SCROLL_STATE_DRAGGING = 1; /** * Indicates that the pager is in the process of settling to a final position. */ public static final int SCROLL_STATE_SETTLING = 2; private int mScrollState = SCROLL_STATE_IDLE; /** * Callback interface for responding to changing state of the selected page. */ public interface OnPageChangeListener { /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param positionOffset Value from [0, 1) indicating the offset from the page at position. * @param positionOffsetPixels Value in pixels indicating the offset from position. */ public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels); /** * This method will be invoked when a new page becomes selected. Animation is not * necessarily complete. * * @param position Position index of the new selected page. */ public void onPageSelected(int position); /** * Called when the scroll state changes. Useful for discovering when the user * begins dragging, when the pager is automatically settling to the current page, * or when it is fully stopped/idle. * * @param state The new scroll state. * @see ViewPager#SCROLL_STATE_IDLE * @see ViewPager#SCROLL_STATE_DRAGGING * @see ViewPager#SCROLL_STATE_SETTLING */ public void onPageScrollStateChanged(int state); } /** * Simple implementation of the {@link OnPageChangeListener} interface with stub * implementations of each method. Extend this if you do not intend to override * every method of {@link OnPageChangeListener}. */ public static class SimpleOnPageChangeListener implements OnPageChangeListener { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { // This space for rent } @Override public void onPageSelected(int position) { // This space for rent } @Override public void onPageScrollStateChanged(int state) { // This space for rent } } /** * A PageTransformer is invoked whenever a visible/attached page is scrolled. * This offers an opportunity for the application to apply a custom transformation * to the page views using animation properties. * * <p>As property animation is only supported as of Android 3.0 and forward, * setting a PageTransformer on a ViewPager on earlier platform versions will * be ignored.</p> */ public interface PageTransformer { /** * Apply a property transformation to the given page. * * @param page Apply the transformation to this page * @param position Position of page relative to the current front-and-center * position of the pager. 0 is front and center. 1 is one full * page position to the right, and -1 is one page position to the left. */ public void transformPage(View page, float position); } /** * Used internally to monitor when adapters are switched. */ interface OnAdapterChangeListener { public void onAdapterChanged(PagerAdapter oldAdapter, PagerAdapter newAdapter); } /** * Used internally to tag special types of child views that should be added as * pager decorations by default. */ interface Decor {} public ViewPager(Context context) { super(context); initViewPager(); } public ViewPager(Context context, AttributeSet attrs) { super(context, attrs); initViewPager(); } void initViewPager() { setWillNotDraw(false); setDescendantFocusability(FOCUS_AFTER_DESCENDANTS); setFocusable(true); final Context context = getContext(); mScroller = new Scroller(context, sInterpolator); final ViewConfiguration configuration = ViewConfiguration.get(context); mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration); mMinimumVelocity = configuration.getScaledMinimumFlingVelocity(); mMaximumVelocity = configuration.getScaledMaximumFlingVelocity(); mLeftEdge = new EdgeEffectCompat(context); mRightEdge = new EdgeEffectCompat(context); final float density = context.getResources().getDisplayMetrics().density; mFlingDistance = (int) (MIN_DISTANCE_FOR_FLING * density); mCloseEnough = (int) (CLOSE_ENOUGH * density); mDefaultGutterSize = (int) (DEFAULT_GUTTER_SIZE * density); ViewCompat.setAccessibilityDelegate(this, new MyAccessibilityDelegate()); if (ViewCompat.getImportantForAccessibility(this) == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) { ViewCompat.setImportantForAccessibility(this, ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_YES); } } private void setScrollState(int newState) { if (mScrollState == newState) { return; } mScrollState = newState; if (newState == SCROLL_STATE_DRAGGING) { mSeenPositionMin = mSeenPositionMax = -1; } if (mPageTransformer != null) { // PageTransformers can do complex things that benefit from hardware layers. enableLayers(newState != SCROLL_STATE_IDLE); } if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrollStateChanged(newState); } } /** * Set a PagerAdapter that will supply views for this pager as needed. * * @param adapter Adapter to use */ public void setAdapter(PagerAdapter adapter) { if (mAdapter != null) { mAdapter.unregisterDataSetObserver(mObserver); mAdapter.startUpdate(this); for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); mAdapter.destroyItem(this, ii.position, ii.object); } mAdapter.finishUpdate(this); mItems.clear(); removeNonDecorViews(); mCurItem = 0; scrollTo(0, 0); } final PagerAdapter oldAdapter = mAdapter; mAdapter = adapter; if (mAdapter != null) { if (mObserver == null) { mObserver = new PagerObserver(); } mAdapter.registerDataSetObserver(mObserver); mPopulatePending = false; mFirstLayout = true; if (mRestoredCurItem >= 0) { mAdapter.restoreState(mRestoredAdapterState, mRestoredClassLoader); setCurrentItemInternal(mRestoredCurItem, false, true); mRestoredCurItem = -1; mRestoredAdapterState = null; mRestoredClassLoader = null; } else { populate(); } } if (mAdapterChangeListener != null && oldAdapter != adapter) { mAdapterChangeListener.onAdapterChanged(oldAdapter, adapter); } } private void removeNonDecorViews() { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { removeViewAt(i); i--; } } } /** * Retrieve the current adapter supplying pages. * * @return The currently registered PagerAdapter */ public PagerAdapter getAdapter() { return mAdapter; } void setOnAdapterChangeListener(OnAdapterChangeListener listener) { mAdapterChangeListener = listener; } /** * Set the currently selected page. If the ViewPager has already been through its first * layout with its current adapter there will be a smooth animated transition between * the current item and the specified item. * * @param item Item index to select */ public void setCurrentItem(int item) { mPopulatePending = false; setCurrentItemInternal(item, !mFirstLayout, false); } /** * Set the currently selected page. * * @param item Item index to select * @param smoothScroll True to smoothly scroll to the new item, false to transition immediately */ public void setCurrentItem(int item, boolean smoothScroll) { mPopulatePending = false; setCurrentItemInternal(item, smoothScroll, false); } public int getCurrentItem() { return mCurItem; } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always) { setCurrentItemInternal(item, smoothScroll, always, 0); } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always, int velocity) { if (mAdapter == null || mAdapter.getCount() <= 0) { setScrollingCacheEnabled(false); return; } if (!always && mCurItem == item && mItems.size() != 0) { setScrollingCacheEnabled(false); return; } if (item < 0) { item = 0; } else if (item >= mAdapter.getCount()) { item = mAdapter.getCount() - 1; } final int pageLimit = mOffscreenPageLimit; if (item > (mCurItem + pageLimit) || item < (mCurItem - pageLimit)) { // We are doing a jump by more than one page. To avoid // glitches, we want to keep all current pages in the view // until the scroll ends. for (int i=0; i<mItems.size(); i++) { mItems.get(i).scrolling = true; } } final boolean dispatchSelected = mCurItem != item; populate(item); final ItemInfo curInfo = infoForPosition(item); int destX = 0; if (curInfo != null) { final int width = getWidth(); destX = (int) (width * Math.max(mFirstOffset, Math.min(curInfo.offset, mLastOffset))); } if (smoothScroll) { smoothScrollTo(destX, 0, velocity); if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } } else { if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } completeScroll(); scrollTo(destX, 0); } } /** * Set a listener that will be invoked whenever the page changes or is incrementally * scrolled. See {@link OnPageChangeListener}. * * @param listener Listener to set */ public void setOnPageChangeListener(OnPageChangeListener listener) { mOnPageChangeListener = listener; } /** * Set a {@link PageTransformer} that will be called for each attached page whenever * the scroll position is changed. This allows the application to apply custom property * transformations to each page, overriding the default sliding look and feel. * * <p><em>Note:</em> Prior to Android 3.0 the property animation APIs did not exist. * As a result, setting a PageTransformer prior to Android 3.0 (API 11) will have no effect.</p> * * @param reverseDrawingOrder true if the supplied PageTransformer requires page views * to be drawn from last to first instead of first to last. * @param transformer PageTransformer that will modify each page's animation properties */ public void setPageTransformer(boolean reverseDrawingOrder, PageTransformer transformer) { if (Build.VERSION.SDK_INT >= 11) { final boolean hasTransformer = transformer != null; final boolean needsPopulate = hasTransformer != (mPageTransformer != null); mPageTransformer = transformer; setChildrenDrawingOrderEnabledCompat(hasTransformer); if (hasTransformer) { mDrawingOrder = reverseDrawingOrder ? DRAW_ORDER_REVERSE : DRAW_ORDER_FORWARD; } else { mDrawingOrder = DRAW_ORDER_DEFAULT; } if (needsPopulate) populate(); } } void setChildrenDrawingOrderEnabledCompat(boolean enable) { if (mSetChildrenDrawingOrderEnabled == null) { try { mSetChildrenDrawingOrderEnabled = ViewGroup.class.getDeclaredMethod( "setChildrenDrawingOrderEnabled", new Class[] { Boolean.TYPE }); } catch (NoSuchMethodException e) { Log.e(TAG, "Can't find setChildrenDrawingOrderEnabled", e); } } try { mSetChildrenDrawingOrderEnabled.invoke(this, enable); } catch (Exception e) { Log.e(TAG, "Error changing children drawing order", e); } } @Override protected int getChildDrawingOrder(int childCount, int i) { final int index = mDrawingOrder == DRAW_ORDER_REVERSE ? childCount - 1 - i : i; final int result = ((LayoutParams) mDrawingOrderedChildren.get(index).getLayoutParams()).childIndex; return result; } /** * Set a separate OnPageChangeListener for internal use by the support library. * * @param listener Listener to set * @return The old listener that was set, if any. */ OnPageChangeListener setInternalPageChangeListener(OnPageChangeListener listener) { OnPageChangeListener oldListener = mInternalPageChangeListener; mInternalPageChangeListener = listener; return oldListener; } /** * Returns the number of pages that will be retained to either side of the * current page in the view hierarchy in an idle state. Defaults to 1. * * @return How many pages will be kept offscreen on either side * @see #setOffscreenPageLimit(int) */ public int getOffscreenPageLimit() { return mOffscreenPageLimit; } /** * Set the number of pages that should be retained to either side of the * current page in the view hierarchy in an idle state. Pages beyond this * limit will be recreated from the adapter when needed. * * <p>This is offered as an optimization. If you know in advance the number * of pages you will need to support or have lazy-loading mechanisms in place * on your pages, tweaking this setting can have benefits in perceived smoothness * of paging animations and interaction. If you have a small number of pages (3-4) * that you can keep active all at once, less time will be spent in layout for * newly created view subtrees as the user pages back and forth.</p> * * <p>You should keep this limit low, especially if your pages have complex layouts. * This setting defaults to 1.</p> * * @param limit How many pages will be kept offscreen in an idle state. */ public void setOffscreenPageLimit(int limit) { if (limit < DEFAULT_OFFSCREEN_PAGES) { Log.w(TAG, "Requested offscreen page limit " + limit + " too small; defaulting to " + DEFAULT_OFFSCREEN_PAGES); limit = DEFAULT_OFFSCREEN_PAGES; } if (limit != mOffscreenPageLimit) { mOffscreenPageLimit = limit; populate(); } } /** * Set the margin between pages. * * @param marginPixels Distance between adjacent pages in pixels * @see #getPageMargin() * @see #setPageMarginDrawable(Drawable) * @see #setPageMarginDrawable(int) */ public void setPageMargin(int marginPixels) { final int oldMargin = mPageMargin; mPageMargin = marginPixels; final int width = getWidth(); recomputeScrollPosition(width, width, marginPixels, oldMargin); requestLayout(); } /** * Return the margin between pages. * * @return The size of the margin in pixels */ public int getPageMargin() { return mPageMargin; } /** * Set a drawable that will be used to fill the margin between pages. * * @param d Drawable to display between pages */ public void setPageMarginDrawable(Drawable d) { mMarginDrawable = d; if (d != null) refreshDrawableState(); setWillNotDraw(d == null); invalidate(); } /** * Set a drawable that will be used to fill the margin between pages. * * @param resId Resource ID of a drawable to display between pages */ public void setPageMarginDrawable(int resId) { setPageMarginDrawable(getContext().getResources().getDrawable(resId)); } @Override protected boolean verifyDrawable(Drawable who) { return super.verifyDrawable(who) || who == mMarginDrawable; } @Override protected void drawableStateChanged() { super.drawableStateChanged(); final Drawable d = mMarginDrawable; if (d != null && d.isStateful()) { d.setState(getDrawableState()); } } // We want the duration of the page snap animation to be influenced by the distance that // the screen has to travel, however, we don't want this duration to be effected in a // purely linear fashion. Instead, we use this method to moderate the effect that the distance // of travel has on the overall snap duration. float distanceInfluenceForSnapDuration(float f) { f -= 0.5f; // center the values about 0. f *= 0.3f * Math.PI / 2.0f; return (float) Math.sin(f); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis */ void smoothScrollTo(int x, int y) { smoothScrollTo(x, y, 0); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis * @param velocity the velocity associated with a fling, if applicable. (0 otherwise) */ void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { // Nothing to do. setScrollingCacheEnabled(false); return; } int sx = getScrollX(); int sy = getScrollY(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(); populate(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); setScrollState(SCROLL_STATE_SETTLING); final int width = getWidth(); final int halfWidth = width / 2; final float distanceRatio = Math.min(1f, 1.0f * Math.abs(dx) / width); final float distance = halfWidth + halfWidth * distanceInfluenceForSnapDuration(distanceRatio); int duration = 0; velocity = Math.abs(velocity); if (velocity > 0) { duration = 4 * Math.round(1000 * Math.abs(distance / velocity)); } else { final float pageWidth = width * mAdapter.getPageWidth(mCurItem); final float pageDelta = (float) Math.abs(dx) / (pageWidth + mPageMargin); duration = (int) ((pageDelta + 1) * 100); } duration = Math.min(duration, MAX_SETTLE_DURATION); mScroller.startScroll(sx, sy, dx, dy, duration); ViewCompat.postInvalidateOnAnimation(this); } ItemInfo addNewItem(int position, int index) { ItemInfo ii = new ItemInfo(); ii.position = position; ii.object = mAdapter.instantiateItem(this, position); ii.widthFactor = mAdapter.getPageWidth(position); if (index < 0 || index >= mItems.size()) { mItems.add(ii); } else { mItems.add(index, ii); } return ii; } void dataSetChanged() { // This method only gets called if our observer is attached, so mAdapter is non-null. boolean needPopulate = mItems.size() < mOffscreenPageLimit * 2 + 1 && mItems.size() < mAdapter.getCount(); int newCurrItem = mCurItem; boolean isUpdating = false; for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); final int newPos = mAdapter.getItemPosition(ii.object); if (newPos == PagerAdapter.POSITION_UNCHANGED) { continue; } if (newPos == PagerAdapter.POSITION_NONE) { mItems.remove(i); i--; if (!isUpdating) { mAdapter.startUpdate(this); isUpdating = true; } mAdapter.destroyItem(this, ii.position, ii.object); needPopulate = true; if (mCurItem == ii.position) { // Keep the current item in the valid range newCurrItem = Math.max(0, Math.min(mCurItem, mAdapter.getCount() - 1)); needPopulate = true; } continue; } if (ii.position != newPos) { if (ii.position == mCurItem) { // Our current item changed position. Follow it. newCurrItem = newPos; } ii.position = newPos; needPopulate = true; } } if (isUpdating) { mAdapter.finishUpdate(this); } Collections.sort(mItems, COMPARATOR); if (needPopulate) { // Reset our known page widths; populate will recompute them. final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { lp.widthFactor = 0.f; } } setCurrentItemInternal(newCurrItem, false, true); requestLayout(); } } void populate() { populate(mCurItem); } void populate(int newCurrentItem) { ItemInfo oldCurInfo = null; if (mCurItem != newCurrentItem) { oldCurInfo = infoForPosition(mCurItem); mCurItem = newCurrentItem; } if (mAdapter == null) { return; } // Bail now if we are waiting to populate. This is to hold off // on creating views from the time the user releases their finger to // fling to a new position until we have finished the scroll to // that position, avoiding glitches from happening at that point. if (mPopulatePending) { if (DEBUG) Log.i(TAG, "populate is pending, skipping for now..."); return; } // Also, don't populate until we are attached to a window. This is to // avoid trying to populate before we have restored our view hierarchy // state and conflicting with what is restored. if (getWindowToken() == null) { return; } mAdapter.startUpdate(this); final int pageLimit = mOffscreenPageLimit; final int startPos = Math.max(0, mCurItem - pageLimit); final int N = mAdapter.getCount(); final int endPos = Math.min(N-1, mCurItem + pageLimit); // Locate the currently focused item or add it if needed. int curIndex = -1; ItemInfo curItem = null; for (curIndex = 0; curIndex < mItems.size(); curIndex++) { final ItemInfo ii = mItems.get(curIndex); if (ii.position >= mCurItem) { if (ii.position == mCurItem) curItem = ii; break; } } if (curItem == null && N > 0) { curItem = addNewItem(mCurItem, curIndex); } // Fill 3x the available width or up to the number of offscreen // pages requested to either side, whichever is larger. // If we have no current item we have no work to do. if (curItem != null) { float extraWidthLeft = 0.f; int itemIndex = curIndex - 1; ItemInfo ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; final float leftWidthNeeded = 2.f - curItem.widthFactor; for (int pos = mCurItem - 1; pos >= 0; pos--) { if (extraWidthLeft >= leftWidthNeeded && pos < startPos) { if (ii == null) { break; } if (pos == ii.position && !ii.scrolling) { mItems.remove(itemIndex); mAdapter.destroyItem(this, pos, ii.object); itemIndex--; curIndex--; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } } else if (ii != null && pos == ii.position) { extraWidthLeft += ii.widthFactor; itemIndex--; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } else { ii = addNewItem(pos, itemIndex + 1); extraWidthLeft += ii.widthFactor; curIndex++; ii = itemIndex >= 0 ? mItems.get(itemIndex) : null; } } float extraWidthRight = curItem.widthFactor; itemIndex = curIndex + 1; if (extraWidthRight < 2.f) { ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; for (int pos = mCurItem + 1; pos < N; pos++) { if (extraWidthRight >= 2.f && pos > endPos) { if (ii == null) { break; } if (pos == ii.position && !ii.scrolling) { mItems.remove(itemIndex); mAdapter.destroyItem(this, pos, ii.object); ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } } else if (ii != null && pos == ii.position) { extraWidthRight += ii.widthFactor; itemIndex++; ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } else { ii = addNewItem(pos, itemIndex); itemIndex++; extraWidthRight += ii.widthFactor; ii = itemIndex < mItems.size() ? mItems.get(itemIndex) : null; } } } calculatePageOffsets(curItem, curIndex, oldCurInfo); } if (DEBUG) { Log.i(TAG, "Current page list:"); for (int i=0; i<mItems.size(); i++) { Log.i(TAG, "#" + i + ": page " + mItems.get(i).position); } } mAdapter.setPrimaryItem(this, mCurItem, curItem != null ? curItem.object : null); mAdapter.finishUpdate(this); // Check width measurement of current pages and drawing sort order. // Update LayoutParams as needed. final boolean sort = mDrawingOrder != DRAW_ORDER_DEFAULT; if (sort) { if (mDrawingOrderedChildren == null) { mDrawingOrderedChildren = new ArrayList<View>(); } else { mDrawingOrderedChildren.clear(); } } final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); lp.childIndex = i; if (!lp.isDecor && lp.widthFactor == 0.f) { // 0 means requery the adapter for this, it doesn't have a valid width. final ItemInfo ii = infoForChild(child); if (ii != null) { lp.widthFactor = ii.widthFactor; lp.position = ii.position; } } if (sort) mDrawingOrderedChildren.add(child); } if (sort) { Collections.sort(mDrawingOrderedChildren, sPositionComparator); } if (hasFocus()) { View currentFocused = findFocus(); ItemInfo ii = currentFocused != null ? infoForAnyChild(currentFocused) : null; if (ii == null || ii.position != mCurItem) { for (int i=0; i<getChildCount(); i++) { View child = getChildAt(i); ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(FOCUS_FORWARD)) { break; } } } } } } private void calculatePageOffsets(ItemInfo curItem, int curIndex, ItemInfo oldCurInfo) { final int N = mAdapter.getCount(); final int width = getWidth(); final float marginOffset = width > 0 ? (float) mPageMargin / width : 0; // Fix up offsets for later layout. if (oldCurInfo != null) { final int oldCurPosition = oldCurInfo.position; // Base offsets off of oldCurInfo. if (oldCurPosition < curItem.position) { int itemIndex = 0; ItemInfo ii = null; float offset = oldCurInfo.offset + oldCurInfo.widthFactor + marginOffset; for (int pos = oldCurPosition + 1; pos <= curItem.position && itemIndex < mItems.size(); pos++) { ii = mItems.get(itemIndex); while (pos > ii.position && itemIndex < mItems.size() - 1) { itemIndex++; ii = mItems.get(itemIndex); } while (pos < ii.position) { // We don't have an item populated for this, // ask the adapter for an offset. offset += mAdapter.getPageWidth(pos) + marginOffset; pos++; } ii.offset = offset; offset += ii.widthFactor + marginOffset; } } else if (oldCurPosition > curItem.position) { int itemIndex = mItems.size() - 1; ItemInfo ii = null; float offset = oldCurInfo.offset; for (int pos = oldCurPosition - 1; pos >= curItem.position && itemIndex >= 0; pos--) { ii = mItems.get(itemIndex); while (pos < ii.position && itemIndex > 0) { itemIndex--; ii = mItems.get(itemIndex); } while (pos > ii.position) { // We don't have an item populated for this, // ask the adapter for an offset. offset -= mAdapter.getPageWidth(pos) + marginOffset; pos--; } offset -= ii.widthFactor + marginOffset; ii.offset = offset; } } } // Base all offsets off of curItem. final int itemCount = mItems.size(); float offset = curItem.offset; int pos = curItem.position - 1; mFirstOffset = curItem.position == 0 ? curItem.offset : -Float.MAX_VALUE; mLastOffset = curItem.position == N - 1 ? curItem.offset + curItem.widthFactor - 1 : Float.MAX_VALUE; // Previous pages for (int i = curIndex - 1; i >= 0; i--, pos--) { final ItemInfo ii = mItems.get(i); while (pos > ii.position) { offset -= mAdapter.getPageWidth(pos--) + marginOffset; } offset -= ii.widthFactor + marginOffset; ii.offset = offset; if (ii.position == 0) mFirstOffset = offset; } offset = curItem.offset + curItem.widthFactor + marginOffset; pos = curItem.position + 1; // Next pages for (int i = curIndex + 1; i < itemCount; i++, pos++) { final ItemInfo ii = mItems.get(i); while (pos < ii.position) { offset += mAdapter.getPageWidth(pos++) + marginOffset; } if (ii.position == N - 1) { mLastOffset = offset + ii.widthFactor - 1; } ii.offset = offset; offset += ii.widthFactor + marginOffset; } mNeedCalculatePageOffsets = false; } /** * This is the persistent state that is saved by ViewPager. Only needed * if you are creating a sublass of ViewPager that must save its own * state, in which case it should implement a subclass of this which * contains that state. */ public static class SavedState extends BaseSavedState { int position; Parcelable adapterState; ClassLoader loader; public SavedState(Parcelable superState) { super(superState); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(position); out.writeParcelable(adapterState, flags); } @Override public String toString() { return "FragmentPager.SavedState{" + Integer.toHexString(System.identityHashCode(this)) + " position=" + position + "}"; } public static final Parcelable.Creator<SavedState> CREATOR = ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() { @Override public SavedState createFromParcel(Parcel in, ClassLoader loader) { return new SavedState(in, loader); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }); SavedState(Parcel in, ClassLoader loader) { super(in); if (loader == null) { loader = getClass().getClassLoader(); } position = in.readInt(); adapterState = in.readParcelable(loader); this.loader = loader; } } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.position = mCurItem; if (mAdapter != null) { ss.adapterState = mAdapter.saveState(); } return ss; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState)state; super.onRestoreInstanceState(ss.getSuperState()); if (mAdapter != null) { mAdapter.restoreState(ss.adapterState, ss.loader); setCurrentItemInternal(ss.position, false, true); } else { mRestoredCurItem = ss.position; mRestoredAdapterState = ss.adapterState; mRestoredClassLoader = ss.loader; } } @Override public void addView(View child, int index, ViewGroup.LayoutParams params) { if (!checkLayoutParams(params)) { params = generateLayoutParams(params); } final LayoutParams lp = (LayoutParams) params; lp.isDecor |= child instanceof Decor; if (mInLayout) { if (lp != null && lp.isDecor) { throw new IllegalStateException("Cannot add pager decor view during layout"); } lp.needsMeasure = true; addViewInLayout(child, index, params); } else { super.addView(child, index, params); } if (USE_CACHE) { if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(mScrollingCacheEnabled); } else { child.setDrawingCacheEnabled(false); } } } ItemInfo infoForChild(View child) { for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (mAdapter.isViewFromObject(child, ii.object)) { return ii; } } return null; } ItemInfo infoForAnyChild(View child) { ViewParent parent; while ((parent=child.getParent()) != this) { if (parent == null || !(parent instanceof View)) { return null; } child = (View)parent; } return infoForChild(child); } ItemInfo infoForPosition(int position) { for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.position == position) { return ii; } } return null; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // For simple implementation, or internal size is always 0. // We depend on the container to specify the layout size of // our view. We can't really know what it is since we will be // adding and removing different arbitrary views and do not // want the layout to change as this happens. setMeasuredDimension(getDefaultSize(0, widthMeasureSpec), getDefaultSize(0, heightMeasureSpec)); final int measuredWidth = getMeasuredWidth(); final int maxGutterSize = measuredWidth / 10; mGutterSize = Math.min(maxGutterSize, mDefaultGutterSize); // Children are just made to fill our space. int childWidthSize = measuredWidth - getPaddingLeft() - getPaddingRight(); int childHeightSize = getMeasuredHeight() - getPaddingTop() - getPaddingBottom(); /* * Make sure all children have been properly measured. Decor views first. * Right now we cheat and make this less complicated by assuming decor * views won't intersect. We will pin to edges based on gravity. */ int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp != null && lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; int widthMode = MeasureSpec.AT_MOST; int heightMode = MeasureSpec.AT_MOST; boolean consumeVertical = vgrav == Gravity.TOP || vgrav == Gravity.BOTTOM; boolean consumeHorizontal = hgrav == Gravity.LEFT || hgrav == Gravity.RIGHT; if (consumeVertical) { widthMode = MeasureSpec.EXACTLY; } else if (consumeHorizontal) { heightMode = MeasureSpec.EXACTLY; } int widthSize = childWidthSize; int heightSize = childHeightSize; if (lp.width != LayoutParams.WRAP_CONTENT) { widthMode = MeasureSpec.EXACTLY; if (lp.width != LayoutParams.FILL_PARENT) { widthSize = lp.width; } } if (lp.height != LayoutParams.WRAP_CONTENT) { heightMode = MeasureSpec.EXACTLY; if (lp.height != LayoutParams.FILL_PARENT) { heightSize = lp.height; } } final int widthSpec = MeasureSpec.makeMeasureSpec(widthSize, widthMode); final int heightSpec = MeasureSpec.makeMeasureSpec(heightSize, heightMode); child.measure(widthSpec, heightSpec); if (consumeVertical) { childHeightSize -= child.getMeasuredHeight(); } else if (consumeHorizontal) { childWidthSize -= child.getMeasuredWidth(); } } } } mChildWidthMeasureSpec = MeasureSpec.makeMeasureSpec(childWidthSize, MeasureSpec.EXACTLY); mChildHeightMeasureSpec = MeasureSpec.makeMeasureSpec(childHeightSize, MeasureSpec.EXACTLY); // Make sure we have created all fragments that we need to have shown. mInLayout = true; populate(); mInLayout = false; // Page views next. size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { if (DEBUG) Log.v(TAG, "Measuring #" + i + " " + child + ": " + mChildWidthMeasureSpec); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp == null || !lp.isDecor) { final int widthSpec = MeasureSpec.makeMeasureSpec( (int) (childWidthSize * lp.widthFactor), MeasureSpec.EXACTLY); child.measure(widthSpec, mChildHeightMeasureSpec); } } } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Make sure scroll position is set correctly. if (w != oldw) { recomputeScrollPosition(w, oldw, mPageMargin, mPageMargin); } } private void recomputeScrollPosition(int width, int oldWidth, int margin, int oldMargin) { if (oldWidth > 0 && !mItems.isEmpty()) { final int widthWithMargin = width + margin; final int oldWidthWithMargin = oldWidth + oldMargin; final int xpos = getScrollX(); final float pageOffset = (float) xpos / oldWidthWithMargin; final int newOffsetPixels = (int) (pageOffset * widthWithMargin); scrollTo(newOffsetPixels, getScrollY()); if (!mScroller.isFinished()) { // We now return to your regularly scheduled scroll, already in progress. final int newDuration = mScroller.getDuration() - mScroller.timePassed(); ItemInfo targetInfo = infoForPosition(mCurItem); mScroller.startScroll(newOffsetPixels, 0, (int) (targetInfo.offset * width), 0, newDuration); } } else { final ItemInfo ii = infoForPosition(mCurItem); final float scrollOffset = ii != null ? Math.min(ii.offset, mLastOffset) : 0; final int scrollPos = (int) (scrollOffset * width); if (scrollPos != getScrollX()) { completeScroll(); scrollTo(scrollPos, getScrollY()); } } } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { mInLayout = true; populate(); mInLayout = false; final int count = getChildCount(); int width = r - l; int height = b - t; int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); int paddingRight = getPaddingRight(); int paddingBottom = getPaddingBottom(); final int scrollX = getScrollX(); int decorCount = 0; // First pass - decor views. We need to do this in two passes so that // we have the proper offsets for non-decor views later. for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); int childLeft = 0; int childTop = 0; if (lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getMeasuredWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } switch (vgrav) { default: childTop = paddingTop; break; case Gravity.TOP: childTop = paddingTop; paddingTop += child.getMeasuredHeight(); break; case Gravity.CENTER_VERTICAL: childTop = Math.max((height - child.getMeasuredHeight()) / 2, paddingTop); break; case Gravity.BOTTOM: childTop = height - paddingBottom - child.getMeasuredHeight(); paddingBottom += child.getMeasuredHeight(); break; } childLeft += scrollX; child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); decorCount++; } } } // Page views. Do this once we have the right padding offsets from above. for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); ItemInfo ii; if (!lp.isDecor && (ii = infoForChild(child)) != null) { int loff = (int) (width * ii.offset); int childLeft = paddingLeft + loff; int childTop = paddingTop; if (lp.needsMeasure) { // This was added during layout and needs measurement. // Do it now that we know what we're working with. lp.needsMeasure = false; final int widthSpec = MeasureSpec.makeMeasureSpec( (int) ((width - paddingLeft - paddingRight) * lp.widthFactor), MeasureSpec.EXACTLY); final int heightSpec = MeasureSpec.makeMeasureSpec( (int) (height - paddingTop - paddingBottom), MeasureSpec.EXACTLY); child.measure(widthSpec, heightSpec); } if (DEBUG) Log.v(TAG, "Positioning #" + i + " " + child + " f=" + ii.object + ":" + childLeft + "," + childTop + " " + child.getMeasuredWidth() + "x" + child.getMeasuredHeight()); child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } } } mTopPageBounds = paddingTop; mBottomPageBounds = height - paddingBottom; mDecorChildCount = decorCount; mFirstLayout = false; } @Override public void computeScroll() { if (!mScroller.isFinished() && mScroller.computeScrollOffset()) { int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); if (!pageScrolled(x)) { mScroller.abortAnimation(); scrollTo(0, y); } } // Keep on drawing until the animation has finished. ViewCompat.postInvalidateOnAnimation(this); return; } // Done with scroll, clean up state. completeScroll(); } private boolean pageScrolled(int xpos) { if (mItems.size() == 0) { mCalledSuper = false; onPageScrolled(0, 0, 0); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } return false; } final ItemInfo ii = infoForCurrentScrollPosition(); final int width = getWidth(); final int widthWithMargin = width + mPageMargin; final float marginOffset = (float) mPageMargin / width; final int currentPage = ii.position; final float pageOffset = (((float) xpos / width) - ii.offset) / (ii.widthFactor + marginOffset); final int offsetPixels = (int) (pageOffset * widthWithMargin); mCalledSuper = false; onPageScrolled(currentPage, pageOffset, offsetPixels); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } return true; } /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * If you override this method you must call through to the superclass implementation * (e.g. super.onPageScrolled(position, offset, offsetPixels)) before onPageScrolled * returns. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param offset Value from [0, 1) indicating the offset from the page at position. * @param offsetPixels Value in pixels indicating the offset from position. */ protected void onPageScrolled(int position, float offset, int offsetPixels) { // Offset any decor views if needed - keep them on-screen at all times. if (mDecorChildCount > 0) { final int scrollX = getScrollX(); int paddingLeft = getPaddingLeft(); int paddingRight = getPaddingRight(); final int width = getWidth(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) continue; final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; int childLeft = 0; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } childLeft += scrollX; final int childOffset = childLeft - child.getLeft(); if (childOffset != 0) { child.offsetLeftAndRight(childOffset); } } } if (mSeenPositionMin < 0 || position < mSeenPositionMin) { mSeenPositionMin = position; } if (mSeenPositionMax < 0 || FloatMath.ceil(position + offset) > mSeenPositionMax) { mSeenPositionMax = position + 1; } if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mPageTransformer != null) { final int scrollX = getScrollX(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp.isDecor) continue; final float transformPos = (float) (child.getLeft() - scrollX) / getWidth(); mPageTransformer.transformPage(child, transformPos); } } mCalledSuper = true; } private void completeScroll() { boolean needPopulate = mScrollState == SCROLL_STATE_SETTLING; if (needPopulate) { // Done with scroll, no longer want to cache view drawing. setScrollingCacheEnabled(false); mScroller.abortAnimation(); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); } setScrollState(SCROLL_STATE_IDLE); } mPopulatePending = false; for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.scrolling) { needPopulate = true; ii.scrolling = false; } } if (needPopulate) { populate(); } } private boolean isGutterDrag(float x, float dx) { return (x < mGutterSize && dx > 0) || (x > getWidth() - mGutterSize && dx < 0); } private void enableLayers(boolean enable) { final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final int layerType = enable ? ViewCompat.LAYER_TYPE_HARDWARE : ViewCompat.LAYER_TYPE_NONE; ViewCompat.setLayerType(getChildAt(i), layerType, null); } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { /* * This method JUST determines whether we want to intercept the motion. * If we return true, onMotionEvent will be called and we do the actual * scrolling there. */ final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; // Always take care of the touch gesture being complete. if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { // Release the drag. if (DEBUG) Log.v(TAG, "Intercept done!"); mIsBeingDragged = false; mIsUnableToDrag = false; mActivePointerId = INVALID_POINTER; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } return false; } // Nothing more to do here if we have decided whether or not we // are dragging. if (action != MotionEvent.ACTION_DOWN) { if (mIsBeingDragged) { if (DEBUG) Log.v(TAG, "Intercept returning true!"); return true; } if (mIsUnableToDrag) { if (DEBUG) Log.v(TAG, "Intercept returning false!"); return false; } } switch (action) { case MotionEvent.ACTION_MOVE: { /* * mIsBeingDragged == false, otherwise the shortcut would have caught it. Check * whether the user has moved far enough from his original down touch. */ /* * Locally do absolute value. mLastMotionY is set to the y value * of the down event. */ final int activePointerId = mActivePointerId; if (activePointerId == INVALID_POINTER) { // If we don't have a valid id, the touch down wasn't on content. break; } final int pointerIndex = MotionEventCompat.findPointerIndex(ev, activePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float dx = x - mLastMotionX; final float xDiff = Math.abs(dx); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (dx != 0 && !isGutterDrag(mLastMotionX, dx) && canScroll(this, false, (int) dx, (int) x, (int) y)) { // Nested view has scrollable area under this point. Let it be handled there. mInitialMotionX = mLastMotionX = x; mLastMotionY = y; mIsUnableToDrag = true; return false; } if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); mLastMotionX = dx > 0 ? mInitialMotionX + mTouchSlop : mInitialMotionX - mTouchSlop; setScrollingCacheEnabled(true); } else { if (yDiff > mTouchSlop) { // The finger has moved enough in the vertical // direction to be counted as a drag... abort // any attempt to drag horizontally, to work correctly // with children that have scrolling containers. if (DEBUG) Log.v(TAG, "Starting unable to drag!"); mIsUnableToDrag = true; } } if (mIsBeingDragged) { // Scroll to follow the motion event if (performDrag(x)) { ViewCompat.postInvalidateOnAnimation(this); } } break; } case MotionEvent.ACTION_DOWN: { /* * Remember location of down touch. * ACTION_DOWN always refers to pointer index 0. */ mLastMotionX = mInitialMotionX = ev.getX(); mLastMotionY = ev.getY(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); mIsUnableToDrag = false; mScroller.computeScrollOffset(); if (mScrollState == SCROLL_STATE_SETTLING && Math.abs(mScroller.getFinalX() - mScroller.getCurrX()) > mCloseEnough) { // Let the user 'catch' the pager as it animates. mScroller.abortAnimation(); mPopulatePending = false; populate(); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); } else { completeScroll(); mIsBeingDragged = false; } if (DEBUG) Log.v(TAG, "Down at " + mLastMotionX + "," + mLastMotionY + " mIsBeingDragged=" + mIsBeingDragged + "mIsUnableToDrag=" + mIsUnableToDrag); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); break; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); /* * The only time we want to intercept motion events is if we are in the * drag mode. */ return mIsBeingDragged; } @Override public boolean onTouchEvent(MotionEvent ev) { if (mFakeDragging) { // A fake drag is in progress already, ignore this real one // but still eat the touch events. // (It is likely that the user is multi-touching the screen.) return true; } if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getEdgeFlags() != 0) { // Don't handle edge touches immediately -- they may actually belong to one of our // descendants. return false; } if (mAdapter == null || mAdapter.getCount() == 0) { // Nothing to present or scroll; nothing to touch. return false; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); final int action = ev.getAction(); boolean needsInvalidate = false; switch (action & MotionEventCompat.ACTION_MASK) { case MotionEvent.ACTION_DOWN: { mScroller.abortAnimation(); mPopulatePending = false; populate(); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); // Remember where the motion event started mLastMotionX = mInitialMotionX = ev.getX(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); break; } case MotionEvent.ACTION_MOVE: if (!mIsBeingDragged) { final int pointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float xDiff = Math.abs(x - mLastMotionX); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; mLastMotionX = x - mInitialMotionX > 0 ? mInitialMotionX + mTouchSlop : mInitialMotionX - mTouchSlop; setScrollState(SCROLL_STATE_DRAGGING); setScrollingCacheEnabled(true); } } // Not else! Note that mIsBeingDragged can be set above. if (mIsBeingDragged) { // Scroll to follow the motion event final int activePointerIndex = MotionEventCompat.findPointerIndex( ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); needsInvalidate |= performDrag(x); } break; case MotionEvent.ACTION_UP: if (mIsBeingDragged) { final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int width = getWidth(); final int scrollX = getScrollX(); final ItemInfo ii = infoForCurrentScrollPosition(); final int currentPage = ii.position; final float pageOffset = (((float) scrollX / width) - ii.offset) / ii.widthFactor; final int activePointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); final int totalDelta = (int) (x - mInitialMotionX); int nextPage = determineTargetPage(currentPage, pageOffset, initialVelocity, totalDelta); setCurrentItemInternal(nextPage, true, true, initialVelocity); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEvent.ACTION_CANCEL: if (mIsBeingDragged) { setCurrentItemInternal(mCurItem, true, true); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); final float x = MotionEventCompat.getX(ev, index); mLastMotionX = x; mActivePointerId = MotionEventCompat.getPointerId(ev, index); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId)); break; } if (needsInvalidate) { ViewCompat.postInvalidateOnAnimation(this); } return true; } private boolean performDrag(float x) { boolean needsInvalidate = false; final float deltaX = mLastMotionX - x; mLastMotionX = x; float oldScrollX = getScrollX(); float scrollX = oldScrollX + deltaX; final int width = getWidth(); float leftBound = width * mFirstOffset; float rightBound = width * mLastOffset; boolean leftAbsolute = true; boolean rightAbsolute = true; final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); if (firstItem.position != 0) { leftAbsolute = false; leftBound = firstItem.offset * width; } if (lastItem.position != mAdapter.getCount() - 1) { rightAbsolute = false; rightBound = lastItem.offset * width; } if (scrollX < leftBound) { if (leftAbsolute) { float over = leftBound - scrollX; needsInvalidate = mLeftEdge.onPull(Math.abs(over) / width); } scrollX = leftBound; } else if (scrollX > rightBound) { if (rightAbsolute) { float over = scrollX - rightBound; needsInvalidate = mRightEdge.onPull(Math.abs(over) / width); } scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); return needsInvalidate; } /** * @return Info about the page at the current scroll position. * This can be synthetic for a missing middle page; the 'object' field can be null. */ private ItemInfo infoForCurrentScrollPosition() { final int width = getWidth(); final float scrollOffset = width > 0 ? (float) getScrollX() / width : 0; final float marginOffset = width > 0 ? (float) mPageMargin / width : 0; int lastPos = -1; float lastOffset = 0.f; float lastWidth = 0.f; boolean first = true; ItemInfo lastItem = null; for (int i = 0; i < mItems.size(); i++) { ItemInfo ii = mItems.get(i); float offset; if (!first && ii.position != lastPos + 1) { // Create a synthetic item for a missing page. ii = mTempItem; ii.offset = lastOffset + lastWidth + marginOffset; ii.position = lastPos + 1; ii.widthFactor = mAdapter.getPageWidth(ii.position); i--; } offset = ii.offset; final float leftBound = offset; final float rightBound = offset + ii.widthFactor + marginOffset; if (first || scrollOffset >= leftBound) { if (scrollOffset < rightBound || i == mItems.size() - 1) { return ii; } } else { return lastItem; } first = false; lastPos = ii.position; lastOffset = offset; lastWidth = ii.widthFactor; lastItem = ii; } return lastItem; } private int determineTargetPage(int currentPage, float pageOffset, int velocity, int deltaX) { int targetPage; if (Math.abs(deltaX) > mFlingDistance && Math.abs(velocity) > mMinimumVelocity) { targetPage = velocity > 0 ? currentPage : currentPage + 1; } else if (mSeenPositionMin >= 0 && mSeenPositionMin < currentPage && pageOffset < 0.5f) { targetPage = currentPage + 1; } else if (mSeenPositionMax >= 0 && mSeenPositionMax > currentPage + 1 && pageOffset >= 0.5f) { targetPage = currentPage - 1; } else { targetPage = (int) (currentPage + pageOffset + 0.5f); } if (mItems.size() > 0) { final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); // Only let the user target pages we have items for targetPage = Math.max(firstItem.position, Math.min(targetPage, lastItem.position)); } return targetPage; } @Override public void draw(Canvas canvas) { super.draw(canvas); boolean needsInvalidate = false; final int overScrollMode = ViewCompat.getOverScrollMode(this); if (overScrollMode == ViewCompat.OVER_SCROLL_ALWAYS || (overScrollMode == ViewCompat.OVER_SCROLL_IF_CONTENT_SCROLLS && mAdapter != null && mAdapter.getCount() > 1)) { if (!mLeftEdge.isFinished()) { final int restoreCount = canvas.save(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); final int width = getWidth(); canvas.rotate(270); canvas.translate(-height + getPaddingTop(), mFirstOffset * width); mLeftEdge.setSize(height, width); needsInvalidate |= mLeftEdge.draw(canvas); canvas.restoreToCount(restoreCount); } if (!mRightEdge.isFinished()) { final int restoreCount = canvas.save(); final int width = getWidth(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); canvas.rotate(90); canvas.translate(-getPaddingTop(), -(mLastOffset + 1) * width); mRightEdge.setSize(height, width); needsInvalidate |= mRightEdge.draw(canvas); canvas.restoreToCount(restoreCount); } } else { mLeftEdge.finish(); mRightEdge.finish(); } if (needsInvalidate) { // Keep animating ViewCompat.postInvalidateOnAnimation(this); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); // Draw the margin drawable between pages if needed. if (mPageMargin > 0 && mMarginDrawable != null && mItems.size() > 0 && mAdapter != null) { final int scrollX = getScrollX(); final int width = getWidth(); final float marginOffset = (float) mPageMargin / width; int itemIndex = 0; ItemInfo ii = mItems.get(0); float offset = ii.offset; final int itemCount = mItems.size(); final int firstPos = ii.position; final int lastPos = mItems.get(itemCount - 1).position; for (int pos = firstPos; pos < lastPos; pos++) { while (pos > ii.position && itemIndex < itemCount) { ii = mItems.get(++itemIndex); } float drawAt; if (pos == ii.position) { drawAt = (ii.offset + ii.widthFactor) * width; offset = ii.offset + ii.widthFactor + marginOffset; } else { float widthFactor = mAdapter.getPageWidth(pos); drawAt = (offset + widthFactor) * width; offset += widthFactor + marginOffset; } if (drawAt + mPageMargin > scrollX) { mMarginDrawable.setBounds((int) drawAt, mTopPageBounds, (int) (drawAt + mPageMargin + 0.5f), mBottomPageBounds); mMarginDrawable.draw(canvas); } if (drawAt > scrollX + width) { break; // No more visible, no sense in continuing } } } } /** * Start a fake drag of the pager. * * <p>A fake drag can be useful if you want to synchronize the motion of the ViewPager * with the touch scrolling of another view, while still letting the ViewPager * control the snapping motion and fling behavior. (e.g. parallax-scrolling tabs.) * Call {@link #fakeDragBy(float)} to simulate the actual drag motion. Call * {@link #endFakeDrag()} to complete the fake drag and fling as necessary. * * <p>During a fake drag the ViewPager will ignore all touch events. If a real drag * is already in progress, this method will return false. * * @return true if the fake drag began successfully, false if it could not be started. * * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean beginFakeDrag() { if (mIsBeingDragged) { return false; } mFakeDragging = true; setScrollState(SCROLL_STATE_DRAGGING); mInitialMotionX = mLastMotionX = 0; if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } else { mVelocityTracker.clear(); } final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(time, time, MotionEvent.ACTION_DOWN, 0, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); mFakeDragBeginTime = time; return true; } /** * End a fake drag of the pager. * * @see #beginFakeDrag() * @see #fakeDragBy(float) */ public void endFakeDrag() { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int width = getWidth(); final int scrollX = getScrollX(); final ItemInfo ii = infoForCurrentScrollPosition(); final int currentPage = ii.position; final float pageOffset = (((float) scrollX / width) - ii.offset) / ii.widthFactor; final int totalDelta = (int) (mLastMotionX - mInitialMotionX); int nextPage = determineTargetPage(currentPage, pageOffset, initialVelocity, totalDelta); setCurrentItemInternal(nextPage, true, true, initialVelocity); endDrag(); mFakeDragging = false; } /** * Fake drag by an offset in pixels. You must have called {@link #beginFakeDrag()} first. * * @param xOffset Offset in pixels to drag by. * @see #beginFakeDrag() * @see #endFakeDrag() */ public void fakeDragBy(float xOffset) { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } mLastMotionX += xOffset; float oldScrollX = getScrollX(); float scrollX = oldScrollX - xOffset; final int width = getWidth(); float leftBound = width * mFirstOffset; float rightBound = width * mLastOffset; final ItemInfo firstItem = mItems.get(0); final ItemInfo lastItem = mItems.get(mItems.size() - 1); if (firstItem.position != 0) { leftBound = firstItem.offset * width; } if (lastItem.position != mAdapter.getCount() - 1) { rightBound = lastItem.offset * width; } if (scrollX < leftBound) { scrollX = leftBound; } else if (scrollX > rightBound) { scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); // Synthesize an event for the VelocityTracker. final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(mFakeDragBeginTime, time, MotionEvent.ACTION_MOVE, mLastMotionX, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); } /** * Returns true if a fake drag is in progress. * * @return true if currently in a fake drag, false otherwise. * * @see #beginFakeDrag() * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean isFakeDragging() { return mFakeDragging; } private void onSecondaryPointerUp(MotionEvent ev) { final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex); if (pointerId == mActivePointerId) { // This was our active pointer going up. Choose a new // active pointer and adjust accordingly. final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mLastMotionX = MotionEventCompat.getX(ev, newPointerIndex); mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex); if (mVelocityTracker != null) { mVelocityTracker.clear(); } } } private void endDrag() { mIsBeingDragged = false; mIsUnableToDrag = false; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } } private void setScrollingCacheEnabled(boolean enabled) { if (mScrollingCacheEnabled != enabled) { mScrollingCacheEnabled = enabled; if (USE_CACHE) { final int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(enabled); } } } } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { // TODO: Add versioned support here for transformed views. // This will not work for transformed views in Honeycomb+ final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && ViewCompat.canScrollHorizontally(v, -dx); } @Override public boolean dispatchKeyEvent(KeyEvent event) { // Let the focused view and/or our descendants get the key first return super.dispatchKeyEvent(event) || executeKeyEvent(event); } /** * You can call this function yourself to have the scroll view perform * scrolling from a key event, just as if the event had been dispatched to * it by the view hierarchy. * * @param event The key event to execute. * @return Return true if the event was handled, else false. */ public boolean executeKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: handled = arrowScroll(FOCUS_LEFT); break; case KeyEvent.KEYCODE_DPAD_RIGHT: handled = arrowScroll(FOCUS_RIGHT); break; case KeyEvent.KEYCODE_TAB: if (Build.VERSION.SDK_INT >= 11) { // The focus finder had a bug handling FOCUS_FORWARD and FOCUS_BACKWARD // before Android 3.0. Ignore the tab key on those devices. if (KeyEventCompat.hasNoModifiers(event)) { handled = arrowScroll(FOCUS_FORWARD); } else if (KeyEventCompat.hasModifiers(event, KeyEvent.META_SHIFT_ON)) { handled = arrowScroll(FOCUS_BACKWARD); } } break; } } return handled; } public boolean arrowScroll(int direction) { View currentFocused = findFocus(); if (currentFocused == this) currentFocused = null; boolean handled = false; View nextFocused = FocusFinder.getInstance().findNextFocus(this, currentFocused, direction); if (nextFocused != null && nextFocused != currentFocused) { if (direction == View.FOCUS_LEFT) { // If there is nothing to the left, or this is causing us to // jump to the right, then what we really want to do is page left. final int nextLeft = getChildRectInPagerCoordinates(mTempRect, nextFocused).left; final int currLeft = getChildRectInPagerCoordinates(mTempRect, currentFocused).left; if (currentFocused != null && nextLeft >= currLeft) { handled = pageLeft(); } else { handled = nextFocused.requestFocus(); } } else if (direction == View.FOCUS_RIGHT) { // If there is nothing to the right, or this is causing us to // jump to the left, then what we really want to do is page right. final int nextLeft = getChildRectInPagerCoordinates(mTempRect, nextFocused).left; final int currLeft = getChildRectInPagerCoordinates(mTempRect, currentFocused).left; if (currentFocused != null && nextLeft <= currLeft) { handled = pageRight(); } else { handled = nextFocused.requestFocus(); } } } else if (direction == FOCUS_LEFT || direction == FOCUS_BACKWARD) { // Trying to move left and nothing there; try to page. handled = pageLeft(); } else if (direction == FOCUS_RIGHT || direction == FOCUS_FORWARD) { // Trying to move right and nothing there; try to page. handled = pageRight(); } if (handled) { playSoundEffect(SoundEffectConstants.getContantForFocusDirection(direction)); } return handled; } private Rect getChildRectInPagerCoordinates(Rect outRect, View child) { if (outRect == null) { outRect = new Rect(); } if (child == null) { outRect.set(0, 0, 0, 0); return outRect; } outRect.left = child.getLeft(); outRect.right = child.getRight(); outRect.top = child.getTop(); outRect.bottom = child.getBottom(); ViewParent parent = child.getParent(); while (parent instanceof ViewGroup && parent != this) { final ViewGroup group = (ViewGroup) parent; outRect.left += group.getLeft(); outRect.right += group.getRight(); outRect.top += group.getTop(); outRect.bottom += group.getBottom(); parent = group.getParent(); } return outRect; } boolean pageLeft() { if (mCurItem > 0) { setCurrentItem(mCurItem-1, true); return true; } return false; } boolean pageRight() { if (mAdapter != null && mCurItem < (mAdapter.getCount()-1)) { setCurrentItem(mCurItem+1, true); return true; } return false; } /** * We only want the current page that is being shown to be focusable. */ @Override public void addFocusables(ArrayList<View> views, int direction, int focusableMode) { final int focusableCount = views.size(); final int descendantFocusability = getDescendantFocusability(); if (descendantFocusability != FOCUS_BLOCK_DESCENDANTS) { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addFocusables(views, direction, focusableMode); } } } } // we add ourselves (if focusable) in all cases except for when we are // FOCUS_AFTER_DESCENDANTS and there are some descendants focusable. this is // to avoid the focus search finding layouts when a more precise search // among the focusable children would be more interesting. if ( descendantFocusability != FOCUS_AFTER_DESCENDANTS || // No focusable descendants (focusableCount == views.size())) { // Note that we can't call the superclass here, because it will // add all views in. So we need to do the same thing View does. if (!isFocusable()) { return; } if ((focusableMode & FOCUSABLES_TOUCH_MODE) == FOCUSABLES_TOUCH_MODE && isInTouchMode() && !isFocusableInTouchMode()) { return; } if (views != null) { views.add(this); } } } /** * We only want the current page that is being shown to be touchable. */ @Override public void addTouchables(ArrayList<View> views) { // Note that we don't call super.addTouchables(), which means that // we don't call View.addTouchables(). This is okay because a ViewPager // is itself not touchable. for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addTouchables(views); } } } } /** * We only want the current page that is being shown to be focusable. */ @Override protected boolean onRequestFocusInDescendants(int direction, Rect previouslyFocusedRect) { int index; int increment; int end; int count = getChildCount(); if ((direction & FOCUS_FORWARD) != 0) { index = 0; increment = 1; end = count; } else { index = count - 1; increment = -1; end = -1; } for (int i = index; i != end; i += increment) { View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(direction, previouslyFocusedRect)) { return true; } } } } return false; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { // ViewPagers should only report accessibility info for the current page, // otherwise things get very confusing. // TODO: Should this note something about the paging container? final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { final ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem && child.dispatchPopulateAccessibilityEvent(event)) { return true; } } } return false; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return generateDefaultLayoutParams(); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } class MyAccessibilityDelegate extends AccessibilityDelegateCompat { @Override public void onInitializeAccessibilityEvent(View host, AccessibilityEvent event) { super.onInitializeAccessibilityEvent(host, event); event.setClassName(ViewPager.class.getName()); } @Override public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfoCompat info) { super.onInitializeAccessibilityNodeInfo(host, info); info.setClassName(ViewPager.class.getName()); info.setScrollable(mAdapter != null && mAdapter.getCount() > 1); if (mAdapter != null && mCurItem >= 0 && mCurItem < mAdapter.getCount() - 1) { info.addAction(AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD); } if (mAdapter != null && mCurItem > 0 && mCurItem < mAdapter.getCount()) { info.addAction(AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD); } } @Override public boolean performAccessibilityAction(View host, int action, Bundle args) { if (super.performAccessibilityAction(host, action, args)) { return true; } switch (action) { case AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD: { if (mAdapter != null && mCurItem >= 0 && mCurItem < mAdapter.getCount() - 1) { setCurrentItem(mCurItem + 1); return true; } } return false; case AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD: { if (mAdapter != null && mCurItem > 0 && mCurItem < mAdapter.getCount()) { setCurrentItem(mCurItem - 1); return true; } } return false; } return false; } } private class PagerObserver extends DataSetObserver { @Override public void onChanged() { dataSetChanged(); } @Override public void onInvalidated() { dataSetChanged(); } } /** * Layout parameters that should be supplied for views added to a * ViewPager. */ public static class LayoutParams extends ViewGroup.LayoutParams { /** * true if this view is a decoration on the pager itself and not * a view supplied by the adapter. */ public boolean isDecor; /** * Gravity setting for use on decor views only: * Where to position the view page within the overall ViewPager * container; constants are defined in {@link android.view.Gravity}. */ public int gravity; /** * Width as a 0-1 multiplier of the measured pager width */ float widthFactor = 0.f; /** * true if this view was added during layout and needs to be measured * before being positioned. */ boolean needsMeasure; /** * Adapter position this view is for if !isDecor */ int position; /** * Current child index within the ViewPager that this view occupies */ int childIndex; public LayoutParams() { super(FILL_PARENT, FILL_PARENT); } public LayoutParams(Context context, AttributeSet attrs) { super(context, attrs); final TypedArray a = context.obtainStyledAttributes(attrs, LAYOUT_ATTRS); gravity = a.getInteger(0, Gravity.TOP); a.recycle(); } } static class ViewPositionComparator implements Comparator<View> { @Override public int compare(View lhs, View rhs) { final LayoutParams llp = (LayoutParams) lhs.getLayoutParams(); final LayoutParams rlp = (LayoutParams) rhs.getLayoutParams(); if (llp.isDecor != rlp.isDecor) { return llp.isDecor ? 1 : -1; } return llp.position - rlp.position; } } }
am 6c1b3964: Merge from jb-mr1-aah-dev * commit '6c1b3964742b6b7e58741a0e1ddec294b19c4121': ViewPager: fixed paddingLeft/paddingRight handling
v4/java/android/support/v4/view/ViewPager.java
am 6c1b3964: Merge from jb-mr1-aah-dev
Java
apache-2.0
72c55233b96d7d54bec3a53d20535e5798d4cc46
0
xschildw/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services
package org.sagebionetworks.javadoc.web.services; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.net.URL; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import com.sun.tools.javadoc.Main; /** * Test for the SpringMVCDoclet * @author jmhill * */ public class SpringMVCDocletTest { File sampleSourceFile; File classpathFile; File outputDirectory; @Before public void before(){ // Lookup the test files. sampleSourceFile = findFileOnClasspath("ExampleController.java"); // Find the classpath file generated by the maven-dependency-plugin String propertyValue = System.getProperty("auto.generated.classpath"); if(propertyValue == null){ // this occurs when run in eclipse. propertyValue = "target/gen/auto-generated-classpath.txt"; } System.out.println(propertyValue); classpathFile = new File(propertyValue); assertTrue("Classpath files does not exist: "+classpathFile.getAbsolutePath(), classpathFile.exists()); // Lookup the output directory. propertyValue = System.getProperty("test.javadoc.output.directory"); if(propertyValue == null){ // this occurs when run in eclipse. propertyValue = "target/javadoc"; } outputDirectory = new File(propertyValue); } public static File findFileOnClasspath(String fileName){ URL url = SpringMVCDocletTest.class.getClassLoader().getResource(fileName); assertNotNull("Failed to find: "+fileName+" on the classpath", url); File file = new File(url.getFile().replaceAll("%20", " ")); assertTrue(file.exists()); return file; } @Test public void testGetOutputDirectory(){ File result = SpringMVCDoclet.getOutputDirectory(new String[0][0]); assertNotNull(result); assertTrue(result.exists()); assertTrue(result.isDirectory()); System.out.println(result.getAbsolutePath()); // It should be the current working directory. assertEquals(System.getProperty("user.dir"), result.getAbsolutePath()); // Now pass the valid options result = SpringMVCDoclet.getOutputDirectory(new String[][]{new String[]{"-d", outputDirectory.getAbsolutePath()+"/testing123"}}); assertNotNull(result); assertTrue(result.exists()); assertTrue(result.isDirectory()); System.out.println(result.getAbsolutePath()); assertEquals(outputDirectory.getAbsolutePath()+File.separator+"testing123", result.getAbsolutePath()); } @Test public void testMain(){ // Run a sample javadoc int result = Main.execute(SpringMVCDocletTest.class.getClassLoader(), new String[]{ "-authControllerName", "org.sagebionetworks.samples.ExampleController", "-d", outputDirectory.getAbsolutePath(), "-doclet", SpringMVCDoclet.class.getName(), "-classpath", "@"+classpathFile.getAbsolutePath(), "-verbose", sampleSourceFile.getAbsolutePath() }); assertEquals(0, result); } }
lib/lib-javadoc/src/test/java/org/sagebionetworks/javadoc/web/services/SpringMVCDocletTest.java
package org.sagebionetworks.javadoc.web.services; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.net.URL; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import com.sun.tools.javadoc.Main; /** * Test for the SpringMVCDoclet * @author jmhill * */ public class SpringMVCDocletTest { File sampleSourceFile; File classpathFile; File outputDirectory; @Before public void before(){ // Lookup the test files. sampleSourceFile = findFileOnClasspath("ExampleController.java"); // Find the classpath file generated by the maven-dependency-plugin String propertyValue = System.getProperty("auto.generated.classpath"); if(propertyValue == null){ // this occurs when run in eclipse. propertyValue = "target/gen/auto-generated-classpath.txt"; } System.out.println(propertyValue); classpathFile = new File(propertyValue); assertTrue("Classpath files does not exist: "+classpathFile.getAbsolutePath(), classpathFile.exists()); // Lookup the output directory. propertyValue = System.getProperty("test.javadoc.output.directory"); if(propertyValue == null){ // this occurs when run in eclipse. propertyValue = "target/javadoc"; } outputDirectory = new File(propertyValue); } public static File findFileOnClasspath(String fileName){ URL url = SpringMVCDocletTest.class.getClassLoader().getResource(fileName); assertNotNull("Failed to find: "+fileName+" on the classpath", url); File file = new File(url.getFile().replaceAll("%20", " ")); assertTrue(file.exists()); return file; } @Test public void testGetOutputDirectory(){ File result = SpringMVCDoclet.getOutputDirectory(new String[0][0]); assertNotNull(result); assertTrue(result.exists()); assertTrue(result.isDirectory()); System.out.println(result.getAbsolutePath()); // It should be the current working directory. assertEquals(System.getProperty("user.dir"), result.getAbsolutePath()); // Now pass the valid options result = SpringMVCDoclet.getOutputDirectory(new String[][]{new String[]{"-d", outputDirectory.getAbsolutePath()+"/testing123"}}); assertNotNull(result); assertTrue(result.exists()); assertTrue(result.isDirectory()); System.out.println(result.getAbsolutePath()); assertEquals(outputDirectory.getAbsolutePath()+File.separator+"testing123", result.getAbsolutePath()); } @Ignore @Test public void testMain(){ // Run a sample javadoc int result = Main.execute(SpringMVCDocletTest.class.getClassLoader(), new String[]{ "-authControllerName", "org.sagebionetworks.samples.ExampleController", "-d", outputDirectory.getAbsolutePath(), "-doclet", SpringMVCDoclet.class.getName(), "-classpath", "@"+classpathFile.getAbsolutePath(), "-verbose", sampleSourceFile.getAbsolutePath() }); assertEquals(0, result); } }
PLFM-2984
lib/lib-javadoc/src/test/java/org/sagebionetworks/javadoc/web/services/SpringMVCDocletTest.java
PLFM-2984
Java
apache-2.0
fd29a779358153351f4471525a6c246abcf678da
0
thomasfischl/eurydome,thomasfischl/eurydome,thomasfischl/eurydome,thomasfischl/eurydome,thomasfischl/eurydome
package com.github.thomasfischl.eurydome.backend.rest; import java.io.IOException; import java.util.List; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import com.github.thomasfischl.eurydome.backend.dal.FileDataStore; import com.github.thomasfischl.eurydome.backend.model.DOFile; import com.mongodb.gridfs.GridFS; import com.mongodb.gridfs.GridFSInputFile; @RestController @RequestMapping(value = "/rest/file") public class FileController { @Inject FileDataStore store; @RequestMapping(method = RequestMethod.POST, value = "/remove/{id}") public void remove(@PathVariable("id") String id) { store.remove(id); } @RequestMapping(method = RequestMethod.GET, value = "/list") public List<DOFile> list() { return store.findAll(); } @RequestMapping(method = RequestMethod.POST, value = "/upload") public void handleFileUpload(@RequestParam("file") MultipartFile file, HttpServletRequest req, HttpServletResponse resp) throws IOException { if (!file.isEmpty()) { try { byte[] bytes = file.getBytes(); GridFS fs = store.getGridFs(); GridFSInputFile f = fs.createFile(bytes); f.setFilename(file.getOriginalFilename()); f.save(); } catch (Exception e) { e.printStackTrace(); // return "You failed to upload " + name + " => " + e.getMessage(); } } else { // return "You failed to upload " + name + " because the file was empty."; } resp.sendRedirect(req.getServletContext().getContextPath() + "/index.html#/app/settings"); } }
eurydome-backend/src/main/java/com/github/thomasfischl/eurydome/backend/rest/FileController.java
package com.github.thomasfischl.eurydome.backend.rest; import java.io.IOException; import java.util.List; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import com.github.thomasfischl.eurydome.backend.dal.FileDataStore; import com.github.thomasfischl.eurydome.backend.model.DOFile; import com.mongodb.gridfs.GridFS; import com.mongodb.gridfs.GridFSInputFile; @RestController @RequestMapping(value = "/rest/file") public class FileController { @Inject FileDataStore store; @RequestMapping(method = RequestMethod.POST, value = "/remove/{id}") public void remove(@PathVariable("id") String id) { store.remove(id); } @RequestMapping(method = RequestMethod.GET, value = "/list") public List<DOFile> list() { return store.findAll(); } @RequestMapping(method = RequestMethod.POST, value = "/upload") public void handleFileUpload(@RequestParam("file") MultipartFile file, HttpServletRequest req, HttpServletResponse resp) throws IOException { if (!file.isEmpty()) { try { byte[] bytes = file.getBytes(); GridFS fs = store.getGridFs(); GridFSInputFile f = fs.createFile(bytes); f.setFilename(file.getOriginalFilename()); f.save(); } catch (Exception e) { e.printStackTrace(); // return "You failed to upload " + name + " => " + e.getMessage(); } } else { // return "You failed to upload " + name + " because the file was empty."; } resp.sendRedirect(req.getServletContext().getContextPath() + "/#/app/settings"); } }
Fixes #13: File Upload Bug: Improve redirect url
eurydome-backend/src/main/java/com/github/thomasfischl/eurydome/backend/rest/FileController.java
Fixes #13: File Upload Bug: Improve redirect url
Java
apache-2.0
4b34535458e5ccdf5561bda698731e77ec4719fb
0
nestormh/svm_path_planner,nestormh/svm_path_planner
package sibtra.ui.modulos; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; import sibtra.predictivo.Coche; import sibtra.shm.ShmInterface; import sibtra.gps.Ruta; import sibtra.gps.Trayectoria; import sibtra.ui.VentanasMonitoriza; //import sibtra.ui.defs.CalculaRuta; import sibtra.ui.defs.ModificadorTrayectoria; import sibtra.ui.defs.Motor; import sibtra.util.PanelFlow; import sibtra.util.SpinnerDouble; import sibtra.util.SpinnerInt; import sibtra.util.ThreadSupendible; /** * Clase que implementa {@link ModificadorTrayectoria} y que lee de la memoria compartida los datos * suministrados por el algoritmo ACO y desplaza la trayectoria lateralmente en caso de * presentarse un estrechamiento de la carretera * @author jesus * */ public class ModificadorACO implements ModificadorTrayectoria{ VentanasMonitoriza ventanaMonitoriza; PanelModACO panelACO; private Trayectoria trayectoria; String NOMBRE="Modificador ACO"; String DESCRIPCION="Modifica la trayectoria usando la información de bordes de la carretera"; private Motor motor; private ThreadSupendible thCiclico; double distInicio; double longitudTramoDesp; int indiceInicial; int indiceFinal; Coche modCoche; public int umbralDesp = 30; public double gananciaLateral = 0.1; /** * Seteador de la distancia a partir del coche a la que se desea empezar a desplazar * lateralmente la trayectoria * @param distInicio Distancia en metros */ public void setDistInicio(double distInicio) { this.distInicio = distInicio; } /** * Seteador de la longitud deseada para el tramo de la trayectoria que se va a desplazar * @param longitudTramoDesp Longitud en metros */ public void setLongitudTramoDesp(double longitudTramoDesp) { this.longitudTramoDesp = longitudTramoDesp; } /** * Calcula el índice a partir del cual hay que empezara desplazar lateralmente la * trayecoria * @return el índice de la trayectoria */ public int calculaIndiceInicial(){ if (trayectoria != null){ trayectoria.situaCoche(modCoche.getX(),modCoche.getY()); int indIni = trayectoria.indiceMasCercano(); this.indiceInicial = trayectoria.indiceHastaLargo(distInicio, indIni); return indiceInicial; }else{ System.out.println("La trayectoria no puede ser null"); return 0; } } /** * Calcula el índice del punto de la trayectoria hasta el cual hay que realizar el desplazamiento * lateral * @return índice de la trayectoria */ public int calculaIndiceFinal(){ if (trayectoria != null){ this.indiceFinal = trayectoria.indiceHastaLargo(longitudTramoDesp, indiceInicial); }else { System.out.println("La trayectoria no puede ser null"); return 0; } return indiceFinal; } @Override public void setTrayectoriaInicial(Trayectoria tra) { this.trayectoria = tra; } @Override public void setMotor(Motor mtr) { motor=mtr; } public String getDescripcion() { return DESCRIPCION; } public String getNombre() { return NOMBRE; } @Override public boolean setVentanaMonitoriza(VentanasMonitoriza ventMonitoriza) { boolean todoBien = false; if(ventMonitoriza != null){ ventanaMonitoriza = ventMonitoriza; todoBien = true; } panelACO = new PanelModACO(); ventMonitoriza.añadePanel(panelACO,"Panel ACO",false,false); //Le decimos que modelo de coche tiene que usar modCoche = motor.getModeloCoche(); thCiclico=new ThreadSupendible() { private long tSig; private long periodoMuestreoMili = 1250; @Override protected void accion() { //apuntamos cual debe ser el instante siguiente tSig = System.currentTimeMillis() + periodoMuestreoMili ; // if (calcular){ accionPeriodica(); // } //esperamos hasta que haya pasado el tiempo convenido while (System.currentTimeMillis() < tSig) { try { Thread.sleep(tSig - System.currentTimeMillis()); } catch (Exception e) {} } } }; thCiclico.setName(getNombre()); return todoBien; } private void accionPeriodica() { int distDerecha = ShmInterface.getAcoRightDist(); // int distDerecha = ShmInterface.getResolucionHoriz()-ShmInterface.getAcoRightDist(); // System.out.println(ShmInterface.getResolucionHoriz()); int distIzquierda = ShmInterface.getAcoLeftDist(); double despLateral = 0; if (distIzquierda>umbralDesp){ despLateral = distIzquierda*gananciaLateral; // Cuando el desp es a la izquierda es negativo }else if(distDerecha>umbralDesp){ despLateral = -distDerecha*gananciaLateral; }else{ despLateral = 0; } System.out.println("Dist Izquierda " + distIzquierda + "\\\\\\ Dist Derecha " + distDerecha); //La trayectoria original se le indica al modificadorACO a través del método //setTrayectoriaInicial y no se modifica Trayectoria trAux = new Trayectoria(trayectoria); double despX = 0; double despY = 0; setDistInicio(1); setLongitudTramoDesp(4); calculaIndiceInicial(); calculaIndiceFinal(); // Es necesario situar el coche en la ruta antes de buscar el indice más cercano trayectoria.situaCoche(modCoche.getX(),modCoche.getY()); if(trayectoria.length() != 0){ // for(int i=(trayectoria.indiceMasCercano()+indiceInicial)%trayectoria.length(); // i<(trayectoria.indiceMasCercano()+indiceFinal)%trayectoria.length(); // i=(i+1)%trayectoria.length()){ //Recorremos todos los puntos de la trayectoria for(int i=0;i<trayectoria.length();i++){ //condición que cumplen los puntos de la trayectoria que se encuentran //por delante del coche if(i>(trayectoria.indiceMasCercano())%trayectoria.length() && i<(trayectoria.indiceMasCercano()+40)%trayectoria.length()){ // Se calcula un desplazamiento lateral perpendicular al rumbo de cada punto despY = -Math.cos(trayectoria.rumbo[i])*despLateral; despX = Math.sin(trayectoria.rumbo[i])*despLateral; //Se añade el desplazamiento a las coordenadas del punto trAux.x[i] = trayectoria.x[i] + despX; trAux.y[i] = trayectoria.y[i] + despY; }else{//no modificamos si los puntos no están por delante del coche trAux.x[i] = trayectoria.x[i]; trAux.y[i] = trayectoria.y[i]; } } } // Trayectoria trAux = new Trayectoria(trayectoria,0.1); motor.nuevaTrayectoria(trAux); // return trayectoria; } @Override public void terminar() { thCiclico.terminar(); } // public static void main(String[] args) { // CalculaRutaACO cal =new CalculaRutaACO(); // String fichero = "Rutas/Universidad/Parq_16_07_cerr"; // Ruta re; // Trayectoria rutaPruebaRellena; // try { // File file = new File(fichero); // ObjectInputStream ois = new ObjectInputStream(new FileInputStream(file)); // re = (Ruta) ois.readObject(); // ois.close(); // double distMax = 0.5; // rutaPruebaRellena = new Trayectoria(re,distMax); // System.out.println(rutaPruebaRellena.length()); // System.out.println("Abrimos el fichero"); // // } catch (IOException ioe) { // re = new Ruta(); // rutaPruebaRellena = null; // System.err.println("Error al abrir el fichero " + fichero); // System.err.println(ioe.getMessage()); // } catch (ClassNotFoundException cnfe) { // re = new Ruta(); // rutaPruebaRellena = null; // System.err.println("Objeto leído inválido: " + cnfe.getMessage()); // } // cal.setTrayectoriaInicial(rutaPruebaRellena); // while(true){ // cal.getTrayectoriaActual(); // } // } @Override public void actuar() { thCiclico.activar(); } @Override public void parar() { thCiclico.suspender(); } public int getUmbralDesp() { return umbralDesp; } public void setUmbralDesp(int umbralDesp) { this.umbralDesp = umbralDesp; } public double getGananciaLateral() { return gananciaLateral; } public void setGananciaLateral(double gananciaLateral) { this.gananciaLateral = gananciaLateral; } protected class PanelModACO extends PanelFlow { public PanelModACO() { super(); // setLayout(new GridLayout(0,4)); //TODO Definir los tamaños adecuados o poner layout añadeAPanel(new SpinnerDouble(ModificadorACO.this,"setGananciaLateral",0,6,0.1), "Ganancia"); añadeAPanel(new SpinnerInt(ModificadorACO.this,"setUmbralDesp",0,100,1), "Umbral"); } } }
Jade/src/sibtra/ui/modulos/ModificadorACO.java
package sibtra.ui.modulos; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; import sibtra.predictivo.Coche; import sibtra.shm.ShmInterface; import sibtra.gps.Ruta; import sibtra.gps.Trayectoria; import sibtra.ui.VentanasMonitoriza; //import sibtra.ui.defs.CalculaRuta; import sibtra.ui.defs.ModificadorTrayectoria; import sibtra.ui.defs.Motor; import sibtra.util.ThreadSupendible; /** * Clase que implementa {@link ModificadorTrayectoria} y que lee de la memoria compartida los datos * suministrados por el algoritmo ACO y desplaza la trayectoria lateralmente en caso de * presentarse un estrechamiento de la carretera * @author jesus * */ public class ModificadorACO implements ModificadorTrayectoria{ VentanasMonitoriza ventanaMonitoriza; private Trayectoria trayectoria; String NOMBRE="Modificador ACO"; String DESCRIPCION="Modifica la trayectoria usando la información de bordes de la carretera"; private Motor motor; private ThreadSupendible thCiclico; double distInicio; double longitudTramoDesp; int indiceInicial; int indiceFinal; Coche modCoche; public int umbralDesp = 30; public double gananciaLateral = 0.1; /** * Seteador de la distancia a partir del coche a la que se desea empezar a desplazar * lateralmente la trayectoria * @param distInicio Distancia en metros */ public void setDistInicio(double distInicio) { this.distInicio = distInicio; } /** * Seteador de la longitud deseada para el tramo de la trayectoria que se va a desplazar * @param longitudTramoDesp Longitud en metros */ public void setLongitudTramoDesp(double longitudTramoDesp) { this.longitudTramoDesp = longitudTramoDesp; } /** * Calcula el índice a partir del cual hay que empezara desplazar lateralmente la * trayecoria * @return el índice de la trayectoria */ public int calculaIndiceInicial(){ if (trayectoria != null){ trayectoria.situaCoche(modCoche.getX(),modCoche.getY()); int indIni = trayectoria.indiceMasCercano(); this.indiceInicial = trayectoria.indiceHastaLargo(distInicio, indIni); return indiceInicial; }else{ System.out.println("La trayectoria no puede ser null"); return 0; } } /** * Calcula el índice del punto de la trayectoria hasta el cual hay que realizar el desplazamiento * lateral * @return índice de la trayectoria */ public int calculaIndiceFinal(){ if (trayectoria != null){ this.indiceFinal = trayectoria.indiceHastaLargo(longitudTramoDesp, indiceInicial); }else { System.out.println("La trayectoria no puede ser null"); return 0; } return indiceFinal; } @Override public void setTrayectoriaInicial(Trayectoria tra) { this.trayectoria = tra; } @Override public void setMotor(Motor mtr) { motor=mtr; } public String getDescripcion() { return DESCRIPCION; } public String getNombre() { return NOMBRE; } @Override public boolean setVentanaMonitoriza(VentanasMonitoriza ventMonitoriza) { boolean todoBien = false; if(ventMonitoriza != null){ ventanaMonitoriza = ventMonitoriza; todoBien = true; } //Le decimos que modelo de coche tiene que usar modCoche = motor.getModeloCoche(); thCiclico=new ThreadSupendible() { private long tSig; private long periodoMuestreoMili = 1250; @Override protected void accion() { //apuntamos cual debe ser el instante siguiente tSig = System.currentTimeMillis() + periodoMuestreoMili ; // if (calcular){ accionPeriodica(); // } //esperamos hasta que haya pasado el tiempo convenido while (System.currentTimeMillis() < tSig) { try { Thread.sleep(tSig - System.currentTimeMillis()); } catch (Exception e) {} } } }; thCiclico.setName(getNombre()); return todoBien; } private void accionPeriodica() { int distDerecha = ShmInterface.getResolucionHoriz()-ShmInterface.getAcoRightDist(); System.out.println(ShmInterface.getResolucionHoriz()); int distIzquierda = ShmInterface.getAcoLeftDist(); double despLateral = 0; if (distIzquierda>umbralDesp){ despLateral = distIzquierda*gananciaLateral; // Cuando el desp es a la izquierda es negativo }else if(distDerecha>umbralDesp){ despLateral = -distDerecha*gananciaLateral; }else{ despLateral = 0; } System.out.println("Dist Izquierda " + distIzquierda + "\\\\\\ Dist Derecha " + distDerecha); //La trayectoria original se le indica al modificadorACO a través del método //setTrayectoriaInicial y no se modifica Trayectoria trAux = new Trayectoria(trayectoria); double despX = 0; double despY = 0; setDistInicio(1); setLongitudTramoDesp(4); calculaIndiceInicial(); calculaIndiceFinal(); // Es necesario situar el coche en la ruta antes de buscar el indice más cercano trayectoria.situaCoche(modCoche.getX(),modCoche.getY()); if(trayectoria.length() != 0){ // for(int i=(trayectoria.indiceMasCercano()+indiceInicial)%trayectoria.length(); // i<(trayectoria.indiceMasCercano()+indiceFinal)%trayectoria.length(); // i=(i+1)%trayectoria.length()){ //Recorremos todos los puntos de la trayectoria for(int i=0;i<trayectoria.length();i++){ //condición que cumplen los puntos de la trayectoria que se encuentran //por delante del coche if(i>(trayectoria.indiceMasCercano())%trayectoria.length() && i<(trayectoria.indiceMasCercano()+40)%trayectoria.length()){ // Se calcula un desplazamiento lateral perpendicular al rumbo de cada punto despY = -Math.cos(trayectoria.rumbo[i])*despLateral; despX = Math.sin(trayectoria.rumbo[i])*despLateral; //Se añade el desplazamiento a las coordenadas del punto trAux.x[i] = trayectoria.x[i] + despX; trAux.y[i] = trayectoria.y[i] + despY; }else{//no modificamos si los puntos no están por delante del coche trAux.x[i] = trayectoria.x[i]; trAux.y[i] = trayectoria.y[i]; } } } // Trayectoria trAux = new Trayectoria(trayectoria,0.1); motor.nuevaTrayectoria(trAux); // return trayectoria; } @Override public void terminar() { thCiclico.terminar(); } // public static void main(String[] args) { // CalculaRutaACO cal =new CalculaRutaACO(); // String fichero = "Rutas/Universidad/Parq_16_07_cerr"; // Ruta re; // Trayectoria rutaPruebaRellena; // try { // File file = new File(fichero); // ObjectInputStream ois = new ObjectInputStream(new FileInputStream(file)); // re = (Ruta) ois.readObject(); // ois.close(); // double distMax = 0.5; // rutaPruebaRellena = new Trayectoria(re,distMax); // System.out.println(rutaPruebaRellena.length()); // System.out.println("Abrimos el fichero"); // // } catch (IOException ioe) { // re = new Ruta(); // rutaPruebaRellena = null; // System.err.println("Error al abrir el fichero " + fichero); // System.err.println(ioe.getMessage()); // } catch (ClassNotFoundException cnfe) { // re = new Ruta(); // rutaPruebaRellena = null; // System.err.println("Objeto leído inválido: " + cnfe.getMessage()); // } // cal.setTrayectoriaInicial(rutaPruebaRellena); // while(true){ // cal.getTrayectoriaActual(); // } // } @Override public void actuar() { thCiclico.activar(); } @Override public void parar() { thCiclico.suspender(); } public int getUmbralDesp() { return umbralDesp; } public void setUmbralDesp(int umbralDesp) { this.umbralDesp = umbralDesp; } public double getGananciaLateral() { return gananciaLateral; } public void setGananciaLateral(double gananciaLateral) { this.gananciaLateral = gananciaLateral; } }
Añadido el panel para el modificador ACO
Jade/src/sibtra/ui/modulos/ModificadorACO.java
Añadido el panel para el modificador ACO
Java
bsd-2-clause
1e1f72acb7966278877b26f48afc41549818d24f
0
HTWProject/Finanzplanung,HTWProject/Finanzplanung
package com.htw.finanzplanung; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.provider.DocumentsContract; import android.text.Html; import android.util.Log; import org.apache.http.NameValuePair; import org.apache.http.message.BasicNameValuePair; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import java.util.ArrayList; import java.util.HashMap; import java.util.List; //datum TEXT as strings ("YYYY-MM-DD"). public class Data_Access extends SQLiteOpenHelper{ private static final String DATABASE_NAME = "Finanzplanung.sqlite"; public Data_Access(Context context) { super(context, DATABASE_NAME, null, 1); } public void databaseDelete(Context context){ context.deleteDatabase(DATABASE_NAME); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL("PRAGMA foreign_keys = OFF;"); db.execSQL("CREATE TABLE IF NOT EXISTS user " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " name TEXT," + " email TEXT UNIQUE," + " passwort TEXT NOT NULL," + " loginstatus INTEGER DEFAULT 1 " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS gruppe " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " name TEXT," + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE , " + " status TEXT DEFAULT 'u' " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS settings " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " server TEXT DEFAULT 'http://fomenko.eu/Finanzplanung/'," + " mobile_sync INTEGER DEFAULT 0," + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE , " + " status TEXT DEFAULT 'u' " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS ausgabe " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " datum TEXT," + " was TEXT," + " betrag REAL," + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE, " + " gruppe_id INTEGER REFERENCES gruppe(_id) ON UPDATE CASCADE ON DELETE CASCADE , " + " status TEXT DEFAULT 'u' " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS user_ist_mitglied_in_gruppe " + "(" + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE," + " gruppe_id INTEGER REFERENCES gruppe(_id) ON UPDATE CASCADE ON DELETE CASCADE, " + " status TEXT DEFAULT 'u' , " + " PRIMARY KEY(user_id,gruppe_id) " + ")" ); db.execSQL("PRAGMA foreign_keys = ON;"); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("PRAGMA foreign_keys = OFF;"); db.execSQL("DROP TABLE IF EXISTS user"); db.execSQL("DROP TABLE IF EXISTS ausgabe"); db.execSQL("DROP TABLE IF EXISTS gruppe"); db.execSQL("DROP TABLE IF EXISTS settings"); db.execSQL("DROP TABLE IF EXISTS user_ist_mitglied_in_gruppe"); db.execSQL("PRAGMA foreign_keys = ON;"); onCreate(db); } //GruppenVerwalten public List<Gruppe> getMeineGruppen() { Integer user_id = getLoginState(); List<Gruppe> Gruppen = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery("SELECT _id, name FROM gruppe WHERE status != 'd' AND user_id = " + user_id + " ", null); if(c.moveToFirst()){ do{ Gruppen.add(new Gruppe(c.getInt(0), c.getString(1))); }while(c.moveToNext()); } c.close(); db.close(); return Gruppen; } public int deleteGruppe(Integer gruppen_id) { Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); int zwisch = 0; //Log.d("Response Delete: ", "> " + "ready"+user_id); if(user_id.equals(getGruppenMasterID(gruppen_id))){ db.execSQL("UPDATE gruppe SET status = 'd' WHERE _id = " + gruppen_id + " "); //db.execSQL("DELETE FROM gruppe WHERE _id = " + gruppen_id + " "); zwisch = 1; } db.close(); return zwisch; } public int addGruppe(String gruppenname){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("INSERT INTO gruppe (name, user_id) VALUES ('" + gruppenname + "' , " + user_id + ");"); return 0; } public Integer getGruppenMasterID(Integer gruppen_id){ SQLiteDatabase db = this.getWritableDatabase(); Integer user_id = -1; Cursor c = db.rawQuery("SELECT user_id FROM gruppe WHERE status != 'd' AND _id = " + gruppen_id + " ;", null); if(c.moveToFirst()){ user_id = c.getInt(0); } c.close(); Log.d("Response MasterID: ", "> " + user_id); return user_id; } //Finanzen public int addGeldausgabe(String datum,String was, Float betrag, Integer gruppen_id){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO ausgabe (datum, was, betrag, user_id, gruppe_id) " + "VALUES (" + " '" + datum + "', " + " '" + was + "', " + " " + betrag + " , " + " " + user_id + " , " + " " + gruppen_id + " " + ");"); db.close(); return 0; } public Float getGruppenGesamtbetrag(String startdatum, String enddatum, Integer gruppen_id){ SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT sum(betrag) AS Summe " + "FROM ausgabe " + "WHERE status != 'd' AND gruppe_id = " + gruppen_id + " " + "AND ausgabe.datum BETWEEN '" + startdatum + "' AND '" + enddatum + "' " + "GROUP BY gruppe_id ", null ); Float gesamtgeldbetrag = 0f; if(c.moveToFirst()){ gesamtgeldbetrag = c.getFloat(0); } c.close(); db.close(); Log.d("Response betrag: ", "> " + gesamtgeldbetrag); return gesamtgeldbetrag; } public Float getUserGesamtbetrag(String startdatum, String enddatum, Integer gruppen_id, Integer user_id){ SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT sum(betrag) AS Summe " + "FROM ausgabe " + "WHERE status != 'd' AND gruppe_id = " + gruppen_id + " " + "AND user_id = " + user_id + " " + "AND datum BETWEEN '" + startdatum + "' AND '" + enddatum + "' " + "GROUP BY user_id ;", null ); Float gesamtgeldbetrag = 0f; if(c.moveToFirst()){ gesamtgeldbetrag = c.getFloat(0); } c.close(); db.close(); return gesamtgeldbetrag; } public List<Geldausgabe> getUserGeldausgaben(String startdatum, String enddatum, Integer gruppen_id, Integer user_id){ List<Geldausgabe> Ausgaben = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT * " + "FROM ausgabe " + "WHERE status != 'd' AND gruppe_id = " + gruppen_id + " " + "AND user_id = " + user_id + " " + "AND ausgabe.datum BETWEEN '" + startdatum + "' AND '" + enddatum + "' ", null ); if(c.moveToFirst()){ do{ Ausgaben.add(new Geldausgabe(c.getInt(0), c.getString(1), c.getString(2), c.getFloat(3))); }while(c.moveToNext()); } c.close(); db.close(); return Ausgaben; } //Gruppenmitglieder public ArrayList<Mitglied> getGruppenMitglieder(Integer gruppe_id){ ArrayList<Mitglied> Mitglieder = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT user._id, user.name " + "FROM user " + "INNER JOIN user_ist_mitglied_in_gruppe " + "ON user._id = user_ist_mitglied_in_gruppe.user_id " + "WHERE user_ist_mitglied_in_gruppe.status != 'd' AND user_ist_mitglied_in_gruppe.gruppe_id = " + gruppe_id + " ", null ); if(c.moveToFirst()){ do{ Mitglieder.add(new Mitglied(c.getInt(0), c.getString(1))); }while(c.moveToNext()); } c.close(); db.close(); return Mitglieder; } //gibt 0 zurück wenn alles gut lief, und gibt -1 zurück wenn der user nicht der eigentümer der Gruppe ist public int deleteMitglied(Integer gruppen_id, Integer mitglied_id){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); Integer zwisch = -1; if(user_id.equals(getGruppenMasterID(gruppen_id))){ db.execSQL("UPDATE user_ist_mitglied_in_gruppe SET status = 'd' WHERE user_id = " + mitglied_id + " AND gruppe_id = " + gruppen_id + " "); //db.execSQL("DELETE FROM user_ist_mitglied_in_gruppe WHERE user_id = " + mitglied_id + " AND gruppe_id = " + gruppen_id + " "); zwisch = 0; } db.close(); return zwisch; } public void addGruppenMitglied(Integer gruppen_id, Integer mitglied_id){ SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO user_ist_mitglied_in_gruppe ( user_id, gruppe_id) " + "VALUES (" + " " + mitglied_id + " , " + " " + gruppen_id + " " + ") "); db.close(); } public Integer existUser(String email){ Integer id = -10; SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT _id " + "FROM user " + "WHERE email = '" + email + "' ", null ); if(c.moveToFirst()){ id = c.getInt(0); } c.close(); return id; } public Boolean existUserInGruppe(String email, Integer gruppen_id){ SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT user._id " + "FROM user " + "INNER JOIN user_ist_mitglied_in_gruppe " + "ON user_ist_mitglied_in_gruppe.user_id = user._id " + "WHERE user_ist_mitglied_in_gruppe.status != 'd' AND user.email = '" + email + "' " + "AND user_ist_mitglied_in_gruppe.gruppe_id = '"+gruppen_id+"' ", null ); if(c.moveToFirst()){ c.close(); return true; }else{ c.close(); return false; } } public String addGruppenMitglied(Integer gruppen_id, String email){ Integer user_id = getLoginState(); if(existUser(email).equals(-10)) { String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/usersearch.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); String jsonStr = sh.makeServiceCall(url, ServiceHandler.POST, PHPanfrage); sh.destroy(); Log.d("Responsess: ", "> " + jsonStr); if (jsonStr != null) { try { Log.d("Responsess: ", "> " + "nice"); JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); Log.d("Responsess: ", "> " + "nice2"); if(jsonObj.getString("exception").equals("OK")) { //Log.d("Responsess: ", "> " + "niceOK"); //Log.d("Response mID: ", "> " + jsonObj.getInt("_id")); Integer mitglied_id = jsonObj.getInt("_id"); addUser(mitglied_id, jsonObj.getString("name"), jsonObj.getString("email")); //Log.d("Response email: ", "> " + jsonObj.getString("email")); addGruppenMitglied(gruppen_id, mitglied_id); jsonStr = "OK"; }else{ jsonStr = jsonObj.getString("exception"); } } catch (JSONException e) { jsonStr = "ERROR: "; e.printStackTrace(); } } else { jsonStr = "ERROR: NO INTERNET CONNECTION"; } return jsonStr; //return "user existiert nicht"; }else if(existUserInGruppe(email,gruppen_id)){ return "user vorhanden"; } else { addGruppenMitglied(gruppen_id, existUser(email)); return "OK"; } } //Home public ArrayList<Gruppe> getGruppen(){ Integer user_id = getLoginState(); ArrayList<Gruppe> Gruppen = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT gruppe._id, gruppe.name " + "FROM gruppe " + "INNER JOIN user_ist_mitglied_in_gruppe " + "ON gruppe._id = user_ist_mitglied_in_gruppe.gruppe_id " + "WHERE user_ist_mitglied_in_gruppe.status != 'd' AND user_ist_mitglied_in_gruppe.user_id = " + user_id + " ", null ); if(c.moveToFirst()){ do{ Gruppen.add(new Gruppe(c.getInt(0), c.getString(1))); }while(c.moveToNext()); } c.close(); db.close(); return Gruppen; } public void verlasseGruppe(Integer gruppen_id){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("UPDATE user_ist_mitglied_in_gruppe SET status = 'd' WHERE user_id = " + user_id + " AND gruppe_id = " + gruppen_id + " "); //db.execSQL("DELETE FROM user_ist_mitglied_in_gruppe WHERE user_id = " + user_id + " AND gruppe_id = " + gruppen_id + " "); db.close(); } //Startseite public String Login(String email,String passwort){ String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/login.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); PHPanfrage.add(new BasicNameValuePair("password", passwort)); String jsonStr = sh.makeServiceCall(url, ServiceHandler.POST, PHPanfrage); Log.d("Response: ", "> " + jsonStr); if (jsonStr != null) { try { JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); if(jsonObj.getString("exception").equals("OK")) { Integer user_id = jsonObj.getInt("_id"); Logout(); setLoginState(user_id); if (getLoginState() != user_id) { addUser(user_id, jsonObj.getString("name"), jsonObj.getString("email"), passwort); } }else{ jsonStr = jsonObj.getString("exception"); } } catch (JSONException e) { jsonStr = "ERROR: "; e.printStackTrace(); } } else { Log.e("ServiceHandler", "Couldn't get any data from the url"); if(!LoginLocal(email, passwort)){ jsonStr = "ERROR: NO INTERNET CONNECTION"; } } sh.destroy(); return jsonStr; } public Boolean LoginLocal(String email, String passwort){ SQLiteDatabase db = this.getWritableDatabase(); Boolean zwisch = false; Cursor c = db.rawQuery( "SELECT _id " + "FROM user " + "WHERE AND email = " + email + " " + "AND passwort = " + passwort + " ", null ); if(c.moveToFirst()){ zwisch = true; setLoginState(c.getInt(0)); } c.close(); db.close(); return zwisch; } public void setLoginState(Integer user_id){ SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("UPDATE user SET loginstatus = 1 WHERE _id = " + user_id + " "); db.close(); } public Integer getLoginState(){ Integer id = null; SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT _id " + "FROM user " + "WHERE loginstatus = 1 ", null ); if(c.moveToFirst()){ do{ id = c.getInt(0); }while(c.moveToNext()); } c.close(); db.close(); return id; } public void Logout(){ SQLiteDatabase db = this.getWritableDatabase(); db.execSQL( "UPDATE user " + "SET loginstatus = 0 " + "WHERE loginstatus = 1 ;" ); db.close(); } public String sendPasswortToEmail(String email){ String jsonStr; String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/sendpassword.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); jsonStr = sh.makeServiceCall(url, ServiceHandler.POST,PHPanfrage); if (jsonStr != null) { try { JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); jsonStr = jsonObj.getString("exception"); } catch (JSONException e) { jsonStr = "ERROR: "+stripHtml(jsonStr); e.printStackTrace(); } } else { jsonStr = "ERROR: NO INTERNET CONNECTION"; } sh.destroy(); return jsonStr; } public String stripHtml(String html) { return html.substring(html.indexOf("{"), html.lastIndexOf("}") + 1); } public String stripHtmlForArray(String html) { return html.substring(html.indexOf("["), html.lastIndexOf("]") + 1); } //Registration public String registration(String name, String email, String passwort, String passwortValidation){ String jsonStr; if(passwort.equals(passwortValidation)){ String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/registration.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); PHPanfrage.add(new BasicNameValuePair("name", name)); PHPanfrage.add(new BasicNameValuePair("password", passwort)); jsonStr = sh.makeServiceCall(url, ServiceHandler.POST,PHPanfrage); Log.d("Response: ", "> " + jsonStr); if (jsonStr != null) { try { JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); jsonStr = jsonObj.getString("exception"); } catch (JSONException e) { jsonStr = "ERROR: "+stripHtml(jsonStr); e.printStackTrace(); } } else { //Log.e("ServiceHandler", "Couldn't get any data from the url"); jsonStr = "ERROR: NO INTERNET CONNECTION"; } sh.destroy(); }else{ jsonStr = "ERROR: Passwort stimmt nicht überein"; } return jsonStr; } public int addUser(Integer user_id, String name, String email, String passwort){ SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO user (_id, name, email, passwort) " + "VALUES (" + " " + user_id + " , " + " '" + name + "', " + " '" + email + "', " + " '" + passwort + "' " + ");"); db.execSQL("INSERT INTO settings (user_id) " + "VALUES (" + " " + user_id + " " + ");"); return 0; } public int addUser(Integer mitglied_id, String name, String email){ SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO user (_id, name, email, passwort, loginstatus) " + "VALUES (" + " '" + mitglied_id + "', " + " '" + name + "', " + " '" + email + "', " + " '00000', " + " 0 " + ");"); return 0; } //Settings public int setNewPasswort(String newpasswort, String oldpasswort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); if(validation(oldpasswort)){ db.execSQL("UPDATE user SET password = '" + newpasswort + "' WHERE _id = " + user_id + ";"); } db.close(); return 0; } public Boolean validation(String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT * " + "FROM user " + "WHERE _id = " + user_id + " " + " AND passwort = '" + passwort + "' ", null ); if(c.moveToFirst()){ c.close(); return true; }else { c.close(); return false; } } public int setNewName(String newName, String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); if(validation(passwort)){ db.execSQL("UPDATE user SET name = '" + newName + "' WHERE _id = " + user_id + ";"); } db.close(); return 0; } public int setNewServer(String newServer, String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); if(validation(passwort)){ db.execSQL("UPDATE settings SET server = '" + newServer + "' WHERE user_id = " + user_id + ";"); } db.close(); return 0; } public String getServer(){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); String server = "nix"; Cursor c = db.rawQuery( "SELECT server " + "FROM settings " + "WHERE user_id = " + user_id + " ", null ); if(c.moveToFirst()){ server= c.getString(0); } c.close(); db.close(); return server; } public String getName(){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); String name = "nix"; Cursor c = db.rawQuery( "SELECT name " + "FROM user " + "WHERE _id = " + user_id + " ", null ); if(c.moveToFirst()){ name = c.getString(0); } c.close(); db.close(); return name; } public void setMobileSync(Boolean mobileSync, String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("UPDATE settings SET mobile_sync = " + (mobileSync ? 1 : 0) + " WHERE user_id = " + user_id + ";"); db.close(); } public Boolean getMobileSyncStatus(){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); Boolean zwisch = false; Cursor c = db.rawQuery( "SELECT mobile_sync " + "FROM settings " + "WHERE user_id = " + user_id + " ", null ); if(c.moveToFirst()){ zwisch = c.getInt(0) == 1; } c.close(); db.close(); return zwisch; } }
app/src/main/java/com/htw/finanzplanung/Data_Access.java
package com.htw.finanzplanung; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.provider.DocumentsContract; import android.text.Html; import android.util.Log; import org.apache.http.NameValuePair; import org.apache.http.message.BasicNameValuePair; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import java.util.ArrayList; import java.util.HashMap; import java.util.List; //datum TEXT as strings ("YYYY-MM-DD"). public class Data_Access extends SQLiteOpenHelper{ private static final String DATABASE_NAME = "Finanzplanung.sqlite"; public Data_Access(Context context) { super(context, DATABASE_NAME, null, 1); } public void databasefghfg(Context context){ context.deleteDatabase(DATABASE_NAME); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL("PRAGMA foreign_keys = OFF;"); db.execSQL("CREATE TABLE IF NOT EXISTS user " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " name TEXT," + " email TEXT UNIQUE," + " passwort TEXT NOT NULL," + " loginstatus INTEGER DEFAULT 1 " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS gruppe " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " name TEXT," + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE , " + " status TEXT DEFAULT 'u' " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS settings " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " server TEXT DEFAULT 'http://fomenko.eu/Finanzplanung/'," + " mobile_sync INTEGER DEFAULT 0," + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE , " + " status TEXT DEFAULT 'u' " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS ausgabe " + "(" + " _id INTEGER PRIMARY KEY AUTOINCREMENT," + " datum TEXT," + " was TEXT," + " betrag REAL," + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE, " + " gruppe_id INTEGER REFERENCES gruppe(_id) ON UPDATE CASCADE ON DELETE CASCADE , " + " status TEXT DEFAULT 'u' " + ")" ); db.execSQL("CREATE TABLE IF NOT EXISTS user_ist_mitglied_in_gruppe " + "(" + " user_id INTEGER REFERENCES user(_id) ON UPDATE CASCADE ON DELETE CASCADE," + " gruppe_id INTEGER REFERENCES gruppe(_id) ON UPDATE CASCADE ON DELETE CASCADE, " + " status TEXT DEFAULT 'u' , " + " PRIMARY KEY(user_id,gruppe_id) " + ")" ); db.execSQL("PRAGMA foreign_keys = ON;"); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("PRAGMA foreign_keys = OFF;"); db.execSQL("DROP TABLE IF EXISTS user"); db.execSQL("DROP TABLE IF EXISTS ausgabe"); db.execSQL("DROP TABLE IF EXISTS gruppe"); db.execSQL("DROP TABLE IF EXISTS settings"); db.execSQL("DROP TABLE IF EXISTS user_ist_mitglied_in_gruppe"); db.execSQL("PRAGMA foreign_keys = ON;"); onCreate(db); } //GruppenVerwalten public List<Gruppe> getMeineGruppen() { Integer user_id = getLoginState(); List<Gruppe> Gruppen = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery("SELECT _id, name FROM gruppe WHERE status != 'd' AND user_id = " + user_id + " ", null); if(c.moveToFirst()){ do{ Gruppen.add(new Gruppe(c.getInt(0), c.getString(1))); }while(c.moveToNext()); } c.close(); db.close(); return Gruppen; } public int deleteGruppe(Integer gruppen_id) { Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); int zwisch = 0; //Log.d("Response Delete: ", "> " + "ready"+user_id); if(user_id.equals(getGruppenMasterID(gruppen_id))){ db.execSQL("UPDATE gruppe SET status = 'd' WHERE _id = " + gruppen_id + " "); //db.execSQL("DELETE FROM gruppe WHERE _id = " + gruppen_id + " "); zwisch = 1; } db.close(); return zwisch; } public int addGruppe(String gruppenname){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("INSERT INTO gruppe (name, user_id) VALUES ('" + gruppenname + "' , " + user_id + ");"); return 0; } public Integer getGruppenMasterID(Integer gruppen_id){ SQLiteDatabase db = this.getWritableDatabase(); Integer user_id = -1; Cursor c = db.rawQuery("SELECT user_id FROM gruppe WHERE status != 'd' AND _id = " + gruppen_id + " ;", null); if(c.moveToFirst()){ user_id = c.getInt(0); } c.close(); Log.d("Response MasterID: ", "> " + user_id); return user_id; } //Finanzen public int addGeldausgabe(String datum,String was, Float betrag, Integer gruppen_id){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO ausgabe (datum, was, betrag, user_id, gruppe_id) " + "VALUES (" + " '" + datum + "', " + " '" + was + "', " + " " + betrag + " , " + " " + user_id + " , " + " " + gruppen_id + " " + ");"); db.close(); return 0; } public Float getGruppenGesamtbetrag(String startdatum, String enddatum, Integer gruppen_id){ SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT sum(betrag) AS Summe " + "FROM ausgabe " + "WHERE status != 'd' AND gruppe_id = " + gruppen_id + " " + "AND ausgabe.datum BETWEEN '" + startdatum + "' AND '" + enddatum + "' " + "GROUP BY gruppe_id ", null ); Float gesamtgeldbetrag = 0f; if(c.moveToFirst()){ gesamtgeldbetrag = c.getFloat(0); } c.close(); db.close(); Log.d("Response betrag: ", "> " + gesamtgeldbetrag); return gesamtgeldbetrag; } public Float getUserGesamtbetrag(String startdatum, String enddatum, Integer gruppen_id, Integer user_id){ SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT sum(betrag) AS Summe " + "FROM ausgabe " + "WHERE status != 'd' AND gruppe_id = " + gruppen_id + " " + "AND user_id = " + user_id + " " + "AND datum BETWEEN '" + startdatum + "' AND '" + enddatum + "' " + "GROUP BY user_id ;", null ); Float gesamtgeldbetrag = 0f; if(c.moveToFirst()){ gesamtgeldbetrag = c.getFloat(0); } c.close(); db.close(); return gesamtgeldbetrag; } public List<Geldausgabe> getUserGeldausgaben(String startdatum, String enddatum, Integer gruppen_id, Integer user_id){ List<Geldausgabe> Ausgaben = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT * " + "FROM ausgabe " + "WHERE status != 'd' AND gruppe_id = " + gruppen_id + " " + "AND user_id = " + user_id + " " + "AND ausgabe.datum BETWEEN '" + startdatum + "' AND '" + enddatum + "' ", null ); if(c.moveToFirst()){ do{ Ausgaben.add(new Geldausgabe(c.getInt(0), c.getString(1), c.getString(2), c.getFloat(3))); }while(c.moveToNext()); } c.close(); db.close(); return Ausgaben; } //Gruppenmitglieder public ArrayList<Mitglied> getGruppenMitglieder(Integer gruppe_id){ ArrayList<Mitglied> Mitglieder = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT user._id, user.name " + "FROM user " + "INNER JOIN user_ist_mitglied_in_gruppe " + "ON user._id = user_ist_mitglied_in_gruppe.user_id " + "WHERE user_ist_mitglied_in_gruppe.status != 'd' AND user_ist_mitglied_in_gruppe.gruppe_id = " + gruppe_id + " ", null ); if(c.moveToFirst()){ do{ Mitglieder.add(new Mitglied(c.getInt(0), c.getString(1))); }while(c.moveToNext()); } c.close(); db.close(); return Mitglieder; } //gibt 0 zurück wenn alles gut lief, und gibt -1 zurück wenn der user nicht der eigentümer der Gruppe ist public int deleteMitglied(Integer gruppen_id, Integer mitglied_id){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); Integer zwisch = -1; if(user_id.equals(getGruppenMasterID(gruppen_id))){ db.execSQL("UPDATE user_ist_mitglied_in_gruppe SET status = 'd' WHERE user_id = " + mitglied_id + " AND gruppe_id = " + gruppen_id + " "); //db.execSQL("DELETE FROM user_ist_mitglied_in_gruppe WHERE user_id = " + mitglied_id + " AND gruppe_id = " + gruppen_id + " "); zwisch = 0; } db.close(); return zwisch; } public void addGruppenMitglied(Integer gruppen_id, Integer mitglied_id){ SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO user_ist_mitglied_in_gruppe ( user_id, gruppe_id) " + "VALUES (" + " " + mitglied_id + " , " + " " + gruppen_id + " " + ") "); db.close(); } public Integer existUser(String email){ Integer id = -10; SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT _id " + "FROM user " + "WHERE email = '" + email + "' ", null ); if(c.moveToFirst()){ id = c.getInt(0); } c.close(); return id; } public Boolean existUserInGruppe(String email, Integer gruppen_id){ SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT user._id " + "FROM user " + "INNER JOIN user_ist_mitglied_in_gruppe " + "ON user_ist_mitglied_in_gruppe.user_id = user._id " + "WHERE user_ist_mitglied_in_gruppe.status != 'd' AND user.email = '" + email + "' " + "AND user_ist_mitglied_in_gruppe.gruppe_id = '"+gruppen_id+"' ", null ); if(c.moveToFirst()){ c.close(); return true; }else{ c.close(); return false; } } public String addGruppenMitglied(Integer gruppen_id, String email){ Integer user_id = getLoginState(); if(existUser(email).equals(-10)) { String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/usersearch.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); String jsonStr = sh.makeServiceCall(url, ServiceHandler.POST, PHPanfrage); sh.destroy(); Log.d("Responsess: ", "> " + jsonStr); if (jsonStr != null) { try { Log.d("Responsess: ", "> " + "nice"); JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); Log.d("Responsess: ", "> " + "nice2"); if(jsonObj.getString("exception").equals("OK")) { //Log.d("Responsess: ", "> " + "niceOK"); //Log.d("Response mID: ", "> " + jsonObj.getInt("_id")); Integer mitglied_id = jsonObj.getInt("_id"); addUser(mitglied_id, jsonObj.getString("name"), jsonObj.getString("email")); //Log.d("Response email: ", "> " + jsonObj.getString("email")); addGruppenMitglied(gruppen_id, mitglied_id); jsonStr = "OK"; }else{ jsonStr = jsonObj.getString("exception"); } } catch (JSONException e) { jsonStr = "ERROR: "; e.printStackTrace(); } } else { jsonStr = "ERROR: NO INTERNET CONNECTION"; } return jsonStr; //return "user existiert nicht"; }else if(existUserInGruppe(email,gruppen_id)){ return "user vorhanden"; } else { addGruppenMitglied(gruppen_id, existUser(email)); return "OK"; } } //Home public ArrayList<Gruppe> getGruppen(){ Integer user_id = getLoginState(); ArrayList<Gruppe> Gruppen = new ArrayList<>(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT gruppe._id, gruppe.name " + "FROM gruppe " + "INNER JOIN user_ist_mitglied_in_gruppe " + "ON gruppe._id = user_ist_mitglied_in_gruppe.gruppe_id " + "WHERE user_ist_mitglied_in_gruppe.status != 'd' AND user_ist_mitglied_in_gruppe.user_id = " + user_id + " ", null ); if(c.moveToFirst()){ do{ Gruppen.add(new Gruppe(c.getInt(0), c.getString(1))); }while(c.moveToNext()); } c.close(); db.close(); return Gruppen; } public void verlasseGruppe(Integer gruppen_id){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("UPDATE user_ist_mitglied_in_gruppe SET status = 'd' WHERE user_id = " + user_id + " AND gruppe_id = " + gruppen_id + " "); //db.execSQL("DELETE FROM user_ist_mitglied_in_gruppe WHERE user_id = " + user_id + " AND gruppe_id = " + gruppen_id + " "); db.close(); } //Startseite public String Login(String email,String passwort){ String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/login.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); PHPanfrage.add(new BasicNameValuePair("password", passwort)); String jsonStr = sh.makeServiceCall(url, ServiceHandler.POST, PHPanfrage); Log.d("Response: ", "> " + jsonStr); if (jsonStr != null) { try { JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); if(jsonObj.getString("exception").equals("OK")) { Integer user_id = jsonObj.getInt("_id"); Logout(); setLoginState(user_id); if (getLoginState() != user_id) { addUser(user_id, jsonObj.getString("name"), jsonObj.getString("email"), passwort); } }else{ jsonStr = jsonObj.getString("exception"); } } catch (JSONException e) { jsonStr = "ERROR: "; e.printStackTrace(); } } else { Log.e("ServiceHandler", "Couldn't get any data from the url"); if(!LoginLocal(email, passwort)){ jsonStr = "ERROR: NO INTERNET CONNECTION"; } } sh.destroy(); return jsonStr; } public Boolean LoginLocal(String email, String passwort){ SQLiteDatabase db = this.getWritableDatabase(); Boolean zwisch = false; Cursor c = db.rawQuery( "SELECT _id " + "FROM user " + "WHERE AND email = " + email + " " + "AND passwort = " + passwort + " ", null ); if(c.moveToFirst()){ zwisch = true; setLoginState(c.getInt(0)); } c.close(); db.close(); return zwisch; } public void setLoginState(Integer user_id){ SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("UPDATE user SET loginstatus = 1 WHERE _id = " + user_id + " "); db.close(); } public Integer getLoginState(){ Integer id = null; SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT _id " + "FROM user " + "WHERE loginstatus = 1 ", null ); if(c.moveToFirst()){ do{ id = c.getInt(0); }while(c.moveToNext()); } c.close(); db.close(); return id; } public void Logout(){ SQLiteDatabase db = this.getWritableDatabase(); db.execSQL( "UPDATE user " + "SET loginstatus = 0 " + "WHERE loginstatus = 1 ;" ); db.close(); } public String sendPasswortToEmail(String email){ String jsonStr; String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/sendpassword.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); jsonStr = sh.makeServiceCall(url, ServiceHandler.POST,PHPanfrage); if (jsonStr != null) { try { JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); jsonStr = jsonObj.getString("exception"); } catch (JSONException e) { jsonStr = "ERROR: "+stripHtml(jsonStr); e.printStackTrace(); } } else { jsonStr = "ERROR: NO INTERNET CONNECTION"; } sh.destroy(); return jsonStr; } public String stripHtml(String html) { return html.substring(html.indexOf("{"), html.lastIndexOf("}") + 1); } public String stripHtmlForArray(String html) { return html.substring(html.indexOf("["), html.lastIndexOf("]") + 1); } //Registration public String registration(String name, String email, String passwort, String passwortValidation){ String jsonStr; if(passwort.equals(passwortValidation)){ String url = "http://home.htw-berlin.de/~s0539589/Finanzplanung/registration.php"; ServiceHandler sh = new ServiceHandler(); // Making a request to url and getting response List<NameValuePair> PHPanfrage = new ArrayList<>(); PHPanfrage.add(new BasicNameValuePair("email", email)); PHPanfrage.add(new BasicNameValuePair("name", name)); PHPanfrage.add(new BasicNameValuePair("password", passwort)); jsonStr = sh.makeServiceCall(url, ServiceHandler.POST,PHPanfrage); Log.d("Response: ", "> " + jsonStr); if (jsonStr != null) { try { JSONObject jsonObj = new JSONObject(stripHtml(jsonStr)); jsonStr = jsonObj.getString("exception"); } catch (JSONException e) { jsonStr = "ERROR: "+stripHtml(jsonStr); e.printStackTrace(); } } else { //Log.e("ServiceHandler", "Couldn't get any data from the url"); jsonStr = "ERROR: NO INTERNET CONNECTION"; } sh.destroy(); }else{ jsonStr = "ERROR: Passwort stimmt nicht überein"; } return jsonStr; } public int addUser(Integer user_id, String name, String email, String passwort){ SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO user (_id, name, email, passwort) " + "VALUES (" + " " + user_id + " , " + " '" + name + "', " + " '" + email + "', " + " '" + passwort + "' " + ");"); db.execSQL("INSERT INTO settings (user_id) " + "VALUES (" + " " + user_id + " " + ");"); return 0; } public int addUser(Integer mitglied_id, String name, String email){ SQLiteDatabase db = this.getWritableDatabase(); //datum TEXT as strings ("YYYY-MM-DD"). db.execSQL("INSERT INTO user (_id, name, email, passwort, loginstatus) " + "VALUES (" + " '" + mitglied_id + "', " + " '" + name + "', " + " '" + email + "', " + " '00000', " + " 0 " + ");"); return 0; } //Settings public int setNewPasswort(String newpasswort, String oldpasswort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); if(validation(oldpasswort)){ db.execSQL("UPDATE user SET password = '" + newpasswort + "' WHERE _id = " + user_id + ";"); } db.close(); return 0; } public Boolean validation(String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); Cursor c = db.rawQuery( "SELECT * " + "FROM user " + "WHERE _id = " + user_id + " " + " AND passwort = '" + passwort + "' ", null ); if(c.moveToFirst()){ c.close(); return true; }else { c.close(); return false; } } public int setNewName(String newName, String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); if(validation(passwort)){ db.execSQL("UPDATE user SET name = '" + newName + "' WHERE _id = " + user_id + ";"); } db.close(); return 0; } public int setNewServer(String newServer, String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); if(validation(passwort)){ db.execSQL("UPDATE settings SET server = '" + newServer + "' WHERE user_id = " + user_id + ";"); } db.close(); return 0; } public String getServer(){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); String server = "nix"; Cursor c = db.rawQuery( "SELECT server " + "FROM settings " + "WHERE user_id = " + user_id + " ", null ); if(c.moveToFirst()){ server= c.getString(0); } c.close(); db.close(); return server; } public String getName(){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); String name = "nix"; Cursor c = db.rawQuery( "SELECT name " + "FROM user " + "WHERE _id = " + user_id + " ", null ); if(c.moveToFirst()){ name = c.getString(0); } c.close(); db.close(); return name; } public void setMobileSync(Boolean mobileSync, String passwort){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("UPDATE settings SET mobile_sync = " + (mobileSync ? 1 : 0) + " WHERE user_id = " + user_id + ";"); db.close(); } public Boolean getMobileSyncStatus(){ Integer user_id = getLoginState(); SQLiteDatabase db = this.getWritableDatabase(); Boolean zwisch = false; Cursor c = db.rawQuery( "SELECT mobile_sync " + "FROM settings " + "WHERE user_id = " + user_id + " ", null ); if(c.moveToFirst()){ zwisch = c.getInt(0) == 1; } c.close(); db.close(); return zwisch; } }
dem Synchronisieren mit der Externen Datenbank sehr nahe
app/src/main/java/com/htw/finanzplanung/Data_Access.java
dem Synchronisieren mit der Externen Datenbank sehr nahe
Java
bsd-3-clause
90006e558460034b01666eefe39c8616a1686ce9
0
agmip/quadui
package org.agmip.ui.quadui; import com.rits.cloning.Cloner; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Scanner; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.agmip.dome.DomeUtil; import org.agmip.dome.Engine; import org.agmip.translators.csv.AlnkInput; import org.agmip.translators.csv.DomeInput; import org.agmip.util.MapUtil; import org.apache.pivot.util.concurrent.Task; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ApplyDomeTask extends Task<HashMap> { private static Logger log = LoggerFactory.getLogger(ApplyDomeTask.class); private final HashMap<String, HashMap<String, Object>> ovlDomes = new HashMap<String, HashMap<String, Object>>(); private final HashMap<String, HashMap<String, Object>> stgDomes = new HashMap<String, HashMap<String, Object>>(); private HashMap<String, Object> linkDomes = new HashMap<String, Object>(); private HashMap<String, String> ovlLinks = new HashMap<String, String>(); private HashMap<String, String> stgLinks = new HashMap<String, String>(); private final HashMap<String, String> ovlNewDomeIdMap = new HashMap<String, String>(); private final HashMap<String, String> stgNewDomeIdMap = new HashMap<String, String>(); // private HashMap<String, ArrayList<String>> wthLinks = new HashMap<String, ArrayList<String>>(); // private HashMap<String, ArrayList<String>> soilLinks = new HashMap<String, ArrayList<String>>(); private HashMap source; private String mode; private boolean autoApply; private int thrPoolSize; public ApplyDomeTask(String linkFile, String fieldFile, String strategyFile, String mode, HashMap m, boolean autoApply) { this.source = m; this.mode = mode; this.autoApply = autoApply; // Setup the domes here. loadDomeLinkFile(linkFile); log.debug("link csv: {}", ovlLinks); if (mode.equals("strategy")) { loadDomeFile(strategyFile, stgDomes); } loadDomeFile(fieldFile, ovlDomes); thrPoolSize = Runtime.getRuntime().availableProcessors(); } public ApplyDomeTask(String linkFile, String fieldFile, String strategyFile, String mode, HashMap m, boolean autoApply, int thrPoolSize) { this(linkFile, fieldFile, strategyFile, mode, m, autoApply); this.thrPoolSize = thrPoolSize; } private void loadDomeLinkFile(String fileName) { String fileNameTest = fileName.toUpperCase(); log.debug("Loading LINK file: {}", fileName); linkDomes = null; try { if (fileNameTest.endsWith(".CSV")) { log.debug("Entering single ACMO CSV file DOME handling"); AlnkInput reader = new AlnkInput(); linkDomes = (HashMap<String, Object>) reader.readFile(fileName); } else if (fileNameTest.endsWith(".ALNK")) { log.debug("Entering single ALNK file DOME handling"); AlnkInput reader = new AlnkInput(); linkDomes = (HashMap<String, Object>) reader.readFile(fileName); } if (linkDomes != null) { log.debug("link info: {}", linkDomes.toString()); try { if (!linkDomes.isEmpty()) { if (linkDomes.containsKey("link_overlay")) { ovlLinks = (HashMap<String, String>) linkDomes.get("link_overlay"); } if (linkDomes.containsKey("link_stragty")) { stgLinks = (HashMap<String, String>) linkDomes.get("link_stragty"); } } } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } private String getLinkIds(String domeType, HashMap entry) { String exname = MapUtil.getValueOr(entry, "exname", ""); String wst_id = MapUtil.getValueOr(entry, "wst_id", ""); String soil_id = MapUtil.getValueOr(entry, "soil_id", ""); String linkIdsExp = getLinkIds(domeType, "EXNAME", exname); String linkIdsWst = getLinkIds(domeType, "WST_ID", wst_id); String linkIdsSoil = getLinkIds(domeType, "SOIL_ID", soil_id); String ret = ""; if (!linkIdsExp.equals("")) { ret += linkIdsExp + "|"; } if (!linkIdsWst.equals("")) { ret += linkIdsWst + "|"; } if (!linkIdsSoil.equals("")) { ret += linkIdsSoil; } if (ret.endsWith("|")) { ret = ret.substring(0, ret.length() - 1); } return ret; } private String getLinkIds(String domeType, String idType, String id) { HashMap<String, String> links; if (domeType.equals("strategy")) { links = stgLinks; } else if (domeType.equals("overlay")) { links = ovlLinks; } else { log.error("Non reconized DOME type has been deceted for {} ", id); return ""; } if (links.isEmpty() || id.equals("")) { return ""; } String linkIds = ""; ArrayList<String> altLinkIds = new ArrayList(); altLinkIds.add(idType + "_ALL"); if (id.matches("[^_]+_\\d+$")) { altLinkIds.add(idType + "_" + id.replaceAll("_\\d+$", "")); altLinkIds.add(idType + "_" + id + "__1"); } else if (id.matches(".+_\\d+__\\d+$")) { altLinkIds.add(idType + "_" + id.replaceAll("__\\d+$", "")); altLinkIds.add(idType + "_" + id.replaceAll("_\\d+__\\d+$", "")); } altLinkIds.add(idType + "_" + id); for (String linkId : altLinkIds) { if (links.containsKey(linkId)) { linkIds += links.get(linkId) + "|"; } } if (linkIds.endsWith("|")) { linkIds = linkIds.substring(0, linkIds.length() - 1); } return linkIds; } private String reviseDomeIds(HashMap entry, String domeIds, String domeType) { HashMap<String, HashMap<String, Object>> domes; HashMap<String, String> domeClimIdMap; if (domeType.equals("strategy")) { domes = stgDomes; domeClimIdMap = ovlNewDomeIdMap; } else if (domeType.equals("overlay")) { domes = ovlDomes; domeClimIdMap = stgNewDomeIdMap; } else { return domeIds; } StringBuilder newDomeIds = new StringBuilder(); for (String domeId : domeIds.split("[|]")) { String[] metas = domeId.split("-"); if (metas.length < 7) { if (domeClimIdMap.containsKey(domeId)) { domeId = domeClimIdMap.get(domeId); } else { String climId = ""; HashMap<String, Object> dome = MapUtil.getObjectOr(domes, domeId, new HashMap()); // Only auto-fix the clim_id for seasonal strategy DOME if (!domeType.equals("overlay")) { climId = MapUtil.getValueOr(entry, "clim_id", "").toUpperCase(); if (!dome.isEmpty()) { ArrayList<HashMap<String, String>> rules = DomeUtil.getRules(dome); for (HashMap<String, String> rule : rules) { String var = MapUtil.getValueOr(rule, "variable", "").toLowerCase(); if (var.equals("clim_id")) { climId = MapUtil.getValueOr(rule, "args", climId).toUpperCase(); } } } } StringBuilder newDomeId = new StringBuilder(); for (int i = 0; i < metas.length - 1; i++) { newDomeId.append(metas[i]).append("-"); } newDomeId.append(climId).append("-").append(metas[metas.length - 1]); domeClimIdMap.put(domeId, newDomeId.toString()); domeId = newDomeId.toString(); DomeUtil.updateMetaInfo(dome, domeId); } } newDomeIds.append(domeId).append("|"); } if (newDomeIds.charAt(newDomeIds.length() - 1) == '|') { newDomeIds.deleteCharAt(newDomeIds.length() - 1); } return newDomeIds.toString(); } private void loadDomeFile(String fileName, HashMap<String, HashMap<String, Object>> domes) { String fileNameTest = fileName.toUpperCase(); log.info("Loading DOME file: {}", fileName); if (fileNameTest.endsWith(".ZIP")) { log.debug("Entering Zip file handling"); ZipFile z; try { z = new ZipFile(fileName); Enumeration entries = z.entries(); while (entries.hasMoreElements()) { // Do we handle nested zips? Not yet. ZipEntry entry = (ZipEntry) entries.nextElement(); File zipFileName = new File(entry.getName()); if (zipFileName.getName().toLowerCase().endsWith(".csv") && !zipFileName.getName().startsWith(".")) { log.debug("Processing file: {}", zipFileName.getName()); DomeInput translator = new DomeInput(); translator.readCSV(z.getInputStream(entry)); HashMap<String, Object> dome = translator.getDome(); log.debug("dome info: {}", dome.toString()); String domeName = DomeUtil.generateDomeName(dome); if (!domeName.equals("----")) { domes.put(domeName, new HashMap<String, Object>(dome)); } } } z.close(); } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } else if (fileNameTest.endsWith(".CSV")) { log.debug("Entering single CSV file DOME handling"); try { DomeInput translator = new DomeInput(); HashMap<String, Object> dome = (HashMap<String, Object>) translator.readFile(fileName); String domeName = DomeUtil.generateDomeName(dome); log.debug("Dome name: {}", domeName); log.debug("Dome layout: {}", dome.toString()); domes.put(domeName, dome); } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } else if (fileNameTest.endsWith(".JSON") || fileNameTest.endsWith(".DOME")) { log.debug("Entering single ACE Binary file DOME handling"); try { ObjectMapper mapper = new ObjectMapper(); String json; if (fileNameTest.endsWith(".JSON")) { json = new Scanner(new FileInputStream(fileName), "UTF-8").useDelimiter("\\A").next(); } else { json = new Scanner(new GZIPInputStream(new FileInputStream(fileName)), "UTF-8").useDelimiter("\\A").next(); } HashMap<String, HashMap<String, Object>> tmp = mapper.readValue(json, new TypeReference<HashMap<String, HashMap<String, Object>>>() { }); // domes.putAll(tmp); for (HashMap dome : tmp.values()) { String domeName = DomeUtil.generateDomeName(dome); if (!domeName.equals("----")) { domes.put(domeName, new HashMap<String, Object>(dome)); } } log.debug("Domes layout: {}", domes.toString()); } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } } @Override public HashMap<String, Object> execute() { // First extract all the domes and put them in a HashMap by DOME_NAME // The read the DOME_NAME field of the CSV file // Split the DOME_NAME, and then apply sequentially to the HashMap. // PLEASE NOTE: This can be a massive undertaking if the source map // is really large. Need to find optimization points. HashMap<String, Object> output = new HashMap<String, Object>(); //HashMap<String, ArrayList<HashMap<String, String>>> dome; // Load the dome if (ovlDomes.isEmpty() && stgDomes.isEmpty()) { log.info("No DOME to apply."); HashMap<String, Object> d = new HashMap<String, Object>(); //d.put("domeinfo", new HashMap<String, String>()); d.put("domeoutput", source); return d; } if (autoApply) { HashMap<String, Object> d = new HashMap<String, Object>(); if (ovlDomes.size() > 1) { log.error("Auto-Apply feature only allows one field overlay file per run"); d.put("errors", "Auto-Apply feature only allows one field overlay file per run"); return d; } else if (stgDomes.size() > 1) { log.error("Auto-Apply feature only allows one seasonal strategy file per run"); d.put("errors", "Auto-Apply feature only allows one seasonal strategy file per run"); return d; } } // Flatten the data and apply the dome. Engine domeEngine; ArrayList<HashMap<String, Object>> flattenedData = MapUtil.flatPack(source); boolean noExpMode = false; if (flattenedData.isEmpty()) { log.info("No experiment data detected, will try Weather and Soil data only mode"); noExpMode = true; flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils")); flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers")); // flatSoilAndWthData(flattenedData, "soil"); // flatSoilAndWthData(flattenedData, "weather"); if (flattenedData.isEmpty()) { HashMap<String, Object> d = new HashMap<String, Object>(); log.error("No data found from input file, no DOME will be applied for data set {}", source.toString()); d.put("errors", "Loaded raw data is invalid, please check input files"); return d; } } if (mode.equals("strategy")) { log.debug("Domes: {}", stgDomes.toString()); log.debug("Entering Strategy mode!"); if (!noExpMode) { updateWthReferences(updateExpReferences(true)); flattenedData = MapUtil.flatPack(source); } // int cnt = 0; // for (HashMap<String, Object> entry : MapUtil.getRawPackageContents(source, "experiments")) { // // log.debug("Exp at {}: {}, {}", // cnt, // entry.get("wst_id"), // entry.get("clim_id"), // ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"), // ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id") // ); // cnt++; // } String stgDomeName = ""; if (autoApply) { for (String domeName : stgDomes.keySet()) { stgDomeName = domeName; } log.info("Auto apply seasonal strategy: {}", stgDomeName); } Engine generatorEngine; ArrayList<HashMap<String, Object>> strategyResults = new ArrayList<HashMap<String, Object>>(); for (HashMap<String, Object> entry : flattenedData) { // Remove observed data from input data if apply strategy DOME entry.remove("observed"); if (autoApply) { entry.put("seasonal_strategy", stgDomeName); } String domeName = getLinkIds("strategy", entry); if (domeName.equals("")) { domeName = MapUtil.getValueOr(entry, "seasonal_strategy", ""); } else { entry.put("seasonal_strategy", domeName); log.debug("Apply seasonal strategy domes from link csv: {}", domeName); } domeName = reviseDomeIds(entry, domeName, "strategy"); entry.put("seasonal_strategy", domeName); String tmp[] = domeName.split("[|]"); String strategyName; if (tmp.length > 1) { log.warn("Multiple seasonal strategy dome is not supported yet, only the first dome will be applied"); for (int i = 1; i < tmp.length; i++) { setFailedDomeId(entry, "seasonal_dome_failed", tmp[i]); } } strategyName = tmp[0].toUpperCase(); log.info("Apply DOME {} for {}", strategyName, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>")))); log.debug("Looking for ss: {}", strategyName); if (!strategyName.equals("")) { if (stgDomes.containsKey(strategyName)) { log.debug("Found strategyName"); entry.put("dome_applied", "Y"); entry.put("seasonal_dome_applied", "Y"); generatorEngine = new Engine(stgDomes.get(strategyName), true); if (!noExpMode) { // Check if there is no weather or soil data matched with experiment if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) { log.warn("No scenario weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); } if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) { log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); } } ArrayList<HashMap<String, Object>> newEntries = generatorEngine.applyStg(flatSoilAndWthData(entry, noExpMode)); log.debug("New Entries to add: {}", newEntries.size()); strategyResults.addAll(newEntries); } else { log.error("Cannot find strategy: {}", strategyName); setFailedDomeId(entry, "seasonal_dome_failed", strategyName); } } } log.debug("=== FINISHED GENERATION ==="); log.debug("Generated count: {}", strategyResults.size()); ArrayList<HashMap<String, Object>> exp = MapUtil.getRawPackageContents(source, "experiments"); exp.clear(); exp.addAll(strategyResults); flattenedData = MapUtil.flatPack(source); if (noExpMode) { flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils")); flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers")); } } if (!noExpMode) { if (mode.equals("strategy")) { updateExpReferences(false); } else { updateWthReferences(updateExpReferences(false)); } flattenedData = MapUtil.flatPack(source); } String ovlDomeName = ""; if (autoApply) { for (String domeName : ovlDomes.keySet()) { ovlDomeName = domeName; } log.info("Auto apply field overlay: {}", ovlDomeName); } int cnt = 0; ArrayList<ApplyDomeRunner> engineRunners = new ArrayList(); ExecutorService executor; if (thrPoolSize > 1) { log.info("Create the thread pool with the size of {} for appling filed overlay DOME", thrPoolSize); executor = Executors.newFixedThreadPool(thrPoolSize); } else if (thrPoolSize == 1) { log.info("Create the single thread pool for appling filed overlay DOME"); executor = Executors.newSingleThreadExecutor(); } else { log.info("Create the cached thread pool with flexible size for appling filed overlay DOME"); executor = Executors.newCachedThreadPool(); } HashMap<String, HashMap<String, ArrayList<HashMap<String, String>>>> soilDomeMap = new HashMap(); HashMap<String, HashMap<String, ArrayList<HashMap<String, String>>>> wthDomeMap = new HashMap(); HashSet<String> soilIds = getSWIdsSet("soils", new String[]{"soil_id"}); HashSet<String> wthIds = getSWIdsSet("weathers", new String[]{"wst_id", "clim_id"}); ArrayList<HashMap> soilDataArr = new ArrayList(); ArrayList<HashMap> wthDataArr = new ArrayList(); ArrayList<ArrayList<Engine>> soilEngines = new ArrayList(); ArrayList<ArrayList<Engine>> wthEngines = new ArrayList(); for (HashMap<String, Object> entry : flattenedData) { log.debug("Exp at {}: {}, {}, {}", cnt, entry.get("wst_id"), ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"), ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id")); cnt++; if (autoApply) { entry.put("field_overlay", ovlDomeName); } String domeName = getLinkIds("overlay", entry); if (domeName.equals("")) { domeName = MapUtil.getValueOr(entry, "field_overlay", ""); } else { entry.put("field_overlay", domeName); log.debug("Apply field overlay domes from link csv: {}", domeName); } domeName = reviseDomeIds(entry, domeName, "overlay"); entry.put("field_overlay", domeName); String soilId = MapUtil.getValueOr(entry, "soil_id", ""); String wstId = MapUtil.getValueOr(entry, "wst_id", ""); String climId = MapUtil.getValueOr(entry, "clim_id", ""); ArrayList<Engine> sEngines = new ArrayList(); ArrayList<Engine> wEngines = new ArrayList(); String sDomeIds = ""; String wDomeIds = ""; ArrayList<HashMap<String, String>> sRulesTotal = new ArrayList(); ArrayList<HashMap<String, String>> wRulesTotal = new ArrayList(); if (!domeName.equals("")) { String tmp[] = domeName.split("[|]"); int tmpLength = tmp.length; ArrayList<Engine> engines = new ArrayList(); for (int i = 0; i < tmpLength; i++) { String tmpDomeId = tmp[i].toUpperCase(); log.debug("Apply DOME {} for {}", tmpDomeId, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>")))); log.debug("Looking for dome_name: {}", tmpDomeId); if (ovlDomes.containsKey(tmpDomeId)) { domeEngine = new Engine(ovlDomes.get(tmpDomeId)); entry.put("dome_applied", "Y"); entry.put("field_dome_applied", "Y"); ArrayList<HashMap<String, String>> sRules = domeEngine.extractSoilRules(); if (!sRules.isEmpty()) { if (sDomeIds.equals("")) { sDomeIds = tmpDomeId; } else { sDomeIds += "|" + tmpDomeId; } sEngines.add(new Engine(sRules, tmpDomeId)); sRulesTotal.addAll(sRules); } ArrayList<HashMap<String, String>> wRules = domeEngine.extractWthRules(); if (!wRules.isEmpty()) { if (wDomeIds.equals("")) { wDomeIds = tmpDomeId; } else { wDomeIds += "|" + tmpDomeId; } wEngines.add(new Engine(wRules, tmpDomeId)); wRulesTotal.addAll(wRules); } engines.add(domeEngine); } else { log.error("Cannot find overlay: {}", tmpDomeId); setFailedDomeId(entry, "field_dome_failed", tmpDomeId); } } HashMap<String, ArrayList<HashMap<String, String>>> lastAppliedSoilDomes = soilDomeMap.get(soilId); if (lastAppliedSoilDomes == null) { soilDataArr.add(entry); soilEngines.add(sEngines); lastAppliedSoilDomes = new HashMap(); lastAppliedSoilDomes.put(sDomeIds, sRulesTotal); soilDomeMap.put(soilId, lastAppliedSoilDomes); } else if (!lastAppliedSoilDomes.containsKey(sDomeIds)) { boolean isSameRules = false; for (ArrayList<HashMap<String, String>> rules : lastAppliedSoilDomes.values()) { if (rules.equals(sRulesTotal)) { isSameRules = true; break; } } if (!isSameRules) { replicateSoil(entry, soilIds); soilDataArr.add(entry); soilEngines.add(sEngines); lastAppliedSoilDomes.put(sDomeIds, sRulesTotal); } } HashMap<String, ArrayList<HashMap<String, String>>> lastAppliedWthDomes = wthDomeMap.get(wstId+climId); if (lastAppliedWthDomes == null) { wthDataArr.add(entry); wthEngines.add(wEngines); lastAppliedWthDomes = new HashMap(); lastAppliedWthDomes.put(wDomeIds, wRulesTotal); wthDomeMap.put(wstId+climId, lastAppliedWthDomes); } else if (!lastAppliedWthDomes.containsKey(wDomeIds)) { boolean isSameRules = false; for (ArrayList<HashMap<String, String>> rules : lastAppliedWthDomes.values()) { if (rules.equals(wRulesTotal)) { isSameRules = true; break; } } if (!isSameRules) { replicateWth(entry, wthIds); wthDataArr.add(entry); wthEngines.add(wEngines); lastAppliedWthDomes.put(wDomeIds, wRulesTotal); } } engineRunners.add(new ApplyDomeRunner(engines, entry, noExpMode, mode)); } } for (int i = 0; i < soilDataArr.size(); i++) { for (Engine e : soilEngines.get(i)) { e.apply(flatSoilAndWthData(soilDataArr.get(i), noExpMode)); } } for (int i = 0; i < wthDataArr.size(); i++) { for (Engine e : wthEngines.get(i)) { e.apply(flatSoilAndWthData(wthDataArr.get(i), noExpMode)); } } for (ApplyDomeRunner engineRunner : engineRunners) { executor.submit(engineRunner); // engine.apply(flatSoilAndWthData(entry, noExpMode)); // ArrayList<String> strategyList = engine.getGenerators(); // if (!strategyList.isEmpty()) { // log.warn("The following DOME commands in the field overlay file are ignored : {}", strategyList.toString()); // } // if (!noExpMode && !mode.equals("strategy")) { // // Check if there is no weather or soil data matched with experiment // if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) { // log.warn("No baseline weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); // } // if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) { // log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); // } // } } executor.shutdown(); while (!executor.isTerminated()) { } // executor = null; if (noExpMode) { output.put("domeoutput", source); } else { output.put("domeoutput", MapUtil.bundle(flattenedData)); } if (ovlDomes != null && !ovlDomes.isEmpty()) { output.put("ovlDomes", ovlDomes); } if (stgDomes != null && !stgDomes.isEmpty()) { output.put("stgDomes", stgDomes); } return output; } // private void flatSoilAndWthData(ArrayList<HashMap<String, Object>> flattenedData, String key) { // ArrayList<HashMap<String, Object>> arr = MapUtil.getRawPackageContents(source, key + "s"); // for (HashMap<String, Object> data : arr) { // HashMap<String, Object> tmp = new HashMap<String, Object>(); // tmp.put(key, data); // flattenedData.add(tmp); // } // } private HashMap<String, Object> flatSoilAndWthData(HashMap<String, Object> data, boolean noExpFlg) { if (!noExpFlg) { return data; } HashMap<String, Object> ret; if (data.containsKey("dailyWeather")) { ret = new HashMap<String, Object>(); ret.put("weather", data); } else if (data.containsKey("soilLayer")) { ret = new HashMap<String, Object>(); ret.put("soil", data); } else { ret = data; } return ret; } private void setFailedDomeId(HashMap data, String failKey, String failId) { String failIds; if ((failIds = (String) data.get(failKey)) != null) { data.put(failKey, failId); } else { data.put(failKey, failIds + "|" + failId); } } private boolean updateExpReferences(boolean isStgDome) { ArrayList<HashMap<String, Object>> expArr = MapUtil.getRawPackageContents(source, "experiments"); boolean isClimIDchanged = false; HashMap<String, HashMap<String, Object>> domes; String linkid; String domeKey; int maxDomeNum; if (isStgDome) { domes = stgDomes; linkid = "strategy"; domeKey = "seasonal_strategy"; maxDomeNum = 1; } else { domes = ovlDomes; linkid = "overlay"; domeKey = "field_overlay"; maxDomeNum = Integer.MAX_VALUE; } // Pre-scan the seasnal DOME to update reference variables String autoDomeName = ""; if (autoApply) { for (String domeName : domes.keySet()) { autoDomeName = domeName; } } for (HashMap<String, Object> exp : expArr) { String domeName = getLinkIds(linkid, exp); if (domeName.equals("")) { if (autoApply) { domeName = autoDomeName; } else { domeName = MapUtil.getValueOr(exp, domeKey, ""); } } domeName = reviseDomeIds(exp, domeName, linkid); if (!domeName.equals("")) { String tmp[] = domeName.split("[|]"); int tmpLength = Math.min(tmp.length, maxDomeNum); for (int i = 0; i < tmpLength; i++) { String tmpDomeId = tmp[i].toUpperCase(); log.debug("Looking for dome_name: {}", tmpDomeId); if (domes.containsKey(tmpDomeId)) { log.debug("Found DOME {}", tmpDomeId); Engine domeEngine = new Engine(domes.get(tmpDomeId)); isClimIDchanged = domeEngine.updateWSRef(exp, isStgDome, mode.equals("strategy")); // Check if the wst_id is switch to 8-bit long version String wst_id = MapUtil.getValueOr(exp, "wst_id", ""); if (isStgDome && wst_id.length() < 8) { exp.put("wst_id", wst_id + "0XXX"); exp.put("clim_id", "0XXX"); isClimIDchanged = true; } log.debug("New exp linkage: {}", exp); } } } } return isClimIDchanged; } private void updateWthReferences(boolean isClimIDchanged) { ArrayList<HashMap<String, Object>> wthArr = MapUtil.getRawPackageContents(source, "weathers"); boolean isStrategy = mode.equals("strategy"); HashMap<String, HashMap> unfixedWths = new HashMap(); HashSet<String> fixedWths = new HashSet(); for (HashMap<String, Object> wth : wthArr) { String wst_id = MapUtil.getValueOr(wth, "wst_id", ""); String clim_id = MapUtil.getValueOr(wth, "clim_id", ""); if (clim_id.equals("")) { if (wst_id.length() == 8) { clim_id = wst_id.substring(4, 8); } else { clim_id = "0XXX"; } } // If user assign CLIM_ID in the DOME, or find non-baseline data in the overlay mode, then switch WST_ID to 8-bit version if (isStrategy || isClimIDchanged || !clim_id.startsWith("0")) { if (wst_id.length() < 8) { wth.put("wst_id", wst_id + clim_id); } } else { // Temporally switch all the WST_ID to 8-bit in the data set if (wst_id.length() < 8) { wth.put("wst_id", wst_id + clim_id); } else { wst_id = wst_id.substring(0, 4); } // Check if there is multiple baseline record for one site if (unfixedWths.containsKey(wst_id)) { log.warn("There is multiple baseline weather data for site [{}], please choose a particular baseline via field overlay DOME", wst_id); unfixedWths.remove(wst_id); fixedWths.add(wst_id); } else { if (!fixedWths.contains(wst_id)) { unfixedWths.put(wst_id, wth); } } } } // If no CLIM_ID provided in the overlay mode, then switch the baseline WST_ID to 4-bit. if (!isStrategy && !unfixedWths.isEmpty()) { for (String wst_id : unfixedWths.keySet()) { unfixedWths.get(wst_id).put("wst_id", wst_id); } } } private void replicateSoil(HashMap entry, HashSet soilIds) { String newSoilId = MapUtil.getValueOr(entry, "soil_id", ""); HashMap data = MapUtil.getObjectOr(entry, "soil", new HashMap()); if (data.isEmpty()) { return; } Cloner cloner = new Cloner(); HashMap newData = cloner.deepClone(data); ArrayList<HashMap<String, Object>> soils = MapUtil.getRawPackageContents(source, "soils"); int count = 1; while (soilIds.contains(newSoilId + "_" + count)) { count++; } newSoilId += "_" + count; newData.put("soil_id", newSoilId); entry.put("soil_id", newSoilId); entry.put("soil", newData); soilIds.add(newSoilId); soils.add(newData); } private void replicateWth(HashMap entry, HashSet wthIds) { String newWthId = MapUtil.getValueOr(entry, "wst_id", ""); String climId = MapUtil.getValueOr(entry, "clim_id", ""); HashMap data = MapUtil.getObjectOr(entry, "weather", new HashMap()); if (data.isEmpty()) { return; } Cloner cloner = new Cloner(); HashMap newData = cloner.deepClone(data); ArrayList<HashMap<String, Object>> wths = MapUtil.getRawPackageContents(source, "weathers"); String inst; if (newWthId.length() > 1) { inst = newWthId.substring(0, 2); } else { inst = newWthId + "0"; } newWthId = inst + "01" + climId; int count = 1; while (wthIds.contains(newWthId) && count < 99) { count++; newWthId = String.format("%s%02d%s", inst, count, climId); } if (count == 99 && wthIds.contains(newWthId)) { inst = inst.substring(0, 1); newWthId = inst + "100" + climId; while (wthIds.contains(newWthId)) { count++; newWthId = String.format("%s%03d%s", inst, count, climId); } } newData.put("wst_id", newWthId); entry.put("wst_id", newWthId); entry.put("weather", newData); wthIds.add(newWthId); wths.add(newData); } private HashSet<String> getSWIdsSet(String dataKey, String... idKeys) { HashSet<String> ret = new HashSet(); ArrayList<HashMap<String, Object>> arr = MapUtil.getRawPackageContents(source, dataKey); for (HashMap data : arr) { StringBuilder sb = new StringBuilder(); for (String idKey : idKeys) { sb.append(MapUtil.getValueOr(data, idKey, "")); } ret.add(sb.toString()); } return ret; } }
src/main/java/org/agmip/ui/quadui/ApplyDomeTask.java
package org.agmip.ui.quadui; import com.rits.cloning.Cloner; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Scanner; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.agmip.dome.DomeUtil; import org.agmip.dome.Engine; import org.agmip.translators.csv.AlnkInput; import org.agmip.translators.csv.DomeInput; import org.agmip.util.MapUtil; import org.apache.pivot.util.concurrent.Task; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ApplyDomeTask extends Task<HashMap> { private static Logger log = LoggerFactory.getLogger(ApplyDomeTask.class); private final HashMap<String, HashMap<String, Object>> ovlDomes = new HashMap<String, HashMap<String, Object>>(); private final HashMap<String, HashMap<String, Object>> stgDomes = new HashMap<String, HashMap<String, Object>>(); private HashMap<String, Object> linkDomes = new HashMap<String, Object>(); private HashMap<String, String> ovlLinks = new HashMap<String, String>(); private HashMap<String, String> stgLinks = new HashMap<String, String>(); private final HashMap<String, String> ovlNewDomeIdMap = new HashMap<String, String>(); private final HashMap<String, String> stgNewDomeIdMap = new HashMap<String, String>(); // private HashMap<String, ArrayList<String>> wthLinks = new HashMap<String, ArrayList<String>>(); // private HashMap<String, ArrayList<String>> soilLinks = new HashMap<String, ArrayList<String>>(); private HashMap source; private String mode; private boolean autoApply; private int thrPoolSize; public ApplyDomeTask(String linkFile, String fieldFile, String strategyFile, String mode, HashMap m, boolean autoApply) { this.source = m; this.mode = mode; this.autoApply = autoApply; // Setup the domes here. loadDomeLinkFile(linkFile); log.debug("link csv: {}", ovlLinks); if (mode.equals("strategy")) { loadDomeFile(strategyFile, stgDomes); } loadDomeFile(fieldFile, ovlDomes); thrPoolSize = Runtime.getRuntime().availableProcessors(); } public ApplyDomeTask(String linkFile, String fieldFile, String strategyFile, String mode, HashMap m, boolean autoApply, int thrPoolSize) { this(linkFile, fieldFile, strategyFile, mode, m, autoApply); this.thrPoolSize = thrPoolSize; } private void loadDomeLinkFile(String fileName) { String fileNameTest = fileName.toUpperCase(); log.debug("Loading LINK file: {}", fileName); linkDomes = null; try { if (fileNameTest.endsWith(".CSV")) { log.debug("Entering single ACMO CSV file DOME handling"); AlnkInput reader = new AlnkInput(); linkDomes = (HashMap<String, Object>) reader.readFile(fileName); } else if (fileNameTest.endsWith(".ALNK")) { log.debug("Entering single ALNK file DOME handling"); AlnkInput reader = new AlnkInput(); linkDomes = (HashMap<String, Object>) reader.readFile(fileName); } if (linkDomes != null) { log.debug("link info: {}", linkDomes.toString()); try { if (!linkDomes.isEmpty()) { if (linkDomes.containsKey("link_overlay")) { ovlLinks = (HashMap<String, String>) linkDomes.get("link_overlay"); } if (linkDomes.containsKey("link_stragty")) { stgLinks = (HashMap<String, String>) linkDomes.get("link_stragty"); } } } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } private String getLinkIds(String domeType, HashMap entry) { String exname = MapUtil.getValueOr(entry, "exname", ""); String wst_id = MapUtil.getValueOr(entry, "wst_id", ""); String soil_id = MapUtil.getValueOr(entry, "soil_id", ""); String linkIdsExp = getLinkIds(domeType, "EXNAME", exname); String linkIdsWst = getLinkIds(domeType, "WST_ID", wst_id); String linkIdsSoil = getLinkIds(domeType, "SOIL_ID", soil_id); String ret = ""; if (!linkIdsExp.equals("")) { ret += linkIdsExp + "|"; } if (!linkIdsWst.equals("")) { ret += linkIdsWst + "|"; } if (!linkIdsSoil.equals("")) { ret += linkIdsSoil; } if (ret.endsWith("|")) { ret = ret.substring(0, ret.length() - 1); } return ret; } private String getLinkIds(String domeType, String idType, String id) { HashMap<String, String> links; if (domeType.equals("strategy")) { links = stgLinks; } else if (domeType.equals("overlay")) { links = ovlLinks; } else { log.error("Non reconized DOME type has been deceted for {} ", id); return ""; } if (links.isEmpty() || id.equals("")) { return ""; } String linkIds = ""; ArrayList<String> altLinkIds = new ArrayList(); altLinkIds.add(idType + "_ALL"); if (id.matches("[^_]+_\\d+$")) { altLinkIds.add(idType + "_" + id.replaceAll("_\\d+$", "")); altLinkIds.add(idType + "_" + id + "__1"); } else if (id.matches(".+_\\d+__\\d+$")) { altLinkIds.add(idType + "_" + id.replaceAll("__\\d+$", "")); altLinkIds.add(idType + "_" + id.replaceAll("_\\d+__\\d+$", "")); } altLinkIds.add(idType + "_" + id); for (String linkId : altLinkIds) { if (links.containsKey(linkId)) { linkIds += links.get(linkId) + "|"; } } if (linkIds.endsWith("|")) { linkIds = linkIds.substring(0, linkIds.length() - 1); } return linkIds; } private void reviseDomeIds(HashMap entry, String domeIds, String domeType) { HashMap<String, HashMap<String, Object>> domes; HashMap<String, String> domeClimIdMap; String domeName; if (domeType.equals("strategy")) { domes = stgDomes; domeClimIdMap = ovlNewDomeIdMap; domeName = "seasonal_strategy"; } else if (domeType.equals("overlay")) { domes = ovlDomes; domeClimIdMap = stgNewDomeIdMap; domeName = "field_overlay"; } else { return; } StringBuilder newDomeIds = new StringBuilder(); for (String domeId : domeIds.split("[|]")) { String[] metas = domeId.split("-"); if (metas.length < 7) { if (domeClimIdMap.containsKey(domeId)) { domeId = domeClimIdMap.get(domeId); } else { String climId = ""; HashMap<String, Object> dome = MapUtil.getObjectOr(domes, domeId, new HashMap()); // Only auto-fix the clim_id for seasonal strategy DOME if (!domeType.equals("overlay")) { climId = MapUtil.getValueOr(entry, "clim_id", "").toUpperCase(); if (!dome.isEmpty()) { ArrayList<HashMap<String, String>> rules = DomeUtil.getRules(dome); for (HashMap<String, String> rule : rules) { String var = MapUtil.getValueOr(rule, "variable", "").toLowerCase(); if (var.equals("clim_id")) { climId = MapUtil.getValueOr(rule, "args", climId).toUpperCase(); } } } } StringBuilder newDomeId = new StringBuilder(); for (int i = 0; i < metas.length - 1; i++) { newDomeId.append(metas[i]).append("-"); } newDomeId.append(climId).append("-").append(metas[metas.length - 1]); domeClimIdMap.put(domeId, newDomeId.toString()); domeId = newDomeId.toString(); DomeUtil.updateMetaInfo(dome, domeId); } } newDomeIds.append(domeId).append("|"); } if (newDomeIds.charAt(newDomeIds.length() - 1) == '|') { newDomeIds.deleteCharAt(newDomeIds.length() - 1); } entry.put(domeName, newDomeIds.toString()); } private void loadDomeFile(String fileName, HashMap<String, HashMap<String, Object>> domes) { String fileNameTest = fileName.toUpperCase(); log.info("Loading DOME file: {}", fileName); if (fileNameTest.endsWith(".ZIP")) { log.debug("Entering Zip file handling"); ZipFile z; try { z = new ZipFile(fileName); Enumeration entries = z.entries(); while (entries.hasMoreElements()) { // Do we handle nested zips? Not yet. ZipEntry entry = (ZipEntry) entries.nextElement(); File zipFileName = new File(entry.getName()); if (zipFileName.getName().toLowerCase().endsWith(".csv") && !zipFileName.getName().startsWith(".")) { log.debug("Processing file: {}", zipFileName.getName()); DomeInput translator = new DomeInput(); translator.readCSV(z.getInputStream(entry)); HashMap<String, Object> dome = translator.getDome(); log.debug("dome info: {}", dome.toString()); String domeName = DomeUtil.generateDomeName(dome); if (!domeName.equals("----")) { domes.put(domeName, new HashMap<String, Object>(dome)); } } } z.close(); } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } else if (fileNameTest.endsWith(".CSV")) { log.debug("Entering single CSV file DOME handling"); try { DomeInput translator = new DomeInput(); HashMap<String, Object> dome = (HashMap<String, Object>) translator.readFile(fileName); String domeName = DomeUtil.generateDomeName(dome); log.debug("Dome name: {}", domeName); log.debug("Dome layout: {}", dome.toString()); domes.put(domeName, dome); } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } else if (fileNameTest.endsWith(".JSON") || fileNameTest.endsWith(".DOME")) { log.debug("Entering single ACE Binary file DOME handling"); try { ObjectMapper mapper = new ObjectMapper(); String json; if (fileNameTest.endsWith(".JSON")) { json = new Scanner(new FileInputStream(fileName), "UTF-8").useDelimiter("\\A").next(); } else { json = new Scanner(new GZIPInputStream(new FileInputStream(fileName)), "UTF-8").useDelimiter("\\A").next(); } HashMap<String, HashMap<String, Object>> tmp = mapper.readValue(json, new TypeReference<HashMap<String, HashMap<String, Object>>>() { }); // domes.putAll(tmp); for (HashMap dome : tmp.values()) { String domeName = DomeUtil.generateDomeName(dome); if (!domeName.equals("----")) { domes.put(domeName, new HashMap<String, Object>(dome)); } } log.debug("Domes layout: {}", domes.toString()); } catch (Exception ex) { log.error("Error processing DOME file: {}", ex.getMessage()); HashMap<String, Object> d = new HashMap<String, Object>(); d.put("errors", ex.getMessage()); } } } @Override public HashMap<String, Object> execute() { // First extract all the domes and put them in a HashMap by DOME_NAME // The read the DOME_NAME field of the CSV file // Split the DOME_NAME, and then apply sequentially to the HashMap. // PLEASE NOTE: This can be a massive undertaking if the source map // is really large. Need to find optimization points. HashMap<String, Object> output = new HashMap<String, Object>(); //HashMap<String, ArrayList<HashMap<String, String>>> dome; // Load the dome if (ovlDomes.isEmpty() && stgDomes.isEmpty()) { log.info("No DOME to apply."); HashMap<String, Object> d = new HashMap<String, Object>(); //d.put("domeinfo", new HashMap<String, String>()); d.put("domeoutput", source); return d; } if (autoApply) { HashMap<String, Object> d = new HashMap<String, Object>(); if (ovlDomes.size() > 1) { log.error("Auto-Apply feature only allows one field overlay file per run"); d.put("errors", "Auto-Apply feature only allows one field overlay file per run"); return d; } else if (stgDomes.size() > 1) { log.error("Auto-Apply feature only allows one seasonal strategy file per run"); d.put("errors", "Auto-Apply feature only allows one seasonal strategy file per run"); return d; } } // Flatten the data and apply the dome. Engine domeEngine; ArrayList<HashMap<String, Object>> flattenedData = MapUtil.flatPack(source); boolean noExpMode = false; if (flattenedData.isEmpty()) { log.info("No experiment data detected, will try Weather and Soil data only mode"); noExpMode = true; flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils")); flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers")); // flatSoilAndWthData(flattenedData, "soil"); // flatSoilAndWthData(flattenedData, "weather"); if (flattenedData.isEmpty()) { HashMap<String, Object> d = new HashMap<String, Object>(); log.error("No data found from input file, no DOME will be applied for data set {}", source.toString()); d.put("errors", "Loaded raw data is invalid, please check input files"); return d; } } if (mode.equals("strategy")) { log.debug("Domes: {}", stgDomes.toString()); log.debug("Entering Strategy mode!"); if (!noExpMode) { updateWthReferences(updateExpReferences(true)); flattenedData = MapUtil.flatPack(source); } // int cnt = 0; // for (HashMap<String, Object> entry : MapUtil.getRawPackageContents(source, "experiments")) { // // log.debug("Exp at {}: {}, {}", // cnt, // entry.get("wst_id"), // entry.get("clim_id"), // ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"), // ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id") // ); // cnt++; // } String stgDomeName = ""; if (autoApply) { for (String domeName : stgDomes.keySet()) { stgDomeName = domeName; } log.info("Auto apply seasonal strategy: {}", stgDomeName); } Engine generatorEngine; ArrayList<HashMap<String, Object>> strategyResults = new ArrayList<HashMap<String, Object>>(); for (HashMap<String, Object> entry : flattenedData) { // Remove observed data from input data if apply strategy DOME entry.remove("observed"); if (autoApply) { entry.put("seasonal_strategy", stgDomeName); } String domeName = getLinkIds("strategy", entry); if (domeName.equals("")) { domeName = MapUtil.getValueOr(entry, "seasonal_strategy", ""); } else { entry.put("seasonal_strategy", domeName); log.debug("Apply seasonal strategy domes from link csv: {}", domeName); } entry.remove("seasonal_strategy"); reviseDomeIds(entry, domeName, "strategy"); domeName = MapUtil.getValueOr(entry, "seasonal_strategy", ""); String tmp[] = domeName.split("[|]"); String strategyName; if (tmp.length > 1) { log.warn("Multiple seasonal strategy dome is not supported yet, only the first dome will be applied"); for (int i = 1; i < tmp.length; i++) { setFailedDomeId(entry, "seasonal_dome_failed", tmp[i]); } } strategyName = tmp[0].toUpperCase(); log.info("Apply DOME {} for {}", strategyName, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>")))); log.debug("Looking for ss: {}", strategyName); if (!strategyName.equals("")) { if (stgDomes.containsKey(strategyName)) { log.debug("Found strategyName"); entry.put("dome_applied", "Y"); entry.put("seasonal_dome_applied", "Y"); generatorEngine = new Engine(stgDomes.get(strategyName), true); if (!noExpMode) { // Check if there is no weather or soil data matched with experiment if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) { log.warn("No scenario weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); } if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) { log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); } } ArrayList<HashMap<String, Object>> newEntries = generatorEngine.applyStg(flatSoilAndWthData(entry, noExpMode)); log.debug("New Entries to add: {}", newEntries.size()); strategyResults.addAll(newEntries); } else { log.error("Cannot find strategy: {}", strategyName); setFailedDomeId(entry, "seasonal_dome_failed", strategyName); } } } log.debug("=== FINISHED GENERATION ==="); log.debug("Generated count: {}", strategyResults.size()); ArrayList<HashMap<String, Object>> exp = MapUtil.getRawPackageContents(source, "experiments"); exp.clear(); exp.addAll(strategyResults); flattenedData = MapUtil.flatPack(source); if (noExpMode) { flattenedData.addAll(MapUtil.getRawPackageContents(source, "soils")); flattenedData.addAll(MapUtil.getRawPackageContents(source, "weathers")); } } if (!noExpMode) { if (mode.equals("strategy")) { updateExpReferences(false); } else { updateWthReferences(updateExpReferences(false)); } flattenedData = MapUtil.flatPack(source); } String ovlDomeName = ""; if (autoApply) { for (String domeName : ovlDomes.keySet()) { ovlDomeName = domeName; } log.info("Auto apply field overlay: {}", ovlDomeName); } int cnt = 0; ArrayList<ApplyDomeRunner> engineRunners = new ArrayList(); ExecutorService executor; if (thrPoolSize > 1) { log.info("Create the thread pool with the size of {} for appling filed overlay DOME", thrPoolSize); executor = Executors.newFixedThreadPool(thrPoolSize); } else if (thrPoolSize == 1) { log.info("Create the single thread pool for appling filed overlay DOME"); executor = Executors.newSingleThreadExecutor(); } else { log.info("Create the cached thread pool with flexible size for appling filed overlay DOME"); executor = Executors.newCachedThreadPool(); } HashMap<String, HashMap<String, ArrayList<HashMap<String, String>>>> soilDomeMap = new HashMap(); HashMap<String, HashMap<String, ArrayList<HashMap<String, String>>>> wthDomeMap = new HashMap(); HashSet<String> soilIds = getSWIdsSet("soils", new String[]{"soil_id"}); HashSet<String> wthIds = getSWIdsSet("weathers", new String[]{"wst_id", "clim_id"}); ArrayList<HashMap> soilDataArr = new ArrayList(); ArrayList<HashMap> wthDataArr = new ArrayList(); ArrayList<ArrayList<Engine>> soilEngines = new ArrayList(); ArrayList<ArrayList<Engine>> wthEngines = new ArrayList(); for (HashMap<String, Object> entry : flattenedData) { log.debug("Exp at {}: {}, {}, {}", cnt, entry.get("wst_id"), ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("wst_id"), ((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).get("clim_id")); cnt++; if (autoApply) { entry.put("field_overlay", ovlDomeName); } String domeName = getLinkIds("overlay", entry); if (domeName.equals("")) { domeName = MapUtil.getValueOr(entry, "field_overlay", ""); } else { entry.put("field_overlay", domeName); log.debug("Apply field overlay domes from link csv: {}", domeName); } reviseDomeIds(entry, domeName, "overlay"); domeName = MapUtil.getValueOr(entry, "field_overlay", ""); String soilId = MapUtil.getValueOr(entry, "soil_id", ""); String wstId = MapUtil.getValueOr(entry, "wst_id", ""); String climId = MapUtil.getValueOr(entry, "clim_id", ""); ArrayList<Engine> sEngines = new ArrayList(); ArrayList<Engine> wEngines = new ArrayList(); String sDomeIds = ""; String wDomeIds = ""; ArrayList<HashMap<String, String>> sRulesTotal = new ArrayList(); ArrayList<HashMap<String, String>> wRulesTotal = new ArrayList(); if (!domeName.equals("")) { String tmp[] = domeName.split("[|]"); int tmpLength = tmp.length; ArrayList<Engine> engines = new ArrayList(); for (int i = 0; i < tmpLength; i++) { String tmpDomeId = tmp[i].toUpperCase(); log.debug("Apply DOME {} for {}", tmpDomeId, MapUtil.getValueOr(entry, "exname", MapUtil.getValueOr(entry, "soil_id", MapUtil.getValueOr(entry, "wst_id", "<Unknow>")))); log.debug("Looking for dome_name: {}", tmpDomeId); if (ovlDomes.containsKey(tmpDomeId)) { domeEngine = new Engine(ovlDomes.get(tmpDomeId)); entry.put("dome_applied", "Y"); entry.put("field_dome_applied", "Y"); ArrayList<HashMap<String, String>> sRules = domeEngine.extractSoilRules(); if (!sRules.isEmpty()) { if (sDomeIds.equals("")) { sDomeIds = tmpDomeId; } else { sDomeIds += "|" + tmpDomeId; } sEngines.add(new Engine(sRules, tmpDomeId)); sRulesTotal.addAll(sRules); } ArrayList<HashMap<String, String>> wRules = domeEngine.extractWthRules(); if (!wRules.isEmpty()) { if (wDomeIds.equals("")) { wDomeIds = tmpDomeId; } else { wDomeIds += "|" + tmpDomeId; } wEngines.add(new Engine(wRules, tmpDomeId)); wRulesTotal.addAll(wRules); } engines.add(domeEngine); } else { log.error("Cannot find overlay: {}", tmpDomeId); setFailedDomeId(entry, "field_dome_failed", tmpDomeId); } } HashMap<String, ArrayList<HashMap<String, String>>> lastAppliedSoilDomes = soilDomeMap.get(soilId); if (lastAppliedSoilDomes == null) { soilDataArr.add(entry); soilEngines.add(sEngines); lastAppliedSoilDomes = new HashMap(); lastAppliedSoilDomes.put(sDomeIds, sRulesTotal); soilDomeMap.put(soilId, lastAppliedSoilDomes); } else if (!lastAppliedSoilDomes.containsKey(sDomeIds)) { boolean isSameRules = false; for (ArrayList<HashMap<String, String>> rules : lastAppliedSoilDomes.values()) { if (rules.equals(sRulesTotal)) { isSameRules = true; break; } } if (!isSameRules) { replicateSoil(entry, soilIds); soilDataArr.add(entry); soilEngines.add(sEngines); lastAppliedSoilDomes.put(sDomeIds, sRulesTotal); } } HashMap<String, ArrayList<HashMap<String, String>>> lastAppliedWthDomes = wthDomeMap.get(wstId+climId); if (lastAppliedWthDomes == null) { wthDataArr.add(entry); wthEngines.add(wEngines); lastAppliedWthDomes = new HashMap(); lastAppliedWthDomes.put(wDomeIds, wRulesTotal); wthDomeMap.put(wstId+climId, lastAppliedWthDomes); } else if (!lastAppliedWthDomes.containsKey(wDomeIds)) { boolean isSameRules = false; for (ArrayList<HashMap<String, String>> rules : lastAppliedWthDomes.values()) { if (rules.equals(wRulesTotal)) { isSameRules = true; break; } } if (!isSameRules) { replicateWth(entry, wthIds); wthDataArr.add(entry); wthEngines.add(wEngines); lastAppliedWthDomes.put(wDomeIds, wRulesTotal); } } engineRunners.add(new ApplyDomeRunner(engines, entry, noExpMode, mode)); } } for (int i = 0; i < soilDataArr.size(); i++) { for (Engine e : soilEngines.get(i)) { e.apply(flatSoilAndWthData(soilDataArr.get(i), noExpMode)); } } for (int i = 0; i < wthDataArr.size(); i++) { for (Engine e : wthEngines.get(i)) { e.apply(flatSoilAndWthData(wthDataArr.get(i), noExpMode)); } } for (ApplyDomeRunner engineRunner : engineRunners) { executor.submit(engineRunner); // engine.apply(flatSoilAndWthData(entry, noExpMode)); // ArrayList<String> strategyList = engine.getGenerators(); // if (!strategyList.isEmpty()) { // log.warn("The following DOME commands in the field overlay file are ignored : {}", strategyList.toString()); // } // if (!noExpMode && !mode.equals("strategy")) { // // Check if there is no weather or soil data matched with experiment // if (((HashMap) MapUtil.getObjectOr(entry, "weather", new HashMap())).isEmpty()) { // log.warn("No baseline weather data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); // } // if (((HashMap) MapUtil.getObjectOr(entry, "soil", new HashMap())).isEmpty()) { // log.warn("No soil data found for: [{}]", MapUtil.getValueOr(entry, "exname", "N/A")); // } // } } executor.shutdown(); while (!executor.isTerminated()) { } // executor = null; if (noExpMode) { output.put("domeoutput", source); } else { output.put("domeoutput", MapUtil.bundle(flattenedData)); } if (ovlDomes != null && !ovlDomes.isEmpty()) { output.put("ovlDomes", ovlDomes); } if (stgDomes != null && !stgDomes.isEmpty()) { output.put("stgDomes", stgDomes); } return output; } // private void flatSoilAndWthData(ArrayList<HashMap<String, Object>> flattenedData, String key) { // ArrayList<HashMap<String, Object>> arr = MapUtil.getRawPackageContents(source, key + "s"); // for (HashMap<String, Object> data : arr) { // HashMap<String, Object> tmp = new HashMap<String, Object>(); // tmp.put(key, data); // flattenedData.add(tmp); // } // } private HashMap<String, Object> flatSoilAndWthData(HashMap<String, Object> data, boolean noExpFlg) { if (!noExpFlg) { return data; } HashMap<String, Object> ret; if (data.containsKey("dailyWeather")) { ret = new HashMap<String, Object>(); ret.put("weather", data); } else if (data.containsKey("soilLayer")) { ret = new HashMap<String, Object>(); ret.put("soil", data); } else { ret = data; } return ret; } private void setFailedDomeId(HashMap data, String failKey, String failId) { String failIds; if ((failIds = (String) data.get(failKey)) != null) { data.put(failKey, failId); } else { data.put(failKey, failIds + "|" + failId); } } private boolean updateExpReferences(boolean isStgDome) { ArrayList<HashMap<String, Object>> expArr = MapUtil.getRawPackageContents(source, "experiments"); boolean isClimIDchanged = false; HashMap<String, HashMap<String, Object>> domes; String linkid; String domeKey; int maxDomeNum; if (isStgDome) { domes = stgDomes; linkid = "strategy"; domeKey = "seasonal_strategy"; maxDomeNum = 1; } else { domes = ovlDomes; linkid = "overlay"; domeKey = "field_overlay"; maxDomeNum = Integer.MAX_VALUE; } // Pre-scan the seasnal DOME to update reference variables String autoDomeName = ""; if (autoApply) { for (String domeName : domes.keySet()) { autoDomeName = domeName; } } for (HashMap<String, Object> exp : expArr) { String domeName = getLinkIds(linkid, exp); if (domeName.equals("")) { if (autoApply) { domeName = autoDomeName; } else { domeName = MapUtil.getValueOr(exp, domeKey, ""); } } if (!domeName.equals("")) { String tmp[] = domeName.split("[|]"); int tmpLength = Math.min(tmp.length, maxDomeNum); for (int i = 0; i < tmpLength; i++) { String tmpDomeId = tmp[i].toUpperCase(); log.debug("Looking for dome_name: {}", tmpDomeId); if (domes.containsKey(tmpDomeId)) { log.debug("Found DOME {}", tmpDomeId); Engine domeEngine = new Engine(domes.get(tmpDomeId)); isClimIDchanged = domeEngine.updateWSRef(exp, isStgDome, mode.equals("strategy")); // Check if the wst_id is switch to 8-bit long version String wst_id = MapUtil.getValueOr(exp, "wst_id", ""); if (isStgDome && wst_id.length() < 8) { exp.put("wst_id", wst_id + "0XXX"); exp.put("clim_id", "0XXX"); isClimIDchanged = true; } log.debug("New exp linkage: {}", exp); } } } } return isClimIDchanged; } private void updateWthReferences(boolean isClimIDchanged) { ArrayList<HashMap<String, Object>> wthArr = MapUtil.getRawPackageContents(source, "weathers"); boolean isStrategy = mode.equals("strategy"); HashMap<String, HashMap> unfixedWths = new HashMap(); HashSet<String> fixedWths = new HashSet(); for (HashMap<String, Object> wth : wthArr) { String wst_id = MapUtil.getValueOr(wth, "wst_id", ""); String clim_id = MapUtil.getValueOr(wth, "clim_id", ""); if (clim_id.equals("")) { if (wst_id.length() == 8) { clim_id = wst_id.substring(4, 8); } else { clim_id = "0XXX"; } } // If user assign CLIM_ID in the DOME, or find non-baseline data in the overlay mode, then switch WST_ID to 8-bit version if (isStrategy || isClimIDchanged || !clim_id.startsWith("0")) { if (wst_id.length() < 8) { wth.put("wst_id", wst_id + clim_id); } } else { // Temporally switch all the WST_ID to 8-bit in the data set if (wst_id.length() < 8) { wth.put("wst_id", wst_id + clim_id); } else { wst_id = wst_id.substring(0, 4); } // Check if there is multiple baseline record for one site if (unfixedWths.containsKey(wst_id)) { log.warn("There is multiple baseline weather data for site [{}], please choose a particular baseline via field overlay DOME", wst_id); unfixedWths.remove(wst_id); fixedWths.add(wst_id); } else { if (!fixedWths.contains(wst_id)) { unfixedWths.put(wst_id, wth); } } } } // If no CLIM_ID provided in the overlay mode, then switch the baseline WST_ID to 4-bit. if (!isStrategy && !unfixedWths.isEmpty()) { for (String wst_id : unfixedWths.keySet()) { unfixedWths.get(wst_id).put("wst_id", wst_id); } } } private void replicateSoil(HashMap entry, HashSet soilIds) { String newSoilId = MapUtil.getValueOr(entry, "soil_id", ""); HashMap data = MapUtil.getObjectOr(entry, "soil", new HashMap()); if (data.isEmpty()) { return; } Cloner cloner = new Cloner(); HashMap newData = cloner.deepClone(data); ArrayList<HashMap<String, Object>> soils = MapUtil.getRawPackageContents(source, "soils"); int count = 1; while (soilIds.contains(newSoilId + "_" + count)) { count++; } newSoilId += "_" + count; newData.put("soil_id", newSoilId); entry.put("soil_id", newSoilId); entry.put("soil", newData); soilIds.add(newSoilId); soils.add(newData); } private void replicateWth(HashMap entry, HashSet wthIds) { String newWthId = MapUtil.getValueOr(entry, "wst_id", ""); String climId = MapUtil.getValueOr(entry, "clim_id", ""); HashMap data = MapUtil.getObjectOr(entry, "weather", new HashMap()); if (data.isEmpty()) { return; } Cloner cloner = new Cloner(); HashMap newData = cloner.deepClone(data); ArrayList<HashMap<String, Object>> wths = MapUtil.getRawPackageContents(source, "weathers"); String inst; if (newWthId.length() > 1) { inst = newWthId.substring(0, 2); } else { inst = newWthId + "0"; } newWthId = inst + "01" + climId; int count = 1; while (wthIds.contains(newWthId) && count < 99) { count++; newWthId = String.format("%s%02d%s", inst, count, climId); } if (count == 99 && wthIds.contains(newWthId)) { inst = inst.substring(0, 1); newWthId = inst + "100" + climId; while (wthIds.contains(newWthId)) { count++; newWthId = String.format("%s%03d%s", inst, count, climId); } } newData.put("wst_id", newWthId); entry.put("wst_id", newWthId); entry.put("weather", newData); wthIds.add(newWthId); wths.add(newData); } private HashSet<String> getSWIdsSet(String dataKey, String... idKeys) { HashSet<String> ret = new HashSet(); ArrayList<HashMap<String, Object>> arr = MapUtil.getRawPackageContents(source, dataKey); for (HashMap data : arr) { StringBuilder sb = new StringBuilder(); for (String idKey : idKeys) { sb.append(MapUtil.getValueOr(data, idKey, "")); } ret.add(sb.toString()); } return ret; } }
Fix the issue that old style DOME would not get linked correctly
src/main/java/org/agmip/ui/quadui/ApplyDomeTask.java
Fix the issue that old style DOME would not get linked correctly
Java
mit
db44e48e18f117392b7b127674b54b1170a8068c
0
core9-archive/module-static-handler
package io.core9.plugin.statichandler; import io.core9.plugin.filesmanager.FileRepository; import io.core9.plugin.server.handler.Middleware; import io.core9.plugin.server.request.Request; import io.core9.plugin.server.vertx.VertxServer; import java.io.IOException; import java.io.InputStream; import java.util.Map; import net.xeoh.plugins.base.annotations.PluginImplementation; import net.xeoh.plugins.base.annotations.injections.InjectPlugin; import com.google.common.io.ByteStreams; @PluginImplementation public class MongoStaticHandlerImpl implements StaticHandler { @InjectPlugin private VertxServer server; @InjectPlugin private FileRepository repository; @Override public void execute() { server.use("/static/.*", new Middleware() { @Override public void handle(Request request) { String filePath = request.getPath().replaceFirst("/static", ""); try { Map<String,Object> file = repository.getFileContentsByName(request.getVirtualHost(), filePath); if(file == null){ request.getResponse().setStatusCode(404); request.getResponse().setStatusMessage("File not found"); }else{ request.getResponse().putHeader("Content-Type", (String) file.get("ContentType")); request.getResponse().sendBinary(ByteStreams.toByteArray((InputStream) file.get("stream"))); } } catch (IOException e) { request.getResponse().setStatusCode(404); request.getResponse().setStatusMessage("File not found"); } } }); } }
src/impl/java/io/core9/plugin/statichandler/MongoStaticHandlerImpl.java
package io.core9.plugin.statichandler; import io.core9.plugin.filesmanager.FileRepository; import io.core9.plugin.server.handler.Middleware; import io.core9.plugin.server.request.Request; import io.core9.plugin.server.vertx.VertxServer; import java.io.IOException; import java.io.InputStream; import java.util.Map; import net.xeoh.plugins.base.annotations.PluginImplementation; import net.xeoh.plugins.base.annotations.injections.InjectPlugin; import com.google.common.io.ByteStreams; @PluginImplementation public class MongoStaticHandlerImpl implements StaticHandler { @InjectPlugin private VertxServer server; @InjectPlugin private FileRepository repository; @Override public void execute() { server.use("/static/.*", new Middleware() { @Override public void handle(Request request) { String filePath = request.getPath().replaceFirst("/static", ""); try { Map<String,Object> file = repository.getFileContentsByName(request.getVirtualHost(), filePath); request.getResponse().putHeader("Content-Type", (String) file.get("ContentType")); request.getResponse().sendBinary(ByteStreams.toByteArray((InputStream) file.get("stream"))); } catch (IOException e) { request.getResponse().setStatusCode(404); request.getResponse().setStatusMessage("File not found"); } } }); } }
null pointer fix
src/impl/java/io/core9/plugin/statichandler/MongoStaticHandlerImpl.java
null pointer fix
Java
mit
243dfaeeae7aae29877f9f4b0bf8a52f4bb3602d
0
vishesh/sealnote,vishesh/sealnote
package com.twistedplane.sealnote; import android.app.Application; import android.view.ViewConfiguration; import com.twistedplane.sealnote.data.DatabaseHandler; import net.sqlcipher.database.SQLiteDatabase; import java.lang.reflect.Field; public class SealnoteApplication extends Application { public final static String TAG = "SealnoteApplication"; private static DatabaseHandler mDatabase; @Override public void onCreate() { super.onCreate(); // load sqlite-cipher native libraries SQLiteDatabase.loadLibs(this); mDatabase = new DatabaseHandler(this); // Force show overflow button on Action Bar try { ViewConfiguration config = ViewConfiguration.get(this); Field menuKeyField = ViewConfiguration.class.getDeclaredField("sHasPermanentMenuKey"); if (menuKeyField != null) { menuKeyField.setAccessible(true); menuKeyField.setBoolean(config, false); } } catch (Exception e) { // presumably, not relevant } } public static DatabaseHandler getDatabase() { return mDatabase; } }
Sealnote/src/main/java/com/twistedplane/sealnote/SealnoteApplication.java
package com.twistedplane.sealnote; import android.app.Application; import com.twistedplane.sealnote.data.DatabaseHandler; import net.sqlcipher.database.SQLiteDatabase; public class SealnoteApplication extends Application { public final static String TAG = "SealnoteApplication"; private static DatabaseHandler mDatabase; @Override public void onCreate() { super.onCreate(); // load sqlite-cipher native libraries SQLiteDatabase.loadLibs(this); mDatabase = new DatabaseHandler(this); } public static DatabaseHandler getDatabase() { return mDatabase; } }
UI: Always show overflow button on ActionBar Overflow button is not visible on devices with hardware menu button. Feels inconsitent with complete touch experience
Sealnote/src/main/java/com/twistedplane/sealnote/SealnoteApplication.java
UI: Always show overflow button on ActionBar
Java
mit
00055cdc985a262d404ab76eca2a8530f40efacd
0
PauliNiva/Sotechat,PauliNiva/Sotechat,PauliNiva/Sotechat
package sotechat.domainService; import java.util.Date; import java.util.List; import javax.transaction.Transactional; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import sotechat.domain.Message; import sotechat.domain.Conversation; import sotechat.repo.ConversationRepo; import sotechat.repo.MessageRepo; import sotechat.repo.PersonRepo; @Service public class MessageService { private MessageRepo messageRepo; private ConversationRepo conversationRepo; private PersonRepo personRepo; @Autowired public MessageService(MessageRepo pMessageRepo, ConversationRepo pConversationRepo) throws Exception{ this.messageRepo = pMessageRepo; this.conversationRepo = pConversationRepo; } @Transactional public void addMessage(Message message) throws Exception { messageRepo.save(message); } @Transactional public void removeMessage(Long messageId) throws Exception { Message message = messageRepo.findOne(messageId); String conversationId = message.getConversation().getChannelId(); conversationRepo.findOne(conversationId).getMessagesOfConversation() .remove(message); messageRepo.delete(messageId); } public List<Message> messagesOfConversation(String channelId) throws Exception { return messageRepo.findByConversation(channelId); } @Transactional public void removeConversation(String channelId) throws Exception { List<Message> messages= messageRepo.findByConversation(channelId); messageRepo.deleteInBatch(messages); } public void setMessageRepo(MessageRepo pMessageRepo) throws Exception { this.messageRepo = pMessageRepo; } public MessageRepo getMessageRepo() throws Exception { return this.messageRepo; } public void setConversationRepo(final ConversationRepo pConversationRepo) throws Exception { this.conversationRepo = pConversationRepo; } public ConversationRepo getConversationRepo() throws Exception { return this.conversationRepo; } }
src/main/java/sotechat/domainService/MessageService.java
package sotechat.domainService; import java.util.Date; import java.util.List; import javax.transaction.Transactional; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import sotechat.domain.Message; import sotechat.domain.Conversation; import sotechat.repo.ConversationRepo; import sotechat.repo.MessageRepo; import sotechat.repo.PersonRepo; @Service public class MessageService { private MessageRepo messageRepo; private ConversationRepo conversationRepo; private PersonRepo personRepo; @Autowired public MessageService(MessageRepo pMessageRepo, ConversationRepo pConversationRepo) { this.messageRepo = pMessageRepo; this.conversationRepo = pConversationRepo; } @Transactional public void addMessage(Message message) { messageRepo.save(message); } @Transactional public void removeMessage(Long messageId) { Message message = messageRepo.findOne(messageId); String conversationId = message.getConversation().getChannelId(); conversationRepo.findOne(conversationId).getMessagesOfConversation() .remove(message); messageRepo.delete(messageId); } public List<Message> messagesOfConversation(String channelId){ return messageRepo.findByConversation(channelId); } @Transactional public void removeConversation(String channelId){ List<Message> messages= messageRepo.findByConversation(channelId); messageRepo.deleteInBatch(messages); } public void setMessageRepo(MessageRepo pMessageRepo) { this.messageRepo = pMessageRepo; } public MessageRepo getMessageRepo() { return this.messageRepo; } public void setConversationRepo(final ConversationRepo pConversationRepo) { this.conversationRepo = pConversationRepo; } public ConversationRepo getConversationRepo() { return this.conversationRepo; } }
Update MessageService.java
src/main/java/sotechat/domainService/MessageService.java
Update MessageService.java
Java
mit
ce596f3f2a51b609e46f9bd695600605a6274816
0
WycliffeAssociates/translationRecorder,WycliffeAssociates/translationRecorder,WycliffeAssociates/translationRecorder,WycliffeAssociates/translationRecorder,WycliffeAssociates/translationRecorder,WycliffeAssociates/translationRecorder
package wycliffeassociates.recordingapp.AudioVisualization; import android.content.Context; import android.graphics.Canvas; import android.support.v4.view.GestureDetectorCompat; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import wycliffeassociates.recordingapp.AudioInfo; import wycliffeassociates.recordingapp.AudioVisualization.Utils.U; import wycliffeassociates.recordingapp.Playback.Editing.CutOp; /** * A canvas view intended for use as the main waveform */ public class WaveformView extends CanvasView { private byte[] mBuffer; private boolean mDrawingFromBuffer = false; private float[] mSamples; private int mTimeToDraw; private int mMarkerStartLoc; private int mMarkerEndLoc; private ScaleGestureDetector sgd; private CutOp mCut; private boolean mGestures = false; private int mDb; public void setCut(CutOp cut){ mCut = cut; } /** * Sets the location (in time (ms)) for the start marker * @param markerStart start marker location in ms */ public void setMarkerToDrawStart(int markerStart) { this.mMarkerStartLoc = markerStart; } /** * Sets the location (in time (ms)) for the end marker * @param markerEnd end marker location in ms */ public void setMarkerToDrawEnd(int markerEnd) { this.mMarkerEndLoc = markerEnd; } /** * Detects gestures on the main canvas */ class MyGestureListener extends GestureDetector.SimpleOnGestureListener { /** * Detects if the user is scrolling the main waveform horizontally * @param distX refers to how far the user scrolled horizontally * @param distY is ignored for this use as we are only allowing horizontal scrolling * @param event1 not accessed, contains information about the start of the gesture * @param event2 not used, contains information about the end of the gesture * @return must be true for gesture detection */ @Override public boolean onScroll(MotionEvent event1, MotionEvent event2, float distX, float distY) { //Should only perform a scroll if the WavPlayer exists, since scrolling performs a seek if (mManager != null && mGestures) { //moves playback by the distance (distX is multiplied so as to scroll at a more //reasonable speed. 3 seems to work well, but is mostly arbitrary. int playbackSectionStart = (int) (distX * 3) + mManager.getLocation(); if(distX > 0) { int skip = mCut.skip(playbackSectionStart); if (skip != -1) { playbackSectionStart = skip + 2; } } else { int skip = mCut.skipReverse(playbackSectionStart); if(skip != Integer.MAX_VALUE){ playbackSectionStart = skip - 2; } } //Ensure scrolling cannot pass an end marker if markers are set. //The seek is to ensure responsiveness; without it the waveform will not scroll //at all if the user slides their finger too far if(SectionMarkers.getEndLocationMs() < playbackSectionStart){ mManager.seekTo(SectionMarkers.getEndLocationMs()); //Same as above but the check is to make sure scrolling will not go before a marker } else if(SectionMarkers.getStartLocationMs() > playbackSectionStart){ mManager.seekTo(SectionMarkers.getStartLocationMs()); } else { mManager.seekTo(playbackSectionStart); } //Redraw in order to display the waveform in the scrolled position mManager.updateUI(); } return true; } } //TODO: scale should adjust userscale in the WavVisualizer class class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { @Override public boolean onScale(ScaleGestureDetector detector) { System.out.println("scaled"); return true; } } public void disableGestures(){ mGestures = false; } public void enableGestures(){ mGestures = true; } /** * Updates the start position in the marker object. If this means both markers are now set, * WavPlayer needs to set start and stop locations * @param startTimeMs time in milliseconds of where to place a start marker */ public void placeStartMarker(int startTimeMs){ SectionMarkers.setStartTime(startTimeMs, getWidth(), mManager.getAdjustedDuration(), mManager); //if both markers are set, then set the start and end markers in WavPlayer if(SectionMarkers.bothSet()){ setWavPlayerSelectionMarkers(); } //draw the placed marker invalidate(); redraw(); } /** * Updates the end position in the marker object. If this means both markers are now set, * WavPlayer needs to set start and end locations * @param endTimeMS time in milliseconds of where to place an end marker */ public void placeEndMarker(int endTimeMS){ SectionMarkers.setEndTime(endTimeMS, getWidth(), mManager.getAdjustedDuration(), mManager); if(SectionMarkers.bothSet()){ setWavPlayerSelectionMarkers(); } invalidate(); redraw(); } /** * Sets the start and end markers in the WavPlayer */ public void setWavPlayerSelectionMarkers(){ mManager.startSectionAt(SectionMarkers.getStartLocationMs()); mManager.stopSectionAt(SectionMarkers.getEndLocationMs()); } /** * Passes a touch event to the scroll and scale gesture detectors, if they exist * @param ev the gesture detected * @return returns true to signify the event was handled */ @Override public boolean onTouchEvent(MotionEvent ev) { if(mDetector!= null) { mDetector.onTouchEvent(ev); } if(sgd != null) { sgd.onTouchEvent(ev); } return true; } public void drawDbLines(Canvas c){ drawDbLines(c, -99); } public void drawDbLines(Canvas c, int db){ int db3 = dBLine(23197); int ndb3 = dBLine(-23197); int db6 = dBLine(16422); int ndb6 = dBLine(-16422); int db12 = dBLine(8230); int ndb12 = dBLine(-8230); int db18 = dBLine(4125); int ndb18 = dBLine(-4125); int db24 = dBLine(2067); int ndb24 = dBLine(-2067); c.drawRect(100, db6, getWidth()-100, db3, mPaintGrid); c.drawRect(100, ndb3, getWidth()-100, ndb6, mPaintGrid); c.drawRect(400, db12, getWidth()-400, db6, mPaintGrid); c.drawRect(400, ndb6, getWidth()-400, ndb12, mPaintGrid); c.drawRect(300, db18, getWidth()-300, db12, mPaintGrid); c.drawRect(300, ndb12, getWidth()-300, ndb18, mPaintGrid); c.drawRect(200, db24, getWidth()-200, db18, mPaintGrid); c.drawRect(200, ndb18, getWidth()-200, ndb24, mPaintGrid); c.drawRect(500, 10, getWidth()-500, ndb3, mPaintGrid); c.drawRect(500, db3, getWidth()-500, getHeight()-2, mPaintGrid); c.drawText(Integer.toString(-3), 0, db3, mPaintText); c.drawText(Integer.toString(-3), 0, ndb3, mPaintText); c.drawText(Integer.toString(-6), 0, db6, mPaintText); c.drawText(Integer.toString(-6), 0, ndb6, mPaintText); c.drawText(Integer.toString(-12), 0, db12, mPaintText); c.drawText(Integer.toString(-12), 0, ndb12, mPaintText); c.drawText(Integer.toString(-18), 0, db18, mPaintText); c.drawText(Integer.toString(-18), 0, ndb18, mPaintText); c.drawText(Integer.toString(-24), 0, db24, mPaintText); c.drawText(Integer.toString(-24), 0, ndb24, mPaintText); } private int dBLine(int val){ return (int)(val/ (double)AudioInfo.AMPLITUDE_RANGE * getHeight()/2 + getHeight()/2); } //TODO: make a paint variable for the playback line rather than use one and swap colors /** * Draws the playback line on the canvas passed in * @param canvas the canvas to be drawn to */ public void drawMarker(Canvas canvas){ //positions the playback line 1/8th of the total width from the left of the screen canvas.drawLine((canvas.getWidth() / 8), 0, (canvas.getWidth() / 8), canvas.getHeight(), mPaintPlayback); } /** * Constructs a WaveformView (which is a canvas view, meant for displaying the main waveform) * Sets up gesture detectors for interacting with the main waveform * @param c is the context of the activity running * @param attrs attributes to be passed to the super class */ public WaveformView(Context c, AttributeSet attrs) { super(c, attrs); mDetector = new GestureDetectorCompat(getContext(), new MyGestureListener()); sgd = new ScaleGestureDetector(getContext(), new ScaleListener()); init(); } /** * Sets the state of the view to draw waveforms from buffers that will be passed in * This implies that the app is recording * @param b True to draw from a buffer (from the mic), False to draw samples of the Waveform */ public void setDrawingFromBuffer(boolean b){ this.mDrawingFromBuffer = b; } //TODO: make a separate paint variable for the start and end markers, rather than swap colors //TODO: change a constant to match the number of seconds on the screen /** * Draws the start and end markers * @param c */ public void drawSectionMarkers(Canvas c){ //FIXME: need to change this to match number of seconds on the screen instead of constant 10 //compute the number of milliseconds in one pixel float mspp = 1000*10/(float)getWidth(); //offset refers to the location where playback actually starts (at the playback line) int offset = (getWidth() / 8); //compute the position on the screen to draw markers. Marker locations and mTimeToDraw //are both in ms float xLoc1 = offset + (mCut.reverseTimeAdjusted(mMarkerStartLoc) - mCut.reverseTimeAdjusted(mTimeToDraw))/mspp; float xLoc2 = offset + (mCut.reverseTimeAdjusted(mMarkerEndLoc) - mCut.reverseTimeAdjusted(mTimeToDraw))/mspp; c.drawLine(xLoc1, 0, xLoc1, getHeight(), mPaintStartMarker); c.drawLine(xLoc2, 0, xLoc2, getHeight(), mPaintEndMarker); c.drawRect(xLoc1, 0, xLoc2, getHeight(), mPaintHighlight); } /** * Sets the time in playback to draw this frame * This is set so that both the waveform and the markers make use of the same time, * rather than each querying WavPlayer when they get to draw their component. * @param timeMs Current time during playback, in milliseconds */ public void setTimeToDraw(int timeMs){ this.mTimeToDraw = timeMs; } //TODO: remove the semaphore, replace with either synchronous or try to remove concurrency /** * Main draw method that is called when the view is invalidated. * @param canvas The canvas which can be drawn on. Provided by Android as onDraw is not * called explicitly. */ @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); //DrawingFromBuffers will draw data received from the microphone during recording if(mDrawingFromBuffer){ drawDbLines(canvas, mDb); drawBuffer(canvas, mBuffer, AudioInfo.BLOCKSIZE); //Samples is a sampled section of the waveform extracted at mTimeToDraw } else if (mSamples != null ){ drawDbLines(canvas); try { drawWaveform(mSamples, canvas); drawMarker(canvas); } catch (Exception e) { e.printStackTrace(); } } //Creates a drawing loop; redraws only will occur if audio is playing redraw(); if(SectionMarkers.shouldDrawMarkers()){ drawSectionMarkers(canvas); } mManager.checkIfShouldStop(); //Determines whether the play or pause button should be rendered //This is done now that there is not a thread dedicated to drawing if(!mManager.isPlaying()){ mManager.enablePlay(); } } /** * Sets a byte buffer to be drawn to the screen * @param buffer a byte buffer containing 16 bit pcm data */ public synchronized void setBuffer(byte[] buffer){ mBuffer = buffer; } public synchronized void setDb(int db){ mDb = db; } //TODO: create a separate paint object for drawing the waveform /** * Draws a waveform from the buffer produced while recording * @param canvas the canvas to draw to * @param buffer the byte buffer containing 16 bit pcm data to draw * @param blocksize the size of a block of audio data; 2 for 16 bit mono PCM */ public synchronized void drawBuffer(Canvas canvas, byte[] buffer, int blocksize){ if (buffer == null || canvas == null) { return; } //convert PCM data in a byte array to a short array Short[] temp = new Short[buffer.length/blocksize]; int index = 0; for(int i = 0; i<buffer.length; i+=blocksize){ byte low = buffer[i]; byte hi = buffer[i + 1]; //PCM data is stored little endian temp[index] = (short)(((hi << 8) & 0x0000FF00) | (low & 0x000000FF)); index++; } int width = canvas.getWidth(); int height = canvas.getHeight(); double xScale = width/(index *.999); double yScale = height/65536.0; for(int i = 0; i < temp.length-1; i++){ // canvas.drawLine((int)(xScale*i), (int)((yScale*temp[i])+ height/2), // (int)(xScale*(i+1)), (int)((yScale*temp[i+1]) + height/2), mPaint); canvas.drawLine((int) (xScale * i), (int) U.getValueForScreen(temp[i], height), (int) (xScale * (i + 1)), (int) U.getValueForScreen(temp[i+1], height), mPaintWaveform); } this.postInvalidate(); } /** * Sets sampled waveform data to draw to the screen * @param samples sampled waveform data to draw */ public synchronized void setWaveformDataForPlayback(float[] samples){ this.mSamples = samples; } }
RecordingApp/app/src/main/java/wycliffeassociates/recordingapp/AudioVisualization/WaveformView.java
package wycliffeassociates.recordingapp.AudioVisualization; import android.content.Context; import android.graphics.Canvas; import android.support.v4.view.GestureDetectorCompat; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import wycliffeassociates.recordingapp.AudioInfo; import wycliffeassociates.recordingapp.AudioVisualization.Utils.U; import wycliffeassociates.recordingapp.Playback.Editing.CutOp; /** * A canvas view intended for use as the main waveform */ public class WaveformView extends CanvasView { private byte[] mBuffer; private boolean mDrawingFromBuffer = false; private float[] mSamples; private int mTimeToDraw; private int mMarkerStartLoc; private int mMarkerEndLoc; private ScaleGestureDetector sgd; private CutOp mCut; private boolean mGestures = false; public void setCut(CutOp cut){ mCut = cut; } /** * Sets the location (in time (ms)) for the start marker * @param markerStart start marker location in ms */ public void setMarkerToDrawStart(int markerStart) { this.mMarkerStartLoc = markerStart; } /** * Sets the location (in time (ms)) for the end marker * @param markerEnd end marker location in ms */ public void setMarkerToDrawEnd(int markerEnd) { this.mMarkerEndLoc = markerEnd; } /** * Detects gestures on the main canvas */ class MyGestureListener extends GestureDetector.SimpleOnGestureListener { /** * Detects if the user is scrolling the main waveform horizontally * @param distX refers to how far the user scrolled horizontally * @param distY is ignored for this use as we are only allowing horizontal scrolling * @param event1 not accessed, contains information about the start of the gesture * @param event2 not used, contains information about the end of the gesture * @return must be true for gesture detection */ @Override public boolean onScroll(MotionEvent event1, MotionEvent event2, float distX, float distY) { //Should only perform a scroll if the WavPlayer exists, since scrolling performs a seek if (mManager != null && mGestures) { //moves playback by the distance (distX is multiplied so as to scroll at a more //reasonable speed. 3 seems to work well, but is mostly arbitrary. int playbackSectionStart = (int) (distX * 3) + mManager.getLocation(); if(distX > 0) { int skip = mCut.skip(playbackSectionStart); if (skip != -1) { playbackSectionStart = skip + 2; } } else { int skip = mCut.skipReverse(playbackSectionStart); if(skip != Integer.MAX_VALUE){ playbackSectionStart = skip - 2; } } //Ensure scrolling cannot pass an end marker if markers are set. //The seek is to ensure responsiveness; without it the waveform will not scroll //at all if the user slides their finger too far if(SectionMarkers.getEndLocationMs() < playbackSectionStart){ mManager.seekTo(SectionMarkers.getEndLocationMs()); //Same as above but the check is to make sure scrolling will not go before a marker } else if(SectionMarkers.getStartLocationMs() > playbackSectionStart){ mManager.seekTo(SectionMarkers.getStartLocationMs()); } else { mManager.seekTo(playbackSectionStart); } //Redraw in order to display the waveform in the scrolled position mManager.updateUI(); } return true; } } //TODO: scale should adjust userscale in the WavVisualizer class class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { @Override public boolean onScale(ScaleGestureDetector detector) { System.out.println("scaled"); return true; } } public void disableGestures(){ mGestures = false; } public void enableGestures(){ mGestures = true; } /** * Updates the start position in the marker object. If this means both markers are now set, * WavPlayer needs to set start and stop locations * @param startTimeMs time in milliseconds of where to place a start marker */ public void placeStartMarker(int startTimeMs){ SectionMarkers.setStartTime(startTimeMs, getWidth(), mManager.getAdjustedDuration(), mManager); //if both markers are set, then set the start and end markers in WavPlayer if(SectionMarkers.bothSet()){ setWavPlayerSelectionMarkers(); } //draw the placed marker invalidate(); redraw(); } /** * Updates the end position in the marker object. If this means both markers are now set, * WavPlayer needs to set start and end locations * @param endTimeMS time in milliseconds of where to place an end marker */ public void placeEndMarker(int endTimeMS){ SectionMarkers.setEndTime(endTimeMS, getWidth(), mManager.getAdjustedDuration(), mManager); if(SectionMarkers.bothSet()){ setWavPlayerSelectionMarkers(); } invalidate(); redraw(); } /** * Sets the start and end markers in the WavPlayer */ public void setWavPlayerSelectionMarkers(){ mManager.startSectionAt(SectionMarkers.getStartLocationMs()); mManager.stopSectionAt(SectionMarkers.getEndLocationMs()); } /** * Passes a touch event to the scroll and scale gesture detectors, if they exist * @param ev the gesture detected * @return returns true to signify the event was handled */ @Override public boolean onTouchEvent(MotionEvent ev) { if(mDetector!= null) { mDetector.onTouchEvent(ev); } if(sgd != null) { sgd.onTouchEvent(ev); } return true; } public void drawDbLines(Canvas c){ int db3 = dBLine(23197); int ndb3 = dBLine(-23197); c.drawLine(0, db3, getWidth(), db3, mPaintGrid); c.drawLine(0, ndb3, getWidth(), ndb3, mPaintGrid); c.drawText(Integer.toString(-3), 0, db3, mPaintText); c.drawText(Integer.toString(-3), 0, ndb3, mPaintText); int db6 = dBLine(16422); int ndb6 = dBLine(-16422); c.drawLine(0, db6, getWidth(), db6, mPaintGrid); c.drawLine(0, ndb6, getWidth(), ndb6, mPaintGrid); c.drawText(Integer.toString(-6), 0, db6, mPaintText); c.drawText(Integer.toString(-6), 0, ndb6, mPaintText); int db12 = dBLine(8230); int ndb12 = dBLine(-8230); c.drawLine(0, db12, getWidth(), db12, mPaintGrid); c.drawLine(0, ndb12, getWidth(), ndb12, mPaintGrid); c.drawText(Integer.toString(-12), 0, db12, mPaintText); c.drawText(Integer.toString(-12), 0, ndb12, mPaintText); int db18 = dBLine(4125); int ndb18 = dBLine(-4125); c.drawLine(0, db18, getWidth(), db18, mPaintGrid); c.drawLine(0, ndb18, getWidth(), ndb18, mPaintGrid); c.drawText(Integer.toString(-18), 0, db18, mPaintText); c.drawText(Integer.toString(-18), 0, ndb18, mPaintText); int db24 = dBLine(2067); int ndb24 = dBLine(-2067); c.drawLine(0, db24, getWidth(), db24, mPaintGrid); c.drawLine(0, ndb24, getWidth(), ndb24, mPaintGrid); c.drawText(Integer.toString(-24), 0, db24, mPaintText); c.drawText(Integer.toString(-24), 0, ndb24, mPaintText); } private int dBLine(int val){ return (int)(val/ (double)AudioInfo.AMPLITUDE_RANGE * getHeight()/2 + getHeight()/2); } //TODO: make a paint variable for the playback line rather than use one and swap colors /** * Draws the playback line on the canvas passed in * @param canvas the canvas to be drawn to */ public void drawMarker(Canvas canvas){ //positions the playback line 1/8th of the total width from the left of the screen canvas.drawLine((canvas.getWidth() / 8), 0, (canvas.getWidth() / 8), canvas.getHeight(), mPaintPlayback); } /** * Constructs a WaveformView (which is a canvas view, meant for displaying the main waveform) * Sets up gesture detectors for interacting with the main waveform * @param c is the context of the activity running * @param attrs attributes to be passed to the super class */ public WaveformView(Context c, AttributeSet attrs) { super(c, attrs); mDetector = new GestureDetectorCompat(getContext(), new MyGestureListener()); sgd = new ScaleGestureDetector(getContext(), new ScaleListener()); init(); } /** * Sets the state of the view to draw waveforms from buffers that will be passed in * This implies that the app is recording * @param b True to draw from a buffer (from the mic), False to draw samples of the Waveform */ public void setDrawingFromBuffer(boolean b){ this.mDrawingFromBuffer = b; } //TODO: make a separate paint variable for the start and end markers, rather than swap colors //TODO: change a constant to match the number of seconds on the screen /** * Draws the start and end markers * @param c */ public void drawSectionMarkers(Canvas c){ //FIXME: need to change this to match number of seconds on the screen instead of constant 10 //compute the number of milliseconds in one pixel float mspp = 1000*10/(float)getWidth(); //offset refers to the location where playback actually starts (at the playback line) int offset = (getWidth() / 8); //compute the position on the screen to draw markers. Marker locations and mTimeToDraw //are both in ms float xLoc1 = offset + (mCut.reverseTimeAdjusted(mMarkerStartLoc) - mCut.reverseTimeAdjusted(mTimeToDraw))/mspp; float xLoc2 = offset + (mCut.reverseTimeAdjusted(mMarkerEndLoc) - mCut.reverseTimeAdjusted(mTimeToDraw))/mspp; c.drawLine(xLoc1, 0, xLoc1, getHeight(), mPaintStartMarker); c.drawLine(xLoc2, 0, xLoc2, getHeight(), mPaintEndMarker); c.drawRect(xLoc1, 0, xLoc2, getHeight(), mPaintHighlight); } /** * Sets the time in playback to draw this frame * This is set so that both the waveform and the markers make use of the same time, * rather than each querying WavPlayer when they get to draw their component. * @param timeMs Current time during playback, in milliseconds */ public void setTimeToDraw(int timeMs){ this.mTimeToDraw = timeMs; } //TODO: remove the semaphore, replace with either synchronous or try to remove concurrency /** * Main draw method that is called when the view is invalidated. * @param canvas The canvas which can be drawn on. Provided by Android as onDraw is not * called explicitly. */ @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); drawDbLines(canvas); //DrawingFromBuffers will draw data received from the microphone during recording if(mDrawingFromBuffer){ drawBuffer(canvas, mBuffer, AudioInfo.BLOCKSIZE); //Samples is a sampled section of the waveform extracted at mTimeToDraw } else if (mSamples != null ){ try { drawWaveform(mSamples, canvas); drawMarker(canvas); } catch (Exception e) { e.printStackTrace(); } } //Creates a drawing loop; redraws only will occur if audio is playing redraw(); if(SectionMarkers.shouldDrawMarkers()){ drawSectionMarkers(canvas); } mManager.checkIfShouldStop(); //Determines whether the play or pause button should be rendered //This is done now that there is not a thread dedicated to drawing if(!mManager.isPlaying()){ mManager.enablePlay(); } } /** * Sets a byte buffer to be drawn to the screen * @param buffer a byte buffer containing 16 bit pcm data */ public synchronized void setBuffer(byte[] buffer){ mBuffer = buffer; } //TODO: create a separate paint object for drawing the waveform /** * Draws a waveform from the buffer produced while recording * @param canvas the canvas to draw to * @param buffer the byte buffer containing 16 bit pcm data to draw * @param blocksize the size of a block of audio data; 2 for 16 bit mono PCM */ public synchronized void drawBuffer(Canvas canvas, byte[] buffer, int blocksize){ if (buffer == null || canvas == null) { return; } //convert PCM data in a byte array to a short array Short[] temp = new Short[buffer.length/blocksize]; int index = 0; for(int i = 0; i<buffer.length; i+=blocksize){ byte low = buffer[i]; byte hi = buffer[i + 1]; //PCM data is stored little endian temp[index] = (short)(((hi << 8) & 0x0000FF00) | (low & 0x000000FF)); index++; } int width = canvas.getWidth(); int height = canvas.getHeight(); double xScale = width/(index *.999); double yScale = height/65536.0; for(int i = 0; i < temp.length-1; i++){ // canvas.drawLine((int)(xScale*i), (int)((yScale*temp[i])+ height/2), // (int)(xScale*(i+1)), (int)((yScale*temp[i+1]) + height/2), mPaint); canvas.drawLine((int) (xScale * i), (int) U.getValueForScreen(temp[i], height), (int) (xScale * (i + 1)), (int) U.getValueForScreen(temp[i+1], height), mPaintWaveform); } this.postInvalidate(); } /** * Sets sampled waveform data to draw to the screen * @param samples sampled waveform data to draw */ public synchronized void setWaveformDataForPlayback(float[] samples){ this.mSamples = samples; } }
save work
RecordingApp/app/src/main/java/wycliffeassociates/recordingapp/AudioVisualization/WaveformView.java
save work
Java
mit
53c09e710f8c0f6d0c31111910ac325b412128fa
0
dktcoding/mrft,dktcoding/mrft
/* * MIT License * * Copyright (c) 2016-2018 Federico Vera <https://github.com/dktcoding> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dkt.mrft.utils; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.dnd.DnDConstants; import java.awt.dnd.DropTarget; import java.awt.dnd.DropTargetDragEvent; import java.awt.dnd.DropTargetDropEvent; import java.awt.dnd.DropTargetEvent; import java.awt.dnd.DropTargetListener; import java.awt.event.HierarchyEvent; import java.awt.event.HierarchyListener; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.io.Reader; import java.net.URI; import java.util.ArrayList; import java.util.EventObject; import java.util.List; import java.util.TooManyListenersException; import javax.swing.BorderFactory; import javax.swing.JComponent; import javax.swing.border.Border; /** * This class makes it easy to drag and drop files from the operating * system to a Java program. Any {@link java.awt.Component} can be * dropped onto, but only {@link javax.swing.JComponent}s will indicate * the drop event with a changed border. * <p> * To use this class, construct a new {@code FileDrop} by passing * it the target component and a {@code Listener} to receive notification * when file(s) have been dropped. Here is an example:</p> * <p> * <code><pre> * JPanel myPanel = new JPanel(); * new FileDrop( myPanel, new FileDrop.Listener() * { public void filesDropped( java.io.File[] files ) * { * // handle file drop * ... * } // end filesDropped * }); // end FileDrop.Listener * </pre></code> * </p><p> * You can specify the border that will appear when files are being dragged by * calling the constructor with a {@link javax.swing.border.Border}. Only * {@link JComponent}s will show any indication with a border. * </p><p> * You can turn on some debugging features by passing a {@link PrintStream} * object (such as {@link System#out}) into the full constructor. A {@code null} * value will result in no extra debugging information being output. * </p> * * <p>I'm releasing this code into the Public Domain. Enjoy. * </p> * <p><em>Original author: Robert Harder, [email protected]</em></p> * <p>2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added.</p> * * @author Robert Harder * @author [email protected] * @version 1.0.1 */ public class FileDrop { private transient Border normalBorder; private transient DropTargetListener dropListener; /** Discover if the running JVM is modern enough to have drag and drop. */ private static volatile Boolean supportsDnD; // Default border color private static Color defaultBorderColor = new Color(0f, 0f, 1f, 0.25f); /** * Constructs a {@link FileDrop} with a default light-blue border * and, if <var>c</var> is a {@link java.awt.Container}, recursively * sets all elements contained within as drop targets, though only * the top level container will change borders. * * @param c Component on which files will be dropped. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), // Drag border true, // Recursive listener); } // end constructor /** * Constructor with a default border and the option to recursively set drop targets. * If your component is a <tt>java.awt.Container</tt>, then each of its children * components will also listen for drops, though only the parent will change borders. * * @param c Component on which files will be dropped. * @param recursive Recursively set children as drop targets. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final boolean recursive, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), // Drag border recursive, // Recursive listener); } // end constructor /** * Constructor with a default border and debugging optionally turned on. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param out PrintStream to record debugging info or null for no debugging. * @param c Component on which files will be dropped. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final PrintStream out, final Component c, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), false, // Recursive listener); } // end constructor /** * Constructor with a default border, debugging optionally turned on * and the option to recursively set drop targets. * If your component is a <tt>java.awt.Container</tt>, then each of its children * components will also listen for drops, though only the parent will change borders. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param out PrintStream to record debugging info or null for no debugging. * @param c Component on which files will be dropped. * @param recursive Recursively set children as drop targets. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final PrintStream out, final Component c, final boolean recursive, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), // Drag border recursive, // Recursive listener); } // end constructor /** * Constructor with a specified border * * @param c Component on which files will be dropped. * @param dragBorder Border to use on <tt>JComponent</tt> when dragging occurs. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final Border dragBorder, final Listener listener) { this(c, // Drop target dragBorder, // Drag border false, // Recursive listener); } // end constructor /** * Constructor with a specified border and debugging optionally turned on. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param out PrintStream to record debugging info or null for no debugging. * @param c Component on which files will be dropped. * @param dragBorder Border to use on <tt>JComponent</tt> when dragging occurs. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final PrintStream out, final Component c, final Border dragBorder, final Listener listener) { this(c, // Drop target dragBorder, // Drag border false, // Recursive listener); } // end constructor /** * Full constructor with a specified border and debugging optionally turned on. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param c Component on which files will be dropped. * @param dragBorder Border to use on <tt>JComponent</tt> when dragging occurs. * @param recursive Recursively set children as drop targets. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final Border dragBorder, final boolean recursive, final Listener listener) { if (supportsDnD()) { // Make a drop listener dropListener = new DropTargetListener() { @Override public void dragEnter(DropTargetDragEvent evt) { // Is this an acceptable drag event? if (isDragOk(evt)) { // If it's a Swing component, set its border if (c instanceof JComponent) { javax.swing.JComponent jc = (JComponent) c; normalBorder = jc.getBorder(); jc.setBorder(dragBorder); } // end if: JComponent // Acknowledge that it's okay to enter evt.acceptDrag(DnDConstants.ACTION_COPY); } // end if: drag ok else { // Reject the drag event evt.rejectDrag(); } // end else: drag not ok } // end dragEnter @Override public void dragOver(DropTargetDragEvent evt) { // This is called continually as long as the mouse is // over the drag target. } // end dragOver @Override public void drop(DropTargetDropEvent evt) { try { // Get whatever was dropped Transferable tr = evt.getTransferable(); // Is it a file list? if (tr.isDataFlavorSupported(DataFlavor.javaFileListFlavor)) { // Say we'll take it. evt.acceptDrop(DnDConstants.ACTION_COPY); // Get a useful list @SuppressWarnings("unchecked") List<File> fileList = (List<File>) tr.getTransferData(DataFlavor.javaFileListFlavor); final File[] files = fileList.toArray(new File[fileList.size()]); // Alert listener to drop. if (listener != null) { listener.filesDropped(files); } // Mark that drop is completed. evt.getDropTargetContext().dropComplete(true); } // end if: file list else // this section will check for a reader flavor. { // Thanks, Nathan! // BEGIN 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. DataFlavor[] flavors = tr.getTransferDataFlavors(); boolean handled = false; for (DataFlavor flavor : flavors) { if (flavor.isRepresentationClassReader()) { // Say we'll take it. evt.acceptDrop(DnDConstants.ACTION_COPY); Reader reader = flavor.getReaderForText(tr); BufferedReader br = new BufferedReader(reader); if (listener != null) { listener.filesDropped(createFileArray(br)); } // Mark that drop is completed. evt.getDropTargetContext().dropComplete(true); handled = true; break; } } if (!handled) { evt.rejectDrop(); } // END 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. } // end else: not a file list } // end try catch (IOException | UnsupportedFlavorException io) { evt.rejectDrop(); } // end catch: UnsupportedFlavorException finally { // If it's a Swing component, reset its border if (c instanceof JComponent) { JComponent jc = (JComponent) c; jc.setBorder(normalBorder); } // end if: JComponent } // end finally } // end drop @Override public void dragExit(DropTargetEvent evt) { // If it's a Swing component, reset its border if (c instanceof JComponent) { JComponent jc = (JComponent) c; jc.setBorder(normalBorder); } // end if: JComponent } // end dragExit @Override public void dropActionChanged(DropTargetDragEvent evt) { // Is this an acceptable drag event? if (isDragOk(evt)) { evt.acceptDrag(DnDConstants.ACTION_COPY); } // end if: drag ok else { evt.rejectDrag(); } // end else: drag not ok } // end dropActionChanged }; // end DropTargetListener // Make the component (and possibly children) drop targets makeDropTarget(c, recursive); } // end if: supports dnd } // end constructor private static boolean supportsDnD() { // Static Boolean if (supportsDnD == null) { try { Class.forName("java.awt.dnd.DnDConstants"); supportsDnD = true; } // end try catch (Exception ex) { supportsDnD = false; } // end catch } // end if: first time through return supportsDnD; } // end supportsDnD // BEGIN 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. private static String ZERO_CHAR_STRING = "0"; private static File[] createFileArray(BufferedReader bReader) { try { ArrayList<File> list = new ArrayList<>(10); String line; while ((line = bReader.readLine()) != null) { try { // kde seems to append a 0 char to the end of the reader if (ZERO_CHAR_STRING.equals(line)) { continue; } list.add(new File(new URI(line))); } catch (Exception ex) { } } return list.toArray(new File[list.size()]); } catch (IOException ex) { } return new File[0]; } // END 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. private void makeDropTarget(final Component c, boolean recursive) { // Make drop target final DropTarget dt = new DropTarget(); try { dt.addDropTargetListener(dropListener); } // end try catch (TooManyListenersException e) { } // end catch // Listen for hierarchy changes and remove the drop target when the parent gets cleared out. c.addHierarchyListener(new HierarchyListener() { @Override public void hierarchyChanged(HierarchyEvent evt) { java.awt.Component parent = c.getParent(); if (parent == null) { c.setDropTarget(null); } // end if: null parent else { DropTarget dt = new DropTarget(c, dropListener); } // end else: parent not null } // end hierarchyChanged }); // end hierarchy listener if (c.getParent() != null) { DropTarget dtt = new DropTarget(c, dropListener); } if (recursive && (c instanceof Container)) { // Get the container Container cont = (Container) c; // Get it's components Component[] comps = cont.getComponents(); for (Component comp : comps) { makeDropTarget(comp, recursive); } } // end if: recursively set components as listener } // end dropListener /** Determine if the dragged data is a file list. */ private boolean isDragOk(final DropTargetDragEvent evt) { boolean ok = false; // Get data flavors being dragged DataFlavor[] flavors = evt.getCurrentDataFlavors(); // See if any of the flavors are a file list int i = 0; while (!ok && i < flavors.length) { // BEGIN 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. // Is the flavor a file list? final DataFlavor curFlavor = flavors[i]; if (curFlavor.equals(DataFlavor.javaFileListFlavor) || curFlavor.isRepresentationClassReader()) { ok = true; } // END 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. i++; } // end while: through flavors return ok; } // end isDragOk /** * Removes the drag-and-drop hooks from the component and optionally * from the all children. You should call this if you add and remove * components after you've set up the drag-and-drop. * This will recursively unregister all components contained within * <var>c</var> if <var>c</var> is a {@link java.awt.Container}. * * @param c The component to unregister as a drop target * @return * @since 1.0 */ public static boolean remove(Component c) { return remove(c, true); } // end remove /** * Removes the drag-and-drop hooks from the component and optionally * from the all children. You should call this if you add and remove * components after you've set up the drag-and-drop. * * @param c The component to unregister * @param recursive Recursively unregister components within a container * @return * @since 1.0 */ public static boolean remove(Component c, boolean recursive) { // Make sure we support dnd. if (supportsDnD()) { c.setDropTarget(null); if (recursive && (c instanceof Container)) { Component[] comps = ((Container) c).getComponents(); for (Component comp : comps) { remove(comp, recursive); } return true; } // end if: recursive else { return false; } } // end if: supports DnD else { return false; } } // end remove /* ******** I N N E R I N T E R F A C E L I S T E N E R ******** */ /** * Implement this inner interface to listen for when files are dropped. For example * your class declaration may begin like this: * <code><pre> * public class MyClass implements FileDrop.Listener * ... * public void filesDropped( java.io.File[] files ) * { * ... * } // end filesDropped * ... * </pre></code> * * @since 1.1 */ public static interface Listener { /** * This method is called when files have been successfully dropped. * * @param files An array of <tt>File</tt>s that were dropped. * @since 1.0 */ public abstract void filesDropped(File[] files); } // end inner-interface Listener /* ******** I N N E R C L A S S ******** */ /** * This is the event that is passed to the * {@link FileDropListener#filesDropped filesDropped(...)} method in * your {@link FileDropListener} when files are dropped onto * a registered drop target. * * <p>I'm releasing this code into the Public Domain. Enjoy.</p> * * @author Robert Harder * @author [email protected] * @version 1.2 */ public static class Event extends EventObject { private static final long serialVersionUID = 689583726879415L; private File[] files; /** * Constructs an {@link Event} with the array * of files that were dropped and the * {@link FileDrop} that initiated the event. * * @param files The array of files that were dropped * @param source * @source The event source * @since 1.1 */ public Event(File[] files, Object source) { super(source); this.files = files; } // end constructor /** * Returns an array of files that were dropped on a * registered drop target. * * @return array of files that were dropped * @since 1.1 */ public File[] getFiles() { return files; } // end getFiles } // end inner class Event /* ******** I N N E R C L A S S ******** */ /** * At last an easy way to encapsulate your custom objects for dragging and dropping * in your Java programs! * When you need to create a {@link java.awt.datatransfer.Transferable} object, * use this class to wrap your object. * For example: * <pre><code> * ... * MyCoolClass myObj = new MyCoolClass(); * Transferable xfer = new TransferableObject( myObj ); * ... * </code></pre> * Or if you need to know when the data was actually dropped, like when you're * moving data out of a list, say, you can use the {@link TransferableObject.Fetcher} * inner class to return your object Just in Time. * For example: * <pre><code> * ... * final MyCoolClass myObj = new MyCoolClass(); * * TransferableObject.Fetcher fetcher = new TransferableObject.Fetcher() * { public Object getObject(){ return myObj; } * }; // end fetcher * * Transferable xfer = new TransferableObject( fetcher ); * ... * </code></pre> * * The {@link java.awt.datatransfer.DataFlavor} associated with * {@link TransferableObject} has the representation class * <tt>net.iharder.dnd.TransferableObject.class</tt> and MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * This data flavor is accessible via the static * {@link #DATA_FLAVOR} property. * * * <p>I'm releasing this code into the Public Domain. Enjoy.</p> * * @author Robert Harder * @author [email protected] * @version 1.2 */ public static class TransferableObject implements Transferable { /** * The MIME type for {@link #DATA_FLAVOR} is * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @since 1.1 */ public final static String MIME_TYPE = "application/x-net.iharder.dnd.TransferableObject"; /** * The default {@link java.awt.datatransfer.DataFlavor} for * {@link TransferableObject} has the representation class * <tt>net.iharder.dnd.TransferableObject.class</tt> * and the MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @since 1.1 */ public final static DataFlavor DATA_FLAVOR = new DataFlavor(FileDrop.TransferableObject.class, MIME_TYPE); private Fetcher fetcher; private Object data; private DataFlavor customFlavor; /** * Creates a new {@link TransferableObject} that wraps <var>data</var>. * Along with the {@link #DATA_FLAVOR} associated with this class, * this creates a custom data flavor with a representation class * determined from <code>data.getClass()</code> and the MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @param data The data to transfer * @since 1.1 */ public TransferableObject(Object data) { this.data = data; this.customFlavor = new DataFlavor(data.getClass(), MIME_TYPE); } // end constructor /** * Creates a new {@link TransferableObject} that will return the * object that is returned by <var>fetcher</var>. * No custom data flavor is set other than the default * {@link #DATA_FLAVOR}. * * @see Fetcher * @param fetcher The {@link Fetcher} that will return the data object * @since 1.1 */ public TransferableObject(Fetcher fetcher) { this.fetcher = fetcher; } // end constructor /** * Creates a new {@link TransferableObject} that will return the * object that is returned by <var>fetcher</var>. * Along with the {@link #DATA_FLAVOR} associated with this class, * this creates a custom data flavor with a representation class <var>dataClass</var> * and the MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @see Fetcher * @param dataClass The {@link java.lang.Class} to use in the custom data flavor * @param fetcher The {@link Fetcher} that will return the data object * @since 1.1 */ public TransferableObject(Class<?> dataClass, Fetcher fetcher) { this.fetcher = fetcher; this.customFlavor = new DataFlavor(dataClass, MIME_TYPE); } // end constructor /** * Returns the custom {@link java.awt.datatransfer.DataFlavor} associated * with the encapsulated object or <tt>null</tt> if the {@link Fetcher} * constructor was used without passing a {@link java.lang.Class}. * * @return The custom data flavor for the encapsulated object * @since 1.1 */ public DataFlavor getCustomDataFlavor() { return customFlavor; } // end getCustomDataFlavor /* ******** T R A N S F E R A B L E M E T H O D S ******** */ /** * Returns a two- or three-element array containing first * the custom data flavor, if one was created in the constructors, * second the default {@link #DATA_FLAVOR} associated with * {@link TransferableObject}, and third the * {@link java.awt.datatransfer.DataFlavor.stringFlavor}. * * @return An array of supported data flavors * @since 1.1 */ @Override public DataFlavor[] getTransferDataFlavors() { if (customFlavor != null) { return new DataFlavor[]{customFlavor, DATA_FLAVOR, DataFlavor.stringFlavor }; // end flavors array } else { return new DataFlavor[]{DATA_FLAVOR, DataFlavor.stringFlavor }; // end flavors array } } // end getTransferDataFlavors /** * Returns the data encapsulated in this {@link TransferableObject}. * If the {@link Fetcher} constructor was used, then this is when * the {@link Fetcher#getObject getObject()} method will be called. * If the requested data flavor is not supported, then the * {@link Fetcher#getObject getObject()} method will not be called. * * @param flavor The data flavor for the data to return * @return The dropped data * @throws java.io.IOException * @since 1.1 */ @Override public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException, IOException { // Native object if (flavor.equals(DATA_FLAVOR)) { return fetcher == null ? data : fetcher.getObject(); } // String if (flavor.equals(DataFlavor.stringFlavor)) { return fetcher == null ? data.toString() : fetcher.getObject().toString(); } // We can't do anything else throw new UnsupportedFlavorException(flavor); } // end getTransferData /** * Returns <tt>true</tt> if <var>flavor</var> is one of the supported * flavors. Flavors are supported using the <code>equals(...)</code> method. * * @param flavor The data flavor to check * @return Whether or not the flavor is supported * @since 1.1 */ @Override public boolean isDataFlavorSupported(DataFlavor flavor) { // Native object if (flavor.equals(DATA_FLAVOR)) { return true; } return flavor.equals(DataFlavor.stringFlavor); } // end isDataFlavorSupported /* ******** I N N E R I N T E R F A C E F E T C H E R ******** */ /** * Instead of passing your data directly to the {@link TransferableObject} * constructor, you may want to know exactly when your data was received * in case you need to remove it from its source (or do anyting else to it). * When the {@link #getTransferData getTransferData(...)} method is called * on the {@link TransferableObject}, the {@link Fetcher}'s * {@link #getObject getObject()} method will be called. * * @author Robert Harder * @copyright 2001 * @version 1.1 * @since 1.1 */ public static interface Fetcher { /** * Return the object being encapsulated in the * {@link TransferableObject}. * * @return The dropped object * @since 1.1 */ public abstract Object getObject(); } // end inner interface Fetcher } // end class TransferableObject } // end class FileDrop
src/com/dkt/mrft/utils/FileDrop.java
/* * MIT License * * Copyright (c) 2016 Federico Vera <https://github.com/dktcoding> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dkt.mrft.utils; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.dnd.DnDConstants; import java.awt.dnd.DropTarget; import java.awt.dnd.DropTargetDragEvent; import java.awt.dnd.DropTargetDropEvent; import java.awt.dnd.DropTargetEvent; import java.awt.dnd.DropTargetListener; import java.awt.event.HierarchyEvent; import java.awt.event.HierarchyListener; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.io.Reader; import java.net.URI; import java.util.ArrayList; import java.util.EventObject; import java.util.List; import java.util.TooManyListenersException; import javax.swing.BorderFactory; import javax.swing.JComponent; import javax.swing.border.Border; /** * This class makes it easy to drag and drop files from the operating * system to a Java program. Any {@link java.awt.Component} can be * dropped onto, but only {@link javax.swing.JComponent}s will indicate * the drop event with a changed border. * <p> * To use this class, construct a new {@code FileDrop} by passing * it the target component and a {@code Listener} to receive notification * when file(s) have been dropped. Here is an example:</p> * <p> * <code><pre> * JPanel myPanel = new JPanel(); * new FileDrop( myPanel, new FileDrop.Listener() * { public void filesDropped( java.io.File[] files ) * { * // handle file drop * ... * } // end filesDropped * }); // end FileDrop.Listener * </pre></code> * </p><p> * You can specify the border that will appear when files are being dragged by * calling the constructor with a {@link javax.swing.border.Border}. Only * {@link JComponent}s will show any indication with a border. * </p><p> * You can turn on some debugging features by passing a {@link PrintStream} * object (such as {@link System#out}) into the full constructor. A {@code null} * value will result in no extra debugging information being output. * </p> * * <p>I'm releasing this code into the Public Domain. Enjoy. * </p> * <p><em>Original author: Robert Harder, [email protected]</em></p> * <p>2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added.</p> * * @author Robert Harder * @author [email protected] * @version 1.0.1 */ public class FileDrop { private transient Border normalBorder; private transient DropTargetListener dropListener; /** Discover if the running JVM is modern enough to have drag and drop. */ private static volatile Boolean supportsDnD; // Default border color private static Color defaultBorderColor = new Color(0f, 0f, 1f, 0.25f); /** * Constructs a {@link FileDrop} with a default light-blue border * and, if <var>c</var> is a {@link java.awt.Container}, recursively * sets all elements contained within as drop targets, though only * the top level container will change borders. * * @param c Component on which files will be dropped. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), // Drag border true, // Recursive listener); } // end constructor /** * Constructor with a default border and the option to recursively set drop targets. * If your component is a <tt>java.awt.Container</tt>, then each of its children * components will also listen for drops, though only the parent will change borders. * * @param c Component on which files will be dropped. * @param recursive Recursively set children as drop targets. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final boolean recursive, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), // Drag border recursive, // Recursive listener); } // end constructor /** * Constructor with a default border and debugging optionally turned on. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param out PrintStream to record debugging info or null for no debugging. * @param c Component on which files will be dropped. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final PrintStream out, final Component c, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), false, // Recursive listener); } // end constructor /** * Constructor with a default border, debugging optionally turned on * and the option to recursively set drop targets. * If your component is a <tt>java.awt.Container</tt>, then each of its children * components will also listen for drops, though only the parent will change borders. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param out PrintStream to record debugging info or null for no debugging. * @param c Component on which files will be dropped. * @param recursive Recursively set children as drop targets. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final PrintStream out, final Component c, final boolean recursive, final Listener listener) { this(c, // Drop target BorderFactory.createMatteBorder(2, 2, 2, 2, defaultBorderColor), // Drag border recursive, // Recursive listener); } // end constructor /** * Constructor with a specified border * * @param c Component on which files will be dropped. * @param dragBorder Border to use on <tt>JComponent</tt> when dragging occurs. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final Border dragBorder, final Listener listener) { this(c, // Drop target dragBorder, // Drag border false, // Recursive listener); } // end constructor /** * Constructor with a specified border and debugging optionally turned on. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param out PrintStream to record debugging info or null for no debugging. * @param c Component on which files will be dropped. * @param dragBorder Border to use on <tt>JComponent</tt> when dragging occurs. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final PrintStream out, final Component c, final Border dragBorder, final Listener listener) { this(c, // Drop target dragBorder, // Drag border false, // Recursive listener); } // end constructor /** * Full constructor with a specified border and debugging optionally turned on. * With Debugging turned on, more status messages will be displayed to * <tt>out</tt>. A common way to use this constructor is with * <tt>System.out</tt> or <tt>System.err</tt>. A <tt>null</tt> value for * the parameter <tt>out</tt> will result in no debugging output. * * @param c Component on which files will be dropped. * @param dragBorder Border to use on <tt>JComponent</tt> when dragging occurs. * @param recursive Recursively set children as drop targets. * @param listener Listens for <tt>filesDropped</tt>. * @since 1.0 */ public FileDrop( final Component c, final Border dragBorder, final boolean recursive, final Listener listener) { if (supportsDnD()) { // Make a drop listener dropListener = new DropTargetListener() { @Override public void dragEnter(DropTargetDragEvent evt) { // Is this an acceptable drag event? if (isDragOk(evt)) { // If it's a Swing component, set its border if (c instanceof JComponent) { javax.swing.JComponent jc = (JComponent) c; normalBorder = jc.getBorder(); jc.setBorder(dragBorder); } // end if: JComponent // Acknowledge that it's okay to enter evt.acceptDrag(DnDConstants.ACTION_COPY); } // end if: drag ok else { // Reject the drag event evt.rejectDrag(); } // end else: drag not ok } // end dragEnter @Override public void dragOver(DropTargetDragEvent evt) { // This is called continually as long as the mouse is // over the drag target. } // end dragOver @Override public void drop(DropTargetDropEvent evt) { try { // Get whatever was dropped Transferable tr = evt.getTransferable(); // Is it a file list? if (tr.isDataFlavorSupported(DataFlavor.javaFileListFlavor)) { // Say we'll take it. evt.acceptDrop(DnDConstants.ACTION_COPY); // Get a useful list @SuppressWarnings("unchecked") List<File> fileList = (List<File>) tr.getTransferData(DataFlavor.javaFileListFlavor); final File[] files = fileList.toArray(new File[fileList.size()]); // Alert listener to drop. if (listener != null) { listener.filesDropped(files); } // Mark that drop is completed. evt.getDropTargetContext().dropComplete(true); } // end if: file list else // this section will check for a reader flavor. { // Thanks, Nathan! // BEGIN 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. DataFlavor[] flavors = tr.getTransferDataFlavors(); boolean handled = false; for (DataFlavor flavor : flavors) { if (flavor.isRepresentationClassReader()) { // Say we'll take it. evt.acceptDrop(DnDConstants.ACTION_COPY); Reader reader = flavor.getReaderForText(tr); BufferedReader br = new BufferedReader(reader); if (listener != null) { listener.filesDropped(createFileArray(br)); } // Mark that drop is completed. evt.getDropTargetContext().dropComplete(true); handled = true; break; } } if (!handled) { evt.rejectDrop(); } // END 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. } // end else: not a file list } // end try catch (IOException | UnsupportedFlavorException io) { evt.rejectDrop(); } // end catch: UnsupportedFlavorException finally { // If it's a Swing component, reset its border if (c instanceof JComponent) { JComponent jc = (JComponent) c; jc.setBorder(normalBorder); } // end if: JComponent } // end finally } // end drop @Override public void dragExit(DropTargetEvent evt) { // If it's a Swing component, reset its border if (c instanceof JComponent) { JComponent jc = (JComponent) c; jc.setBorder(normalBorder); } // end if: JComponent } // end dragExit @Override public void dropActionChanged(DropTargetDragEvent evt) { // Is this an acceptable drag event? if (isDragOk(evt)) { evt.acceptDrag(DnDConstants.ACTION_COPY); } // end if: drag ok else { evt.rejectDrag(); } // end else: drag not ok } // end dropActionChanged }; // end DropTargetListener // Make the component (and possibly children) drop targets makeDropTarget(c, recursive); } // end if: supports dnd else { //FileDrop: Drag and drop is not supported with this JVM } // end else: does not support DnD } // end constructor private static boolean supportsDnD() { // Static Boolean if (supportsDnD == null) { try { Class.forName("java.awt.dnd.DnDConstants"); supportsDnD = true; } // end try catch (Exception ex) { supportsDnD = false; } // end catch } // end if: first time through return supportsDnD; } // end supportsDnD // BEGIN 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. private static String ZERO_CHAR_STRING = "0"; private static File[] createFileArray(BufferedReader bReader) { try { ArrayList<File> list = new ArrayList<>(10); String line; while ((line = bReader.readLine()) != null) { try { // kde seems to append a 0 char to the end of the reader if (ZERO_CHAR_STRING.equals(line)) { continue; } list.add(new File(new URI(line))); } catch (Exception ex) { } } return list.toArray(new File[list.size()]); } catch (IOException ex) { } return new File[0]; } // END 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. private void makeDropTarget(final Component c, boolean recursive) { // Make drop target final DropTarget dt = new DropTarget(); try { dt.addDropTargetListener(dropListener); } // end try catch (TooManyListenersException e) { } // end catch // Listen for hierarchy changes and remove the drop target when the parent gets cleared out. c.addHierarchyListener(new HierarchyListener() { @Override public void hierarchyChanged(HierarchyEvent evt) { java.awt.Component parent = c.getParent(); if (parent == null) { c.setDropTarget(null); } // end if: null parent else { DropTarget dt = new DropTarget(c, dropListener); } // end else: parent not null } // end hierarchyChanged }); // end hierarchy listener if (c.getParent() != null) { DropTarget dtt = new DropTarget(c, dropListener); } if (recursive && (c instanceof Container)) { // Get the container Container cont = (Container) c; // Get it's components Component[] comps = cont.getComponents(); for (Component comp : comps) { makeDropTarget(comp, recursive); } } // end if: recursively set components as listener } // end dropListener /** Determine if the dragged data is a file list. */ private boolean isDragOk(final DropTargetDragEvent evt) { boolean ok = false; // Get data flavors being dragged DataFlavor[] flavors = evt.getCurrentDataFlavors(); // See if any of the flavors are a file list int i = 0; while (!ok && i < flavors.length) { // BEGIN 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. // Is the flavor a file list? final DataFlavor curFlavor = flavors[i]; if (curFlavor.equals(DataFlavor.javaFileListFlavor) || curFlavor.isRepresentationClassReader()) { ok = true; } // END 2007-09-12 Nathan Blomquist -- Linux (KDE/Gnome) support added. i++; } // end while: through flavors return ok; } // end isDragOk /** * Removes the drag-and-drop hooks from the component and optionally * from the all children. You should call this if you add and remove * components after you've set up the drag-and-drop. * This will recursively unregister all components contained within * <var>c</var> if <var>c</var> is a {@link java.awt.Container}. * * @param c The component to unregister as a drop target * @return * @since 1.0 */ public static boolean remove(Component c) { return remove(c, true); } // end remove /** * Removes the drag-and-drop hooks from the component and optionally * from the all children. You should call this if you add and remove * components after you've set up the drag-and-drop. * * @param c The component to unregister * @param recursive Recursively unregister components within a container * @return * @since 1.0 */ public static boolean remove(Component c, boolean recursive) { // Make sure we support dnd. if (supportsDnD()) { c.setDropTarget(null); if (recursive && (c instanceof Container)) { Component[] comps = ((Container) c).getComponents(); for (Component comp : comps) { remove(comp, recursive); } return true; } // end if: recursive else { return false; } } // end if: supports DnD else { return false; } } // end remove /* ******** I N N E R I N T E R F A C E L I S T E N E R ******** */ /** * Implement this inner interface to listen for when files are dropped. For example * your class declaration may begin like this: * <code><pre> * public class MyClass implements FileDrop.Listener * ... * public void filesDropped( java.io.File[] files ) * { * ... * } // end filesDropped * ... * </pre></code> * * @since 1.1 */ public static interface Listener { /** * This method is called when files have been successfully dropped. * * @param files An array of <tt>File</tt>s that were dropped. * @since 1.0 */ public abstract void filesDropped(File[] files); } // end inner-interface Listener /* ******** I N N E R C L A S S ******** */ /** * This is the event that is passed to the * {@link FileDropListener#filesDropped filesDropped(...)} method in * your {@link FileDropListener} when files are dropped onto * a registered drop target. * * <p>I'm releasing this code into the Public Domain. Enjoy.</p> * * @author Robert Harder * @author [email protected] * @version 1.2 */ public static class Event extends EventObject { private static final long serialVersionUID = 689583726879415L; private File[] files; /** * Constructs an {@link Event} with the array * of files that were dropped and the * {@link FileDrop} that initiated the event. * * @param files The array of files that were dropped * @param source * @source The event source * @since 1.1 */ public Event(File[] files, Object source) { super(source); this.files = files; } // end constructor /** * Returns an array of files that were dropped on a * registered drop target. * * @return array of files that were dropped * @since 1.1 */ public File[] getFiles() { return files; } // end getFiles } // end inner class Event /* ******** I N N E R C L A S S ******** */ /** * At last an easy way to encapsulate your custom objects for dragging and dropping * in your Java programs! * When you need to create a {@link java.awt.datatransfer.Transferable} object, * use this class to wrap your object. * For example: * <pre><code> * ... * MyCoolClass myObj = new MyCoolClass(); * Transferable xfer = new TransferableObject( myObj ); * ... * </code></pre> * Or if you need to know when the data was actually dropped, like when you're * moving data out of a list, say, you can use the {@link TransferableObject.Fetcher} * inner class to return your object Just in Time. * For example: * <pre><code> * ... * final MyCoolClass myObj = new MyCoolClass(); * * TransferableObject.Fetcher fetcher = new TransferableObject.Fetcher() * { public Object getObject(){ return myObj; } * }; // end fetcher * * Transferable xfer = new TransferableObject( fetcher ); * ... * </code></pre> * * The {@link java.awt.datatransfer.DataFlavor} associated with * {@link TransferableObject} has the representation class * <tt>net.iharder.dnd.TransferableObject.class</tt> and MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * This data flavor is accessible via the static * {@link #DATA_FLAVOR} property. * * * <p>I'm releasing this code into the Public Domain. Enjoy.</p> * * @author Robert Harder * @author [email protected] * @version 1.2 */ public static class TransferableObject implements Transferable { /** * The MIME type for {@link #DATA_FLAVOR} is * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @since 1.1 */ public final static String MIME_TYPE = "application/x-net.iharder.dnd.TransferableObject"; /** * The default {@link java.awt.datatransfer.DataFlavor} for * {@link TransferableObject} has the representation class * <tt>net.iharder.dnd.TransferableObject.class</tt> * and the MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @since 1.1 */ public final static DataFlavor DATA_FLAVOR = new DataFlavor(FileDrop.TransferableObject.class, MIME_TYPE); private Fetcher fetcher; private Object data; private DataFlavor customFlavor; /** * Creates a new {@link TransferableObject} that wraps <var>data</var>. * Along with the {@link #DATA_FLAVOR} associated with this class, * this creates a custom data flavor with a representation class * determined from <code>data.getClass()</code> and the MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @param data The data to transfer * @since 1.1 */ public TransferableObject(Object data) { this.data = data; this.customFlavor = new DataFlavor(data.getClass(), MIME_TYPE); } // end constructor /** * Creates a new {@link TransferableObject} that will return the * object that is returned by <var>fetcher</var>. * No custom data flavor is set other than the default * {@link #DATA_FLAVOR}. * * @see Fetcher * @param fetcher The {@link Fetcher} that will return the data object * @since 1.1 */ public TransferableObject(Fetcher fetcher) { this.fetcher = fetcher; } // end constructor /** * Creates a new {@link TransferableObject} that will return the * object that is returned by <var>fetcher</var>. * Along with the {@link #DATA_FLAVOR} associated with this class, * this creates a custom data flavor with a representation class <var>dataClass</var> * and the MIME type * <tt>application/x-net.iharder.dnd.TransferableObject</tt>. * * @see Fetcher * @param dataClass The {@link java.lang.Class} to use in the custom data flavor * @param fetcher The {@link Fetcher} that will return the data object * @since 1.1 */ public TransferableObject(Class<?> dataClass, Fetcher fetcher) { this.fetcher = fetcher; this.customFlavor = new DataFlavor(dataClass, MIME_TYPE); } // end constructor /** * Returns the custom {@link java.awt.datatransfer.DataFlavor} associated * with the encapsulated object or <tt>null</tt> if the {@link Fetcher} * constructor was used without passing a {@link java.lang.Class}. * * @return The custom data flavor for the encapsulated object * @since 1.1 */ public DataFlavor getCustomDataFlavor() { return customFlavor; } // end getCustomDataFlavor /* ******** T R A N S F E R A B L E M E T H O D S ******** */ /** * Returns a two- or three-element array containing first * the custom data flavor, if one was created in the constructors, * second the default {@link #DATA_FLAVOR} associated with * {@link TransferableObject}, and third the * {@link java.awt.datatransfer.DataFlavor.stringFlavor}. * * @return An array of supported data flavors * @since 1.1 */ @Override public DataFlavor[] getTransferDataFlavors() { if (customFlavor != null) { return new DataFlavor[]{customFlavor, DATA_FLAVOR, DataFlavor.stringFlavor }; // end flavors array } else { return new DataFlavor[]{DATA_FLAVOR, DataFlavor.stringFlavor }; // end flavors array } } // end getTransferDataFlavors /** * Returns the data encapsulated in this {@link TransferableObject}. * If the {@link Fetcher} constructor was used, then this is when * the {@link Fetcher#getObject getObject()} method will be called. * If the requested data flavor is not supported, then the * {@link Fetcher#getObject getObject()} method will not be called. * * @param flavor The data flavor for the data to return * @return The dropped data * @throws java.io.IOException * @since 1.1 */ @Override public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException, IOException { // Native object if (flavor.equals(DATA_FLAVOR)) { return fetcher == null ? data : fetcher.getObject(); } // String if (flavor.equals(DataFlavor.stringFlavor)) { return fetcher == null ? data.toString() : fetcher.getObject().toString(); } // We can't do anything else throw new UnsupportedFlavorException(flavor); } // end getTransferData /** * Returns <tt>true</tt> if <var>flavor</var> is one of the supported * flavors. Flavors are supported using the <code>equals(...)</code> method. * * @param flavor The data flavor to check * @return Whether or not the flavor is supported * @since 1.1 */ @Override public boolean isDataFlavorSupported(DataFlavor flavor) { // Native object if (flavor.equals(DATA_FLAVOR)) { return true; } // String if (flavor.equals(DataFlavor.stringFlavor)) { return true; } // We can't do anything else return false; } // end isDataFlavorSupported /* ******** I N N E R I N T E R F A C E F E T C H E R ******** */ /** * Instead of passing your data directly to the {@link TransferableObject} * constructor, you may want to know exactly when your data was received * in case you need to remove it from its source (or do anyting else to it). * When the {@link #getTransferData getTransferData(...)} method is called * on the {@link TransferableObject}, the {@link Fetcher}'s * {@link #getObject getObject()} method will be called. * * @author Robert Harder * @copyright 2001 * @version 1.1 * @since 1.1 */ public static interface Fetcher { /** * Return the object being encapsulated in the * {@link TransferableObject}. * * @return The dropped object * @since 1.1 */ public abstract Object getObject(); } // end inner interface Fetcher } // end class TransferableObject } // end class FileDrop
Remove empty else{} and redundant if
src/com/dkt/mrft/utils/FileDrop.java
Remove empty else{} and redundant if
Java
mit
df8ded297a67264c2e1a09bc34419c66f13cf06f
0
KehxStudios/Atlas,KehxStudios/Atlas,KehxStudios/Atlas,KehxStudios/Atlas
/******************************************************************************* * Copyright 2017 See AUTHORS file. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT * LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ******************************************************************************/ package com.kehxstudios.atlas.managers; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.kehxstudios.atlas.components.ClickableComponent; import com.kehxstudios.atlas.components.Component; import com.kehxstudios.atlas.tools.DebugTool; import com.kehxstudios.atlas.tools.ErrorTool; import com.kehxstudios.atlas.type.ComponentType; import java.util.ArrayList; /** * Used to control all user input */ public class InputManager extends Manager { // Holds instance of class, create new if not set private static InputManager instance; public static InputManager getInstance() { if (instance == null) { instance = new InputManager(); } return instance; } // HashMap for all ClickableComponents created private HashMap<int, ClickableComponent> clickableComponents; private Vector2 clickedPosition; // Constructor public InputManager() { super(); init(); } // Initalizes the @clickableComponents @Override protected void init() { clickableComponents = new HashMap<int,ClickableComponent>(); clickedPosition = new Vector2(0,0); DebugTool.log("InputManager_setup: Complete"); } // Called to check if any clickableComponents were touched @Override public void tick(float delta) { if (Gdx.input.justTouched() && clickableComponents.size() > 0) { float x = screen.getWidth() - Gdx.input.getX() / screen.getScaleWidth() + screen.getCamera().position.x - screen.getCamera().viewportWidth/2; float y = screen.getHeight() - Gdx.input.getY() / screen.getScaleHeight() + screen.getCamera().position.y - screen.getCamera().viewportHeight/2; clickedPosition.set(x, y); for (ClickableComponent clickable : clickableComponents.values()) { if (clickable.isEnabled()) { if (x > clickable.getPosition().x - clickable.getWidth() / 2 && x < clickable.getPosition().x + clickable.getWidth() / 2 && y > clickable.getPosition().y - clickable.getHeight() / 2 && y < clickable.getPosition().y + clickable.getHeight() / 2) { clickable.trigger(); } } } } else { clickedPosition.set(0, 0); } } // Called when loading a new screen @Override protected void loadSettings() { DebugTool.log("InputManager_loadSettings: Complete"); } // Called when unloading the current screen @Override protected void removeSettings() { DebugTool.log("InputManager_removeSettings: Complete"); } @Override public void add(Component component) { if (component.getType() == ComponentType.CLICKABLE) { ClickableComponent clickable = (ClickableComponent)component; if (!clickableComponents.contains(clickable)) { clickableComponents.put(clickable.id, clickable); } else { ErrorTool.log("Failed to add clickable to clickableComponents"); } } } @Override public void remove(Component component) { if (component.getType() == ComponentType.CLICKABLE) { ClickableComponent clickable = (ClickableComponent)component; if (clickableComponents.contains(clickable)) { clickableComponents.values().remove(clickable); } else { ErrorTool.log("Failed to add clickable to clickableComponents"); } } } public Vector2 getClickedPosition() { return clickedPosition; } }
core/src/com/kehxstudios/atlas/managers/InputManager.java
/******************************************************************************* * Copyright 2017 See AUTHORS file. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT * LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ******************************************************************************/ package com.kehxstudios.atlas.managers; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.kehxstudios.atlas.components.ClickableComponent; import com.kehxstudios.atlas.components.Component; import com.kehxstudios.atlas.tools.DebugTool; import com.kehxstudios.atlas.tools.ErrorTool; import com.kehxstudios.atlas.type.ComponentType; import java.util.ArrayList; /** * Used to control all user input */ public class InputManager extends Manager { // Holds instance of class, create new if not set private static InputManager instance; public static InputManager getInstance() { if (instance == null) { instance = new InputManager(); } return instance; } // ArrayList for all ClickableComponents created private ArrayList<ClickableComponent> clickableComponents; private Vector2 clickedPosition; // Constructor public InputManager() { super(); init(); } // Initalizes the @clickableComponents @Override protected void init() { clickableComponents = new ArrayList<ClickableComponent>(); clickedPosition = new Vector2(0,0); DebugTool.log("InputManager_setup: Complete"); } // Called to check if any clickableComponents were touched @Override public void tick(float delta) { if (Gdx.input.justTouched() && clickableComponents.size() > 0) { float x = screen.getWidth() - Gdx.input.getX() / screen.getScaleWidth() + screen.getCamera().position.x - screen.getCamera().viewportWidth/2; float y = screen.getHeight() - Gdx.input.getY() / screen.getScaleHeight() + screen.getCamera().position.y - screen.getCamera().viewportHeight/2; clickedPosition.set(x, y); for (ClickableComponent clickable : clickableComponents) { if (clickable.isEnabled()) { if (x > clickable.getPosition().x - clickable.getWidth() / 2 && x < clickable.getPosition().x + clickable.getWidth() / 2 && y > clickable.getPosition().y - clickable.getHeight() / 2 && y < clickable.getPosition().y + clickable.getHeight() / 2) { clickable.trigger(); } } } } else { clickedPosition.set(0, 0); } } // Called when loading a new screen @Override protected void loadSettings() { DebugTool.log("InputManager_loadSettings: Complete"); } // Called when unloading the current screen @Override protected void removeSettings() { DebugTool.log("InputManager_removeSettings: Complete"); } @Override public void add(Component component) { if (component.getType() == ComponentType.CLICKABLE) { ClickableComponent clickable = (ClickableComponent)component; if (!clickableComponents.contains(clickable)) { clickableComponents.add(clickable); } else { ErrorTool.log("Failed to add clickable to clickableComponents"); } } } @Override public void remove(Component component) { if (component.getType() == ComponentType.CLICKABLE) { ClickableComponent clickable = (ClickableComponent)component; if (clickableComponents.contains(clickable)) { clickableComponents.remove(clickable); } else { ErrorTool.log("Failed to add clickable to clickableComponents"); } } } public Vector2 getClickedPosition() { return clickedPosition; } }
InputManager ECS Update - Patched Entity & Components ECS changes
core/src/com/kehxstudios/atlas/managers/InputManager.java
InputManager ECS Update
Java
epl-1.0
f4ab122c37b41a43ba26be671207bbd0d3b309b9
0
Adonias1/327Project
package client; import java.io.*; import java.net.*; import java.util.ArrayList; import java.util.concurrent.*; import server.TCPServer.ServerSocketThread; class TCPClient{ public static void main(String argv[]) throws Exception{ String serverResponse; // String to hold response from server Socket clientSocket = new Socket("localhost", 4279); // Establishing connection to server DataOutputStream outToServer = new DataOutputStream(clientSocket.getOutputStream()); // Creating a DataOutputStream to send requests to the server BufferedReader inFromServer = new BufferedReader(new InputStreamReader(clientSocket.getInputStream())); // Creating a BufferedReader to receive responses from the server while(true){ for(int i = 1; i <= 5; i++){ // For loop for sending command "1" and receiving Fibonacci number outToServer.writeBytes("1\n"); // Send command "1" to the server serverResponse = inFromServer.readLine(); // Receiving response from server System.out.println("Fibonacci Number from Server: " + serverResponse); // Printing response from server } for(int i = 1; i <= 5; i++){ // For loop for sending command "2" to receive a random number that is larger than the previous one outToServer.writeBytes("2\n"); // Send command "2" to the server serverResponse = inFromServer.readLine(); // Receiving response from server System.out.println("Larger random number from Server: " + serverResponse); // Printing response from server } for(int i = 1; i <= 5; i++){ // For loop for sending command "3" to receive the next prime number outToServer.writeBytes("3\n"); // Send command "3" to the server serverResponse = inFromServer.readLine(); // Receiving response from server System.out.println("Prime Number from Server: " + serverResponse); // Printing response from server } <<<<<<< Updated upstream } //clientSocket.close(); //Close client connection with the server ======= class localThr extends Thread { public localThr(){ ConcurrentlinkedQueue<> requestQueue = new ConcurrentLinkedQueue<>(); ConcurrentlinkedQueue<> returnQueue = new ConcurrentLinkedQueue<>(); } public void run(){ } } ////////////////////////////////////////////////////////// class uThr extends Thread { public uThr(Socket sock){ } public void run(){ } } ////////////////////////////////////////////////////////// class runtimeThr extends Thread { public runtimeThr(Socket sock){ } public void run(){ } } ////////////////////////////////////////////////////////// class networkThr extends Thread { public networkThr(Socket sock){ } public void run(){ } } ////////////////////////////////////////////////////////// clientSocket.close(); //Close client connection with the server >>>>>>> Stashed changes } }
src/client/TCPClient.java
package client; import java.io.*; import java.net.*; class TCPClient{ public static void main(String argv[]) throws Exception{ String serverResponse; // String to hold response from server Socket clientSocket = new Socket("localhost", 4279); // Establishing connection to server DataOutputStream outToServer = new DataOutputStream(clientSocket.getOutputStream()); // Creating a DataOutputStream to send requests to the server BufferedReader inFromServer = new BufferedReader(new InputStreamReader(clientSocket.getInputStream())); // Creating a BufferedReader to receive responses from the server while(true){ for(int i = 1; i <= 5; i++){ // For loop for sending command "1" and receiving Fibonacci number outToServer.writeBytes("1\n"); // Send command "1" to the server serverResponse = inFromServer.readLine(); // Receiving response from server System.out.println("Fibonacci Number from Server: " + serverResponse); // Printing response from server } for(int i = 1; i <= 5; i++){ // For loop for sending command "2" to receive a random number that is larger than the previous one outToServer.writeBytes("2\n"); // Send command "2" to the server serverResponse = inFromServer.readLine(); // Receiving response from server System.out.println("Larger random number from Server: " + serverResponse); // Printing response from server } for(int i = 1; i <= 5; i++){ // For loop for sending command "3" to receive the next prime number outToServer.writeBytes("3\n"); // Send command "3" to the server serverResponse = inFromServer.readLine(); // Receiving response from server System.out.println("Prime Number from Server: " + serverResponse); // Printing response from server } } //clientSocket.close(); //Close client connection with the server } }
localhost ahh
src/client/TCPClient.java
localhost
Java
epl-1.0
17c63b28650dbe527244679f96056daf8cfd735f
0
PeterC-DLS/org.eclipse.dataset,jamesmudd/dawnsci,Anthchirp/dawnsci,jonahkichwacoders/dawnsci,belkassaby/dawnsci,DawnScience/dawnsci,Anthchirp/dawnsci,eclipse/dawnsci,jamesmudd/dawnsci,willrogers/dawnsci,colinpalmer/dawnsci,jamesmudd/dawnsci,xen-0/dawnsci,PeterC-DLS/org.eclipse.dataset,jonahkichwacoders/dawnsci,colinpalmer/dawnsci,willrogers/dawnsci
/*- * Copyright 2015 Diamond Light Source Ltd. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.dawnsci.plotting.examples.exercises; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; import org.eclipse.dawnsci.plotting.api.histogram.IImageService; import org.eclipse.dawnsci.plotting.api.histogram.ImageServiceBean; import org.eclipse.dawnsci.plotting.api.trace.IImageTrace; import org.eclipse.dawnsci.plotting.api.trace.ITraceListener; import org.eclipse.dawnsci.plotting.api.trace.TraceEvent; import org.eclipse.dawnsci.plotting.examples.Examples; import org.eclipse.swt.graphics.ImageData; public class Exercise3 extends Exercise2 { private static final String EXPORT_DIR = "C:/tmp/Export_"; private int count; private static int buttonPressedCount; public Exercise3() { super(); } protected void startReorderThread() { count = 0; final File olddir = new File(EXPORT_DIR+buttonPressedCount); if (olddir.exists()) recursiveDelete(olddir); buttonPressedCount++; final File dir = new File(EXPORT_DIR+buttonPressedCount); if (dir.exists()) recursiveDelete(dir); super.startReorderThread(); } protected ITraceListener createTraceListener() { return new ITraceListener.Stub() { @Override public void traceUpdated(TraceEvent evt) { // In reality use a Job queue to do this or the UI goes slow... IImageTrace trace = (IImageTrace)evt.getSource(); createThreasholdMask(trace); createImageFile(trace); } }; } protected void createImageFile(IImageTrace trace) { try { final IImageService iservice = Examples.getCurrent().getImageService(); final ImageServiceBean bean = trace.getImageServiceBean().clone(); // Full image and full mask bean.setImage(trace.getData()); bean.setMask(trace.getMask()); final ImageData imdata = iservice.getImageData(bean); final BufferedImage image = iservice.getBufferedImage(imdata); count++; final File dir = new File(EXPORT_DIR+buttonPressedCount); if (!dir.exists()) dir.mkdirs(); final File imageFile = new File(dir, "Image_"+buttonPressedCount+"_"+count+".png"); boolean ok = ImageIO.write(image, "png", imageFile); if (ok) { System.out.println("Written: "+imageFile.getAbsolutePath()); } else { System.out.println("Failed writing: "+imageFile.getAbsolutePath()); } } catch (Exception ne) { ne.printStackTrace(); } } static protected final boolean recursiveDelete(File parent) { if (parent.exists()) { if (parent.isDirectory()) { File[] files = parent.listFiles(); for (int ifile = 0; ifile < files.length; ++ifile) { if (files[ifile].isDirectory()) { recursiveDelete(files[ifile]); } if (files[ifile].exists()) { files[ifile].delete(); } } } return parent.delete(); } return false; } }
org.eclipse.dawnsci.plotting.examples/src/org/eclipse/dawnsci/plotting/examples/exercises/Exercise3.java
/*- * Copyright 2015 Diamond Light Source Ltd. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.dawnsci.plotting.examples.exercises; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; import org.eclipse.dawnsci.plotting.api.histogram.IImageService; import org.eclipse.dawnsci.plotting.api.histogram.ImageServiceBean; import org.eclipse.dawnsci.plotting.api.trace.IImageTrace; import org.eclipse.dawnsci.plotting.api.trace.ITraceListener; import org.eclipse.dawnsci.plotting.api.trace.TraceEvent; import org.eclipse.dawnsci.plotting.examples.Examples; import org.eclipse.swt.graphics.ImageData; public class Exercise3 extends Exercise2 { private static final String EXPORT_DIR = "C:/tmp/Export_"; private int count; private static int buttonPressedCount; public Exercise3() { super(); } protected void startReorderThread() { count = 0; final File olddir = new File(EXPORT_DIR+buttonPressedCount); if (olddir.exists()) recursiveDelete(olddir); buttonPressedCount++; final File dir = new File(EXPORT_DIR+buttonPressedCount); if (dir.exists()) recursiveDelete(dir); super.startReorderThread(); } protected ITraceListener createTraceListener() { return new ITraceListener.Stub() { @Override public void traceUpdated(TraceEvent evt) { // In reality use a Job queue to do this or the UI goes slow... IImageTrace trace = (IImageTrace)evt.getSource(); createThreasholdMask(trace); createImageFile(trace); } }; } protected void createImageFile(IImageTrace trace) { try { final IImageService iservice = Examples.getCurrent().getImageService(); final ImageServiceBean bean = trace.getImageServiceBean().clone(); // Full image and full mask bean.setImage(trace.getData()); bean.setMask(trace.getMask()); final ImageData imdata = iservice.getImageData(bean); final BufferedImage image = iservice.getBufferedImage(imdata); count++; final File dir = new File(EXPORT_DIR+buttonPressedCount); if (!dir.exists()) dir.mkdirs(); final File imageFile = new File(dir, "Image_"+buttonPressedCount+"_"+count+".png"); boolean ok = ImageIO.write(image, "png", imageFile); if (ok) { System.out.println("Written: "+imageFile.getAbsolutePath()); } else { System.out.println("Failed writing: "+imageFile.getAbsolutePath()); } // TODO Probably should delete the files between writings. } catch (Exception ne) { ne.printStackTrace(); } } static protected final boolean recursiveDelete(File parent) { if (parent.exists()) { if (parent.isDirectory()) { File[] files = parent.listFiles(); for (int ifile = 0; ifile < files.length; ++ifile) { if (files[ifile].isDirectory()) { recursiveDelete(files[ifile]); } if (files[ifile].exists()) { files[ifile].delete(); } } } return parent.delete(); } return false; } }
Removed comment.
org.eclipse.dawnsci.plotting.examples/src/org/eclipse/dawnsci/plotting/examples/exercises/Exercise3.java
Removed comment.
Java
mpl-2.0
97c015f919900a080887a59e3c9e2ddce61d44d8
0
jamesfeshner/openmrs-module,dcmul/openmrs-core,aboutdata/openmrs-core,maany/openmrs-core,nilusi/Legacy-UI,andyvand/OpenMRS,kristopherschmidt/openmrs-core,sintjuri/openmrs-core,macorrales/openmrs-core,jvena1/openmrs-core,michaelhofer/openmrs-core,jembi/openmrs-core,jcantu1988/openmrs-core,hoquangtruong/TestMylyn,aboutdata/openmrs-core,asifur77/openmrs,jcantu1988/openmrs-core,andyvand/OpenMRS,prisamuel/openmrs-core,foolchan2556/openmrs-core,donaldgavis/openmrs-core,trsorsimoII/openmrs-core,naraink/openmrs-core,prisamuel/openmrs-core,lilo2k/openmrs-core,foolchan2556/openmrs-core,kckc/openmrs-core,kigsmtua/openmrs-core,maekstr/openmrs-core,alexei-grigoriev/openmrs-core,Negatu/openmrs-core,Openmrs-joel/openmrs-core,MitchellBot/openmrs-core,andyvand/OpenMRS,jamesfeshner/openmrs-module,ssmusoke/openmrs-core,aj-jaswanth/openmrs-core,joansmith/openmrs-core,maany/openmrs-core,jamesfeshner/openmrs-module,shiangree/openmrs-core,donaldgavis/openmrs-core,dlahn/openmrs-core,WANeves/openmrs-core,Winbobob/openmrs-core,alexwind26/openmrs-core,shiangree/openmrs-core,kigsmtua/openmrs-core,rbtracker/openmrs-core,siddharthkhabia/openmrs-core,pselle/openmrs-core,sintjuri/openmrs-core,naraink/openmrs-core,MuhammadSafwan/Stop-Button-Ability,nilusi/Legacy-UI,preethi29/openmrs-core,chethandeshpande/openmrs-core,kabariyamilind/openMRSDEV,foolchan2556/openmrs-core,prisamuel/openmrs-core,rbtracker/openmrs-core,kigsmtua/openmrs-core,ldf92/openmrs-core,Openmrs-joel/openmrs-core,aj-jaswanth/openmrs-core,sravanthi17/openmrs-core,michaelhofer/openmrs-core,macorrales/openmrs-core,chethandeshpande/openmrs-core,iLoop2/openmrs-core,alexei-grigoriev/openmrs-core,shiangree/openmrs-core,jvena1/openmrs-core,naraink/openmrs-core,jembi/openmrs-core,spereverziev/openmrs-core,trsorsimoII/openmrs-core,MitchellBot/openmrs-core,milankarunarathne/openmrs-core,lilo2k/openmrs-core,vinayvenu/openmrs-core,chethandeshpande/openmrs-core,milankarunarathne/openmrs-core,jamesfeshner/openmrs-module,aboutdata/openmrs-core,geoff-wasilwa/openmrs-core,geoff-wasilwa/openmrs-core,joansmith/openmrs-core,jamesfeshner/openmrs-module,koskedk/openmrs-core,hoquangtruong/TestMylyn,donaldgavis/openmrs-core,aj-jaswanth/openmrs-core,asifur77/openmrs,lbl52001/openmrs-core,AbhijitParate/openmrs-core,hoquangtruong/TestMylyn,Ch3ck/openmrs-core,alexei-grigoriev/openmrs-core,MuhammadSafwan/Stop-Button-Ability,Negatu/openmrs-core,ern2/openmrs-core,asifur77/openmrs,Openmrs-joel/openmrs-core,maekstr/openmrs-core,pselle/openmrs-core,WANeves/openmrs-core,shiangree/openmrs-core,aj-jaswanth/openmrs-core,rbtracker/openmrs-core,ldf92/openmrs-core,joansmith/openmrs-core,ssmusoke/openmrs-core,maany/openmrs-core,lilo2k/openmrs-core,kabariyamilind/openMRSDEV,WANeves/openmrs-core,sadhanvejella/openmrs,rbtracker/openmrs-core,jvena1/openmrs-core,Negatu/openmrs-core,aboutdata/openmrs-core,vinayvenu/openmrs-core,aj-jaswanth/openmrs-core,kabariyamilind/openMRSDEV,sravanthi17/openmrs-core,foolchan2556/openmrs-core,michaelhofer/openmrs-core,kigsmtua/openmrs-core,MitchellBot/openmrs-core,kckc/openmrs-core,Negatu/openmrs-core,MitchellBot/openmrs-core,vinayvenu/openmrs-core,AbhijitParate/openmrs-core,sadhanvejella/openmrs,jembi/openmrs-core,kigsmtua/openmrs-core,kckc/openmrs-core,AbhijitParate/openmrs-core,lbl52001/openmrs-core,dcmul/openmrs-core,maekstr/openmrs-core,ern2/openmrs-core,aboutdata/openmrs-core,sadhanvejella/openmrs,alexwind26/openmrs-core,MuhammadSafwan/Stop-Button-Ability,sravanthi17/openmrs-core,Ch3ck/openmrs-core,andyvand/OpenMRS,lbl52001/openmrs-core,geoff-wasilwa/openmrs-core,michaelhofer/openmrs-core,iLoop2/openmrs-core,jembi/openmrs-core,iLoop2/openmrs-core,alexwind26/openmrs-core,hoquangtruong/TestMylyn,donaldgavis/openmrs-core,naraink/openmrs-core,milankarunarathne/openmrs-core,dlahn/openmrs-core,dlahn/openmrs-core,MuhammadSafwan/Stop-Button-Ability,Negatu/openmrs-core,Openmrs-joel/openmrs-core,nilusi/Legacy-UI,chethandeshpande/openmrs-core,kristopherschmidt/openmrs-core,dlahn/openmrs-core,kckc/openmrs-core,iLoop2/openmrs-core,WANeves/openmrs-core,ern2/openmrs-core,naraink/openmrs-core,siddharthkhabia/openmrs-core,kckc/openmrs-core,nilusi/Legacy-UI,andyvand/OpenMRS,koskedk/openmrs-core,lbl52001/openmrs-core,lbl52001/openmrs-core,shiangree/openmrs-core,pselle/openmrs-core,maekstr/openmrs-core,asifur77/openmrs,joansmith/openmrs-core,shiangree/openmrs-core,kristopherschmidt/openmrs-core,Winbobob/openmrs-core,siddharthkhabia/openmrs-core,lilo2k/openmrs-core,Ch3ck/openmrs-core,WANeves/openmrs-core,nilusi/Legacy-UI,jcantu1988/openmrs-core,Winbobob/openmrs-core,MuhammadSafwan/Stop-Button-Ability,pselle/openmrs-core,asifur77/openmrs,Openmrs-joel/openmrs-core,Winbobob/openmrs-core,sintjuri/openmrs-core,Ch3ck/openmrs-core,Winbobob/openmrs-core,sintjuri/openmrs-core,ldf92/openmrs-core,geoff-wasilwa/openmrs-core,WANeves/openmrs-core,siddharthkhabia/openmrs-core,andyvand/OpenMRS,prisamuel/openmrs-core,siddharthkhabia/openmrs-core,naraink/openmrs-core,AbhijitParate/openmrs-core,lbl52001/openmrs-core,jvena1/openmrs-core,michaelhofer/openmrs-core,ssmusoke/openmrs-core,ssmusoke/openmrs-core,prisamuel/openmrs-core,alexwind26/openmrs-core,alexwind26/openmrs-core,maekstr/openmrs-core,jembi/openmrs-core,MitchellBot/openmrs-core,spereverziev/openmrs-core,Negatu/openmrs-core,kristopherschmidt/openmrs-core,ldf92/openmrs-core,spereverziev/openmrs-core,maany/openmrs-core,hoquangtruong/TestMylyn,koskedk/openmrs-core,foolchan2556/openmrs-core,kabariyamilind/openMRSDEV,pselle/openmrs-core,rbtracker/openmrs-core,lilo2k/openmrs-core,jembi/openmrs-core,trsorsimoII/openmrs-core,MuhammadSafwan/Stop-Button-Ability,dlahn/openmrs-core,aboutdata/openmrs-core,spereverziev/openmrs-core,nilusi/Legacy-UI,lilo2k/openmrs-core,jvena1/openmrs-core,koskedk/openmrs-core,ldf92/openmrs-core,chethandeshpande/openmrs-core,milankarunarathne/openmrs-core,kigsmtua/openmrs-core,kabariyamilind/openMRSDEV,hoquangtruong/TestMylyn,koskedk/openmrs-core,vinayvenu/openmrs-core,geoff-wasilwa/openmrs-core,foolchan2556/openmrs-core,sadhanvejella/openmrs,maany/openmrs-core,joansmith/openmrs-core,sintjuri/openmrs-core,alexei-grigoriev/openmrs-core,alexei-grigoriev/openmrs-core,donaldgavis/openmrs-core,trsorsimoII/openmrs-core,jcantu1988/openmrs-core,preethi29/openmrs-core,milankarunarathne/openmrs-core,milankarunarathne/openmrs-core,macorrales/openmrs-core,preethi29/openmrs-core,koskedk/openmrs-core,ssmusoke/openmrs-core,sravanthi17/openmrs-core,macorrales/openmrs-core,sadhanvejella/openmrs,sintjuri/openmrs-core,dcmul/openmrs-core,preethi29/openmrs-core,dcmul/openmrs-core,alexei-grigoriev/openmrs-core,AbhijitParate/openmrs-core,maekstr/openmrs-core,pselle/openmrs-core,AbhijitParate/openmrs-core,vinayvenu/openmrs-core,sadhanvejella/openmrs,prisamuel/openmrs-core,spereverziev/openmrs-core,trsorsimoII/openmrs-core,kristopherschmidt/openmrs-core,ern2/openmrs-core,Winbobob/openmrs-core,macorrales/openmrs-core,sravanthi17/openmrs-core,siddharthkhabia/openmrs-core,Ch3ck/openmrs-core,iLoop2/openmrs-core,ern2/openmrs-core,dcmul/openmrs-core,dcmul/openmrs-core,preethi29/openmrs-core,kckc/openmrs-core,spereverziev/openmrs-core,iLoop2/openmrs-core,jcantu1988/openmrs-core
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.web.dwr; import java.util.List; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Encounter; import org.openmrs.Location; import org.openmrs.api.APIException; import org.openmrs.api.EncounterService; import org.openmrs.api.LocationService; import org.openmrs.api.context.Context; public class DWREncounterService { private static final Log log = LogFactory.getLog(DWREncounterService.class); public Vector findEncounters(String phrase, boolean includeVoided) throws APIException { // List to return // Object type gives ability to return error strings Vector<Object> objectList = new Vector<Object>(); try { EncounterService es = Context.getEncounterService(); List<Encounter> encs = new Vector<Encounter>(); if (phrase == null) { objectList.add("Search phrase cannot be null"); return objectList; } if (phrase.matches("\\d+")) { // user searched on a number. Insert concept with corresponding encounterId Encounter e = es.getEncounter(Integer.valueOf(phrase)); if (e != null) { if (!e.isVoided() || includeVoided == true) encs.add(e); } } if (phrase == null || phrase.equals("")) { //TODO get all concepts for testing purposes? } else { encs.addAll(es.getEncountersByPatient(phrase)); } if (encs.size() == 0) { objectList.add("No matches found for <b>" + phrase + "</b>"); } else { objectList = new Vector<Object>(encs.size()); for (Encounter e : encs) { objectList.add(new EncounterListItem(e)); } } } catch (Exception e) { log.error("Error while searching for encounters", e); objectList.add("Error while attempting to find encounter - " + e.getMessage()); } return objectList; } public EncounterListItem getEncounter(Integer encounterId) { EncounterService es = Context.getEncounterService(); Encounter e = es.getEncounter(encounterId); return e == null ? null : new EncounterListItem(e); } @SuppressWarnings("unchecked") public Vector findLocations(String searchValue) { Vector locationList = new Vector(); try { LocationService ls = Context.getLocationService(); List<Location> locations = ls.getLocations(searchValue); locationList = new Vector(locations.size()); for (Location loc : locations) { locationList.add(new LocationListItem(loc)); } } catch (Exception e) { log.error(e); locationList.add("Error while attempting to find locations - " + e.getMessage()); } if (locationList.size() == 0) { locationList.add("No locations found. Please search again."); } return locationList; } @SuppressWarnings("unchecked") public Vector getLocations() { Vector locationList = new Vector(); try { LocationService ls = Context.getLocationService(); List<Location> locations = ls.getAllLocations(); locationList = new Vector(locations.size()); for (Location loc : locations) { locationList.add(new LocationListItem(loc)); } } catch (Exception e) { log.error("Error while attempting to get locations", e); locationList.add("Error while attempting to get locations - " + e.getMessage()); } return locationList; } public LocationListItem getLocation(Integer locationId) { LocationService ls = Context.getLocationService(); Location l = ls.getLocation(locationId); return l == null ? null : new LocationListItem(l); } }
src/web/org/openmrs/web/dwr/DWREncounterService.java
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.web.dwr; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Encounter; import org.openmrs.Location; import org.openmrs.api.APIException; import org.openmrs.api.EncounterService; import org.openmrs.api.LocationService; import org.openmrs.api.context.Context; import java.util.List; import java.util.Vector; public class DWREncounterService { private static final Log log = LogFactory.getLog(DWREncounterService.class); public Vector findEncounters(String phrase, boolean includeVoided) throws APIException { if (includeVoided) throw new APIException("Don't include voided encounters."); // List to return // Object type gives ability to return error strings Vector<Object> objectList = new Vector<Object>(); try { EncounterService es = Context.getEncounterService(); List<Encounter> encs = new Vector<Encounter>(); if (phrase == null) { objectList.add("Search phrase cannot be null"); return objectList; } if (phrase.matches("\\d+")) { // user searched on a number. Insert concept with corresponding encounterId Encounter e = es.getEncounter(Integer.valueOf(phrase)); if (e != null) { if (!e.isVoided() || includeVoided == true) encs.add(e); } } if (phrase == null || phrase.equals("")) { //TODO get all concepts for testing purposes? } else { encs.addAll(es.getEncountersByPatient(phrase)); } if (encs.size() == 0) { objectList.add("No matches found for <b>" + phrase + "</b>"); } else { objectList = new Vector<Object>(encs.size()); for (Encounter e : encs) { objectList.add(new EncounterListItem(e)); } } } catch (Exception e) { log.error("Error while searching for encounters", e); objectList.add("Error while attempting to find encounter - " + e.getMessage()); } return objectList; } public EncounterListItem getEncounter(Integer encounterId) { EncounterService es = Context.getEncounterService(); Encounter e = es.getEncounter(encounterId); return e == null ? null : new EncounterListItem(e); } @SuppressWarnings("unchecked") public Vector findLocations(String searchValue) { Vector locationList = new Vector(); try { LocationService ls = Context.getLocationService(); List<Location> locations = ls.getLocations(searchValue); locationList = new Vector(locations.size()); for (Location loc : locations) { locationList.add(new LocationListItem(loc)); } } catch (Exception e) { log.error(e); locationList.add("Error while attempting to find locations - " + e.getMessage()); } if (locationList.size() == 0) { locationList.add("No locations found. Please search again."); } return locationList; } @SuppressWarnings("unchecked") public Vector getLocations() { Vector locationList = new Vector(); try { LocationService ls = Context.getLocationService(); List<Location> locations = ls.getAllLocations(); locationList = new Vector(locations.size()); for (Location loc : locations) { locationList.add(new LocationListItem(loc)); } } catch (Exception e) { log.error("Error while attempting to get locations", e); locationList.add("Error while attempting to get locations - " + e.getMessage()); } return locationList; } public LocationListItem getLocation(Integer locationId) { LocationService ls = Context.getLocationService(); Location l = ls.getLocation(locationId); return l == null ? null : new LocationListItem(l); } }
Error when attempting to search for voided encounters: Don't include voided encounters - TRUNK-190 git-svn-id: ce3478dfdc990238714fcdf4fc6855b7489218cf@14279 5bac5841-c719-aa4e-b3fe-cce5062f897a
src/web/org/openmrs/web/dwr/DWREncounterService.java
Error when attempting to search for voided encounters: Don't include voided encounters - TRUNK-190
Java
lgpl-2.1
ebc446e5007881073d198798f44148a0157b9aee
0
kimrutherford/intermine,JoeCarlson/intermine,joshkh/intermine,JoeCarlson/intermine,kimrutherford/intermine,joshkh/intermine,elsiklab/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,JoeCarlson/intermine,kimrutherford/intermine,joshkh/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,JoeCarlson/intermine,tomck/intermine,elsiklab/intermine,elsiklab/intermine,joshkh/intermine,tomck/intermine,zebrafishmine/intermine,zebrafishmine/intermine,elsiklab/intermine,zebrafishmine/intermine,justincc/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,justincc/intermine,JoeCarlson/intermine,JoeCarlson/intermine,zebrafishmine/intermine,elsiklab/intermine,zebrafishmine/intermine,elsiklab/intermine,drhee/toxoMine,elsiklab/intermine,kimrutherford/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,justincc/intermine,justincc/intermine,justincc/intermine,JoeCarlson/intermine,elsiklab/intermine,justincc/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,joshkh/intermine,kimrutherford/intermine,tomck/intermine,kimrutherford/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,drhee/toxoMine,JoeCarlson/intermine,zebrafishmine/intermine,kimrutherford/intermine,justincc/intermine,zebrafishmine/intermine,zebrafishmine/intermine,drhee/toxoMine,tomck/intermine,justincc/intermine,drhee/toxoMine,elsiklab/intermine,joshkh/intermine,JoeCarlson/intermine,joshkh/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,drhee/toxoMine
package org.modmine.web; /* * Copyright (C) 2002-2010 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeMap; import javax.servlet.ServletContext; import org.apache.log4j.Logger; import org.intermine.model.bio.Chromosome; import org.intermine.model.bio.DatabaseRecord; import org.intermine.model.bio.Experiment; import org.intermine.model.bio.ExpressionLevel; import org.intermine.model.bio.LocatedSequenceFeature; import org.intermine.model.bio.Location; import org.intermine.model.bio.Project; import org.intermine.model.bio.ResultFile; import org.intermine.model.bio.Submission; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.query.ConstraintOp; import org.intermine.objectstore.query.ConstraintSet; import org.intermine.objectstore.query.ContainsConstraint; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.QueryClass; import org.intermine.objectstore.query.QueryCollectionReference; import org.intermine.objectstore.query.QueryField; import org.intermine.objectstore.query.QueryFunction; import org.intermine.objectstore.query.QueryObjectReference; import org.intermine.objectstore.query.QueryValue; import org.intermine.objectstore.query.Results; import org.intermine.objectstore.query.ResultsRow; import org.intermine.objectstore.query.SimpleConstraint; import org.intermine.util.TypeUtil; import org.modmine.web.GBrowseParser.GBrowseTrack; /** * Read modENCODE metadata into objects that simplify display code, cache results. * @author Richard Smith * */ public class MetadataCache { private static final Logger LOG = Logger.getLogger(MetadataCache.class); private static final String NO_FEAT_DESCR_LOG = "Unable to find /WEB-INF/featureTypeDescr.properties, no feature descriptions in webapp!"; private static Map<String, DisplayExperiment> experimentCache = null; private static Map<Integer, Map<String, Long>> submissionFeatureCounts = null; private static Map<Integer, Map<String, Long>> submissionFeatureExpressionLevelCounts = null; private static Map<String, Map<String, Long>> experimentFeatureExpressionLevelCounts = null; private static Map<Integer, Integer> submissionExpressionLevelCounts = null; private static Map<Integer, Integer> submissionIdCache = null; private static Map<Integer, List<GBrowseTrack>> submissionTracksCache = null; private static Map<Integer, Set<ResultFile>> submissionFilesCache = null; private static Map<Integer, Integer> filesPerSubmissionCache = null; private static Map<Integer, List<String>> submissionLocatedFeatureTypes = null; private static Map<Integer, List<String>> submissionUnlocatedFeatureTypes = null; private static Map<Integer, List<String[]>> submissionRepositedCache = null; private static Map<String, String> featDescriptionCache = null; private static long lastTrackCacheRefresh = 0; private static final long TWO_HOUR = 7200000; /** * Fetch experiment details for display. * @param os the production objectStore * @return a list of experiments */ public static synchronized List<DisplayExperiment> getExperiments(ObjectStore os) { if (experimentCache == null) { readExperiments(os); } return new ArrayList<DisplayExperiment>(experimentCache.values()); } /** * Fetch GBrowse tracks per submission fpr display. This updates automatically from the GBrowse * server and refreshes periodically (according to threshold). When refreshing another process * is spawned which will update tracks when finished, if GBrowse can't be accessed the current * list of tracks of tracks are preserved. * @return map from submission id to list of GBrowse tracks */ public static synchronized Map<Integer, List<GBrowseTrack>> getGBrowseTracks() { fetchGBrowseTracks(); while (submissionTracksCache == null) { try { MetadataCache.class.wait(); } catch (InterruptedException e) { } } return submissionTracksCache; } /** * Fetch unlocated feature types per submission. * @param os the production objectStore * @return map of unlocated feature types */ public static synchronized Map<Integer, List<String>> getLocatedFeatureTypes(ObjectStore os) { if (submissionLocatedFeatureTypes == null) { readSubmissionLocatedFeature(os); } return submissionLocatedFeatureTypes; } /** * Fetch unlocated feature types per submission. * @param os the production objectStore * @return map of unlocated feature types */ public static synchronized Map<Integer, List<String>> getUnlocatedFeatureTypes(ObjectStore os) { if (submissionUnlocatedFeatureTypes == null) { readUnlocatedFeatureTypes(os); } return submissionUnlocatedFeatureTypes; } /** * Fetch unlocated feature types per submission. * @param os the production objectStore * @param dccId the dccId * @return map of unlocated feature types */ public static synchronized Set<String> getUnlocatedFeatureTypesBySubId(ObjectStore os, Integer dccId) { if (submissionUnlocatedFeatureTypes == null) { readUnlocatedFeatureTypes(os); } Set<String> uf = new HashSet<String>(submissionUnlocatedFeatureTypes.get(dccId)); return uf; } /** * Fetch the collection of ResultFiles per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Set<ResultFile>> getSubmissionFiles(ObjectStore os) { if (submissionFilesCache == null) { readSubmissionCollections(os); } return submissionFilesCache; } /** * Fetch the collection of Expression Level Counts per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Integer> getSubmissionExpressionLevelCounts(ObjectStore os) { if (submissionExpressionLevelCounts == null) { readSubmissionCollections(os); } return submissionExpressionLevelCounts; } /** * Fetch the collection of Expression Level Counts per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Map<String, Long>> getSubmissionFeatureExpressionLevelCounts(ObjectStore os) { if (submissionFeatureExpressionLevelCounts == null) { readSubmissionFeatureExpressionLevelCounts(os); } return submissionFeatureExpressionLevelCounts; } /** * Fetch the collection of Expression Level Counts per submission. * @param os the production objectStore * @return map */ public static synchronized Map<String, Map<String, Long>> getExperimentFeatureExpressionLevelCounts(ObjectStore os) { if (experimentFeatureExpressionLevelCounts == null) { readExperimentFeatureExpressionLevelCounts(os); } return experimentFeatureExpressionLevelCounts; } /** * Fetch number of input/output file per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Integer> getFilesPerSubmission(ObjectStore os) { if (submissionFilesCache == null) { readSubmissionCollections(os); } filesPerSubmissionCache = new HashMap<Integer, Integer>(); Iterator<Integer> dccId = submissionFilesCache.keySet().iterator(); while (dccId.hasNext()) { Integer thisSub = dccId.next(); Integer nrFiles = submissionFilesCache.get(thisSub).size(); filesPerSubmissionCache.put(thisSub, nrFiles); } return filesPerSubmissionCache; } /** * Fetch a list of file names for a given submission. * @param os the objectStore * @param dccId the modENCODE submission id * @return a list of file names */ public static synchronized List<ResultFile> getFilesByDccId(ObjectStore os, Integer dccId) { if (submissionFilesCache == null) { readSubmissionCollections(os); } return new ArrayList<ResultFile>(submissionFilesCache.get(dccId)); } /** * Fetch a list of GBrowse tracks for a given submission. * @param dccId the modENCODE submission id * @return a list of file names */ public static synchronized List<GBrowseTrack> getTracksByDccId(Integer dccId) { Map<Integer, List<GBrowseTrack>> tracks = getGBrowseTracks(); if (tracks.get(dccId) != null) { return new ArrayList<GBrowseTrack>(tracks.get(dccId)); } else { return new ArrayList<GBrowseTrack>(); } } /** * Fetch a list of file names for a given submission. * @param servletContext the context * @return a list of file names */ public static synchronized Map<String, String> getFeatTypeDescription(ServletContext servletContext) { if (featDescriptionCache == null) { readFeatTypeDescription(servletContext); } return featDescriptionCache; } /** * Fetch a map from feature type to count for a given submission. * @param os the objectStore * @param dccId the modENCODE submission id * @return a map from feature type to count */ public static synchronized Map<String, Long> getSubmissionFeatureCounts(ObjectStore os, Integer dccId) { if (submissionFeatureCounts == null) { readSubmissionFeatureCounts(os); } return submissionFeatureCounts.get(dccId); } // /** // * Fetch the number of expression levels for a given submission. // * @param os the objectStore // * @param dccId the modENCODE submission id // * @return a map from submission to count // */ // public static synchronized Integer getSubmissionExpressionLevelCount(ObjectStore os, // Integer dccId) { // if (submissionExpressionLevelCounts == null) { // getSubmissionExpressionLevelCounts(os); // } // return submissionExpressionLevelCounts.get(dccId); // } // /** // * Fetch the number of expression levels for a given submission. // * @param os the objectStore // * @return a map from submission to count // */ // public static synchronized Map<String, Map<String, Long>> // getExperimentFeatureExpressionLevels(ObjectStore os) { // if (experimentFeatureExpressionLevelCounts == null) { // readExperimentFeatureExpressionLevelCounts(os); // } // return experimentFeatureExpressionLevelCounts; // } /** * Fetch a submission by the modENCODE submission ids * @param os the objectStore * @param dccId the modENCODE submission id * @return the requested submission * @throws ObjectStoreException if error reading database */ public static synchronized Submission getSubmissionByDccId(ObjectStore os, Integer dccId) throws ObjectStoreException { if (submissionIdCache == null) { readSubmissionFeatureCounts(os); } return (Submission) os.getObjectById(submissionIdCache.get(dccId)); } /** * Get experiment information by name * @param os the objectStore * @param name of the experiment to fetch * @return details of the experiment * @throws ObjectStoreException if error reading database */ public static synchronized DisplayExperiment getExperimentByName(ObjectStore os, String name) throws ObjectStoreException { if (experimentCache == null) { readExperiments(os); } return experimentCache.get(name); } //====================== private static void fetchGBrowseTracks() { long timeSinceLastRefresh = System.currentTimeMillis() - lastTrackCacheRefresh; if (timeSinceLastRefresh > TWO_HOUR) { readGBrowseTracks(); lastTrackCacheRefresh = System.currentTimeMillis(); } } /** * Set the map of GBrowse tracks. * * @param tracks map of dccId:GBrowse tracks */ public static synchronized void setGBrowseTracks(Map<Integer, List<GBrowseTrack>> tracks) { MetadataCache.class.notifyAll(); submissionTracksCache = tracks; } /** * Method to obtain the map of unlocated feature types by submission id * * @param os the objectStore * @return submissionUnlocatedFeatureTypes */ private static Map<Integer, List<String>> readUnlocatedFeatureTypes(ObjectStore os) { long startTime = System.currentTimeMillis(); try { if (submissionUnlocatedFeatureTypes != null) { return submissionUnlocatedFeatureTypes; } submissionUnlocatedFeatureTypes = new HashMap<Integer, List<String>>(); if (submissionLocatedFeatureTypes == null) { readSubmissionLocatedFeature(os); } if (submissionFeatureCounts == null) { readSubmissionFeatureCounts(os); } for (Integer subId : submissionFeatureCounts.keySet()) { Set<String> allFeatures = submissionFeatureCounts.get(subId).keySet(); Set<String> difference = new HashSet<String>(allFeatures); if (submissionLocatedFeatureTypes.get(subId) != null) { difference.removeAll(submissionLocatedFeatureTypes.get(subId)); } if (!difference.isEmpty()) { List <String> thisUnlocated = new ArrayList<String>(); for (String fType : difference) { thisUnlocated.add(fType); } submissionUnlocatedFeatureTypes.put(subId, thisUnlocated); } } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed unlocated feature cache, took: " + timeTaken + "ms size = " + submissionUnlocatedFeatureTypes.size()); return submissionUnlocatedFeatureTypes; } /** * * @param os objectStore * @return map exp-tracks */ public static Map<String, List<GBrowseTrack>> getExperimentGBrowseTracks(ObjectStore os) { Map<String, List<GBrowseTrack>> tracks = new HashMap<String, List<GBrowseTrack>>(); Map<Integer, List<GBrowseTrack>> subTracksMap = getGBrowseTracks(); for (DisplayExperiment exp : getExperiments(os)) { List<GBrowseTrack> expTracks = new ArrayList<GBrowseTrack>(); tracks.put(exp.getName(), expTracks); for (Submission sub : exp.getSubmissions()) { if (subTracksMap.get(sub.getdCCid()) != null){ List<GBrowseTrack> subTracks = subTracksMap.get(sub.getdCCid()); if (subTracks != null) { // check so it is unique // expTracks.addAll(subTracks); addToList(expTracks, subTracks); } else { continue; } } } } return tracks; } /** * adds the elements of a list i to a list l only if they are not yet * there * @param l the receiving list * @param i the donating list */ private static void addToList(List<GBrowseTrack> l, List<GBrowseTrack> i) { Iterator <GBrowseTrack> it = i.iterator(); while (it.hasNext()) { GBrowseTrack thisId = it.next(); if (!l.contains(thisId)) { l.add(thisId); } } } /** * * @param os objectStore * @return map exp-repository entries */ public static Map<String, Set<String[]>> getExperimentRepositoryEntries(ObjectStore os) { Map<String, Set<String[]>> reposited = new HashMap<String, Set<String[]>>(); Map<Integer, List<String[]>> subRepositedMap = getRepositoryEntries(os); for (DisplayExperiment exp : getExperiments(os)) { Set<String[]> expReps = new HashSet<String[]>(); for (Submission sub : exp.getSubmissions()) { List<String[]> subReps = subRepositedMap.get(sub.getdCCid()); if (subReps != null) { expReps.addAll(subReps); } } // for each experiment, we don't to count twice the same repository // entry produced by 2 different submissions. Set<String[]> expRepsCleaned = removeDuplications(expReps); reposited.put(exp.getName(), expRepsCleaned); } return reposited; } private static Set<String[]> removeDuplications(Set<String[]> expReps) { // removing the same repository entry coming from different submissions // in the given experiment Set<String> db = new HashSet<String>(); Set<String> acc = new HashSet<String>(); Set<String[]> dup = new HashSet<String[]>(); for (String[] s : expReps) { if (db.contains(s[0]) && acc.contains(s[1])) { // we don't remove place holders if (!s[1].startsWith("To be")) { dup.add(s); } } db.add(s[0]); acc.add(s[1]); } // do the difference between sets and return it Set<String[]> uniques = new HashSet<String[]>(expReps); uniques.removeAll(dup); return uniques; } /** * * @param os objectStore * @return map exp-repository entries */ public static Map<String, Integer> getExperimentExpressionLevels(ObjectStore os) { Map<String, Integer> experimentELevel = new HashMap<String, Integer>(); Map<Integer, Integer> subELevelMap = getSubmissionExpressionLevelCounts(os); for (DisplayExperiment exp : getExperiments(os)) { Integer expCount = 0; for (Submission sub : exp.getSubmissions()) { Integer subCount = subELevelMap.get(sub.getdCCid()); if (subCount != null) { expCount = expCount + subCount; } } // if (expCount > 0) { experimentELevel.put(exp.getName(), expCount); // } } return experimentELevel; } /** * * @param os objectStore * @return map exp-repository entries */ public static Map<String, Map<String, Long>> readExperimentFeatureExpressionLevels(ObjectStore os) { Map<String, Map<String, Long>> expELevels = new HashMap<String, Map<String, Long>>(); //TODO Map<Integer, Map<String, Long>> subELevels = getSubmissionFeatureExpressionLevelCounts(os); for (DisplayExperiment exp : getExperiments(os)) { for (Submission sub : exp.getSubmissions()) { Map <String, Long> subFeat = subELevels.get(sub.getdCCid()); if (subFeat != null) { // get the experiment feature map Map<String, Long> expFeat = expELevels.get(exp.getName()); if (expFeat == null) { expELevels.put(exp.getName(), subFeat); } else { for (String feat : subFeat.keySet()) { Long subCount = subFeat.get(feat); Long expCount = subCount; if (expFeat.get(feat) != null) { expCount = expCount + expFeat.get(feat); } expFeat.put(feat, expCount); expCount = Long.valueOf(0); } expELevels.put(exp.getName(), expFeat); } } } } return expELevels; } /** * Fetch a map from project name to experiment. * @param os the production ObjectStore * @return a map from project name to experiment */ public static Map<String, List<DisplayExperiment>> getProjectExperiments(ObjectStore os) { long startTime = System.currentTimeMillis(); Map<String, List<DisplayExperiment>> projectExperiments = new TreeMap<String, List<DisplayExperiment>>(); for (DisplayExperiment exp : getExperiments(os)) { List<DisplayExperiment> exps = projectExperiments.get(exp.getProjectName()); if (exps == null) { exps = new ArrayList<DisplayExperiment>(); projectExperiments.put(exp.getProjectName(), exps); } exps.add(exp); } long totalTime = System.currentTimeMillis() - startTime; LOG.info("Made project map: " + projectExperiments.size() + " took: " + totalTime + " ms."); return projectExperiments; } private static void readExperiments(ObjectStore os) { long startTime = System.currentTimeMillis(); // Map <String, Map<String, Long>> featureCounts = getExperimentFeatureCounts(os); Map <String, Map<String, Long>> featureCounts = getUniqueExperimentFeatureCounts(os); try { Query q = new Query(); QueryClass qcProject = new QueryClass(Project.class); QueryField qcName = new QueryField(qcProject, "name"); q.addFrom(qcProject); q.addToSelect(qcProject); QueryClass qcExperiment = new QueryClass(Experiment.class); q.addFrom(qcExperiment); q.addToSelect(qcExperiment); QueryCollectionReference projExperiments = new QueryCollectionReference(qcProject, "experiments"); ContainsConstraint cc = new ContainsConstraint(projExperiments, ConstraintOp.CONTAINS, qcExperiment); q.setConstraint(cc); q.addToOrderBy(qcName); Results results = os.execute(q); experimentCache = new HashMap<String, DisplayExperiment>(); Iterator i = results.iterator(); while (i.hasNext()) { ResultsRow row = (ResultsRow) i.next(); Project project = (Project) row.get(0); Experiment experiment = (Experiment) row.get(1); Map<String, Long> expFeatureCounts = featureCounts.get(experiment.getName()); DisplayExperiment displayExp = new DisplayExperiment(experiment, project, expFeatureCounts, os); experimentCache.put(displayExp.getName(), displayExp); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed experiment cache, took: " + timeTaken + "ms size = " + experimentCache.size()); } /** * The counts are duplicated in the method, see getUniqueExperimentFeatureCounts */ private static Map<String, Map<String, Long>> getExperimentFeatureCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); // NB: example of query (with group by) enwrapping a subquery that gets rids of // duplications Query q = new Query(); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryField qfName = new QueryField(qcExp, "name"); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcExp); q.addToSelect(qcExp); q.addToSelect(qcLsf); q.addToSelect(qfName); q.addToSelect(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); q.setConstraint(cs); q.setDistinct(true); Query superQ = new Query(); superQ.addFrom(q); QueryField superQfName = new QueryField(q, qfName); QueryField superQfClass = new QueryField(q, qfClass); superQ.addToSelect(superQfName); superQ.addToSelect(superQfClass); superQ.addToOrderBy(superQfName); superQ.addToOrderBy(superQfClass); superQ.addToGroupBy(superQfName); superQ.addToGroupBy(superQfClass); superQ.addToSelect(new QueryFunction()); superQ.setDistinct(false); Results results = os.execute(superQ); Map<String, Map<String, Long>> featureCounts = new LinkedHashMap<String, Map<String, Long>>(); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); String expName = (String) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); Map<String, Long> expFeatureCounts = featureCounts.get(expName); if (expFeatureCounts == null) { expFeatureCounts = new HashMap<String, Long>(); featureCounts.put(expName, expFeatureCounts); } expFeatureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Read experiment feature counts, took: " + timeTaken + "ms"); return featureCounts; } /** * Method equivalent to getExperimentFeatureCounts but return Unique counts * * @param os * @return Map<String: expName, Map<String: feature type, Long: count>> */ private static Map<String, Map<String, Long>> getUniqueExperimentFeatureCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); Query q = new Query(); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcChr = new QueryClass(Chromosome.class); QueryClass qcLoc = new QueryClass(Location.class); QueryField qfExpName = new QueryField(qcExp, "name"); QueryField qfFT = new QueryField(qcLsf, "class"); QueryField qfChrID = new QueryField(qcChr, "primaryIdentifier"); QueryField qfStart = new QueryField(qcLoc, "start"); QueryField qfEnd = new QueryField(qcLoc, "end"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcExp); q.addFrom(qcChr); q.addFrom(qcLoc); q.addToSelect(qfExpName); q.addToSelect(qfFT); q.addToSelect(qfChrID); q.addToSelect(qfStart); q.addToSelect(qfEnd); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryObjectReference chromosome = new QueryObjectReference(qcLsf, "chromosome"); ContainsConstraint ccChr = new ContainsConstraint(chromosome, ConstraintOp.CONTAINS, qcChr); cs.addConstraint(ccChr); QueryObjectReference chromosomeLocation = new QueryObjectReference(qcLsf, "chromosomeLocation"); ContainsConstraint ccChrLoc = new ContainsConstraint(chromosomeLocation, ConstraintOp.CONTAINS, qcLoc); cs.addConstraint(ccChrLoc); q.setConstraint(cs); q.setDistinct(true); Query superQ = new Query(); superQ.addFrom(q); QueryField superQfName = new QueryField(q, qfExpName); QueryField superQfFT = new QueryField(q, qfFT); superQ.addToSelect(superQfName); superQ.addToSelect(superQfFT); superQ.addToOrderBy(superQfName); superQ.addToOrderBy(superQfFT); superQ.addToGroupBy(superQfName); superQ.addToGroupBy(superQfFT); superQ.addToSelect(new QueryFunction()); superQ.setDistinct(false); Results results = os.execute(superQ); Map<String, Map<String, Long>> featureCounts = new LinkedHashMap<String, Map<String, Long>>(); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); String expName = (String) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); Map<String, Long> expFeatureCounts = featureCounts.get(expName); if (expFeatureCounts == null) { expFeatureCounts = new HashMap<String, Long>(); featureCounts.put(expName, expFeatureCounts); } expFeatureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Read experiment feature counts, took: " + timeTaken + "ms"); return featureCounts; } /** * Method to find the feature types and the count for each of them within an experiment * * @param expName name of an experiment * @param os * @return featureCounts Map<String: feature type, Long: feature count> */ private static Map<String, Long> getUniqueExperimentFeatureCountsByExpNameSlow( String expName, ObjectStore os) { long startTime = System.currentTimeMillis(); Query q = new Query(); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryField qfExpName = new QueryField(qcExp, "name"); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcExp); q.addToSelect(qfClass); q.addToSelect(qcLsf); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); SimpleConstraint scChr = new SimpleConstraint(qfExpName, ConstraintOp.EQUALS, new QueryValue(expName)); cs.addConstraint(scChr); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); q.setConstraint(cs); q.setDistinct(true); Results results = os.execute(q); LinkedHashSet<String> ftClassSet = new LinkedHashSet<String>(); for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Class ftClass= (Class) row.get(0); ftClassSet.add(TypeUtil.unqualifiedName(ftClass.getName())); } LinkedHashMap<String, LinkedHashSet<LocatedSequenceFeature>> featureMap = new LinkedHashMap<String, LinkedHashSet<LocatedSequenceFeature>>(); for (String ftClassName : ftClassSet) { LinkedHashSet<LocatedSequenceFeature> featureSet = new LinkedHashSet<LocatedSequenceFeature>(); for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Class ftClass= (Class) row.get(0); LocatedSequenceFeature feature = (LocatedSequenceFeature) row.get(1); if (TypeUtil.unqualifiedName(ftClass.getName()).equals(ftClassName)) { featureSet.add(feature); } } featureMap.put(ftClassName, featureSet); } Map<String, Long> featureCounts = new LinkedHashMap<String, Long>(); for (Map.Entry<String, LinkedHashSet<LocatedSequenceFeature>> entry : featureMap .entrySet()) { Map<String, LocatedSequenceFeature> spanMap = new LinkedHashMap<String, LocatedSequenceFeature>(); for (LocatedSequenceFeature feature : entry.getValue()) { String spanString = feature.getChromosome() .getPrimaryIdentifier() + ":" + feature.getChromosomeLocation().getStart() + "-" + feature.getChromosomeLocation().getEnd(); spanMap.put(spanString, feature); } featureCounts.put(entry.getKey(), Long.valueOf(spanMap.size())); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Read experiment feature counts, took: " + timeTaken + "ms"); return featureCounts; } /** * Method equivalent to getUniqueExperimentFeatureCountsByExpNameSlow but much faster * * @param expName name of an experiment * @param os * @return featureCounts Map<String: feature type, Long: feature count> */ private static Map<String, Long> getUniqueExperimentFeatureCountsByExpNameFast( String expName, ObjectStore os) { long startTime = System.currentTimeMillis(); Query q = new Query(); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcChr = new QueryClass(Chromosome.class); QueryClass qcLoc = new QueryClass(Location.class); QueryField qfExpName = new QueryField(qcExp, "name"); QueryField qfFT = new QueryField(qcLsf, "class"); QueryField qfChrID = new QueryField(qcChr, "primaryIdentifier"); QueryField qfStart = new QueryField(qcLoc, "start"); QueryField qfEnd = new QueryField(qcLoc, "end"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcExp); q.addFrom(qcChr); q.addFrom(qcLoc); q.addToSelect(qfFT); q.addToSelect(qfChrID); q.addToSelect(qfStart); q.addToSelect(qfEnd); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); SimpleConstraint scExpName = new SimpleConstraint(qfExpName, ConstraintOp.EQUALS, new QueryValue(expName)); cs.addConstraint(scExpName); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryObjectReference chromosome = new QueryObjectReference(qcLsf, "chromosome"); ContainsConstraint ccChr = new ContainsConstraint(chromosome, ConstraintOp.CONTAINS, qcChr); cs.addConstraint(ccChr); QueryObjectReference chromosomeLocation = new QueryObjectReference(qcLsf, "chromosomeLocation"); ContainsConstraint ccChrLoc = new ContainsConstraint(chromosomeLocation, ConstraintOp.CONTAINS, qcLoc); cs.addConstraint(ccChrLoc); q.setConstraint(cs); q.setDistinct(true); Query superQ = new Query(); superQ.addFrom(q); QueryField superQfFT = new QueryField(q, qfFT); superQ.addToSelect(superQfFT); superQ.addToOrderBy(superQfFT); superQ.addToGroupBy(superQfFT); superQ.addToSelect(new QueryFunction()); superQ.setDistinct(false); Results results = os.execute(superQ); Map<String, Long> featureCounts = new LinkedHashMap<String, Long>(); for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Class feat = (Class) row.get(0); Long count = (Long) row.get(1); featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Read experiment feature counts, took: " + timeTaken + "ms"); return featureCounts; } private static void readSubmissionFeatureCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); submissionFeatureCounts = new LinkedHashMap<Integer, Map<String, Long>>(); submissionIdCache = new HashMap<Integer, Integer>(); Query q = new Query(); q.setDistinct(false); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addToSelect(qcSub); q.addToSelect(qfClass); q.addToSelect(new QueryFunction()); q.addToGroupBy(qcSub); q.addToGroupBy(qfClass); q.addToOrderBy(qcSub); q.addToOrderBy(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Submission sub = (Submission) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); submissionIdCache.put(sub.getdCCid(), sub.getId()); Map<String, Long> featureCounts = submissionFeatureCounts.get(sub.getdCCid()); if (featureCounts == null) { featureCounts = new HashMap<String, Long>(); submissionFeatureCounts.put(sub.getdCCid(), featureCounts); } featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed submissionFeatureCounts cache, took: " + timeTaken + "ms size = " + submissionFeatureCounts.size()); } private static void readSubmissionFeatureExpressionLevelCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); submissionFeatureExpressionLevelCounts = new LinkedHashMap<Integer, Map<String, Long>>(); //submissionIdCache = new HashMap<Integer, Integer>(); Query q = new Query(); q.setDistinct(false); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcEL = new QueryClass(ExpressionLevel.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcEL); q.addToSelect(qcSub); q.addToSelect(qfClass); q.addToSelect(new QueryFunction()); q.addToGroupBy(qcSub); q.addToGroupBy(qfClass); q.addToOrderBy(qcSub); q.addToOrderBy(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryCollectionReference el = new QueryCollectionReference(qcLsf, "expressionLevels"); ContainsConstraint ccEl = new ContainsConstraint(el, ConstraintOp.CONTAINS, qcEL); cs.addConstraint(ccEl); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Submission sub = (Submission) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); //submissionIdCache.put(sub.getdCCid(), sub.getId()); Map<String, Long> featureCounts = submissionFeatureExpressionLevelCounts.get(sub.getdCCid()); if (featureCounts == null) { featureCounts = new HashMap<String, Long>(); submissionFeatureExpressionLevelCounts.put(sub.getdCCid(), featureCounts); } featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed submissionFeatureExpressionLevelCounts cache, took: " + timeTaken + "ms size = " + submissionFeatureExpressionLevelCounts.size() + "<->" + submissionFeatureCounts.size()); LOG.info("submissionFeatureELCounts " + submissionFeatureExpressionLevelCounts); } private static void readExperimentFeatureExpressionLevelCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); experimentFeatureExpressionLevelCounts = new LinkedHashMap<String, Map<String, Long>>(); //submissionIdCache = new HashMap<Integer, Integer>(); Query q = new Query(); q.setDistinct(false); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcEL = new QueryClass(ExpressionLevel.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcExp); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcEL); q.addToSelect(qcExp); q.addToSelect(qfClass); q.addToSelect(new QueryFunction()); q.addToGroupBy(qcExp); q.addToGroupBy(qfClass); q.addToOrderBy(qcExp); q.addToOrderBy(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryCollectionReference el = new QueryCollectionReference(qcLsf, "expressionLevels"); ContainsConstraint ccEl = new ContainsConstraint(el, ConstraintOp.CONTAINS, qcEL); cs.addConstraint(ccEl); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Experiment exp = (Experiment) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); //submissionIdCache.put(sub.getdCCid(), sub.getId()); Map<String, Long> featureCounts = experimentFeatureExpressionLevelCounts.get(exp.getName()); if (featureCounts == null) { featureCounts = new HashMap<String, Long>(); experimentFeatureExpressionLevelCounts.put(exp.getName(), featureCounts); } featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed experimentFeatureExpressionLevelCounts cache, took: " + timeTaken + "ms size = " + experimentFeatureExpressionLevelCounts.size()); LOG.info("experimentFeatureELCounts " + experimentFeatureExpressionLevelCounts); } private static void readSubmissionCollections(ObjectStore os) { // long startTime = System.currentTimeMillis(); try { Query q = new Query(); QueryClass qcSubmission = new QueryClass(Submission.class); QueryField qfDCCid = new QueryField(qcSubmission, "DCCid"); q.addFrom(qcSubmission); q.addToSelect(qcSubmission); q.addToOrderBy(qfDCCid); submissionFilesCache = new HashMap<Integer, Set<ResultFile>>(); submissionExpressionLevelCounts = new HashMap<Integer, Integer>(); Results results = os.executeSingleton(q); // for submission, get result files and expression level count Iterator i = results.iterator(); while (i.hasNext()) { Submission sub = (Submission) i.next(); Set<ResultFile> files = sub.getResultFiles(); submissionFilesCache.put(sub.getdCCid(), files); Set<ExpressionLevel> el = sub.getExpressionLevels(); submissionExpressionLevelCounts.put(sub.getdCCid(), el.size()); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed submission collections caches, took: " + timeTaken + "ms size: files = " + submissionFilesCache.size() + ", expression levels = " + submissionExpressionLevelCounts.size()); } private static void readSubmissionFiles(ObjectStore os) { // long startTime = System.currentTimeMillis(); try { Query q = new Query(); QueryClass qcSubmission = new QueryClass(Submission.class); QueryField qfDCCid = new QueryField(qcSubmission, "DCCid"); q.addFrom(qcSubmission); q.addToSelect(qcSubmission); q.addToOrderBy(qfDCCid); submissionFilesCache = new HashMap<Integer, Set<ResultFile>>(); Results results = os.executeSingleton(q); // for each project, get its labs Iterator i = results.iterator(); while (i.hasNext()) { Submission sub = (Submission) i.next(); Set<ResultFile> files = sub.getResultFiles(); submissionFilesCache.put(sub.getdCCid(), files); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed file names cache, took: " + timeTaken + "ms size = " + submissionFilesCache.size()); } private static void readSubmissionLocatedFeature(ObjectStore os) { long startTime = System.currentTimeMillis(); submissionLocatedFeatureTypes = new LinkedHashMap<Integer, List<String>>(); Query q = new Query(); q.setDistinct(true); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcLoc = new QueryClass(Location.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcLoc); q.addToSelect(qcSub); q.addToSelect(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryObjectReference location = new QueryObjectReference(qcLsf, "chromosomeLocation"); ContainsConstraint ccLocs = new ContainsConstraint(location, ConstraintOp.CONTAINS, qcLoc); cs.addConstraint(ccLocs); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Submission sub = (Submission) row.get(0); Class feat = (Class) row.get(1); addToMap(submissionLocatedFeatureTypes, sub.getdCCid(), TypeUtil.unqualifiedName(feat.getName())); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed located features cache, took: " + timeTaken + "ms size = " + submissionLocatedFeatureTypes.size()); } /** * Fetch reposited (GEO/SRA/AE..) entries per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, List<String[]>> getRepositoryEntries(ObjectStore os) { if (submissionRepositedCache == null) { readSubmissionRepositoryEntries(os); } return submissionRepositedCache; } private static void readSubmissionRepositoryEntries(ObjectStore os) { // long startTime = System.currentTimeMillis(); try { Query q = new Query(); QueryClass qcSubmission = new QueryClass(Submission.class); QueryField qfDCCid = new QueryField(qcSubmission, "DCCid"); q.addFrom(qcSubmission); q.addToSelect(qfDCCid); QueryClass qcRepositoryEntry = new QueryClass(DatabaseRecord.class); QueryField qfDatabase = new QueryField(qcRepositoryEntry, "database"); QueryField qfAccession = new QueryField(qcRepositoryEntry, "accession"); QueryField qfUrl = new QueryField(qcRepositoryEntry, "url"); q.addFrom(qcRepositoryEntry); q.addToSelect(qfDatabase); q.addToSelect(qfAccession); q.addToSelect(qfUrl); // join the tables QueryCollectionReference ref1 = new QueryCollectionReference(qcSubmission, "databaseRecords"); ContainsConstraint cc = new ContainsConstraint(ref1, ConstraintOp.CONTAINS, qcRepositoryEntry); q.setConstraint(cc); q.addToOrderBy(qfDCCid); q.addToOrderBy(qfDatabase); Results results = os.execute(q); submissionRepositedCache = new HashMap<Integer, List<String[]>>(); Integer counter = 0; Integer prevSub = new Integer(-1); List<String[]> subRep = new ArrayList<String[]>(); Iterator i = results.iterator(); while (i.hasNext()) { ResultsRow row = (ResultsRow) i.next(); counter++; Integer dccId = (Integer) row.get(0); String db = (String) row.get(1); String acc = (String) row.get(2); String url = (String) row.get(3); String[] thisRecord = {db, acc, url}; if (!dccId.equals(prevSub) || counter.equals(results.size())) { if (prevSub > 0) { if (counter.equals(results.size())) { prevSub = dccId; subRep.add(thisRecord); } List<String[]> subRepIn = new ArrayList<String[]>(); subRepIn.addAll(subRep); submissionRepositedCache.put(prevSub, subRepIn); subRep.clear(); } prevSub = dccId; } subRep.add(thisRecord); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed Repository entries cache, took: " + timeTaken + "ms size = " + submissionRepositedCache.size()); } /** * adds an element to a list which is the value of a map * @param m the map (<String, List<String>>) * @param key the key for the map * @param value the list */ private static void addToMap(Map<Integer, List<String>> m, Integer key, String value) { List<String> ids = new ArrayList<String>(); if (m.containsKey(key)) { ids = m.get(key); } if (!ids.contains(value)) { ids.add(value); m.put(key, ids); } } /** * Method to fill the cached map of submissions (ddcId) to list of * GBrowse tracks * */ private static void readGBrowseTracks() { Runnable r = new Runnable() { public void run() { threadedReadGBrowseTracks(); } }; Thread t = new Thread(r); t.start(); } private static void threadedReadGBrowseTracks() { long startTime = System.currentTimeMillis(); Map<Integer, List<GBrowseTrack>> tracks = new HashMap<Integer, List<GBrowseTrack>>(); Map<Integer, List<GBrowseTrack>> flyTracks = null; Map<Integer, List<GBrowseTrack>> wormTracks = null; try { flyTracks = GBrowseParser.readTracks("fly"); wormTracks = GBrowseParser.readTracks("worm"); } catch (Exception e) { LOG.error(e); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed GBrowse tracks cache, took: " + timeTaken + "ms size = " + tracks.size()); if (flyTracks != null && wormTracks != null) { tracks.putAll(flyTracks); tracks.putAll(wormTracks); setGBrowseTracks(tracks); } } /** * This method get the feature descriptions from a property file. * * @return the map feature/description */ private static Map<String, String> readFeatTypeDescription(ServletContext servletContext) { long startTime = System.currentTimeMillis(); featDescriptionCache = new HashMap<String, String>(); Properties props = new Properties(); InputStream is = servletContext.getResourceAsStream("/WEB-INF/featureTypeDescr.properties"); if (is == null) { LOG.info(NO_FEAT_DESCR_LOG); } else { try { props.load(is); } catch (IOException e) { // TODO Auto-generated catch block //throw new IllegalAccessException("Error getting featureTypeDescr.properties file", // e.printStackTrace()); e.printStackTrace(); } Enumeration en = props.keys(); // while (props.keys().hasMoreElements()) { while (en.hasMoreElements()) { String expFeat = (String) en.nextElement(); String descr = props.getProperty(expFeat); featDescriptionCache.put(expFeat, descr); } } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed feature description cache, took: " + timeTaken + "ms size = " + featDescriptionCache.size()); return featDescriptionCache; } }
modmine/webapp/src/org/modmine/web/MetadataCache.java
package org.modmine.web; /* * Copyright (C) 2002-2010 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeMap; import javax.servlet.ServletContext; import org.apache.log4j.Logger; import org.intermine.model.bio.DatabaseRecord; import org.intermine.model.bio.Experiment; import org.intermine.model.bio.ExpressionLevel; import org.intermine.model.bio.LocatedSequenceFeature; import org.intermine.model.bio.Location; import org.intermine.model.bio.Project; import org.intermine.model.bio.ResultFile; import org.intermine.model.bio.Submission; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.query.ConstraintOp; import org.intermine.objectstore.query.ConstraintSet; import org.intermine.objectstore.query.ContainsConstraint; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.QueryClass; import org.intermine.objectstore.query.QueryCollectionReference; import org.intermine.objectstore.query.QueryField; import org.intermine.objectstore.query.QueryFunction; import org.intermine.objectstore.query.QueryObjectReference; import org.intermine.objectstore.query.Results; import org.intermine.objectstore.query.ResultsRow; import org.intermine.util.TypeUtil; import org.modmine.web.GBrowseParser.GBrowseTrack; /** * Read modENCODE metadata into objects that simplify display code, cache results. * @author Richard Smith * */ public class MetadataCache { private static final Logger LOG = Logger.getLogger(MetadataCache.class); private static final String NO_FEAT_DESCR_LOG = "Unable to find /WEB-INF/featureTypeDescr.properties, no feature descriptions in webapp!"; private static Map<String, DisplayExperiment> experimentCache = null; private static Map<Integer, Map<String, Long>> submissionFeatureCounts = null; private static Map<Integer, Map<String, Long>> submissionFeatureExpressionLevelCounts = null; private static Map<String, Map<String, Long>> experimentFeatureExpressionLevelCounts = null; private static Map<Integer, Integer> submissionExpressionLevelCounts = null; private static Map<Integer, Integer> submissionIdCache = null; private static Map<Integer, List<GBrowseTrack>> submissionTracksCache = null; private static Map<Integer, Set<ResultFile>> submissionFilesCache = null; private static Map<Integer, Integer> filesPerSubmissionCache = null; private static Map<Integer, List<String>> submissionLocatedFeatureTypes = null; private static Map<Integer, List<String>> submissionUnlocatedFeatureTypes = null; private static Map<Integer, List<String[]>> submissionRepositedCache = null; private static Map<String, String> featDescriptionCache = null; private static long lastTrackCacheRefresh = 0; private static final long TWO_HOUR = 7200000; /** * Fetch experiment details for display. * @param os the production objectStore * @return a list of experiments */ public static synchronized List<DisplayExperiment> getExperiments(ObjectStore os) { if (experimentCache == null) { readExperiments(os); } return new ArrayList<DisplayExperiment>(experimentCache.values()); } /** * Fetch GBrowse tracks per submission fpr display. This updates automatically from the GBrowse * server and refreshes periodically (according to threshold). When refreshing another process * is spawned which will update tracks when finished, if GBrowse can't be accessed the current * list of tracks of tracks are preserved. * @return map from submission id to list of GBrowse tracks */ public static synchronized Map<Integer, List<GBrowseTrack>> getGBrowseTracks() { fetchGBrowseTracks(); while (submissionTracksCache == null) { try { MetadataCache.class.wait(); } catch (InterruptedException e) { } } return submissionTracksCache; } /** * Fetch unlocated feature types per submission. * @param os the production objectStore * @return map of unlocated feature types */ public static synchronized Map<Integer, List<String>> getLocatedFeatureTypes(ObjectStore os) { if (submissionLocatedFeatureTypes == null) { readSubmissionLocatedFeature(os); } return submissionLocatedFeatureTypes; } /** * Fetch unlocated feature types per submission. * @param os the production objectStore * @return map of unlocated feature types */ public static synchronized Map<Integer, List<String>> getUnlocatedFeatureTypes(ObjectStore os) { if (submissionUnlocatedFeatureTypes == null) { readUnlocatedFeatureTypes(os); } return submissionUnlocatedFeatureTypes; } /** * Fetch unlocated feature types per submission. * @param os the production objectStore * @param dccId the dccId * @return map of unlocated feature types */ public static synchronized Set<String> getUnlocatedFeatureTypesBySubId(ObjectStore os, Integer dccId) { if (submissionUnlocatedFeatureTypes == null) { readUnlocatedFeatureTypes(os); } Set<String> uf = new HashSet<String>(submissionUnlocatedFeatureTypes.get(dccId)); return uf; } /** * Fetch the collection of ResultFiles per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Set<ResultFile>> getSubmissionFiles(ObjectStore os) { if (submissionFilesCache == null) { readSubmissionCollections(os); } return submissionFilesCache; } /** * Fetch the collection of Expression Level Counts per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Integer> getSubmissionExpressionLevelCounts(ObjectStore os) { if (submissionExpressionLevelCounts == null) { readSubmissionCollections(os); } return submissionExpressionLevelCounts; } /** * Fetch the collection of Expression Level Counts per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Map<String, Long>> getSubmissionFeatureExpressionLevelCounts(ObjectStore os) { if (submissionFeatureExpressionLevelCounts == null) { readSubmissionFeatureExpressionLevelCounts(os); } return submissionFeatureExpressionLevelCounts; } /** * Fetch the collection of Expression Level Counts per submission. * @param os the production objectStore * @return map */ public static synchronized Map<String, Map<String, Long>> getExperimentFeatureExpressionLevelCounts(ObjectStore os) { if (experimentFeatureExpressionLevelCounts == null) { readExperimentFeatureExpressionLevelCounts(os); } return experimentFeatureExpressionLevelCounts; } /** * Fetch number of input/output file per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, Integer> getFilesPerSubmission(ObjectStore os) { if (submissionFilesCache == null) { readSubmissionCollections(os); } filesPerSubmissionCache = new HashMap<Integer, Integer>(); Iterator<Integer> dccId = submissionFilesCache.keySet().iterator(); while (dccId.hasNext()) { Integer thisSub = dccId.next(); Integer nrFiles = submissionFilesCache.get(thisSub).size(); filesPerSubmissionCache.put(thisSub, nrFiles); } return filesPerSubmissionCache; } /** * Fetch a list of file names for a given submission. * @param os the objectStore * @param dccId the modENCODE submission id * @return a list of file names */ public static synchronized List<ResultFile> getFilesByDccId(ObjectStore os, Integer dccId) { if (submissionFilesCache == null) { readSubmissionCollections(os); } return new ArrayList<ResultFile>(submissionFilesCache.get(dccId)); } /** * Fetch a list of GBrowse tracks for a given submission. * @param dccId the modENCODE submission id * @return a list of file names */ public static synchronized List<GBrowseTrack> getTracksByDccId(Integer dccId) { Map<Integer, List<GBrowseTrack>> tracks = getGBrowseTracks(); if (tracks.get(dccId) != null) { return new ArrayList<GBrowseTrack>(tracks.get(dccId)); } else { return new ArrayList<GBrowseTrack>(); } } /** * Fetch a list of file names for a given submission. * @param servletContext the context * @return a list of file names */ public static synchronized Map<String, String> getFeatTypeDescription(ServletContext servletContext) { if (featDescriptionCache == null) { readFeatTypeDescription(servletContext); } return featDescriptionCache; } /** * Fetch a map from feature type to count for a given submission. * @param os the objectStore * @param dccId the modENCODE submission id * @return a map from feature type to count */ public static synchronized Map<String, Long> getSubmissionFeatureCounts(ObjectStore os, Integer dccId) { if (submissionFeatureCounts == null) { readSubmissionFeatureCounts(os); } return submissionFeatureCounts.get(dccId); } // /** // * Fetch the number of expression levels for a given submission. // * @param os the objectStore // * @param dccId the modENCODE submission id // * @return a map from submission to count // */ // public static synchronized Integer getSubmissionExpressionLevelCount(ObjectStore os, // Integer dccId) { // if (submissionExpressionLevelCounts == null) { // getSubmissionExpressionLevelCounts(os); // } // return submissionExpressionLevelCounts.get(dccId); // } // /** // * Fetch the number of expression levels for a given submission. // * @param os the objectStore // * @return a map from submission to count // */ // public static synchronized Map<String, Map<String, Long>> // getExperimentFeatureExpressionLevels(ObjectStore os) { // if (experimentFeatureExpressionLevelCounts == null) { // readExperimentFeatureExpressionLevelCounts(os); // } // return experimentFeatureExpressionLevelCounts; // } /** * Fetch a submission by the modENCODE submission ids * @param os the objectStore * @param dccId the modENCODE submission id * @return the requested submission * @throws ObjectStoreException if error reading database */ public static synchronized Submission getSubmissionByDccId(ObjectStore os, Integer dccId) throws ObjectStoreException { if (submissionIdCache == null) { readSubmissionFeatureCounts(os); } return (Submission) os.getObjectById(submissionIdCache.get(dccId)); } /** * Get experiment information by name * @param os the objectStore * @param name of the experiment to fetch * @return details of the experiment * @throws ObjectStoreException if error reading database */ public static synchronized DisplayExperiment getExperimentByName(ObjectStore os, String name) throws ObjectStoreException { if (experimentCache == null) { readExperiments(os); } return experimentCache.get(name); } //====================== private static void fetchGBrowseTracks() { long timeSinceLastRefresh = System.currentTimeMillis() - lastTrackCacheRefresh; if (timeSinceLastRefresh > TWO_HOUR) { readGBrowseTracks(); lastTrackCacheRefresh = System.currentTimeMillis(); } } /** * Set the map of GBrowse tracks. * * @param tracks map of dccId:GBrowse tracks */ public static synchronized void setGBrowseTracks(Map<Integer, List<GBrowseTrack>> tracks) { MetadataCache.class.notifyAll(); submissionTracksCache = tracks; } /** * Method to obtain the map of unlocated feature types by submission id * * @param os the objectStore * @return submissionUnlocatedFeatureTypes */ private static Map<Integer, List<String>> readUnlocatedFeatureTypes(ObjectStore os) { long startTime = System.currentTimeMillis(); try { if (submissionUnlocatedFeatureTypes != null) { return submissionUnlocatedFeatureTypes; } submissionUnlocatedFeatureTypes = new HashMap<Integer, List<String>>(); if (submissionLocatedFeatureTypes == null) { readSubmissionLocatedFeature(os); } if (submissionFeatureCounts == null) { readSubmissionFeatureCounts(os); } for (Integer subId : submissionFeatureCounts.keySet()) { Set<String> allFeatures = submissionFeatureCounts.get(subId).keySet(); Set<String> difference = new HashSet<String>(allFeatures); if (submissionLocatedFeatureTypes.get(subId) != null) { difference.removeAll(submissionLocatedFeatureTypes.get(subId)); } if (!difference.isEmpty()) { List <String> thisUnlocated = new ArrayList<String>(); for (String fType : difference) { thisUnlocated.add(fType); } submissionUnlocatedFeatureTypes.put(subId, thisUnlocated); } } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed unlocated feature cache, took: " + timeTaken + "ms size = " + submissionUnlocatedFeatureTypes.size()); return submissionUnlocatedFeatureTypes; } /** * * @param os objectStore * @return map exp-tracks */ public static Map<String, List<GBrowseTrack>> getExperimentGBrowseTracks(ObjectStore os) { Map<String, List<GBrowseTrack>> tracks = new HashMap<String, List<GBrowseTrack>>(); Map<Integer, List<GBrowseTrack>> subTracksMap = getGBrowseTracks(); for (DisplayExperiment exp : getExperiments(os)) { List<GBrowseTrack> expTracks = new ArrayList<GBrowseTrack>(); tracks.put(exp.getName(), expTracks); for (Submission sub : exp.getSubmissions()) { if (subTracksMap.get(sub.getdCCid()) != null){ List<GBrowseTrack> subTracks = subTracksMap.get(sub.getdCCid()); if (subTracks != null) { // check so it is unique // expTracks.addAll(subTracks); addToList(expTracks, subTracks); } else { continue; } } } } return tracks; } /** * adds the elements of a list i to a list l only if they are not yet * there * @param l the receiving list * @param i the donating list */ private static void addToList(List<GBrowseTrack> l, List<GBrowseTrack> i) { Iterator <GBrowseTrack> it = i.iterator(); while (it.hasNext()) { GBrowseTrack thisId = it.next(); if (!l.contains(thisId)) { l.add(thisId); } } } /** * * @param os objectStore * @return map exp-repository entries */ public static Map<String, Set<String[]>> getExperimentRepositoryEntries(ObjectStore os) { Map<String, Set<String[]>> reposited = new HashMap<String, Set<String[]>>(); Map<Integer, List<String[]>> subRepositedMap = getRepositoryEntries(os); for (DisplayExperiment exp : getExperiments(os)) { Set<String[]> expReps = new HashSet<String[]>(); for (Submission sub : exp.getSubmissions()) { List<String[]> subReps = subRepositedMap.get(sub.getdCCid()); if (subReps != null) { expReps.addAll(subReps); } } // for each experiment, we don't to count twice the same repository // entry produced by 2 different submissions. Set<String[]> expRepsCleaned = removeDuplications(expReps); reposited.put(exp.getName(), expRepsCleaned); } return reposited; } private static Set<String[]> removeDuplications(Set<String[]> expReps) { // removing the same repository entry coming from different submissions // in the given experiment Set<String> db = new HashSet<String>(); Set<String> acc = new HashSet<String>(); Set<String[]> dup = new HashSet<String[]>(); for (String[] s : expReps) { if (db.contains(s[0]) && acc.contains(s[1])) { // we don't remove place holders if (!s[1].startsWith("To be")) { dup.add(s); } } db.add(s[0]); acc.add(s[1]); } // do the difference between sets and return it Set<String[]> uniques = new HashSet<String[]>(expReps); uniques.removeAll(dup); return uniques; } /** * * @param os objectStore * @return map exp-repository entries */ public static Map<String, Integer> getExperimentExpressionLevels(ObjectStore os) { Map<String, Integer> experimentELevel = new HashMap<String, Integer>(); Map<Integer, Integer> subELevelMap = getSubmissionExpressionLevelCounts(os); for (DisplayExperiment exp : getExperiments(os)) { Integer expCount = 0; for (Submission sub : exp.getSubmissions()) { Integer subCount = subELevelMap.get(sub.getdCCid()); if (subCount != null) { expCount = expCount + subCount; } } // if (expCount > 0) { experimentELevel.put(exp.getName(), expCount); // } } return experimentELevel; } /** * * @param os objectStore * @return map exp-repository entries */ public static Map<String, Map<String, Long>> readExperimentFeatureExpressionLevels(ObjectStore os) { Map<String, Map<String, Long>> expELevels = new HashMap<String, Map<String, Long>>(); //TODO Map<Integer, Map<String, Long>> subELevels = getSubmissionFeatureExpressionLevelCounts(os); for (DisplayExperiment exp : getExperiments(os)) { for (Submission sub : exp.getSubmissions()) { Map <String, Long> subFeat = subELevels.get(sub.getdCCid()); if (subFeat != null) { // get the experiment feature map Map<String, Long> expFeat = expELevels.get(exp.getName()); if (expFeat == null) { expELevels.put(exp.getName(), subFeat); } else { for (String feat : subFeat.keySet()) { Long subCount = subFeat.get(feat); Long expCount = subCount; if (expFeat.get(feat) != null) { expCount = expCount + expFeat.get(feat); } expFeat.put(feat, expCount); expCount = Long.valueOf(0); } expELevels.put(exp.getName(), expFeat); } } } } return expELevels; } /** * Fetch a map from project name to experiment. * @param os the production ObjectStore * @return a map from project name to experiment */ public static Map<String, List<DisplayExperiment>> getProjectExperiments(ObjectStore os) { long startTime = System.currentTimeMillis(); Map<String, List<DisplayExperiment>> projectExperiments = new TreeMap<String, List<DisplayExperiment>>(); for (DisplayExperiment exp : getExperiments(os)) { List<DisplayExperiment> exps = projectExperiments.get(exp.getProjectName()); if (exps == null) { exps = new ArrayList<DisplayExperiment>(); projectExperiments.put(exp.getProjectName(), exps); } exps.add(exp); } long totalTime = System.currentTimeMillis() - startTime; LOG.info("Made project map: " + projectExperiments.size() + " took: " + totalTime + " ms."); return projectExperiments; } private static void readExperiments(ObjectStore os) { long startTime = System.currentTimeMillis(); Map <String, Map<String, Long>> featureCounts = getExperimentFeatureCounts(os); try { Query q = new Query(); QueryClass qcProject = new QueryClass(Project.class); QueryField qcName = new QueryField(qcProject, "name"); q.addFrom(qcProject); q.addToSelect(qcProject); QueryClass qcExperiment = new QueryClass(Experiment.class); q.addFrom(qcExperiment); q.addToSelect(qcExperiment); QueryCollectionReference projExperiments = new QueryCollectionReference(qcProject, "experiments"); ContainsConstraint cc = new ContainsConstraint(projExperiments, ConstraintOp.CONTAINS, qcExperiment); q.setConstraint(cc); q.addToOrderBy(qcName); Results results = os.execute(q); experimentCache = new HashMap<String, DisplayExperiment>(); Iterator i = results.iterator(); while (i.hasNext()) { ResultsRow row = (ResultsRow) i.next(); Project project = (Project) row.get(0); Experiment experiment = (Experiment) row.get(1); Map<String, Long> expFeatureCounts = featureCounts.get(experiment.getName()); DisplayExperiment displayExp = new DisplayExperiment(experiment, project, expFeatureCounts, os); experimentCache.put(displayExp.getName(), displayExp); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed experiment cache, took: " + timeTaken + "ms size = " + experimentCache.size()); } private static Map<String, Map<String, Long>> getExperimentFeatureCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); // NB: example of query (with group by) enwrapping a subquery that gets rids of // duplications Query q = new Query(); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryField qfName = new QueryField(qcExp, "name"); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcExp); q.addToSelect(qcExp); q.addToSelect(qcLsf); q.addToSelect(qfName); q.addToSelect(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); q.setConstraint(cs); q.setDistinct(true); Query superQ = new Query(); superQ.addFrom(q); QueryField superQfName = new QueryField(q, qfName); QueryField superQfClass = new QueryField(q, qfClass); superQ.addToSelect(superQfName); superQ.addToSelect(superQfClass); superQ.addToOrderBy(superQfName); superQ.addToOrderBy(superQfClass); superQ.addToGroupBy(superQfName); superQ.addToGroupBy(superQfClass); superQ.addToSelect(new QueryFunction()); superQ.setDistinct(false); Results results = os.execute(superQ); Map<String, Map<String, Long>> featureCounts = new LinkedHashMap<String, Map<String, Long>>(); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); String expName = (String) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); Map<String, Long> expFeatureCounts = featureCounts.get(expName); if (expFeatureCounts == null) { expFeatureCounts = new HashMap<String, Long>(); featureCounts.put(expName, expFeatureCounts); } expFeatureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Read experiment feature counts, took: " + timeTaken + "ms"); return featureCounts; } private static void readSubmissionFeatureCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); submissionFeatureCounts = new LinkedHashMap<Integer, Map<String, Long>>(); submissionIdCache = new HashMap<Integer, Integer>(); Query q = new Query(); q.setDistinct(false); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addToSelect(qcSub); q.addToSelect(qfClass); q.addToSelect(new QueryFunction()); q.addToGroupBy(qcSub); q.addToGroupBy(qfClass); q.addToOrderBy(qcSub); q.addToOrderBy(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Submission sub = (Submission) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); submissionIdCache.put(sub.getdCCid(), sub.getId()); Map<String, Long> featureCounts = submissionFeatureCounts.get(sub.getdCCid()); if (featureCounts == null) { featureCounts = new HashMap<String, Long>(); submissionFeatureCounts.put(sub.getdCCid(), featureCounts); } featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed submissionFeatureCounts cache, took: " + timeTaken + "ms size = " + submissionFeatureCounts.size()); } private static void readSubmissionFeatureExpressionLevelCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); submissionFeatureExpressionLevelCounts = new LinkedHashMap<Integer, Map<String, Long>>(); //submissionIdCache = new HashMap<Integer, Integer>(); Query q = new Query(); q.setDistinct(false); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcEL = new QueryClass(ExpressionLevel.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcEL); q.addToSelect(qcSub); q.addToSelect(qfClass); q.addToSelect(new QueryFunction()); q.addToGroupBy(qcSub); q.addToGroupBy(qfClass); q.addToOrderBy(qcSub); q.addToOrderBy(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryCollectionReference el = new QueryCollectionReference(qcLsf, "expressionLevels"); ContainsConstraint ccEl = new ContainsConstraint(el, ConstraintOp.CONTAINS, qcEL); cs.addConstraint(ccEl); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Submission sub = (Submission) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); //submissionIdCache.put(sub.getdCCid(), sub.getId()); Map<String, Long> featureCounts = submissionFeatureExpressionLevelCounts.get(sub.getdCCid()); if (featureCounts == null) { featureCounts = new HashMap<String, Long>(); submissionFeatureExpressionLevelCounts.put(sub.getdCCid(), featureCounts); } featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed submissionFeatureExpressionLevelCounts cache, took: " + timeTaken + "ms size = " + submissionFeatureExpressionLevelCounts.size() + "<->" + submissionFeatureCounts.size()); LOG.info("submissionFeatureELCounts " + submissionFeatureExpressionLevelCounts); } private static void readExperimentFeatureExpressionLevelCounts(ObjectStore os) { long startTime = System.currentTimeMillis(); experimentFeatureExpressionLevelCounts = new LinkedHashMap<String, Map<String, Long>>(); //submissionIdCache = new HashMap<Integer, Integer>(); Query q = new Query(); q.setDistinct(false); QueryClass qcExp = new QueryClass(Experiment.class); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcEL = new QueryClass(ExpressionLevel.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcExp); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcEL); q.addToSelect(qcExp); q.addToSelect(qfClass); q.addToSelect(new QueryFunction()); q.addToGroupBy(qcExp); q.addToGroupBy(qfClass); q.addToOrderBy(qcExp); q.addToOrderBy(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference submissions = new QueryCollectionReference(qcExp, "submissions"); ContainsConstraint ccSubs = new ContainsConstraint(submissions, ConstraintOp.CONTAINS, qcSub); cs.addConstraint(ccSubs); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryCollectionReference el = new QueryCollectionReference(qcLsf, "expressionLevels"); ContainsConstraint ccEl = new ContainsConstraint(el, ConstraintOp.CONTAINS, qcEL); cs.addConstraint(ccEl); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Experiment exp = (Experiment) row.get(0); Class feat = (Class) row.get(1); Long count = (Long) row.get(2); //submissionIdCache.put(sub.getdCCid(), sub.getId()); Map<String, Long> featureCounts = experimentFeatureExpressionLevelCounts.get(exp.getName()); if (featureCounts == null) { featureCounts = new HashMap<String, Long>(); experimentFeatureExpressionLevelCounts.put(exp.getName(), featureCounts); } featureCounts.put(TypeUtil.unqualifiedName(feat.getName()), count); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed experimentFeatureExpressionLevelCounts cache, took: " + timeTaken + "ms size = " + experimentFeatureExpressionLevelCounts.size()); LOG.info("experimentFeatureELCounts " + experimentFeatureExpressionLevelCounts); } private static void readSubmissionCollections(ObjectStore os) { // long startTime = System.currentTimeMillis(); try { Query q = new Query(); QueryClass qcSubmission = new QueryClass(Submission.class); QueryField qfDCCid = new QueryField(qcSubmission, "DCCid"); q.addFrom(qcSubmission); q.addToSelect(qcSubmission); q.addToOrderBy(qfDCCid); submissionFilesCache = new HashMap<Integer, Set<ResultFile>>(); submissionExpressionLevelCounts = new HashMap<Integer, Integer>(); Results results = os.executeSingleton(q); // for submission, get result files and expression level count Iterator i = results.iterator(); while (i.hasNext()) { Submission sub = (Submission) i.next(); Set<ResultFile> files = sub.getResultFiles(); submissionFilesCache.put(sub.getdCCid(), files); Set<ExpressionLevel> el = sub.getExpressionLevels(); submissionExpressionLevelCounts.put(sub.getdCCid(), el.size()); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed submission collections caches, took: " + timeTaken + "ms size: files = " + submissionFilesCache.size() + ", expression levels = " + submissionExpressionLevelCounts.size()); } private static void readSubmissionFiles(ObjectStore os) { // long startTime = System.currentTimeMillis(); try { Query q = new Query(); QueryClass qcSubmission = new QueryClass(Submission.class); QueryField qfDCCid = new QueryField(qcSubmission, "DCCid"); q.addFrom(qcSubmission); q.addToSelect(qcSubmission); q.addToOrderBy(qfDCCid); submissionFilesCache = new HashMap<Integer, Set<ResultFile>>(); Results results = os.executeSingleton(q); // for each project, get its labs Iterator i = results.iterator(); while (i.hasNext()) { Submission sub = (Submission) i.next(); Set<ResultFile> files = sub.getResultFiles(); submissionFilesCache.put(sub.getdCCid(), files); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed file names cache, took: " + timeTaken + "ms size = " + submissionFilesCache.size()); } private static void readSubmissionLocatedFeature(ObjectStore os) { long startTime = System.currentTimeMillis(); submissionLocatedFeatureTypes = new LinkedHashMap<Integer, List<String>>(); Query q = new Query(); q.setDistinct(true); QueryClass qcSub = new QueryClass(Submission.class); QueryClass qcLsf = new QueryClass(LocatedSequenceFeature.class); QueryClass qcLoc = new QueryClass(Location.class); QueryField qfClass = new QueryField(qcLsf, "class"); q.addFrom(qcSub); q.addFrom(qcLsf); q.addFrom(qcLoc); q.addToSelect(qcSub); q.addToSelect(qfClass); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); QueryCollectionReference features = new QueryCollectionReference(qcSub, "features"); ContainsConstraint ccFeats = new ContainsConstraint(features, ConstraintOp.CONTAINS, qcLsf); cs.addConstraint(ccFeats); QueryObjectReference location = new QueryObjectReference(qcLsf, "chromosomeLocation"); ContainsConstraint ccLocs = new ContainsConstraint(location, ConstraintOp.CONTAINS, qcLoc); cs.addConstraint(ccLocs); q.setConstraint(cs); Results results = os.execute(q); // for each classes set the values for jsp for (Iterator<ResultsRow> iter = results.iterator(); iter.hasNext(); ) { ResultsRow row = iter.next(); Submission sub = (Submission) row.get(0); Class feat = (Class) row.get(1); addToMap(submissionLocatedFeatureTypes, sub.getdCCid(), TypeUtil.unqualifiedName(feat.getName())); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed located features cache, took: " + timeTaken + "ms size = " + submissionLocatedFeatureTypes.size()); } /** * Fetch reposited (GEO/SRA/AE..) entries per submission. * @param os the production objectStore * @return map */ public static synchronized Map<Integer, List<String[]>> getRepositoryEntries(ObjectStore os) { if (submissionRepositedCache == null) { readSubmissionRepositoryEntries(os); } return submissionRepositedCache; } private static void readSubmissionRepositoryEntries(ObjectStore os) { // long startTime = System.currentTimeMillis(); try { Query q = new Query(); QueryClass qcSubmission = new QueryClass(Submission.class); QueryField qfDCCid = new QueryField(qcSubmission, "DCCid"); q.addFrom(qcSubmission); q.addToSelect(qfDCCid); QueryClass qcRepositoryEntry = new QueryClass(DatabaseRecord.class); QueryField qfDatabase = new QueryField(qcRepositoryEntry, "database"); QueryField qfAccession = new QueryField(qcRepositoryEntry, "accession"); QueryField qfUrl = new QueryField(qcRepositoryEntry, "url"); q.addFrom(qcRepositoryEntry); q.addToSelect(qfDatabase); q.addToSelect(qfAccession); q.addToSelect(qfUrl); // join the tables QueryCollectionReference ref1 = new QueryCollectionReference(qcSubmission, "databaseRecords"); ContainsConstraint cc = new ContainsConstraint(ref1, ConstraintOp.CONTAINS, qcRepositoryEntry); q.setConstraint(cc); q.addToOrderBy(qfDCCid); q.addToOrderBy(qfDatabase); Results results = os.execute(q); submissionRepositedCache = new HashMap<Integer, List<String[]>>(); Integer counter = 0; Integer prevSub = new Integer(-1); List<String[]> subRep = new ArrayList<String[]>(); Iterator i = results.iterator(); while (i.hasNext()) { ResultsRow row = (ResultsRow) i.next(); counter++; Integer dccId = (Integer) row.get(0); String db = (String) row.get(1); String acc = (String) row.get(2); String url = (String) row.get(3); String[] thisRecord = {db, acc, url}; if (!dccId.equals(prevSub) || counter.equals(results.size())) { if (prevSub > 0) { if (counter.equals(results.size())) { prevSub = dccId; subRep.add(thisRecord); } List<String[]> subRepIn = new ArrayList<String[]>(); subRepIn.addAll(subRep); submissionRepositedCache.put(prevSub, subRepIn); subRep.clear(); } prevSub = dccId; } subRep.add(thisRecord); } } catch (Exception err) { err.printStackTrace(); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed Repository entries cache, took: " + timeTaken + "ms size = " + submissionRepositedCache.size()); } /** * adds an element to a list which is the value of a map * @param m the map (<String, List<String>>) * @param key the key for the map * @param value the list */ private static void addToMap(Map<Integer, List<String>> m, Integer key, String value) { List<String> ids = new ArrayList<String>(); if (m.containsKey(key)) { ids = m.get(key); } if (!ids.contains(value)) { ids.add(value); m.put(key, ids); } } /** * Method to fill the cached map of submissions (ddcId) to list of * GBrowse tracks * */ private static void readGBrowseTracks() { Runnable r = new Runnable() { public void run() { threadedReadGBrowseTracks(); } }; Thread t = new Thread(r); t.start(); } private static void threadedReadGBrowseTracks() { long startTime = System.currentTimeMillis(); Map<Integer, List<GBrowseTrack>> tracks = new HashMap<Integer, List<GBrowseTrack>>(); Map<Integer, List<GBrowseTrack>> flyTracks = null; Map<Integer, List<GBrowseTrack>> wormTracks = null; try { flyTracks = GBrowseParser.readTracks("fly"); wormTracks = GBrowseParser.readTracks("worm"); } catch (Exception e) { LOG.error(e); } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed GBrowse tracks cache, took: " + timeTaken + "ms size = " + tracks.size()); if (flyTracks != null && wormTracks != null) { tracks.putAll(flyTracks); tracks.putAll(wormTracks); setGBrowseTracks(tracks); } } /** * This method get the feature descriptions from a property file. * * @return the map feature/description */ private static Map<String, String> readFeatTypeDescription(ServletContext servletContext) { long startTime = System.currentTimeMillis(); featDescriptionCache = new HashMap<String, String>(); Properties props = new Properties(); InputStream is = servletContext.getResourceAsStream("/WEB-INF/featureTypeDescr.properties"); if (is == null) { LOG.info(NO_FEAT_DESCR_LOG); } else { try { props.load(is); } catch (IOException e) { // TODO Auto-generated catch block //throw new IllegalAccessException("Error getting featureTypeDescr.properties file", // e.printStackTrace()); e.printStackTrace(); } Enumeration en = props.keys(); // while (props.keys().hasMoreElements()) { while (en.hasMoreElements()) { String expFeat = (String) en.nextElement(); String descr = props.getProperty(expFeat); featDescriptionCache.put(expFeat, descr); } } long timeTaken = System.currentTimeMillis() - startTime; LOG.info("Primed feature description cache, took: " + timeTaken + "ms size = " + featDescriptionCache.size()); return featDescriptionCache; } }
Experiment feature counts duplication fixed
modmine/webapp/src/org/modmine/web/MetadataCache.java
Experiment feature counts duplication fixed
Java
lgpl-2.1
63c8ba46dc791884087aa7c773eae7b547482cdb
0
sewe/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,sewe/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs
/* * Machine Learning support for FindBugs * Copyright (C) 2005, University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.ml; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; import java.util.zip.GZIPInputStream; import org.dom4j.Attribute; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.DocumentFactory; import org.dom4j.Element; import org.dom4j.io.OutputFormat; import org.dom4j.io.XMLWriter; import edu.umd.cs.findbugs.BugCollection; import edu.umd.cs.findbugs.Project; import edu.umd.cs.findbugs.SortedBugCollection; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.xml.Dom4JXMLOutput; /** * Add uid attributes to BugInstances in a BugCollection. * A uid is an integer that uniquely identifies a BugInstance * in a BugCollection. * Right now this is only used in machine learning experiments. * * @author David Hovemeyer */ public class GenerateUIDs { private BugCollection bugCollection; @NonNull private Project project; private String inputFilename; private String outputFilename; public GenerateUIDs(String inputFilename, String outputFilename) { this.bugCollection = new SortedBugCollection(); this.project = new Project(); this.inputFilename = inputFilename; this.outputFilename = outputFilename; } public void execute() throws IOException, DocumentException { InputStream in; if (inputFilename.equals("-")) { in = System.in; } else { in = new BufferedInputStream(new FileInputStream(inputFilename)); if (inputFilename.endsWith(".gz")) in = new GZIPInputStream(in); } bugCollection.readXML(in, project); Document document = DocumentFactory.getInstance().createDocument(); Dom4JXMLOutput xmlOutput = new Dom4JXMLOutput(document); bugCollection.writeXML(xmlOutput, project); int count = 0; List<Element> bugInstanceList = document.selectNodes("/BugCollection/BugInstance"); for (Element element : bugInstanceList) { Attribute uidAttr = element.attribute("uid"); if (uidAttr == null) { element.addAttribute("uid", Integer.toString(count++)); } } OutputStream out; if (outputFilename.equals("-")) { out = System.out; } else { out = new BufferedOutputStream(new FileOutputStream(outputFilename)); } XMLWriter xmlWriter = new XMLWriter(out, OutputFormat.createPrettyPrint()); xmlWriter.write(document); } public static void main(String[] args) throws IOException, DocumentException { if (args.length != 2) { System.err.println("Usage: " + GenerateUIDs.class.getName() + " <input file> <output file>"); System.exit(1); } String inputFilename = args[0]; String outputFilename = args[1]; GenerateUIDs generateUIDs = new GenerateUIDs(inputFilename, outputFilename); generateUIDs.execute(); } }
findbugs/src/java/edu/umd/cs/findbugs/ml/GenerateUIDs.java
/* * Machine Learning support for FindBugs * Copyright (C) 2005, University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.ml; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; import java.util.zip.GZIPInputStream; import org.dom4j.Attribute; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.DocumentFactory; import org.dom4j.Element; import org.dom4j.io.OutputFormat; import org.dom4j.io.XMLWriter; import edu.umd.cs.findbugs.BugCollection; import edu.umd.cs.findbugs.Project; import edu.umd.cs.findbugs.SortedBugCollection; import edu.umd.cs.findbugs.xml.Dom4JXMLOutput; /** * Add uid attributes to BugInstances in a BugCollection. * A uid is an integer that uniquely identifies a BugInstance * in a BugCollection. * Right now this is only used in machine learning experiments. * * @author David Hovemeyer */ public class GenerateUIDs { private BugCollection bugCollection; private Project project; private String inputFilename; private String outputFilename; public GenerateUIDs(String inputFilename, String outputFilename) { this.bugCollection = new SortedBugCollection(); this.project = new Project(); this.inputFilename = inputFilename; this.outputFilename = outputFilename; } public void execute() throws IOException, DocumentException { InputStream in; if (inputFilename.equals("-")) { in = System.in; } else { in = new BufferedInputStream(new FileInputStream(inputFilename)); if (inputFilename.endsWith(".gz")) in = new GZIPInputStream(in); } bugCollection.readXML(in, project); Document document = DocumentFactory.getInstance().createDocument(); Dom4JXMLOutput xmlOutput = new Dom4JXMLOutput(document); bugCollection.writeXML(xmlOutput, project); int count = 0; List<Element> bugInstanceList = document.selectNodes("/BugCollection/BugInstance"); for (Element element : bugInstanceList) { Attribute uidAttr = element.attribute("uid"); if (uidAttr == null) { element.addAttribute("uid", Integer.toString(count++)); } } OutputStream out; if (outputFilename.equals("-")) { out = System.out; } else { out = new BufferedOutputStream(new FileOutputStream(outputFilename)); } XMLWriter xmlWriter = new XMLWriter(out, OutputFormat.createPrettyPrint()); xmlWriter.write(document); } public static void main(String[] args) throws IOException, DocumentException { if (args.length != 2) { System.err.println("Usage: " + GenerateUIDs.class.getName() + " <input file> <output file>"); System.exit(1); } String inputFilename = args[0]; String outputFilename = args[1]; GenerateUIDs generateUIDs = new GenerateUIDs(inputFilename, outputFilename); generateUIDs.execute(); } }
add additional null pointer checks git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@7740 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
findbugs/src/java/edu/umd/cs/findbugs/ml/GenerateUIDs.java
add additional null pointer checks
Java
apache-2.0
049ed6964f88946a4829f9d3ea29b573a314d89e
0
RoaringBitmap/RoaringBitmap,lemire/RoaringBitmap,lemire/RoaringBitmap,lemire/RoaringBitmap,RoaringBitmap/RoaringBitmap,RoaringBitmap/RoaringBitmap,RoaringBitmap/RoaringBitmap,lemire/RoaringBitmap
package org.roaringbitmap.longlong; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.roaringbitmap.RoaringBitmap; public class TestRoaring64NavigableMap { @Test public void testEmpty() { Roaring64NavigableMap map = new Roaring64NavigableMap(); Assert.assertFalse(map.getLongIterator().hasNext()); Assert.assertEquals(0, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(0, map.rankLong(-1)); Assert.assertEquals(0, map.rankLong(0)); Assert.assertEquals(0, map.rankLong(1)); Assert.assertEquals(0, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(0, map.rankLong(Long.MAX_VALUE)); } @Test public void testZero() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(0); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(0, iterator.next()); Assert.assertEquals(0, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(1, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(0, map.rankLong(Integer.MIN_VALUE - 1L)); Assert.assertEquals(0, map.rankLong(-1)); Assert.assertEquals(1, map.rankLong(0)); Assert.assertEquals(1, map.rankLong(1)); Assert.assertEquals(1, map.rankLong(Integer.MAX_VALUE + 1L)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); } @Test public void testSimpleIntegers() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(123); map.addLong(234); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(123, iterator.next()); Assert.assertEquals(123, map.select(0)); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(234, iterator.next()); Assert.assertEquals(234, map.select(1)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(2, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(0)); Assert.assertEquals(1, map.rankLong(123)); Assert.assertEquals(1, map.rankLong(233)); Assert.assertEquals(2, map.rankLong(234)); Assert.assertEquals(2, map.rankLong(235)); Assert.assertEquals(2, map.rankLong(Integer.MAX_VALUE + 1L)); Assert.assertEquals(2, map.rankLong(Long.MAX_VALUE)); Assert.assertArrayEquals(new long[] {123L, 234L}, map.toArray()); } @Test(expected = IllegalArgumentException.class) public void testAddOneSelect2() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(123); map.select(1); } @Test public void testIterator_NextWithoutHasNext_Filled() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(0); Assert.assertTrue(map.getLongIterator().hasNext()); Assert.assertEquals(0, map.getLongIterator().next()); } @Test(expected = IllegalStateException.class) public void testIterator_NextWithoutHasNext_Empty() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.getLongIterator().next(); } @Test public void testLongMaxValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MAX_VALUE); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(Long.MAX_VALUE, iterator.next()); Assert.assertEquals(Long.MAX_VALUE, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(1, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(0, map.rankLong(-1)); Assert.assertEquals(0, map.rankLong(0)); Assert.assertEquals(0, map.rankLong(1)); Assert.assertEquals(0, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); } @Test public void testLongMinValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MIN_VALUE); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(Long.MIN_VALUE, iterator.next()); Assert.assertEquals(Long.MIN_VALUE, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(1, map.getLongCardinality()); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(1, map.rankLong(-1)); Assert.assertEquals(1, map.rankLong(0)); Assert.assertEquals(1, map.rankLong(1)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); } @Test public void testLongMinValueZeroOneMaxValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MIN_VALUE); map.addLong(0); map.addLong(1); map.addLong(Long.MAX_VALUE); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(Long.MIN_VALUE, iterator.next()); Assert.assertEquals(Long.MIN_VALUE, map.select(0)); Assert.assertEquals(0, iterator.next()); Assert.assertEquals(0, map.select(1)); Assert.assertEquals(1, iterator.next()); Assert.assertEquals(1, map.select(2)); Assert.assertEquals(Long.MAX_VALUE, iterator.next()); Assert.assertEquals(Long.MAX_VALUE, map.select(3)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(4, map.getLongCardinality()); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(1, map.rankLong(-1)); Assert.assertEquals(2, map.rankLong(0)); Assert.assertEquals(3, map.rankLong(1)); Assert.assertEquals(3, map.rankLong(2)); Assert.assertEquals(3, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(4, map.rankLong(Long.MAX_VALUE)); final List<Long> foreach = new ArrayList<>(); map.forEach(new LongConsumer() { @Override public void accept(long value) { foreach.add(value); } }); Assert.assertEquals(Arrays.asList(Long.MIN_VALUE, 0L, 1L, Long.MAX_VALUE), foreach); } // TODO // FIXME // @Ignore("TODO FIXME") @Test public void testRemove() { Roaring64NavigableMap map = new Roaring64NavigableMap(); // Add a value map.addLong(123); Assert.assertEquals(1L, map.getLongCardinality()); // Remove it map.remove(123L); Assert.assertEquals(0L, map.getLongCardinality()); // Add it back map.addLong(123); Assert.assertEquals(1L, map.getLongCardinality()); } @Test public void testRemoveDifferentBuckets() { Roaring64NavigableMap map = new Roaring64NavigableMap(); // Add two values map.addLong(123); map.addLong(Long.MAX_VALUE); Assert.assertEquals(2L, map.getLongCardinality()); // Remove biggest map.remove(Long.MAX_VALUE); Assert.assertEquals(1L, map.getLongCardinality()); Assert.assertEquals(123L, map.select(0)); } @Test public void testPerfManyDifferentBuckets() { Roaring64NavigableMap map = new Roaring64NavigableMap(); long problemSize = 100 * 1000L; for (long i = 1; i <= problemSize; i++) { map.addLong(i * Integer.MAX_VALUE + 1L); } long cardinality = map.getLongCardinality(); Assert.assertEquals(problemSize, cardinality); long last = map.select(cardinality - 1); Assert.assertEquals(problemSize * Integer.MAX_VALUE + 1L, last); Assert.assertEquals(cardinality, map.rankLong(last)); } @Test public void testPerfManyDifferentBuckets_NoCache() { Roaring64NavigableMap map = new Roaring64NavigableMap(true, false); long problemSize = 100 * 1000L; for (long i = 1; i <= problemSize; i++) { map.addLong(i * Integer.MAX_VALUE + 1L); } long cardinality = map.getLongCardinality(); Assert.assertEquals(problemSize, cardinality); long last = map.select(cardinality - 1); Assert.assertEquals(problemSize * Integer.MAX_VALUE + 1L, last); Assert.assertEquals(cardinality, map.rankLong(last)); } @Test public void testComparator() { Comparator<Integer> natural = new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return Integer.compare(o1, o2); } }; Comparator<Integer> unsigned = RoaringIntPacking.unsignedComparator(); // Comparator a negative and a positive differs from natural comparison Assert.assertTrue(natural.compare(-1, 1) < 0); Assert.assertFalse(unsigned.compare(-1, 1) < 0); // Comparator Long.MAX_VALUE and Long.MAX_VALUE + 1 differs Assert.assertTrue(natural.compare(Integer.MAX_VALUE, Integer.MAX_VALUE + 1) > 0); Assert.assertFalse(unsigned.compare(Integer.MAX_VALUE, Integer.MAX_VALUE + 1) > 0); // 'Integer.MAX_VALUE+1' is lower than 'Integer.MAX_VALUE+2' Assert.assertTrue(unsigned.compare(Integer.MAX_VALUE + 1, Integer.MAX_VALUE + 2) < 0); } @Test public void testLargeSelectLong_signed() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(positive); map.addLong(negative); long first = map.select(0); long last = map.select(1); // signed: positive is after negative Assert.assertEquals(negative, first); Assert.assertEquals(positive, last); } @Test public void testLargeSelectLong_unsigned() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(positive); map.addLong(negative); long first = map.select(0); long last = map.select(1); // unsigned: negative means bigger than Long.MAX_VALUE Assert.assertEquals(positive, first); Assert.assertEquals(negative, last); } @Test public void testLargeRankLong_signed() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(positive); map.addLong(negative); Assert.assertEquals(1, map.rankLong(negative)); } @Test public void testLargeRankLong_unsigned() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(positive); map.addLong(negative); Assert.assertEquals(2, map.rankLong(negative)); } @Test public void testIterationOrder_signed() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(positive); map.addLong(negative); LongIterator it = map.getLongIterator(); long first = it.next(); long last = it.next(); Assert.assertEquals(negative, first); Assert.assertEquals(positive, last); } @Test public void testIterationOrder_unsigned() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(positive); map.addLong(negative); LongIterator it = map.getLongIterator(); long first = it.next(); long last = it.next(); Assert.assertEquals(positive, first); Assert.assertEquals(negative, last); } @Test public void testAddingLowValueAfterHighValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MAX_VALUE); Assert.assertEquals(Long.MAX_VALUE, map.select(0)); map.addLong(666); Assert.assertEquals(666, map.select(0)); Assert.assertEquals(Long.MAX_VALUE, map.select(1)); } @Test public void testSerialization_Empty() throws IOException, ClassNotFoundException { final Roaring64NavigableMap map = new Roaring64NavigableMap(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(map); } final Roaring64NavigableMap clone; try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()))) { clone = (Roaring64NavigableMap) ois.readObject(); } // Check the test has not simply copied the ref Assert.assertNotSame(map, clone); Assert.assertEquals(0, clone.getLongCardinality()); } @Test public void testSerialization_OneValue() throws IOException, ClassNotFoundException { final Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(123); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(map); } final Roaring64NavigableMap clone; try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()))) { clone = (Roaring64NavigableMap) ois.readObject(); } // Check the test has not simply copied the ref Assert.assertNotSame(map, clone); Assert.assertEquals(1, clone.getLongCardinality()); Assert.assertEquals(123, clone.select(0)); } @Test public void testOr_SameBucket() { Roaring64NavigableMap left = new Roaring64NavigableMap(); Roaring64NavigableMap right = new Roaring64NavigableMap(); left.addLong(123); right.addLong(234); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); } @Test public void testOr_DifferentBucket_NotBuffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); left.addLong(123); right.addLong(Long.MAX_VALUE / 2); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(Long.MAX_VALUE / 2, left.select(1)); } @Test public void testOr_SameBucket_NotBuffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); left.addLong(123); right.addLong(234); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); } @Test public void testOr_DifferentBucket_Buffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); left.addLong(123); right.addLong(Long.MAX_VALUE / 2); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(Long.MAX_VALUE / 2, left.select(1)); } @Test public void testOr_SameBucket_Buffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); left.addLong(123); right.addLong(234); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); } @Test public void testOr_CloneInput() { Roaring64NavigableMap left = new Roaring64NavigableMap(); Roaring64NavigableMap right = new Roaring64NavigableMap(); right.addLong(123); // We push in left a bucket which does not exist left.or(right); // Then we mutate left: ensure it does not impact right as it should remain unchanged left.addLong(234); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); Assert.assertEquals(1, right.getLongCardinality()); Assert.assertEquals(123, right.select(0)); } @Test public void testToString_signed() { Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(123); map.addLong(Long.MAX_VALUE); map.addLong(Long.MAX_VALUE + 1L); Assert.assertEquals("{-9223372036854775808,123,9223372036854775807}", map.toString()); } @Test public void testToString_unsigned() { Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(123); map.addLong(Long.MAX_VALUE); map.addLong(Long.MAX_VALUE + 1L); Assert.assertEquals("{123,9223372036854775807,9223372036854775808}", map.toString()); } public static final long outOfRoaringBitmapRange = 2L * Integer.MAX_VALUE + 3L; // TODO // FIXME @Ignore("TODO FIXME") @Test public void testCardinalityAboveIntegerMaxValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); // This should fill entirely one bitmap,and add one in the next bitmap map.add(0, outOfRoaringBitmapRange); Assert.assertEquals(0, map.select(0)); Assert.assertEquals(outOfRoaringBitmapRange, map.select(outOfRoaringBitmapRange - 1)); Assert.assertEquals(outOfRoaringBitmapRange, map.getLongCardinality()); } @Test(expected = IllegalArgumentException.class) public void testCardinalityAboveIntegerMaxValue_RoaringBitmap() { RoaringBitmap map = new RoaringBitmap(); map.add(0L, outOfRoaringBitmapRange); } }
src/test/java/org/roaringbitmap/longlong/TestRoaring64NavigableMap.java
package org.roaringbitmap.longlong; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.roaringbitmap.RoaringBitmap; public class TestRoaring64NavigableMap { @Test public void testEmpty() { Roaring64NavigableMap map = new Roaring64NavigableMap(); Assert.assertFalse(map.getLongIterator().hasNext()); Assert.assertEquals(0, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(0, map.rankLong(-1)); Assert.assertEquals(0, map.rankLong(0)); Assert.assertEquals(0, map.rankLong(1)); Assert.assertEquals(0, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(0, map.rankLong(Long.MAX_VALUE)); } @Test public void testZero() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(0); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(0, iterator.next()); Assert.assertEquals(0, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(1, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(0, map.rankLong(Integer.MIN_VALUE - 1L)); Assert.assertEquals(0, map.rankLong(-1)); Assert.assertEquals(1, map.rankLong(0)); Assert.assertEquals(1, map.rankLong(1)); Assert.assertEquals(1, map.rankLong(Integer.MAX_VALUE + 1L)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); } @Test public void testSimpleIntegers() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(123); map.addLong(234); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(123, iterator.next()); Assert.assertEquals(123, map.select(0)); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(234, iterator.next()); Assert.assertEquals(234, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(2, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(0)); Assert.assertEquals(1, map.rankLong(123)); Assert.assertEquals(1, map.rankLong(233)); Assert.assertEquals(2, map.rankLong(234)); Assert.assertEquals(2, map.rankLong(235)); Assert.assertEquals(1, map.rankLong(Integer.MAX_VALUE + 1L)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); Assert.assertArrayEquals(new long[] {123L, 234L}, map.toArray()); } @Test(expected = IllegalArgumentException.class) public void testAddOneSelect2() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(123); map.select(1); } @Test public void testIterator_NextWithoutHasNext_Filled() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(0); Assert.assertTrue(map.getLongIterator().hasNext()); Assert.assertEquals(0, map.getLongIterator().next()); } @Test(expected = IllegalStateException.class) public void testIterator_NextWithoutHasNext_Empty() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.getLongIterator().next(); } @Test public void testLongMaxValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MAX_VALUE); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(Long.MAX_VALUE, iterator.next()); Assert.assertEquals(Long.MAX_VALUE, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(1, map.getLongCardinality()); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(0, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(0, map.rankLong(-1)); Assert.assertEquals(0, map.rankLong(0)); Assert.assertEquals(0, map.rankLong(1)); Assert.assertEquals(0, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); } @Test public void testLongMinValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MIN_VALUE); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(Long.MIN_VALUE, iterator.next()); Assert.assertEquals(Long.MIN_VALUE, map.select(0)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(1, map.getLongCardinality()); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(1, map.rankLong(-1)); Assert.assertEquals(1, map.rankLong(0)); Assert.assertEquals(1, map.rankLong(1)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(1, map.rankLong(Long.MAX_VALUE)); } @Test public void testLongMinValueZeroOneMaxValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MIN_VALUE); map.addLong(0); map.addLong(1); map.addLong(Long.MAX_VALUE); { LongIterator iterator = map.getLongIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertEquals(Long.MIN_VALUE, iterator.next()); Assert.assertEquals(Long.MIN_VALUE, map.select(0)); Assert.assertEquals(0, iterator.next()); Assert.assertEquals(0, map.select(1)); Assert.assertEquals(1, iterator.next()); Assert.assertEquals(1, map.select(2)); Assert.assertEquals(Long.MAX_VALUE, iterator.next()); Assert.assertEquals(Long.MAX_VALUE, map.select(3)); Assert.assertFalse(iterator.hasNext()); } Assert.assertEquals(4, map.getLongCardinality()); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE)); Assert.assertEquals(1, map.rankLong(Long.MIN_VALUE + 1)); Assert.assertEquals(1, map.rankLong(-1)); Assert.assertEquals(2, map.rankLong(0)); Assert.assertEquals(3, map.rankLong(1)); Assert.assertEquals(3, map.rankLong(2)); Assert.assertEquals(3, map.rankLong(Long.MAX_VALUE - 1)); Assert.assertEquals(4, map.rankLong(Long.MAX_VALUE)); final List<Long> foreach = new ArrayList<>(); map.forEach(new LongConsumer() { @Override public void accept(long value) { foreach.add(value); } }); Assert.assertEquals(Arrays.asList(Long.MIN_VALUE, 0L, 1L, Long.MAX_VALUE), foreach); } // TODO // FIXME // @Ignore("TODO FIXME") @Test public void testRemove() { Roaring64NavigableMap map = new Roaring64NavigableMap(); // Add a value map.addLong(123); Assert.assertEquals(1L, map.getLongCardinality()); // Remove it map.remove(123L); Assert.assertEquals(0L, map.getLongCardinality()); // Add it back map.addLong(123); Assert.assertEquals(1L, map.getLongCardinality()); } @Test public void testRemoveDifferentBuckets() { Roaring64NavigableMap map = new Roaring64NavigableMap(); // Add two values map.addLong(123); map.addLong(Long.MAX_VALUE); Assert.assertEquals(2L, map.getLongCardinality()); // Remove biggest map.remove(Long.MAX_VALUE); Assert.assertEquals(1L, map.getLongCardinality()); Assert.assertEquals(123L, map.select(0)); } @Test public void testPerfManyDifferentBuckets() { Roaring64NavigableMap map = new Roaring64NavigableMap(); long problemSize = 100 * 1000L; for (long i = 1; i <= problemSize; i++) { map.addLong(i * Integer.MAX_VALUE + 1L); } long cardinality = map.getLongCardinality(); Assert.assertEquals(problemSize, cardinality); long last = map.select(cardinality - 1); Assert.assertEquals(problemSize * Integer.MAX_VALUE + 1L, last); Assert.assertEquals(cardinality, map.rankLong(last)); } @Test public void testPerfManyDifferentBuckets_NoCache() { Roaring64NavigableMap map = new Roaring64NavigableMap(true, false); long problemSize = 100 * 1000L; for (long i = 1; i <= problemSize; i++) { map.addLong(i * Integer.MAX_VALUE + 1L); } long cardinality = map.getLongCardinality(); Assert.assertEquals(problemSize, cardinality); long last = map.select(cardinality - 1); Assert.assertEquals(problemSize * Integer.MAX_VALUE + 1L, last); Assert.assertEquals(cardinality, map.rankLong(last)); } @Test public void testComparator() { Comparator<Integer> natural = new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return Integer.compare(o1, o2); } }; Comparator<Integer> unsigned = RoaringIntPacking.unsignedComparator(); // Comparator a negative and a positive differs from natural comparison Assert.assertTrue(natural.compare(-1, 1) < 0); Assert.assertFalse(unsigned.compare(-1, 1) < 0); // Comparator Long.MAX_VALUE and Long.MAX_VALUE + 1 differs Assert.assertTrue(natural.compare(Integer.MAX_VALUE, Integer.MAX_VALUE + 1) > 0); Assert.assertFalse(unsigned.compare(Integer.MAX_VALUE, Integer.MAX_VALUE + 1) > 0); // 'Integer.MAX_VALUE+1' is lower than 'Integer.MAX_VALUE+2' Assert.assertTrue(unsigned.compare(Integer.MAX_VALUE + 1, Integer.MAX_VALUE + 2) < 0); } @Test public void testLargeSelectLong_signed() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(positive); map.addLong(negative); long first = map.select(0); long last = map.select(1); // signed: positive is after negative Assert.assertEquals(negative, first); Assert.assertEquals(positive, last); } @Test public void testLargeSelectLong_unsigned() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(positive); map.addLong(negative); long first = map.select(0); long last = map.select(1); // unsigned: negative means bigger than Long.MAX_VALUE Assert.assertEquals(positive, first); Assert.assertEquals(negative, last); } @Test public void testLargeRankLong_signed() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(positive); map.addLong(negative); Assert.assertEquals(1, map.rankLong(negative)); } @Test public void testLargeRankLong_unsigned() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(positive); map.addLong(negative); Assert.assertEquals(2, map.rankLong(negative)); } @Test public void testIterationOrder_signed() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(positive); map.addLong(negative); LongIterator it = map.getLongIterator(); long first = it.next(); long last = it.next(); Assert.assertEquals(negative, first); Assert.assertEquals(positive, last); } @Test public void testIterationOrder_unsigned() { long positive = 1; long negative = -1; Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(positive); map.addLong(negative); LongIterator it = map.getLongIterator(); long first = it.next(); long last = it.next(); Assert.assertEquals(positive, first); Assert.assertEquals(negative, last); } @Test public void testAddingLowValueAfterHighValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(Long.MAX_VALUE); Assert.assertEquals(Long.MAX_VALUE, map.select(0)); map.addLong(666); Assert.assertEquals(666, map.select(0)); Assert.assertEquals(Long.MAX_VALUE, map.select(1)); } @Test public void testSerialization_Empty() throws IOException, ClassNotFoundException { final Roaring64NavigableMap map = new Roaring64NavigableMap(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(map); } final Roaring64NavigableMap clone; try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()))) { clone = (Roaring64NavigableMap) ois.readObject(); } // Check the test has not simply copied the ref Assert.assertNotSame(map, clone); Assert.assertEquals(0, clone.getLongCardinality()); } @Test public void testSerialization_OneValue() throws IOException, ClassNotFoundException { final Roaring64NavigableMap map = new Roaring64NavigableMap(); map.addLong(123); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(map); } final Roaring64NavigableMap clone; try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()))) { clone = (Roaring64NavigableMap) ois.readObject(); } // Check the test has not simply copied the ref Assert.assertNotSame(map, clone); Assert.assertEquals(1, clone.getLongCardinality()); Assert.assertEquals(123, clone.select(0)); } @Test public void testOr_SameBucket() { Roaring64NavigableMap left = new Roaring64NavigableMap(); Roaring64NavigableMap right = new Roaring64NavigableMap(); left.addLong(123); right.addLong(234); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); } @Test public void testOr_DifferentBucket_NotBuffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); left.addLong(123); right.addLong(Long.MAX_VALUE / 2); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(Long.MAX_VALUE / 2, left.select(1)); } @Test public void testOr_SameBucket_NotBuffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new RoaringBitmapSupplier()); left.addLong(123); right.addLong(234); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); } @Test public void testOr_DifferentBucket_Buffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); left.addLong(123); right.addLong(Long.MAX_VALUE / 2); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(Long.MAX_VALUE / 2, left.select(1)); } @Test public void testOr_SameBucket_Buffer() { Roaring64NavigableMap left = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); Roaring64NavigableMap right = new Roaring64NavigableMap(true, true, new MutableRoaringBitmapSupplier()); left.addLong(123); right.addLong(234); left.or(right); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); } @Test public void testOr_CloneInput() { Roaring64NavigableMap left = new Roaring64NavigableMap(); Roaring64NavigableMap right = new Roaring64NavigableMap(); right.addLong(123); // We push in left a bucket which does not exist left.or(right); // Then we mutate left: ensure it does not impact right as it should remain unchanged left.addLong(234); Assert.assertEquals(2, left.getLongCardinality()); Assert.assertEquals(123, left.select(0)); Assert.assertEquals(234, left.select(1)); Assert.assertEquals(1, right.getLongCardinality()); Assert.assertEquals(123, right.select(0)); } @Test public void testToString_signed() { Roaring64NavigableMap map = new Roaring64NavigableMap(true); map.addLong(123); map.addLong(Long.MAX_VALUE); map.addLong(Long.MAX_VALUE + 1L); Assert.assertEquals("{-9223372036854775808,123,9223372036854775807}", map.toString()); } @Test public void testToString_unsigned() { Roaring64NavigableMap map = new Roaring64NavigableMap(false); map.addLong(123); map.addLong(Long.MAX_VALUE); map.addLong(Long.MAX_VALUE + 1L); Assert.assertEquals("{123,9223372036854775807,9223372036854775808}", map.toString()); } public static final long outOfRoaringBitmapRange = 2L * Integer.MAX_VALUE + 3L; // TODO // FIXME // @Ignore("TODO FIXME") @Test public void testCardinalityAboveIntegerMaxValue() { Roaring64NavigableMap map = new Roaring64NavigableMap(); // This should fill entirely one bitmap,and add one in the next bitmap map.add(0, outOfRoaringBitmapRange); Assert.assertEquals(0, map.select(0)); Assert.assertEquals(outOfRoaringBitmapRange, map.select(outOfRoaringBitmapRange - 1)); Assert.assertEquals(outOfRoaringBitmapRange, map.getLongCardinality()); } @Test(expected = IllegalArgumentException.class) public void testCardinalityAboveIntegerMaxValue_RoaringBitmap() { RoaringBitmap map = new RoaringBitmap(); map.add(0L, outOfRoaringBitmapRange); } }
Fix unit-tests
src/test/java/org/roaringbitmap/longlong/TestRoaring64NavigableMap.java
Fix unit-tests
Java
apache-2.0
27108be45f5b8753d2ec58a53083188853ff477b
0
danc86/jena-core,danc86/jena-core
/* * (c) Copyright 2002, Hewlett-Packard Company, all rights reserved. * [See end of file] */ package com.hp.hpl.jena.util; import java.io.* ; import java.net.* ; import org.apache.log4j.*; import com.hp.hpl.jena.rdf.model.* ; import com.hp.hpl.jena.mem.* ; //import com.hp.hpl.jena.bdb.* ; //import com.hp.hpl.jena.rdb.* ; /** A set of static convenience methods for getting models * The loader will guess the language/type of the model using * {@link #guessLang(String) guessLang} * * @author Andy Seaborne * @version $Id: ModelLoader.java,v 1.4 2003-03-06 09:49:50 andy_seaborne Exp $ */ public class ModelLoader { static Logger logger = Logger.getLogger(ModelLoader.class.getName()) ; public static final String langXML = "RDF/XML" ; public static final String langXMLAbbrev = "RDF/XML-ABBREV" ; public static final String langNTriple = "N-TRIPLE" ; public static final String langN3 = "N3" ; // Non-standard public static final String langBDB = "RDF/BDB" ; public static final String langSQL = "RDF/SQL" ; public static String defaultLanguage = langXML ; public static String basename = null ; public static boolean useARP = true ; /** Load a model * * @param urlStr The URL or file name of the model */ public static Model loadModel(String urlStr) { return loadModel(urlStr, null) ; } /** Load a model or attached a persistent store. * * @param urlStr The URL or file name of the model * @param lang The language of the data - if null, the system guesses */ public static Model loadModel(String urlStr, String lang) { return loadModel(urlStr, lang, "", "") ; } /** Load a model or attached a persistent store. * * @param urlStr The URL or file name of the model * @param lang The language of the data - if null, the system guesses * @param dbUser Database user name (for RDB/JDBC) * @param dbPassword Database password (for RDB/JDBC) */ public static Model loadModel(String urlStr, String lang, String dbUser, String dbPassword) { // Wild guess at the language! if ( lang == null ) lang = guessLang(urlStr) ; if ( lang == null ) lang = defaultLanguage ; if ( lang.equals(langBDB) ) { // @@ temporarily not supported Log.severe("Failed to open Berkeley database", "ModelLoader", "loadModel") ; System.exit(1) ; /* // URL had better be a file! if ( basename != null ) urlStr = basename+File.separator+urlStr ; String dirBDB = getDirname(urlStr) ; if ( dirBDB == null || dirBDB.length() == 0) dirBDB = "." ; urlStr = getBasename(urlStr) ; Log.debug("BDB: file="+urlStr+", dir="+dirBDB+", basename="+basename, "ModelLoader", "loadModel") ; try { Model model = new ModelBdb(new StoreBdbF(dirBDB, urlStr)) ; return model ; } catch (RDFException rdfEx) { Log.severe("Failed to open Berkeley database", "ModelLoader", "loadModel", rdfEx) ; System.exit(1) ; } */ } if ( lang.equals(langSQL) ) { // URL had better be a file! if ( basename != null ) urlStr = basename+File.separator+urlStr ; Log.debug("SQL: file="+urlStr, "ModelLoader", "loadModel") ; // @@ temporarily disabled Log.severe("Failed to open SQL database", "ModelLoader", "loadModel") ; System.exit(1) ; /* // No way to specify user and password. try { DBConnection dbcon = new DBConnection(urlStr, dbUser, dbPassword); ModelRDB model = null; try { model = ModelRDB.open(dbcon); } catch (Exception e) { model = ModelRDB.create(dbcon, "Generic", "Postgresql"); } return model ; } catch (RDFException rdfEx) { Log.severe("Failed to open SQL database", "ModelLoader", "loadModel", rdfEx) ; System.exit(1) ; } */ } // Language is N3, RDF/XML or N-TRIPLE Model m = new ModelMem() ; m.setReaderClassName(langXML, com.hp.hpl.jena.rdf.arp.JenaReader.class.getName()); m.setReaderClassName(langXMLAbbrev, com.hp.hpl.jena.rdf.arp.JenaReader.class.getName()); // Default. //m.setReaderClassName(langNTriple, com.hp.hpl.jena.rdf.arp.NTriple.class.getName()) ; try { loadModel(m, urlStr, lang) ; } catch (RDFException rdfEx) { Log.warning("Error loading data source", "ModelLoader", "loadModel", rdfEx); return null ; } catch (FileNotFoundException e) { Log.warning("No such data source: "+urlStr, "ModelLoader", "loadModel", e); return null ; } return m ; } public static Model loadModel(Model model, String urlStr, String lang) throws RDFException, java.io.FileNotFoundException { // Wild guess at the language! // Yes - repeated from above. // System.err.println( "[" + urlStr + "]" ); if ( lang == null ) lang = guessLang(urlStr) ; if ( lang.equals(langBDB) || lang.equals(langSQL) ) { Log.severe("Can't load data into existing model from a persistent database", "ModelLoader", "loadModel") ; return null ; } String base = "file://unknown.net/" ; Reader dataReader = null ; try { URL url = new URL(urlStr); dataReader = new BufferedReader(new InputStreamReader(url.openStream())) ; base = urlStr ; } catch (java.net.MalformedURLException e) { // Try as a file. String filename = urlStr ; File file = ( basename != null ) ? new File(basename, filename) : new File(filename) ; // Unfortunately Xerces objects to hybrid file, URLs with \ in them, for a base name. base = ("file:///"+file.getAbsolutePath()).replace('\\','/') ; // System.err.println( "| file = " + filename + " & basename = " + basename ); FileReader fr = tryFile( basename, filename ); // was new FileReader(filename) ; // was (file) dataReader = new BufferedReader(fr) ; } catch (java.io.IOException ioEx) { Log.severe("IOException: "+ioEx, "ModelLoader", "loadModel", ioEx) ; return null ; } //model.read(urlStr, base, lang) ; RDFReader rdfReader = model.getReader(lang) ; if ( rdfReader instanceof com.hp.hpl.jena.rdf.arp.JenaReader ) rdfReader.setProperty("error-mode", "lax") ; rdfReader.read(model, dataReader, base) ; try { dataReader.close() ; } catch (IOException ioEx) { logger.warn("IOException closing reader", ioEx) ; } return model ; } private static FileReader tryFile( String baseName, String fileName ) throws FileNotFoundException { try { return new FileReader( fileName ); } catch (FileNotFoundException e) { // System.err.println( "| could not read " + fileName + "; trying " + new File( baseName, fileName ) ); try { return new FileReader( new File( baseName, fileName ) ); } catch (FileNotFoundException e2) { // System.err.println( "| that didn't work either, alas" ); throw e2; } } } /** Guess the language/type of model data * <ul> * <li> If the URI of the model starts jdbc: it is assumed to be an RDB model</li> * <li> If the URI ends ".rdf", it is assumed to be RDF/XML</li> * <li> If the URI end .nt, it is assumed to be N-Triples</li> * <li> If the URI end .bdbd, it is assumed to be BerkleyDB model</li> * </ul> */ public static String guessLang(String urlStr) { String lang = null ; if ( urlStr.startsWith("jdbc:") || urlStr.startsWith("JDBC:") ) return langSQL ; String ext = getFilenameExt(urlStr) ; if ( ext != null && ext.length() > 0 ) { // Types that can be detected from file extensions if ( ext.equalsIgnoreCase("rdf") ) lang = langXML ; else if ( ext.equalsIgnoreCase("nt") ) lang = langNTriple ; else if ( ext.equalsIgnoreCase("n3") ) lang = langN3 ; else if ( ext.equalsIgnoreCase("bdb") ) lang = langBDB ; // But not .. //else if ( ext.equalsIgnoreCase("rdb") ) // lang = langSQL ; // else no idea. } return lang ; } /** Sets the directory used in * resolving URIs that are raw file names (no file:) * This is a global change when the ModelLoader is used. */ public static void setFileBase(String _basename) { basename = _basename ; } ; private static String getFilenameExt(String filename) { // Works on URLs int iSep = 0 ; // Last separator: either / or \ (covers all OSes?) int iExt = 0 ; // File extension iSep = filename.lastIndexOf('/') ; int iTmp = filename.lastIndexOf('\\') ; // NB \ is not an escape character in URLs if ( iTmp > iSep ) iSep = iTmp ; iExt = filename.lastIndexOf('.') ; if ( iExt > iSep ) { String ext = filename.substring(iExt+1).toLowerCase() ; return ext ; } return "" ; } private static String getDirname(String filename) { File f = new File(filename) ; return f.getParent() ; } private static String getBasename(String filename) { File f = new File(filename) ; return f.getName() ; } } /* * (c) Copyright Hewlett-Packard Company 2002 * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
src/com/hp/hpl/jena/util/ModelLoader.java
/* * (c) Copyright 2002, Hewlett-Packard Company, all rights reserved. * [See end of file] */ package com.hp.hpl.jena.util; import java.io.* ; import java.net.* ; import org.apache.log4j.*; import com.hp.hpl.jena.rdf.model.* ; import com.hp.hpl.jena.mem.* ; //import com.hp.hpl.jena.bdb.* ; //import com.hp.hpl.jena.rdb.* ; /** A set of static convenience methods for getting models * The loader will guess the language/type of the model using * {@link #guessLang(String) guessLang} * * @author Andy Seaborne * @version $Id: ModelLoader.java,v 1.3 2003-02-11 13:18:18 andy_seaborne Exp $ */ public class ModelLoader { static Logger logger = Logger.getLogger(ModelLoader.class.getName()) ; public static final String langXML = "RDF/XML" ; public static final String langXMLAbbrev = "RDF/XML-ABBREV" ; public static final String langNTriple = "N-TRIPLE" ; public static final String langN3 = "N3" ; // Non-standard public static final String langBDB = "RDF/BDB" ; public static final String langSQL = "RDF/SQL" ; public static String defaultLanguage = langXML ; public static String basename = null ; public static boolean useARP = true ; /** Load a model or attached a persistent store. * * @param urlStr The URL or file name of the model * @param lang The language of the data - if null, the system guesses */ public static Model loadModel(String urlStr, String lang) { return loadModel(urlStr, lang, "", "") ; } /** Load a model or attached a persistent store. * * @param urlStr The URL or file name of the model * @param lang The language of the data - if null, the system guesses * @param dbUser Database user name (for RDB/JDBC) * @param dbPassword Database password (for RDB/JDBC) */ public static Model loadModel(String urlStr, String lang, String dbUser, String dbPassword) { // Wild guess at the language! if ( lang == null ) lang = guessLang(urlStr) ; if ( lang == null ) lang = defaultLanguage ; if ( lang.equals(langBDB) ) { // @@ temporarily not supported Log.severe("Failed to open Berkeley database", "ModelLoader", "loadModel") ; System.exit(1) ; /* // URL had better be a file! if ( basename != null ) urlStr = basename+File.separator+urlStr ; String dirBDB = getDirname(urlStr) ; if ( dirBDB == null || dirBDB.length() == 0) dirBDB = "." ; urlStr = getBasename(urlStr) ; Log.debug("BDB: file="+urlStr+", dir="+dirBDB+", basename="+basename, "ModelLoader", "loadModel") ; try { Model model = new ModelBdb(new StoreBdbF(dirBDB, urlStr)) ; return model ; } catch (RDFException rdfEx) { Log.severe("Failed to open Berkeley database", "ModelLoader", "loadModel", rdfEx) ; System.exit(1) ; } */ } if ( lang.equals(langSQL) ) { // URL had better be a file! if ( basename != null ) urlStr = basename+File.separator+urlStr ; Log.debug("SQL: file="+urlStr, "ModelLoader", "loadModel") ; // @@ temporarily disabled Log.severe("Failed to open SQL database", "ModelLoader", "loadModel") ; System.exit(1) ; /* // No way to specify user and password. try { DBConnection dbcon = new DBConnection(urlStr, dbUser, dbPassword); ModelRDB model = null; try { model = ModelRDB.open(dbcon); } catch (Exception e) { model = ModelRDB.create(dbcon, "Generic", "Postgresql"); } return model ; } catch (RDFException rdfEx) { Log.severe("Failed to open SQL database", "ModelLoader", "loadModel", rdfEx) ; System.exit(1) ; } */ } // Language is N3, RDF/XML or N-TRIPLE Model m = new ModelMem() ; m.setReaderClassName(langXML, com.hp.hpl.jena.rdf.arp.JenaReader.class.getName()); m.setReaderClassName(langXMLAbbrev, com.hp.hpl.jena.rdf.arp.JenaReader.class.getName()); // Default. //m.setReaderClassName(langNTriple, com.hp.hpl.jena.rdf.arp.NTriple.class.getName()) ; try { loadModel(m, urlStr, lang) ; } catch (RDFException rdfEx) { Log.warning("Error loading data source", "ModelLoader", "loadModel", rdfEx); return null ; } catch (FileNotFoundException e) { Log.warning("No such data source: "+urlStr, "ModelLoader", "loadModel", e); return null ; } return m ; } public static Model loadModel(Model model, String urlStr, String lang) throws RDFException, java.io.FileNotFoundException { // Wild guess at the language! // Yes - repeated from above. // System.err.println( "[" + urlStr + "]" ); if ( lang == null ) lang = guessLang(urlStr) ; if ( lang.equals(langBDB) || lang.equals(langSQL) ) { Log.severe("Can't load data into existing model from a persistent database", "ModelLoader", "loadModel") ; return null ; } String base = "file://unknown.net/" ; Reader dataReader = null ; try { URL url = new URL(urlStr); dataReader = new BufferedReader(new InputStreamReader(url.openStream())) ; base = urlStr ; } catch (java.net.MalformedURLException e) { // Try as a file. String filename = urlStr ; File file = ( basename != null ) ? new File(basename, filename) : new File(filename) ; // Unfortunately Xerces objects to hybrid file, URLs with \ in them, for a base name. base = ("file:///"+file.getAbsolutePath()).replace('\\','/') ; // System.err.println( "| file = " + filename + " & basename = " + basename ); FileReader fr = tryFile( basename, filename ); // was new FileReader(filename) ; // was (file) dataReader = new BufferedReader(fr) ; } catch (java.io.IOException ioEx) { Log.severe("IOException: "+ioEx, "ModelLoader", "loadModel", ioEx) ; return null ; } //model.read(urlStr, base, lang) ; RDFReader rdfReader = model.getReader(lang) ; if ( rdfReader instanceof com.hp.hpl.jena.rdf.arp.JenaReader ) rdfReader.setProperty("error-mode", "lax") ; rdfReader.read(model, dataReader, base) ; try { dataReader.close() ; } catch (IOException ioEx) { logger.warn("IOException closing reader", ioEx) ; } return model ; } private static FileReader tryFile( String baseName, String fileName ) throws FileNotFoundException { try { return new FileReader( fileName ); } catch (FileNotFoundException e) { // System.err.println( "| could not read " + fileName + "; trying " + new File( baseName, fileName ) ); try { return new FileReader( new File( baseName, fileName ) ); } catch (FileNotFoundException e2) { // System.err.println( "| that didn't work either, alas" ); throw e2; } } } /** Guess the language/type of model data * <ul> * <li> If the URI of the model starts jdbc: it is assumed to be an RDB model</li> * <li> If the URI ends ".rdf", it is assumed to be RDF/XML</li> * <li> If the URI end .nt, it is assumed to be N-Triples</li> * <li> If the URI end .bdbd, it is assumed to be BerkleyDB model</li> * </ul> */ public static String guessLang(String urlStr) { String lang = null ; if ( urlStr.startsWith("jdbc:") || urlStr.startsWith("JDBC:") ) return langSQL ; String ext = getFilenameExt(urlStr) ; if ( ext != null && ext.length() > 0 ) { // Types that can be detected from file extensions if ( ext.equalsIgnoreCase("rdf") ) lang = langXML ; else if ( ext.equalsIgnoreCase("nt") ) lang = langNTriple ; else if ( ext.equalsIgnoreCase("n3") ) lang = langN3 ; else if ( ext.equalsIgnoreCase("bdb") ) lang = langBDB ; // But not .. //else if ( ext.equalsIgnoreCase("rdb") ) // lang = langSQL ; // else no idea. } return lang ; } /** Sets the directory used in * resolving URIs that are raw file names (no file:) * This is a global change when the ModelLoader is used. */ public static void setFileBase(String _basename) { basename = _basename ; } ; private static String getFilenameExt(String filename) { // Works on URLs int iSep = 0 ; // Last separator: either / or \ (covers all OSes?) int iExt = 0 ; // File extension iSep = filename.lastIndexOf('/') ; int iTmp = filename.lastIndexOf('\\') ; // NB \ is not an escape character in URLs if ( iTmp > iSep ) iSep = iTmp ; iExt = filename.lastIndexOf('.') ; if ( iExt > iSep ) { String ext = filename.substring(iExt+1).toLowerCase() ; return ext ; } return "" ; } private static String getDirname(String filename) { File f = new File(filename) ; return f.getParent() ; } private static String getBasename(String filename) { File f = new File(filename) ; return f.getName() ; } } /* * (c) Copyright Hewlett-Packard Company 2002 * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
New method loadModel(string) to choose and load a file or URL git-svn-id: 227c23bb629cf7bef445105b977924772e49ae4f@1109235 13f79535-47bb-0310-9956-ffa450edef68
src/com/hp/hpl/jena/util/ModelLoader.java
New method loadModel(string) to choose and load a file or URL
Java
apache-2.0
f21efbe2e834b2b860052f0a2c226d648f902685
0
mdogan/hazelcast,tufangorel/hazelcast,emre-aydin/hazelcast,mesutcelik/hazelcast,emre-aydin/hazelcast,tkountis/hazelcast,Donnerbart/hazelcast,mdogan/hazelcast,dbrimley/hazelcast,dbrimley/hazelcast,tkountis/hazelcast,tufangorel/hazelcast,dbrimley/hazelcast,dsukhoroslov/hazelcast,dsukhoroslov/hazelcast,mesutcelik/hazelcast,mesutcelik/hazelcast,emre-aydin/hazelcast,Donnerbart/hazelcast,Donnerbart/hazelcast,tkountis/hazelcast,tufangorel/hazelcast,mdogan/hazelcast
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.jsr; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstanceNotActiveException; import com.hazelcast.instance.HazelcastInstanceFactory; import com.hazelcast.util.EmptyStatement; import javax.cache.Caching; import javax.cache.spi.CachingProvider; import java.lang.reflect.Field; import java.util.LinkedList; import java.util.List; import java.util.Map; import static com.hazelcast.test.HazelcastTestSupport.assertThatIsNoParallelTest; import static java.lang.String.format; import static org.junit.Assert.fail; /** * Utility class responsible for setup/cleanup of JSR member tests. */ public final class JsrTestUtil { /** * Keeps track of system properties set by this utility. * <p> * We have to manage the System properties by ourselves, since they are set in {@link org.junit.BeforeClass} methods, * which are invoked before our Hazelcast {@link org.junit.runner.Runner} classes are copying the System properties * to restore them for us. */ private static final List<String> SYSTEM_PROPERTY_REGISTRY = new LinkedList<String>(); private JsrTestUtil() { } public static void setup() { assertThatIsNoParallelTest(); setSystemProperties("server"); } public static void cleanup() { clearSystemProperties(); clearCachingProviderRegistry(); Hazelcast.shutdownAll(); HazelcastInstanceFactory.terminateAll(); } /** * Sets the System properties for JSR related tests including the JCache provider type. * * @param providerType "server" or "client" according to your test type */ public static void setSystemProperties(String providerType) { /* If we don't set this parameter the HazelcastCachingProvider will try to determine if it has to create a client or server CachingProvider by looking for the client class. If you run the testsuite from IDEA across all modules, that class is available (even though you might want to start a server side test). This leads to a ClassCastException for server side tests, since a client CachingProvider will be created. So we explicitly set this property to ease the test setups for IDEA environments. */ setSystemProperty("hazelcast.jcache.provider.type", providerType); setSystemProperties(); } /** * Sets the System properties for JSR related tests. */ public static void setSystemProperties() { // uses plain strings to avoid triggering any classloading of JSR classes with static code initializations setSystemProperty("javax.management.builder.initial", "com.hazelcast.cache.impl.TCKMBeanServerBuilder"); setSystemProperty("CacheManagerImpl", "com.hazelcast.cache.HazelcastCacheManager"); setSystemProperty("javax.cache.Cache", "com.hazelcast.cache.ICache"); setSystemProperty("javax.cache.Cache.Entry", "com.hazelcast.cache.impl.CacheEntry"); setSystemProperty("org.jsr107.tck.management.agentId", "TCKMbeanServer"); setSystemProperty("javax.cache.annotation.CacheInvocationContext", "javax.cache.annotation.impl.cdi.CdiCacheKeyInvocationContextImpl"); } /** * Clears the System properties for JSR related tests. */ public static void clearSystemProperties() { for (String key : SYSTEM_PROPERTY_REGISTRY) { System.clearProperty(key); } SYSTEM_PROPERTY_REGISTRY.clear(); } /** * Closes and removes the {@link javax.cache.spi.CachingProvider} from the static registry in {@link Caching}. */ public static void clearCachingProviderRegistry() { try { // retrieve the CachingProviderRegistry instance Field providerRegistryField = getProviderRegistryField(); // retrieve the map with the CachingProvider instances Map<ClassLoader, Map<String, CachingProvider>> providerMap = getProviderMap(providerRegistryField); // close all existing CachingProvider for (Map<String, CachingProvider> providers : providerMap.values()) { for (CachingProvider provider : providers.values()) { try { provider.close(); } catch (HazelcastInstanceNotActiveException ignored) { // this is fine, since the instances can already be stopped } } } // clear the CachingProvider map providerMap.clear(); Class<?> providerRegistryClass = providerRegistryField.getType(); Object providerRegistryInstance = providerRegistryField.get(Caching.class); // retrieve the ClassLoader of the CachingProviderRegistry Field classLoaderField = providerRegistryClass.getDeclaredField("classLoader"); classLoaderField.setAccessible(true); // set the ClassLoader to null classLoaderField.set(providerRegistryInstance, null); } catch (Exception e) { e.printStackTrace(); fail(format("Could not cleanup CachingProvider registry: [%s] %s", e.getClass().getSimpleName(), e.getMessage())); } } /** * Returns the number of registered {@link javax.cache.spi.CachingProvider} from the static registry in {@link Caching}. */ public static int getCachingProviderRegistrySize() { try { // retrieve the CachingProviderRegistry instance Field providerRegistryField = getProviderRegistryField(); // retrieve the map with the CachingProvider instances Map<ClassLoader, Map<String, CachingProvider>> providerMap = getProviderMap(providerRegistryField); // count the number of existing CachingProviders int count = 0; for (Map<String, CachingProvider> providers : providerMap.values()) { count += providers.values().size(); } // return the map size return count; } catch (NoClassDefFoundError e) { return -1; } catch (Exception e) { return -1; } } private static void setSystemProperty(String key, String value) { // we just want to set a System property, which has not been set already // this way you can always override a JSR setting manually if (System.getProperty(key) == null) { System.setProperty(key, value); SYSTEM_PROPERTY_REGISTRY.add(key); } } private static Field getProviderRegistryField() throws NoSuchFieldException { Field providerRegistryField = Caching.class.getDeclaredField("CACHING_PROVIDERS"); providerRegistryField.setAccessible(true); return providerRegistryField; } private static Map<ClassLoader, Map<String, CachingProvider>> getProviderMap(Field providerRegistryField) throws Exception { Class<?> providerRegistryClass = providerRegistryField.getType(); Object providerRegistryInstance = providerRegistryField.get(Caching.class); // retrieve the map with the CachingProvider instances Field providerMapField = providerRegistryClass.getDeclaredField("cachingProviders"); providerMapField.setAccessible(true); //noinspection unchecked return (Map<ClassLoader, Map<String, CachingProvider>>) providerMapField.get(providerRegistryInstance); } }
hazelcast/src/test/java/com/hazelcast/cache/jsr/JsrTestUtil.java
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.jsr; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstanceNotActiveException; import com.hazelcast.instance.HazelcastInstanceFactory; import javax.cache.Caching; import javax.cache.spi.CachingProvider; import java.lang.reflect.Field; import java.util.LinkedList; import java.util.List; import java.util.Map; import static com.hazelcast.test.HazelcastTestSupport.assertThatIsNoParallelTest; import static java.lang.String.format; import static org.junit.Assert.fail; /** * Utility class responsible for setup/cleanup of JSR member tests. */ public final class JsrTestUtil { /** * Keeps track of system properties set by this utility. * <p> * We have to manage the System properties by ourselves, since they are set in {@link org.junit.BeforeClass} methods, * which are invoked before our Hazelcast {@link org.junit.runner.Runner} classes are copying the System properties * to restore them for us. */ private static final List<String> SYSTEM_PROPERTY_REGISTRY = new LinkedList<String>(); private JsrTestUtil() { } public static void setup() { assertThatIsNoParallelTest(); setSystemProperties("server"); } public static void cleanup() { clearSystemProperties(); clearCachingProviderRegistry(); Hazelcast.shutdownAll(); HazelcastInstanceFactory.terminateAll(); } /** * Sets the System properties for JSR related tests including the JCache provider type. * * @param providerType "server" or "client" according to your test type */ public static void setSystemProperties(String providerType) { /* If we don't set this parameter the HazelcastCachingProvider will try to determine if it has to create a client or server CachingProvider by looking for the client class. If you run the testsuite from IDEA across all modules, that class is available (even though you might want to start a server side test). This leads to a ClassCastException for server side tests, since a client CachingProvider will be created. So we explicitly set this property to ease the test setups for IDEA environments. */ setSystemProperty("hazelcast.jcache.provider.type", providerType); setSystemProperties(); } /** * Sets the System properties for JSR related tests. */ public static void setSystemProperties() { // uses plain strings to avoid triggering any classloading of JSR classes with static code initializations setSystemProperty("javax.management.builder.initial", "com.hazelcast.cache.impl.TCKMBeanServerBuilder"); setSystemProperty("CacheManagerImpl", "com.hazelcast.cache.HazelcastCacheManager"); setSystemProperty("javax.cache.Cache", "com.hazelcast.cache.ICache"); setSystemProperty("javax.cache.Cache.Entry", "com.hazelcast.cache.impl.CacheEntry"); setSystemProperty("org.jsr107.tck.management.agentId", "TCKMbeanServer"); setSystemProperty("javax.cache.annotation.CacheInvocationContext", "javax.cache.annotation.impl.cdi.CdiCacheKeyInvocationContextImpl"); } /** * Clears the System properties for JSR related tests. */ public static void clearSystemProperties() { for (String key : SYSTEM_PROPERTY_REGISTRY) { System.clearProperty(key); } SYSTEM_PROPERTY_REGISTRY.clear(); } /** * Closes and removes the {@link javax.cache.spi.CachingProvider} from the static registry in {@link Caching}. */ public static void clearCachingProviderRegistry() { try { // retrieve the CachingProviderRegistry instance Field providerRegistryField = getProviderRegistryField(); // retrieve the map with the CachingProvider instances Map<ClassLoader, Map<String, CachingProvider>> providerMap = getProviderMap(providerRegistryField); // close all existing CachingProvider for (Map<String, CachingProvider> providers : providerMap.values()) { for (CachingProvider provider : providers.values()) { try { provider.close(); } catch (HazelcastInstanceNotActiveException ignored) { // this is fine, since the instances can already be stopped } } } // clear the CachingProvider map providerMap.clear(); Class<?> providerRegistryClass = providerRegistryField.getType(); Object providerRegistryInstance = providerRegistryField.get(Caching.class); // retrieve the ClassLoader of the CachingProviderRegistry Field classLoaderField = providerRegistryClass.getDeclaredField("classLoader"); classLoaderField.setAccessible(true); // set the ClassLoader to null classLoaderField.set(providerRegistryInstance, null); } catch (Exception e) { e.printStackTrace(); fail(format("Could not cleanup CachingProvider registry: [%s] %s", e.getClass().getSimpleName(), e.getMessage())); } } /** * Returns the number of registered {@link javax.cache.spi.CachingProvider} from the static registry in {@link Caching}. */ public static int getCachingProviderRegistrySize() { try { // retrieve the CachingProviderRegistry instance Field providerRegistryField = getProviderRegistryField(); // retrieve the map with the CachingProvider instances Map<ClassLoader, Map<String, CachingProvider>> providerMap = getProviderMap(providerRegistryField); // count the number of existing CachingProviders int count = 0; for (Map<String, CachingProvider> providers : providerMap.values()) { count += providers.values().size(); } // return the map size return count; } catch (Exception e) { return -1; } } private static void setSystemProperty(String key, String value) { // we just want to set a System property, which has not been set already // this way you can always override a JSR setting manually if (System.getProperty(key) == null) { System.setProperty(key, value); SYSTEM_PROPERTY_REGISTRY.add(key); } } private static Field getProviderRegistryField() throws NoSuchFieldException { Field providerRegistryField = Caching.class.getDeclaredField("CACHING_PROVIDERS"); providerRegistryField.setAccessible(true); return providerRegistryField; } private static Map<ClassLoader, Map<String, CachingProvider>> getProviderMap(Field providerRegistryField) throws Exception { Class<?> providerRegistryClass = providerRegistryField.getType(); Object providerRegistryInstance = providerRegistryField.get(Caching.class); // retrieve the map with the CachingProvider instances Field providerMapField = providerRegistryClass.getDeclaredField("cachingProviders"); providerMapField.setAccessible(true); //noinspection unchecked return (Map<ClassLoader, Map<String, CachingProvider>>) providerMapField.get(providerRegistryInstance); } }
Fixed NoClassDefFoundError in JsrTestUtil.getCachingProviderRegistrySize()
hazelcast/src/test/java/com/hazelcast/cache/jsr/JsrTestUtil.java
Fixed NoClassDefFoundError in JsrTestUtil.getCachingProviderRegistrySize()
Java
apache-2.0
78f9041452250be31a697e519022c1209958d909
0
ymn/lorsource,kloun/lorsource,ymn/lorsource,kloun/lorsource,fat0troll/lorsource,bodqhrohro/lorsource,hizel/lorsource,maxcom/lorsource,maxcom/lorsource,maxcom/lorsource,fat0troll/lorsource,bodqhrohro/lorsource,hizel/lorsource,hizel/lorsource,fat0troll/lorsource,ymn/lorsource,bodqhrohro/lorsource,kloun/lorsource,ymn/lorsource,kloun/lorsource,hizel/lorsource,maxcom/lorsource,fat0troll/lorsource
/* * Copyright 1998-2012 Linux.org.ru * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ru.org.linux.auth; import org.springframework.beans.factory.InitializingBean; import org.springframework.security.core.Authentication; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; import org.springframework.web.filter.GenericFilterBean; import ru.org.linux.csrf.CSRFProtectionService; import ru.org.linux.site.Template; import ru.org.linux.spring.Configuration; import ru.org.linux.util.LorHttpUtils; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.io.IOException; import java.util.Properties; /** */ public class SecurityFilter extends GenericFilterBean implements InitializingBean { public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { WebApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(getServletContext()); HttpServletRequest request = (HttpServletRequest) req; request.setAttribute("configuration", ctx.getBean(Configuration.class)); request.setAttribute("template", new Template(ctx)); request.setCharacterEncoding("utf-8"); // блядский tomcat CSRFManipulation(request, (HttpServletResponse) res); chain.doFilter(req, res); } private void CSRFManipulation(HttpServletRequest request, HttpServletResponse response) { Properties cookies = LorHttpUtils.getCookies(request.getCookies()); if (cookies.get(CSRFProtectionService.CSRF_COOKIE) == null) { CSRFProtectionService.generateCSRFCookie(request, response); } else { request.setAttribute(CSRFProtectionService.CSRF_ATTRIBUTE, cookies.getProperty(CSRFProtectionService.CSRF_COOKIE).trim()); } response.addHeader("Cache-Control", "private"); } private void forWikiManipulation(HttpServletRequest request, HttpServletResponse response, Authentication authentication) { HttpSession session = request.getSession(); AuthUtil.getCurrentUser().acegiSecurityHack(response, session); } }
src/main/java/ru/org/linux/auth/SecurityFilter.java
/* * Copyright 1998-2012 Linux.org.ru * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ru.org.linux.auth; import org.springframework.beans.factory.InitializingBean; import org.springframework.security.core.Authentication; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; import org.springframework.web.filter.GenericFilterBean; import ru.org.linux.csrf.CSRFProtectionService; import ru.org.linux.site.Template; import ru.org.linux.spring.Configuration; import ru.org.linux.user.Profile; import ru.org.linux.util.LorHttpUtils; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.io.IOException; import java.util.Properties; /** */ public class SecurityFilter extends GenericFilterBean implements InitializingBean { public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { WebApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(getServletContext()); HttpServletRequest request = (HttpServletRequest) req; request.getSession().setAttribute("configuration", ctx.getBean(Configuration.class)); request.getSession().setAttribute("template", new Template(ctx)); if (AuthUtil.isSessionAuthorized()) { request.getSession().setAttribute("currentStyle", AuthUtil.getCurrentUser().getStyle()); request.getSession().setAttribute("currentProfile", AuthUtil.getCurrentProfile()); request.getSession().setAttribute("currentProperties", AuthUtil.getProf()); forWikiManipulation(request, (HttpServletResponse) res, AuthUtil.getAuthentication()); } else { request.getSession().setAttribute("currentStyle", "tango"); request.getSession().setAttribute("currentProfile", Profile.getDefaultProfile()); request.getSession().setAttribute("currentProperties", AuthUtil.getProf()); } request.setCharacterEncoding("utf-8"); // блядский tomcat CSRFManipulation(request, (HttpServletResponse) res); chain.doFilter(req, res); } private void CSRFManipulation(HttpServletRequest request, HttpServletResponse response) { Properties cookies = LorHttpUtils.getCookies(request.getCookies()); if (cookies.get(CSRFProtectionService.CSRF_COOKIE) == null) { CSRFProtectionService.generateCSRFCookie(request, response); } else { request.setAttribute(CSRFProtectionService.CSRF_ATTRIBUTE, cookies.getProperty(CSRFProtectionService.CSRF_COOKIE).trim()); } response.addHeader("Cache-Control", "private"); } private void forWikiManipulation(HttpServletRequest request, HttpServletResponse response, Authentication authentication) { HttpSession session = request.getSession(); AuthUtil.getCurrentUser().acegiSecurityHack(response, session); } }
хранить текущего пользователя в запросе
src/main/java/ru/org/linux/auth/SecurityFilter.java
хранить текущего пользователя в запросе
Java
apache-2.0
d96b838d3147da6a2cef4b510d30090dda8be61f
0
JBYoshi/BlockDodge
/* * Copyright (c) 2015 JBYoshi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jbyoshi.blockdodge; import java.awt.*; import java.util.*; import java.util.concurrent.atomic.*; import jbyoshi.blockdodge.util.*; import jbyoshi.blockdodge.util.TaskQueue; public abstract class BlockDodgeGame { final Random rand = new Random(); private static final int FRAME_TIME = 1000 / 75; private Dimension size = new Dimension(0, 0); private final Set<DodgeShape> shapes = new HashSet<DodgeShape>(); private final AtomicBoolean stop = new AtomicBoolean(false), pause = new AtomicBoolean(false), playerActive = new AtomicBoolean(false); private final TaskQueue tasks = new TaskQueue(); private volatile double score; private static final RandomChooser<Color> COLORS = new RandomChooser<>(Color.BLUE, Color.CYAN, Color.GREEN, Color.MAGENTA, new Color(255, 127, 0), new Color(0, 140, 0), Color.RED, Color.YELLOW); public void add(DodgeShape shape) { shapes.add(shape); } public void remove(DodgeShape shape) { shapes.remove(shape); shape.onDeath(); if (shape instanceof PlayerDodgeShape) { playerActive.set(false); setPaused(false); } if (shape.getDropCount() == 0) { shape.onFullyRemoved(); } } public boolean contains(DodgeShape shape) { return shapes.contains(shape); } public void go(PlayerDodgeShape player) { playerActive.set(player != null); if (player != null) { shapes.clear(); player.reset(); shapes.add(player); score = 0; } pause.set(false); int timer = 0; stop.set(false); while (!stop.get()) { while (pause.get()) { try { Thread.sleep(10); } catch (InterruptedException e) { } } long start = System.currentTimeMillis(); Dimension newSize = calculateSize(); int xChange = newSize.width - size.width; int yChange = newSize.height - size.height; if (xChange != 0 || yChange != 0) { for (DodgeShape shape : shapes) { if (xChange != 0 && shape.shape.getMaxX() > newSize.width) { shape.shape.x += xChange; } if (yChange != 0 && shape.shape.getMaxY() > newSize.height) { shape.shape.y += yChange; } } size = newSize; } for (DodgeShape shape : new HashSet<DodgeShape>(shapes)) { shape.move(); } for (DodgeShape one : new HashSet<DodgeShape>(shapes)) { if (!shapes.contains(one)) { continue; // Removed during a previous iteration } for (DodgeShape two : new HashSet<DodgeShape>(shapes)) { if (!shapes.contains(two) || one == two) { continue; } if (one.collides(two)) { one.onCollided(two); two.onCollided(one); } } } if (timer % 12 == 0) { createShape(player != null, timer); } if (contains(player)) { score += 250000.0 / getWidth() / getHeight(); } tasks.runAll(); update(); timer++; long sleep = FRAME_TIME - (System.currentTimeMillis() - start); if (sleep > 0) { try { Thread.sleep(sleep); } catch (InterruptedException e) { } } } } private void createShape(boolean includePlayer, int timer) { int w = rand.nextInt(25) + 8; int h = rand.nextInt(25) + 8; int pos = rand.nextInt(2 * (getWidth() + w + getHeight() + h)); int x, y; float dirChg; if (pos < getWidth() + w) { // From top x = pos - w + 1; y = -h; dirChg = 0.25f; } else { pos -= getWidth() + w; if (pos < getWidth() + w) { // From bottom x = pos - w; y = getHeight(); dirChg = 0.75f; } else { pos -= getWidth() + w; if (pos < getHeight() + h) { // From left x = -w; y = pos - h; dirChg = 0; } else { // From right pos -= getHeight() + h; x = getWidth(); y = pos - h + 1; dirChg = 0.5f; } } } float dir = (rand.nextFloat() / 2 + dirChg) % 1; Color c = COLORS.next(); add(new BounceDodgeShape(this, x, y, w, h, c, (float) (dir * 2 * Math.PI), includePlayer ? timer / 5000.0 + 1 : 1.5)); } public void stop() { stop.set(true); } public int getWidth() { return size.width; } public int getHeight() { return size.height; } public double getRawScore() { return score; } public int getScore() { return (int) score; } public Set<DodgeShape> getShapes() { return Collections.unmodifiableSet(shapes); } public void addTask(Runnable task) { tasks.add(task); } public void setPaused(boolean paused) { boolean active = playerActive.get(); while (true) { boolean newActive = playerActive.get(); pause.set(paused && active); updatePaused(paused && active); if (newActive == active) break; active = newActive; } } protected abstract Dimension calculateSize(); protected abstract void updatePaused(boolean paused); protected abstract void update(); }
src/main/java/jbyoshi/blockdodge/BlockDodgeGame.java
/* * Copyright (c) 2015 JBYoshi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jbyoshi.blockdodge; import java.awt.*; import java.util.*; import java.util.concurrent.atomic.*; import jbyoshi.blockdodge.util.*; import jbyoshi.blockdodge.util.TaskQueue; public abstract class BlockDodgeGame { final Random rand = new Random(); private static final int FRAME_TIME = 1000 / 75; private Dimension size = new Dimension(0, 0); private final Set<DodgeShape> shapes = new HashSet<DodgeShape>(); private final AtomicBoolean stop = new AtomicBoolean(false), pause = new AtomicBoolean(false); private final TaskQueue tasks = new TaskQueue(); private volatile double score; private static final RandomChooser<Color> COLORS = new RandomChooser<>(Color.BLUE, Color.CYAN, Color.GREEN, Color.MAGENTA, new Color(255, 127, 0), new Color(0, 140, 0), Color.RED, Color.YELLOW); public void add(DodgeShape shape) { shapes.add(shape); } public void remove(DodgeShape shape) { shapes.remove(shape); shape.onDeath(); if (shape.getDropCount() == 0) { shape.onFullyRemoved(); } } public boolean contains(DodgeShape shape) { return shapes.contains(shape); } public void go(PlayerDodgeShape player) { if (player != null) { shapes.clear(); player.reset(); shapes.add(player); score = 0; } else { shapes.remove(player); } int timer = 0; stop.set(false); while (!stop.get()) { while (pause.get()) { try { Thread.sleep(10); } catch (InterruptedException e) { } } long start = System.currentTimeMillis(); Dimension newSize = calculateSize(); int xChange = newSize.width - size.width; int yChange = newSize.height - size.height; if (xChange != 0 || yChange != 0) { for (DodgeShape shape : shapes) { if (xChange != 0 && shape.shape.getMaxX() > newSize.width) { shape.shape.x += xChange; } if (yChange != 0 && shape.shape.getMaxY() > newSize.height) { shape.shape.y += yChange; } } size = newSize; } for (DodgeShape shape : new HashSet<DodgeShape>(shapes)) { shape.move(); } for (DodgeShape one : new HashSet<DodgeShape>(shapes)) { if (!shapes.contains(one)) { continue; // Removed during a previous iteration } for (DodgeShape two : new HashSet<DodgeShape>(shapes)) { if (!shapes.contains(two) || one == two) { continue; } if (one.collides(two)) { one.onCollided(two); two.onCollided(one); } } } if (timer % 12 == 0) { createShape(player != null, timer); } if (contains(player)) { score += 250000.0 / getWidth() / getHeight(); } tasks.runAll(); update(); timer++; long sleep = FRAME_TIME - (System.currentTimeMillis() - start); if (sleep > 0) { try { Thread.sleep(sleep); } catch (InterruptedException e) { } } } } private void createShape(boolean includePlayer, int timer) { int w = rand.nextInt(25) + 8; int h = rand.nextInt(25) + 8; int pos = rand.nextInt(2 * (getWidth() + w + getHeight() + h)); int x, y; float dirChg; if (pos < getWidth() + w) { // From top x = pos - w + 1; y = -h; dirChg = 0.25f; } else { pos -= getWidth() + w; if (pos < getWidth() + w) { // From bottom x = pos - w; y = getHeight(); dirChg = 0.75f; } else { pos -= getWidth() + w; if (pos < getHeight() + h) { // From left x = -w; y = pos - h; dirChg = 0; } else { // From right pos -= getHeight() + h; x = getWidth(); y = pos - h + 1; dirChg = 0.5f; } } } float dir = (rand.nextFloat() / 2 + dirChg) % 1; Color c = COLORS.next(); add(new BounceDodgeShape(this, x, y, w, h, c, (float) (dir * 2 * Math.PI), includePlayer ? timer / 5000.0 + 1 : 1.5)); } public void stop() { stop.set(true); } public int getWidth() { return size.width; } public int getHeight() { return size.height; } public double getRawScore() { return score; } public int getScore() { return (int) score; } public Set<DodgeShape> getShapes() { return Collections.unmodifiableSet(shapes); } public void addTask(Runnable task) { tasks.add(task); } public boolean isPaused() { return pause.get(); } public void setPaused(boolean paused) { pause.set(paused); updatePaused(paused); } protected abstract Dimension calculateSize(); protected abstract void updatePaused(boolean paused); protected abstract void update(); }
Actually fix high score freezes.
src/main/java/jbyoshi/blockdodge/BlockDodgeGame.java
Actually fix high score freezes.
Java
apache-2.0
372033356e4dd866d5152fe5212283a6e77dd4c9
0
amaembo/streamex,manikitos/streamex
/* * Copyright 2015 Tagir Valeev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.util.streamex; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Deque; import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.OptionalInt; import java.util.Map.Entry; import java.util.Optional; import java.util.OptionalLong; import java.util.PriorityQueue; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BinaryOperator; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.stream.Collector; import java.util.stream.Collector.Characteristics; import java.util.stream.Collectors; import static javax.util.streamex.StreamExInternals.*; /** * Implementations of several collectors in addition to ones available in JDK. * * @author Tagir Valeev * @see Collectors * @since 0.3.2 */ public final class MoreCollectors { private MoreCollectors() { throw new UnsupportedOperationException(); } /** * Returns a {@code Collector} which just ignores the input and calls the * provided supplier once to return the output. * * @param <T> * the type of input elements * @param <U> * the type of output * @param supplier * the supplier of the output * @return a {@code Collector} which just ignores the input and calls the * provided supplier once to return the output. */ private static <T, U> Collector<T, ?, U> empty(Supplier<U> supplier) { return new CancellableCollectorImpl<>(() -> NONE, (acc, t) -> { // empty }, selectFirst(), acc -> supplier.get(), acc -> true, EnumSet.allOf(Characteristics.class)); } private static <T> Collector<T, ?, List<T>> empty() { return empty(ArrayList<T>::new); } /** * Returns a {@code Collector} that accumulates the input elements into a * new array. * * The operation performed by the returned collector is equivalent to * {@code stream.toArray(generator)}. This collector is mostly useful as a * downstream collector. * * @param <T> * the type of the input elements * @param generator * a function which produces a new array of the desired type and * the provided length * @return a {@code Collector} which collects all the input elements into an * array, in encounter order */ public static <T> Collector<T, ?, T[]> toArray(IntFunction<T[]> generator) { return Collectors.collectingAndThen(Collectors.toList(), list -> list.toArray(generator.apply(list.size()))); } /** * Returns a {@code Collector} which produces a boolean array containing the * results of applying the given predicate to the input elements, in * encounter order. * * @param <T> * the type of the input elements * @param predicate * a non-interfering, stateless predicate to apply to each input * element. The result values of this predicate are collected to * the resulting boolean array. * @return a {@code Collector} which collects the results of the predicate * function to the boolean array, in encounter order. * @since 0.3.8 */ public static <T> Collector<T, ?, boolean[]> toBooleanArray(Predicate<T> predicate) { return PartialCollector.booleanArray().asRef((box, t) -> { if (predicate.test(t)) box.a.set(box.b); box.b = StrictMath.addExact(box.b, 1); }); } /** * Returns a {@code Collector} that accumulates the input enum values into a * new {@code EnumSet}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the resulting set * contains all possible enum values. * * @param <T> * the type of the input elements * @param enumClass * the class of input enum values * @return a {@code Collector} which collects all the input elements into a * {@code EnumSet} */ public static <T extends Enum<T>> Collector<T, ?, EnumSet<T>> toEnumSet(Class<T> enumClass) { int size = EnumSet.allOf(enumClass).size(); return new CancellableCollectorImpl<>(() -> EnumSet.noneOf(enumClass), EnumSet::add, (s1, s2) -> { s1.addAll(s2); return s1; }, Function.identity(), set -> set.size() == size, UNORDERED_ID_CHARACTERISTICS); } /** * Returns a {@code Collector} which counts a number of distinct values the * mapper function returns for the stream elements. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.map(mapper).distinct().count()}. This collector is mostly * useful as a downstream collector. * * @param <T> * the type of the input elements * @param mapper * a function which classifies input elements. * @return a collector which counts a number of distinct classes the mapper * function returns for the stream elements. */ public static <T> Collector<T, ?, Integer> distinctCount(Function<? super T, ?> mapper) { return Collectors.collectingAndThen(Collectors.mapping(mapper, Collectors.toSet()), Set::size); } /** * Returns a {@code Collector} which collects into the {@link List} the * input elements for which given mapper function returns distinct results. * * <p> * For ordered source the order of collected elements is preserved. If the * same result is returned by mapper function for several elements, only the * first element is included into the resulting list. * * <p> * There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code List} returned. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.distinct(mapper).toList()}, but may work faster. * * @param <T> * the type of the input elements * @param mapper * a function which classifies input elements. * @return a collector which collects distinct elements to the {@code List}. * @since 0.3.8 */ public static <T> Collector<T, ?, List<T>> distinctBy(Function<? super T, ?> mapper) { return Collector.<T, Map<Object, T>, List<T>> of(LinkedHashMap::new, (map, t) -> map.putIfAbsent(mapper.apply(t), t), (m1, m2) -> { for (Entry<Object, T> e : m2.entrySet()) { m1.putIfAbsent(e.getKey(), e.getValue()); } return m1; }, map -> new ArrayList<>(map.values())); } /** * Returns a {@code Collector} accepting elements of type {@code T} that * counts the number of input elements and returns result as {@code Integer} * . If no elements are present, the result is 0. * * @param <T> * the type of the input elements * @return a {@code Collector} that counts the input elements * @since 0.3.3 * @see Collectors#counting() */ public static <T> Collector<T, ?, Integer> countingInt() { return PartialCollector.intSum().asRef((acc, t) -> acc[0]++); } /** * Returns a {@code Collector} which aggregates the results of two supplied * collectors using the supplied finisher function. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if both downstream collectors are short-circuiting. The * collection might stop when both downstream collectors report that the * collection is complete. * * @param <T> * the type of the input elements * @param <A1> * the intermediate accumulation type of the first collector * @param <A2> * the intermediate accumulation type of the second collector * @param <R1> * the result type of the first collector * @param <R2> * the result type of the second collector * @param <R> * the final result type * @param c1 * the first collector * @param c2 * the second collector * @param finisher * the function which merges two results into the single one. * @return a {@code Collector} which aggregates the results of two supplied * collectors. */ public static <T, A1, A2, R1, R2, R> Collector<T, ?, R> pairing(Collector<? super T, A1, R1> c1, Collector<? super T, A2, R2> c2, BiFunction<? super R1, ? super R2, ? extends R> finisher) { EnumSet<Characteristics> c = EnumSet.noneOf(Characteristics.class); c.addAll(c1.characteristics()); c.retainAll(c2.characteristics()); c.remove(Characteristics.IDENTITY_FINISH); Supplier<A1> c1Supplier = c1.supplier(); Supplier<A2> c2Supplier = c2.supplier(); BiConsumer<A1, ? super T> c1Accumulator = c1.accumulator(); BiConsumer<A2, ? super T> c2Accumulator = c2.accumulator(); BinaryOperator<A1> c1Combiner = c1.combiner(); BinaryOperator<A2> c2combiner = c2.combiner(); Supplier<PairBox<A1, A2>> supplier = () -> new PairBox<>(c1Supplier.get(), c2Supplier.get()); BiConsumer<PairBox<A1, A2>, T> accumulator = (acc, v) -> { c1Accumulator.accept(acc.a, v); c2Accumulator.accept(acc.b, v); }; BinaryOperator<PairBox<A1, A2>> combiner = (acc1, acc2) -> { acc1.a = c1Combiner.apply(acc1.a, acc2.a); acc1.b = c2combiner.apply(acc1.b, acc2.b); return acc1; }; Function<PairBox<A1, A2>, R> resFinisher = acc -> { R1 r1 = c1.finisher().apply(acc.a); R2 r2 = c2.finisher().apply(acc.b); return finisher.apply(r1, r2); }; if (c1 instanceof CancellableCollector && c2 instanceof CancellableCollector) { Predicate<A1> c1Finished = ((CancellableCollector<? super T, A1, R1>) c1).finished(); Predicate<A2> c2Finished = ((CancellableCollector<? super T, A2, R2>) c2).finished(); Predicate<PairBox<A1, A2>> finished = acc -> c1Finished.test(acc.a) && c2Finished.test(acc.b); return new CancellableCollectorImpl<>(supplier, accumulator, combiner, resFinisher, finished, c); } return Collector.of(supplier, accumulator, combiner, resFinisher, c.toArray(new Characteristics[c.size()])); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the * specified {@link Comparator}. The found elements are reduced using the * specified downstream {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param comparator * a {@code Comparator} to compare the elements * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the maximal elements. * @see #maxAll(Comparator) * @see #maxAll(Collector) * @see #maxAll() */ public static <T, A, D> Collector<T, ?, D> maxAll(Comparator<? super T> comparator, Collector<? super T, A, D> downstream) { Supplier<A> downstreamSupplier = downstream.supplier(); BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); BinaryOperator<A> downstreamCombiner = downstream.combiner(); Supplier<PairBox<A, T>> supplier = () -> new PairBox<>(downstreamSupplier.get(), none()); BiConsumer<PairBox<A, T>, T> accumulator = (acc, t) -> { if (acc.b == NONE) { downstreamAccumulator.accept(acc.a, t); acc.b = t; } else { int cmp = comparator.compare(t, acc.b); if (cmp > 0) { acc.a = downstreamSupplier.get(); acc.b = t; } if (cmp >= 0) downstreamAccumulator.accept(acc.a, t); } }; BinaryOperator<PairBox<A, T>> combiner = (acc1, acc2) -> { if (acc2.b == NONE) { return acc1; } if (acc1.b == NONE) { return acc2; } int cmp = comparator.compare(acc1.b, acc2.b); if (cmp > 0) { return acc1; } if (cmp < 0) { return acc2; } acc1.a = downstreamCombiner.apply(acc1.a, acc2.a); return acc1; }; Function<PairBox<A, T>, D> finisher = acc -> downstream.finisher().apply(acc.a); return Collector.of(supplier, accumulator, combiner, finisher); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the * specified {@link Comparator}. The found elements are collected to * {@link List}. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds all the maximal elements and * collects them to the {@code List}. * @see #maxAll(Comparator, Collector) * @see #maxAll() */ public static <T> Collector<T, ?, List<T>> maxAll(Comparator<? super T> comparator) { return maxAll(comparator, Collectors.toList()); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the natural * order. The found elements are reduced using the specified downstream * {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the maximal elements. * @see #maxAll(Comparator, Collector) * @see #maxAll(Comparator) * @see #maxAll() */ public static <T extends Comparable<? super T>, A, D> Collector<T, ?, D> maxAll(Collector<T, A, D> downstream) { return maxAll(Comparator.<T> naturalOrder(), downstream); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the natural * order. The found elements are collected to {@link List}. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds all the maximal elements and * collects them to the {@code List}. * @see #maxAll(Comparator) * @see #maxAll(Collector) */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> maxAll() { return maxAll(Comparator.<T> naturalOrder(), Collectors.toList()); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the * specified {@link Comparator}. The found elements are reduced using the * specified downstream {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param comparator * a {@code Comparator} to compare the elements * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the minimal elements. * @see #minAll(Comparator) * @see #minAll(Collector) * @see #minAll() */ public static <T, A, D> Collector<T, ?, D> minAll(Comparator<? super T> comparator, Collector<T, A, D> downstream) { return maxAll(comparator.reversed(), downstream); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the * specified {@link Comparator}. The found elements are collected to * {@link List}. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @see #minAll(Comparator, Collector) * @see #minAll() */ public static <T> Collector<T, ?, List<T>> minAll(Comparator<? super T> comparator) { return maxAll(comparator.reversed(), Collectors.toList()); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the natural * order. The found elements are reduced using the specified downstream * {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the minimal elements. * @see #minAll(Comparator, Collector) * @see #minAll(Comparator) * @see #minAll() */ public static <T extends Comparable<? super T>, A, D> Collector<T, ?, D> minAll(Collector<T, A, D> downstream) { return maxAll(Comparator.<T> reverseOrder(), downstream); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the natural * order. The found elements are collected to {@link List}. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @see #minAll(Comparator) * @see #minAll(Collector) */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> minAll() { return maxAll(Comparator.<T> reverseOrder(), Collectors.toList()); } /** * Returns a {@code Collector} which collects the stream element if stream * contains exactly one element. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>. * * @param <T> * the type of the input elements * @return a collector which returns an {@link Optional} which describes the * only element of the stream. For empty stream or stream containing * more than one element an empty {@code Optional} is returned. * @since 0.4.0 */ public static <T> Collector<T, ?, Optional<T>> onlyOne() { return new CancellableCollectorImpl<>(() -> new Box<Optional<T>>(null), (box, t) -> box.a = box.a == null ? Optional.of(t) : Optional.empty(), (box1, box2) -> box1.a == null ? box2 : box2.a == null ? box1 : new Box<>(Optional.empty()), box -> box.a == null ? Optional.empty() : box.a, box -> box.a != null && !box.a.isPresent(), UNORDERED_CHARACTERISTICS); } /** * Returns a {@code Collector} which collects only the first stream element * if any. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.findFirst()}. This collector is mostly useful as a * downstream collector. * * @param <T> * the type of the input elements * @return a collector which returns an {@link Optional} which describes the * first element of the stream. For empty stream an empty * {@code Optional} is returned. */ public static <T> Collector<T, ?, Optional<T>> first() { return new CancellableCollectorImpl<>(() -> new Box<T>(none()), (box, t) -> { if (box.a == NONE) box.a = t; }, (box1, box2) -> box1.a == NONE ? box2 : box1, box -> box.a == NONE ? Optional.empty() : Optional.of(box.a), box -> box.a != NONE, NO_CHARACTERISTICS); } /** * Returns a {@code Collector} which collects only the last stream element * if any. * * @param <T> * the type of the input elements * @return a collector which returns an {@link Optional} which describes the * last element of the stream. For empty stream an empty * {@code Optional} is returned. */ public static <T> Collector<T, ?, Optional<T>> last() { return Collectors.reducing((u, v) -> v); } /** * Returns a {@code Collector} which collects at most specified number of * the first stream elements into the {@link List}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.limit(n).collect(Collectors.toList())}. This collector is * mostly useful as a downstream collector. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the first n * stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> head(int n) { if (n <= 0) return empty(); return new CancellableCollectorImpl<>(ArrayList::new, (acc, t) -> { if (acc.size() < n) acc.add(t); }, (acc1, acc2) -> { acc1.addAll(acc2.subList(0, Math.min(acc2.size(), n - acc1.size()))); return acc1; }, Function.identity(), acc -> acc.size() >= n, ID_CHARACTERISTICS); } /** * Returns a {@code Collector} which collects at most specified number of * the last stream elements into the {@link List}. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the last n * stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> tail(int n) { if (n <= 0) return empty(); return Collector.<T, Deque<T>, List<T>> of(ArrayDeque::new, (acc, t) -> { if (acc.size() == n) acc.pollFirst(); acc.addLast(t); }, (acc1, acc2) -> { while (acc2.size() < n && !acc1.isEmpty()) { acc2.addFirst(acc1.pollLast()); } return acc2; }, ArrayList<T>::new); } /** * Returns a {@code Collector} which collects at most specified number of * the greatest stream elements according to the specified * {@link Comparator} into the {@link List}. The resulting {@code List} is * sorted in comparator reverse order (greatest element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted(comparator.reversed()).limit(n).collect(Collectors.toList())} * , but can be performed much faster if the input is not sorted and * {@code n} is much less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param comparator * the comparator to compare the elements by * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the greatest * n stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> greatest(Comparator<? super T> comparator, int n) { if (n <= 0) return empty(); BiConsumer<PriorityQueue<T>, T> accumulator = (queue, t) -> { if (queue.size() < n) queue.add(t); else if (comparator.compare(queue.peek(), t) < 0) { queue.poll(); queue.add(t); } }; return Collector.of(() -> new PriorityQueue<>(comparator), accumulator, (q1, q2) -> { for (T t : q2) { accumulator.accept(q1, t); } return q1; }, queue -> { List<T> result = new ArrayList<>(queue); result.sort(comparator.reversed()); return result; }); } /** * Returns a {@code Collector} which collects at most specified number of * the greatest stream elements according to the natural order into the * {@link List}. The resulting {@code List} is sorted in reverse order * (greatest element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted(Comparator.reverseOrder()).limit(n).collect(Collectors.toList())} * , but can be performed much faster if the input is not sorted and * {@code n} is much less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the greatest * n stream elements or less if the stream was shorter. */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> greatest(int n) { return greatest(Comparator.<T> naturalOrder(), n); } /** * Returns a {@code Collector} which collects at most specified number of * the least stream elements according to the specified {@link Comparator} * into the {@link List}. The resulting {@code List} is sorted in comparator * order (least element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted(comparator).limit(n).collect(Collectors.toList())}, * but can be performed much faster if the input is not sorted and {@code n} * is much less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param comparator * the comparator to compare the elements by * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the least n * stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> least(Comparator<? super T> comparator, int n) { return greatest(comparator.reversed(), n); } /** * Returns a {@code Collector} which collects at most specified number of * the least stream elements according to the natural order into the * {@link List}. The resulting {@code List} is sorted in natural order * (least element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted().limit(n).collect(Collectors.toList())}, but can be * performed much faster if the input is not sorted and {@code n} is much * less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the least n * stream elements or less if the stream was shorter. */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> least(int n) { return greatest(Comparator.<T> reverseOrder(), n); } /** * Returns a {@code Collector} which finds the index of the minimal stream * element according to the specified {@link Comparator}. If there are * several minimal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds the index of the minimal element. * @see #minIndex() * @since 0.3.5 */ public static <T> Collector<T, ?, OptionalLong> minIndex(Comparator<? super T> comparator) { class Container { T value; long count = 0; long index = -1; } BiConsumer<Container, T> accumulator = (c, t) -> { if (c.index == -1 || comparator.compare(c.value, t) > 0) { c.value = t; c.index = c.count; } c.count++; }; BinaryOperator<Container> combiner = (c1, c2) -> { if (c1.index == -1 || (c2.index != -1 && comparator.compare(c1.value, c2.value) > 0)) { c2.index += c1.count; c2.count += c1.count; return c2; } c1.count += c2.count; return c1; }; Function<Container, OptionalLong> finisher = c -> c.index == -1 ? OptionalLong.empty() : OptionalLong .of(c.index); return Collector.of(Container::new, accumulator, combiner, finisher); } /** * Returns a {@code Collector} which finds the index of the minimal stream * element according to the elements natural order. If there are several * minimal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds the index of the minimal element. * @see #minIndex(Comparator) * @since 0.3.5 */ public static <T extends Comparable<? super T>> Collector<T, ?, OptionalLong> minIndex() { return minIndex(Comparator.naturalOrder()); } /** * Returns a {@code Collector} which finds the index of the maximal stream * element according to the specified {@link Comparator}. If there are * several maximal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds the index of the maximal element. * @see #maxIndex() * @since 0.3.5 */ public static <T> Collector<T, ?, OptionalLong> maxIndex(Comparator<? super T> comparator) { return minIndex(comparator.reversed()); } /** * Returns a {@code Collector} which finds the index of the maximal stream * element according to the elements natural order. If there are several * maximal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds the index of the maximal element. * @see #maxIndex(Comparator) * @since 0.3.5 */ public static <T extends Comparable<? super T>> Collector<T, ?, OptionalLong> maxIndex() { return minIndex(Comparator.reverseOrder()); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation * on input elements of type {@code T}, for classification function which * maps input elements to the enum values. The downstream reduction for * repeating keys is performed using the specified downstream * {@code Collector}. * * <p> * Unlike the {@link Collectors#groupingBy(Function, Collector)} collector * this collector produces an {@link EnumMap} which contains all possible * keys including keys which were never returned by the classification * function. These keys are mapped to the default collector value which is * equivalent to collecting an empty stream with the same collector. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. The * collection might stop when for every possible enum key the downstream * collection is known to be finished. * * @param <T> * the type of the input elements * @param <K> * the type of the enum values returned by the classifier * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param enumClass * the class of enum values returned by the classifier * @param classifier * a classifier function mapping input elements to enum values * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * @see Collectors#groupingBy(Function, Collector) * @since 0.3.7 */ public static <T, K extends Enum<K>, A, D> Collector<T, ?, EnumMap<K, D>> groupingByEnum(Class<K> enumClass, Function<? super T, K> classifier, Collector<? super T, A, D> downstream) { return groupingBy(classifier, EnumSet.allOf(enumClass), () -> new EnumMap<>(enumClass), downstream); } public static <T, K, D, A> Collector<T, ?, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, Set<K> domain, Collector<? super T, A, D> downstream) { return groupingBy(classifier, domain, HashMap::new, downstream); } public static <T, K, D, A, M extends Map<K, D>> Collector<T, ?, M> groupingBy( Function<? super T, ? extends K> classifier, Set<K> domain, Supplier<M> mapFactory, Collector<? super T, A, D> downstream) { Supplier<A> downstreamSupplier = downstream.supplier(); Collector<T, ?, M> groupingBy; if (downstream instanceof CancellableCollectorImpl) { Function<K, A> supplier = k -> { if (!domain.contains(k)) throw new IllegalStateException("Classifier returned value '" + k + "' which is out of domain"); return downstreamSupplier.get(); }; BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); Predicate<A> downstreamFinished = ((CancellableCollectorImpl<? super T, A, D>) downstream).finished(); BiConsumer<Map<K, A>, T> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t)); A container = m.computeIfAbsent(key, supplier); downstreamAccumulator.accept(container, t); }; int size = domain.size(); groupingBy = PartialCollector.grouping(mapFactory, downstream).asCancellable(accumulator, map -> { if (map.size() < size) return false; for (A container : map.values()) { if (!downstreamFinished.test(container)) return false; } return true; }); } else { groupingBy = Collectors.groupingBy(classifier, mapFactory, downstream); } return collectingAndThen(groupingBy, map -> { Function<A, D> finisher = downstream.finisher(); domain.forEach(key -> map.computeIfAbsent(key, k -> finisher.apply(downstreamSupplier.get()))); return map; }); } /** * Returns a {@code Collector} which collects the intersection of the input * collections into the newly-created {@link Set}. * * <p> * The returned collector produces an empty set if the input is empty or * intersection of the input collections is empty. * * <p> * There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code Set} returned. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the resulting * intersection is empty. * * @param <T> * the type of the elements in the input collections * @param <S> * the type of the input collections * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @since 0.4.0 */ public static <T, S extends Collection<T>> Collector<S, ?, Set<T>> intersecting() { return new CancellableCollectorImpl<>(() -> new Box<Set<T>>(null), (b, t) -> { if (b.a == null) { b.a = new HashSet<>(t); } else { b.a.retainAll(t); } }, (b1, b2) -> { if (b1.a == null) return b2; if (b2.a != null) b1.a.retainAll(b2.a); return b1; }, b -> b.a == null ? Collections.emptySet() : b.a, b -> b.a != null && b.a.isEmpty(), UNORDERED_CHARACTERISTICS); } /** * Adapts a {@code Collector} to perform an additional finishing * transformation. * * <p> * Unlike {@link Collectors#collectingAndThen(Collector, Function)} this * method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. * * @param <T> * the type of the input elements * @param <A> * intermediate accumulation type of the downstream collector * @param <R> * result type of the downstream collector * @param <RR> * result type of the resulting collector * @param downstream * a collector * @param finisher * a function to be applied to the final result of the downstream * collector * @return a collector which performs the action of the downstream * collector, followed by an additional finishing step * @see Collectors#collectingAndThen(Collector, Function) * @since 0.4.0 */ public static <T, A, R, RR> Collector<T, A, RR> collectingAndThen(Collector<T, A, R> downstream, Function<R, RR> finisher) { if (downstream instanceof CancellableCollector) { return new CancellableCollectorImpl<>(downstream.supplier(), downstream.accumulator(), downstream.combiner(), downstream.finisher().andThen(finisher), ((CancellableCollector<T, A, R>) downstream).finished(), downstream.characteristics().contains( Characteristics.UNORDERED) ? UNORDERED_CHARACTERISTICS : NO_CHARACTERISTICS); } return Collectors.collectingAndThen(downstream, finisher); } /** * Returns a {@code Collector} which partitions the input elements according * to a {@code Predicate}, reduces the values in each partition according to * another {@code Collector}, and organizes them into a * {@code Map<Boolean, D>} whose values are the result of the downstream * reduction. * * <p> * Unlike {@link Collectors#partitioningBy(Predicate, Collector)} this * method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param predicate * a predicate used for classifying input elements * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded partitioning * operation * @since 0.4.0 * @see Collectors#partitioningBy(Predicate, Collector) */ public static <T, D, A> Collector<T, ?, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, Collector<? super T, A, D> downstream) { if (downstream instanceof CancellableCollector) { BiConsumer<A, ? super T> accumulator = downstream.accumulator(); Predicate<A> finished = ((CancellableCollector<? super T, A, D>) downstream).finished(); return BooleanMap.partialCollector(downstream).asCancellable( (map, t) -> accumulator.accept(predicate.test(t) ? map.trueValue : map.falseValue, t), map -> finished.test(map.trueValue) && finished.test(map.falseValue)); } return Collectors.partitioningBy(predicate, downstream); } /** * Adapts a {@code Collector} accepting elements of type {@code U} to one * accepting elements of type {@code T} by applying a mapping function to * each input element before accumulation. * * <p> * Unlike {@link Collectors#mapping(Function, Collector)} this method * returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. * * @param <T> * the type of the input elements * @param <U> * type of elements accepted by downstream collector * @param <A> * intermediate accumulation type of the downstream collector * @param <R> * result type of collector * @param mapper * a function to be applied to the input elements * @param downstream * a collector which will accept mapped values * @return a collector which applies the mapping function to the input * elements and provides the mapped results to the downstream * collector * @see Collectors#mapping(Function, Collector) * @since 0.4.0 */ public static <T, U, A, R> Collector<T, ?, R> mapping(Function<? super T, ? extends U> mapper, Collector<? super U, A, R> downstream) { if (downstream instanceof CancellableCollector) { BiConsumer<A, ? super U> downstreamAccumulator = downstream.accumulator(); return new CancellableCollectorImpl<>(downstream.supplier(), (r, t) -> downstreamAccumulator.accept(r, mapper.apply(t)), downstream.combiner(), downstream.finisher(), ((CancellableCollector<? super U, A, R>) downstream).finished(), downstream.characteristics()); } return Collectors.mapping(mapper, downstream); } /** * Returns a {@code Collector} which passes only those elements to the * specified downstream collector which match given predicate. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if downstream collector is short-circuiting. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.filter(predicate).collect(downstream)}. This collector is * mostly useful as a downstream collector in cascaded operation involving * {@link #pairing(Collector, Collector, BiFunction)} collector. * * @param <T> * the type of the input elements * @param <A> * intermediate accumulation type of the downstream collector * @param <R> * result type of collector * @param predicate * a filter function to be applied to the input elements * @param downstream * a collector which will accept filtered values * @return a collector which applies the predicate to the input elements and * provides the elements for which predicate returned true to the * downstream collector * @see #pairing(Collector, Collector, BiFunction) * @since 0.4.0 */ public static <T, A, R> Collector<T, ?, R> filtering(Predicate<? super T> predicate, Collector<T, A, R> downstream) { BiConsumer<A, T> downstreamAccumulator = downstream.accumulator(); BiConsumer<A, T> accumulator = (acc, t) -> { if (predicate.test(t)) downstreamAccumulator.accept(acc, t); }; if (downstream instanceof CancellableCollector) { return new CancellableCollectorImpl<>(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), ((CancellableCollector<T, A, R>) downstream).finished(), downstream.characteristics()); } return Collector.of(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), downstream.characteristics().toArray(new Characteristics[downstream.characteristics().size()])); } public static Collector<CharSequence, ?, String> joining(CharSequence delimiter, CharSequence ellipsis, int maxChars, boolean partial) { if (maxChars <= 0) return empty(() -> ""); int delimLength = delimiter.length(); BiConsumer<ObjIntBox<ArrayList<String>>, CharSequence> accumulator = (acc, str) -> { if (acc.b <= maxChars) { acc.b += str.length() + (acc.a.isEmpty() ? 0 : delimLength); acc.a.add(str.toString()); } }; BinaryOperator<ObjIntBox<ArrayList<String>>> combiner = (acc1, acc2) -> { int len = acc1.b + acc2.b + ((acc1.a.isEmpty() || acc2.a.isEmpty()) ? 0 : delimLength); if (len <= maxChars) { acc1.b = len; acc1.a.addAll(acc2.a); } else { for (CharSequence s : acc2.a) { if (acc1.b > maxChars) break; accumulator.accept(acc1, s); } } return acc1; }; Function<ObjIntBox<ArrayList<String>>, String> finisher = acc -> { char[] result = new char[Math.min(maxChars, acc.b)]; char[] delimArray = delimiter.toString().toCharArray(); int ellipsisLength = Math.min(ellipsis.length(), maxChars); int pos = 0; boolean overflow = false; int prevPos = 0; for (int i = 0; i < acc.a.size(); i++) { String s = acc.a.get(i); int nextPos; if (i > 0) { nextPos = pos + delimArray.length; System.arraycopy(delimArray, 0, result, pos, Math.min(nextPos, maxChars) - pos); if (nextPos > maxChars) { overflow = true; break; } pos = nextPos; if (!partial && pos <= maxChars - ellipsisLength) { prevPos = pos; } } nextPos = pos + s.length(); s.getChars(0, Math.min(nextPos, maxChars) - pos, result, pos); if (nextPos > maxChars) { overflow = true; break; } pos = nextPos; } if (overflow) { if (!partial) { ellipsis.toString().getChars(0, ellipsisLength, result, prevPos); return new String(result, 0, prevPos + ellipsisLength); } ellipsis.toString().getChars(0, ellipsisLength, result, maxChars - ellipsisLength); } return new String(result); }; return new CancellableCollectorImpl<>(() -> new ObjIntBox<>(new ArrayList<String>(), 0), accumulator, combiner, finisher, acc -> acc.b > maxChars, NO_CHARACTERISTICS); } /** * Returns a {@code Collector} that produces the bitwise-and operation of a * integer-valued function applied to the input elements. If no elements are * present, the result is empty {@link OptionalInt}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the result is zero. * * @param <T> * the type of the input elements * @param mapper * a function extracting the property to be processed * @return a {@code Collector} that produces the bitwise-and operation of a * derived property * @since 0.4.0 */ public static <T> Collector<T, ?, OptionalInt> andingInt(ToIntFunction<T> mapper) { return new CancellableCollectorImpl<>(PrimitiveBox::new, (acc, t) -> { if (!acc.b) { acc.i = mapper.applyAsInt(t); acc.b = true; } else { acc.i &= mapper.applyAsInt(t); } }, (acc1, acc2) -> { if (!acc1.b) return acc2; if (!acc2.b) return acc1; acc1.i &= acc2.i; return acc1; }, PrimitiveBox::asInt, acc -> acc.b && acc.i == 0, UNORDERED_CHARACTERISTICS); } /** * Returns a {@code Collector} that produces the bitwise-and operation of a * long-valued function applied to the input elements. If no elements are * present, the result is empty {@link OptionalLong}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the result is zero. * * @param <T> * the type of the input elements * @param mapper * a function extracting the property to be processed * @return a {@code Collector} that produces the bitwise-and operation of a * derived property * @since 0.4.0 */ public static <T> Collector<T, ?, OptionalLong> andingLong(ToLongFunction<T> mapper) { return new CancellableCollectorImpl<>(PrimitiveBox::new, (acc, t) -> { if (!acc.b) { acc.l = mapper.applyAsLong(t); acc.b = true; } else { acc.l &= mapper.applyAsLong(t); } }, (acc1, acc2) -> { if (!acc1.b) return acc2; if (!acc2.b) return acc1; acc1.l &= acc2.l; return acc1; }, PrimitiveBox::asLong, acc -> acc.b && acc.l == 0, UNORDERED_CHARACTERISTICS); } }
src/main/java/javax/util/streamex/MoreCollectors.java
/* * Copyright 2015 Tagir Valeev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.util.streamex; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Deque; import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.OptionalInt; import java.util.Map.Entry; import java.util.Optional; import java.util.OptionalLong; import java.util.PriorityQueue; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BinaryOperator; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.stream.Collector; import java.util.stream.Collector.Characteristics; import java.util.stream.Collectors; import static javax.util.streamex.StreamExInternals.*; /** * Implementations of several collectors in addition to ones available in JDK. * * @author Tagir Valeev * @see Collectors * @since 0.3.2 */ public final class MoreCollectors { private MoreCollectors() { throw new UnsupportedOperationException(); } /** * Returns a {@code Collector} which just ignores the input and calls the * provided supplier once to return the output. * * @param <T> * the type of input elements * @param <U> * the type of output * @param supplier * the supplier of the output * @return a {@code Collector} which just ignores the input and calls the * provided supplier once to return the output. */ private static <T, U> Collector<T, ?, U> empty(Supplier<U> supplier) { return new CancellableCollectorImpl<>(() -> NONE, (acc, t) -> { // empty }, selectFirst(), acc -> supplier.get(), acc -> true, EnumSet.allOf(Characteristics.class)); } private static <T> Collector<T, ?, List<T>> empty() { return empty(ArrayList<T>::new); } /** * Returns a {@code Collector} that accumulates the input elements into a * new array. * * The operation performed by the returned collector is equivalent to * {@code stream.toArray(generator)}. This collector is mostly useful as a * downstream collector. * * @param <T> * the type of the input elements * @param generator * a function which produces a new array of the desired type and * the provided length * @return a {@code Collector} which collects all the input elements into an * array, in encounter order */ public static <T> Collector<T, ?, T[]> toArray(IntFunction<T[]> generator) { return Collectors.collectingAndThen(Collectors.toList(), list -> list.toArray(generator.apply(list.size()))); } /** * Returns a {@code Collector} which produces a boolean array containing the * results of applying the given predicate to the input elements, in * encounter order. * * @param <T> * the type of the input elements * @param predicate * a non-interfering, stateless predicate to apply to each input * element. The result values of this predicate are collected to * the resulting boolean array. * @return a {@code Collector} which collects the results of the predicate * function to the boolean array, in encounter order. * @since 0.3.8 */ public static <T> Collector<T, ?, boolean[]> toBooleanArray(Predicate<T> predicate) { return PartialCollector.booleanArray().asRef((box, t) -> { if (predicate.test(t)) box.a.set(box.b); box.b = StrictMath.addExact(box.b, 1); }); } /** * Returns a {@code Collector} that accumulates the input enum values into a * new {@code EnumSet}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the resulting set * contains all possible enum values. * * @param <T> * the type of the input elements * @param enumClass * the class of input enum values * @return a {@code Collector} which collects all the input elements into a * {@code EnumSet} */ public static <T extends Enum<T>> Collector<T, ?, EnumSet<T>> toEnumSet(Class<T> enumClass) { int size = EnumSet.allOf(enumClass).size(); return new CancellableCollectorImpl<>(() -> EnumSet.noneOf(enumClass), EnumSet::add, (s1, s2) -> { s1.addAll(s2); return s1; }, Function.identity(), set -> set.size() == size, UNORDERED_ID_CHARACTERISTICS); } /** * Returns a {@code Collector} which counts a number of distinct values the * mapper function returns for the stream elements. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.map(mapper).distinct().count()}. This collector is mostly * useful as a downstream collector. * * @param <T> * the type of the input elements * @param mapper * a function which classifies input elements. * @return a collector which counts a number of distinct classes the mapper * function returns for the stream elements. */ public static <T> Collector<T, ?, Integer> distinctCount(Function<? super T, ?> mapper) { return Collectors.collectingAndThen(Collectors.mapping(mapper, Collectors.toSet()), Set::size); } /** * Returns a {@code Collector} which collects into the {@link List} the * input elements for which given mapper function returns distinct results. * * <p> * For ordered source the order of collected elements is preserved. If the * same result is returned by mapper function for several elements, only the * first element is included into the resulting list. * * <p> * There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code List} returned. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.distinct(mapper).toList()}, but may work faster. * * @param <T> * the type of the input elements * @param mapper * a function which classifies input elements. * @return a collector which collects distinct elements to the {@code List}. * @since 0.3.8 */ public static <T> Collector<T, ?, List<T>> distinctBy(Function<? super T, ?> mapper) { return Collector.<T, Map<Object, T>, List<T>> of(LinkedHashMap::new, (map, t) -> map.putIfAbsent(mapper.apply(t), t), (m1, m2) -> { for (Entry<Object, T> e : m2.entrySet()) { m1.putIfAbsent(e.getKey(), e.getValue()); } return m1; }, map -> new ArrayList<>(map.values())); } /** * Returns a {@code Collector} accepting elements of type {@code T} that * counts the number of input elements and returns result as {@code Integer} * . If no elements are present, the result is 0. * * @param <T> * the type of the input elements * @return a {@code Collector} that counts the input elements * @since 0.3.3 * @see Collectors#counting() */ public static <T> Collector<T, ?, Integer> countingInt() { return PartialCollector.intSum().asRef((acc, t) -> acc[0]++); } /** * Returns a {@code Collector} which aggregates the results of two supplied * collectors using the supplied finisher function. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if both downstream collectors are short-circuiting. The * collection might stop when both downstream collectors report that the * collection is complete. * * @param <T> * the type of the input elements * @param <A1> * the intermediate accumulation type of the first collector * @param <A2> * the intermediate accumulation type of the second collector * @param <R1> * the result type of the first collector * @param <R2> * the result type of the second collector * @param <R> * the final result type * @param c1 * the first collector * @param c2 * the second collector * @param finisher * the function which merges two results into the single one. * @return a {@code Collector} which aggregates the results of two supplied * collectors. */ public static <T, A1, A2, R1, R2, R> Collector<T, ?, R> pairing(Collector<? super T, A1, R1> c1, Collector<? super T, A2, R2> c2, BiFunction<? super R1, ? super R2, ? extends R> finisher) { EnumSet<Characteristics> c = EnumSet.noneOf(Characteristics.class); c.addAll(c1.characteristics()); c.retainAll(c2.characteristics()); c.remove(Characteristics.IDENTITY_FINISH); Supplier<A1> c1Supplier = c1.supplier(); Supplier<A2> c2Supplier = c2.supplier(); BiConsumer<A1, ? super T> c1Accumulator = c1.accumulator(); BiConsumer<A2, ? super T> c2Accumulator = c2.accumulator(); BinaryOperator<A1> c1Combiner = c1.combiner(); BinaryOperator<A2> c2combiner = c2.combiner(); Supplier<PairBox<A1, A2>> supplier = () -> new PairBox<>(c1Supplier.get(), c2Supplier.get()); BiConsumer<PairBox<A1, A2>, T> accumulator = (acc, v) -> { c1Accumulator.accept(acc.a, v); c2Accumulator.accept(acc.b, v); }; BinaryOperator<PairBox<A1, A2>> combiner = (acc1, acc2) -> { acc1.a = c1Combiner.apply(acc1.a, acc2.a); acc1.b = c2combiner.apply(acc1.b, acc2.b); return acc1; }; Function<PairBox<A1, A2>, R> resFinisher = acc -> { R1 r1 = c1.finisher().apply(acc.a); R2 r2 = c2.finisher().apply(acc.b); return finisher.apply(r1, r2); }; if (c1 instanceof CancellableCollector && c2 instanceof CancellableCollector) { Predicate<A1> c1Finished = ((CancellableCollector<? super T, A1, R1>) c1).finished(); Predicate<A2> c2Finished = ((CancellableCollector<? super T, A2, R2>) c2).finished(); Predicate<PairBox<A1, A2>> finished = acc -> c1Finished.test(acc.a) && c2Finished.test(acc.b); return new CancellableCollectorImpl<>(supplier, accumulator, combiner, resFinisher, finished, c); } return Collector.of(supplier, accumulator, combiner, resFinisher, c.toArray(new Characteristics[c.size()])); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the * specified {@link Comparator}. The found elements are reduced using the * specified downstream {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param comparator * a {@code Comparator} to compare the elements * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the maximal elements. * @see #maxAll(Comparator) * @see #maxAll(Collector) * @see #maxAll() */ public static <T, A, D> Collector<T, ?, D> maxAll(Comparator<? super T> comparator, Collector<? super T, A, D> downstream) { Supplier<A> downstreamSupplier = downstream.supplier(); BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); BinaryOperator<A> downstreamCombiner = downstream.combiner(); Supplier<PairBox<A, T>> supplier = () -> new PairBox<>(downstreamSupplier.get(), none()); BiConsumer<PairBox<A, T>, T> accumulator = (acc, t) -> { if (acc.b == NONE) { downstreamAccumulator.accept(acc.a, t); acc.b = t; } else { int cmp = comparator.compare(t, acc.b); if (cmp > 0) { acc.a = downstreamSupplier.get(); acc.b = t; } if (cmp >= 0) downstreamAccumulator.accept(acc.a, t); } }; BinaryOperator<PairBox<A, T>> combiner = (acc1, acc2) -> { if (acc2.b == NONE) { return acc1; } if (acc1.b == NONE) { return acc2; } int cmp = comparator.compare(acc1.b, acc2.b); if (cmp > 0) { return acc1; } if (cmp < 0) { return acc2; } acc1.a = downstreamCombiner.apply(acc1.a, acc2.a); return acc1; }; Function<PairBox<A, T>, D> finisher = acc -> downstream.finisher().apply(acc.a); return Collector.of(supplier, accumulator, combiner, finisher); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the * specified {@link Comparator}. The found elements are collected to * {@link List}. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds all the maximal elements and * collects them to the {@code List}. * @see #maxAll(Comparator, Collector) * @see #maxAll() */ public static <T> Collector<T, ?, List<T>> maxAll(Comparator<? super T> comparator) { return maxAll(comparator, Collectors.toList()); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the natural * order. The found elements are reduced using the specified downstream * {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the maximal elements. * @see #maxAll(Comparator, Collector) * @see #maxAll(Comparator) * @see #maxAll() */ public static <T extends Comparable<? super T>, A, D> Collector<T, ?, D> maxAll(Collector<T, A, D> downstream) { return maxAll(Comparator.<T> naturalOrder(), downstream); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and bigger than any other element according to the natural * order. The found elements are collected to {@link List}. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds all the maximal elements and * collects them to the {@code List}. * @see #maxAll(Comparator) * @see #maxAll(Collector) */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> maxAll() { return maxAll(Comparator.<T> naturalOrder(), Collectors.toList()); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the * specified {@link Comparator}. The found elements are reduced using the * specified downstream {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param comparator * a {@code Comparator} to compare the elements * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the minimal elements. * @see #minAll(Comparator) * @see #minAll(Collector) * @see #minAll() */ public static <T, A, D> Collector<T, ?, D> minAll(Comparator<? super T> comparator, Collector<T, A, D> downstream) { return maxAll(comparator.reversed(), downstream); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the * specified {@link Comparator}. The found elements are collected to * {@link List}. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @see #minAll(Comparator, Collector) * @see #minAll() */ public static <T> Collector<T, ?, List<T>> minAll(Comparator<? super T> comparator) { return maxAll(comparator.reversed(), Collectors.toList()); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the natural * order. The found elements are reduced using the specified downstream * {@code Collector}. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} which finds all the minimal elements. * @see #minAll(Comparator, Collector) * @see #minAll(Comparator) * @see #minAll() */ public static <T extends Comparable<? super T>, A, D> Collector<T, ?, D> minAll(Collector<T, A, D> downstream) { return maxAll(Comparator.<T> reverseOrder(), downstream); } /** * Returns a {@code Collector} which finds all the elements which are equal * to each other and smaller than any other element according to the natural * order. The found elements are collected to {@link List}. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @see #minAll(Comparator) * @see #minAll(Collector) */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> minAll() { return maxAll(Comparator.<T> reverseOrder(), Collectors.toList()); } /** * Returns a {@code Collector} which collects the stream element if stream * contains exactly one element. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>. * * @param <T> * the type of the input elements * @return a collector which returns an {@link Optional} which describes the * only element of the stream. For empty stream or stream containing * more than one element an empty {@code Optional} is returned. * @since 0.4.0 */ public static <T> Collector<T, ?, Optional<T>> onlyOne() { return new CancellableCollectorImpl<>(() -> new Box<Optional<T>>(null), (box, t) -> box.a = box.a == null ? Optional.of(t) : Optional.empty(), (box1, box2) -> box1.a == null ? box2 : box2.a == null ? box1 : new Box<>(Optional.empty()), box -> box.a == null ? Optional.empty() : box.a, box -> box.a != null && !box.a.isPresent(), UNORDERED_CHARACTERISTICS); } /** * Returns a {@code Collector} which collects only the first stream element * if any. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.findFirst()}. This collector is mostly useful as a * downstream collector. * * @param <T> * the type of the input elements * @return a collector which returns an {@link Optional} which describes the * first element of the stream. For empty stream an empty * {@code Optional} is returned. */ public static <T> Collector<T, ?, Optional<T>> first() { return new CancellableCollectorImpl<>(() -> new Box<T>(none()), (box, t) -> { if (box.a == NONE) box.a = t; }, (box1, box2) -> box1.a == NONE ? box2 : box1, box -> box.a == NONE ? Optional.empty() : Optional.of(box.a), box -> box.a != NONE, NO_CHARACTERISTICS); } /** * Returns a {@code Collector} which collects only the last stream element * if any. * * @param <T> * the type of the input elements * @return a collector which returns an {@link Optional} which describes the * last element of the stream. For empty stream an empty * {@code Optional} is returned. */ public static <T> Collector<T, ?, Optional<T>> last() { return Collectors.reducing((u, v) -> v); } /** * Returns a {@code Collector} which collects at most specified number of * the first stream elements into the {@link List}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.limit(n).collect(Collectors.toList())}. This collector is * mostly useful as a downstream collector. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the first n * stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> head(int n) { if (n <= 0) return empty(); return new CancellableCollectorImpl<>(ArrayList::new, (acc, t) -> { if (acc.size() < n) acc.add(t); }, (acc1, acc2) -> { acc1.addAll(acc2.subList(0, Math.min(acc2.size(), n - acc1.size()))); return acc1; }, Function.identity(), acc -> acc.size() >= n, ID_CHARACTERISTICS); } /** * Returns a {@code Collector} which collects at most specified number of * the last stream elements into the {@link List}. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the last n * stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> tail(int n) { if (n <= 0) return empty(); return Collector.<T, Deque<T>, List<T>> of(ArrayDeque::new, (acc, t) -> { if (acc.size() == n) acc.pollFirst(); acc.addLast(t); }, (acc1, acc2) -> { while (acc2.size() < n && !acc1.isEmpty()) { acc2.addFirst(acc1.pollLast()); } return acc2; }, ArrayList<T>::new); } /** * Returns a {@code Collector} which collects at most specified number of * the greatest stream elements according to the specified * {@link Comparator} into the {@link List}. The resulting {@code List} is * sorted in comparator reverse order (greatest element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted(comparator.reversed()).limit(n).collect(Collectors.toList())} * , but can be performed much faster if the input is not sorted and * {@code n} is much less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param comparator * the comparator to compare the elements by * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the greatest * n stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> greatest(Comparator<? super T> comparator, int n) { if (n <= 0) return empty(); BiConsumer<PriorityQueue<T>, T> accumulator = (queue, t) -> { if (queue.size() < n) queue.add(t); else if (comparator.compare(queue.peek(), t) < 0) { queue.poll(); queue.add(t); } }; return Collector.of(() -> new PriorityQueue<>(comparator), accumulator, (q1, q2) -> { for (T t : q2) { accumulator.accept(q1, t); } return q1; }, queue -> { List<T> result = new ArrayList<>(queue); result.sort(comparator.reversed()); return result; }); } /** * Returns a {@code Collector} which collects at most specified number of * the greatest stream elements according to the natural order into the * {@link List}. The resulting {@code List} is sorted in reverse order * (greatest element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted(Comparator.reverseOrder()).limit(n).collect(Collectors.toList())} * , but can be performed much faster if the input is not sorted and * {@code n} is much less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the greatest * n stream elements or less if the stream was shorter. */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> greatest(int n) { return greatest(Comparator.<T> naturalOrder(), n); } /** * Returns a {@code Collector} which collects at most specified number of * the least stream elements according to the specified {@link Comparator} * into the {@link List}. The resulting {@code List} is sorted in comparator * order (least element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted(comparator).limit(n).collect(Collectors.toList())}, * but can be performed much faster if the input is not sorted and {@code n} * is much less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param comparator * the comparator to compare the elements by * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the least n * stream elements or less if the stream was shorter. */ public static <T> Collector<T, ?, List<T>> least(Comparator<? super T> comparator, int n) { return greatest(comparator.reversed(), n); } /** * Returns a {@code Collector} which collects at most specified number of * the least stream elements according to the natural order into the * {@link List}. The resulting {@code List} is sorted in natural order * (least element is the first). * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.sorted().limit(n).collect(Collectors.toList())}, but can be * performed much faster if the input is not sorted and {@code n} is much * less than the stream size. * * <p> * When supplied {@code n} is less or equal to zero, this method returns a * <a href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> which ignores the input and produces an empty list. * * @param <T> * the type of the input elements * @param n * maximum number of stream elements to preserve * @return a collector which returns a {@code List} containing the least n * stream elements or less if the stream was shorter. */ public static <T extends Comparable<? super T>> Collector<T, ?, List<T>> least(int n) { return greatest(Comparator.<T> reverseOrder(), n); } /** * Returns a {@code Collector} which finds the index of the minimal stream * element according to the specified {@link Comparator}. If there are * several minimal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds the index of the minimal element. * @see #minIndex() * @since 0.3.5 */ public static <T> Collector<T, ?, OptionalLong> minIndex(Comparator<? super T> comparator) { class Container { T value; long count = 0; long index = -1; } BiConsumer<Container, T> accumulator = (c, t) -> { if (c.index == -1 || comparator.compare(c.value, t) > 0) { c.value = t; c.index = c.count; } c.count++; }; BinaryOperator<Container> combiner = (c1, c2) -> { if (c1.index == -1 || (c2.index != -1 && comparator.compare(c1.value, c2.value) > 0)) { c2.index += c1.count; c2.count += c1.count; return c2; } c1.count += c2.count; return c1; }; Function<Container, OptionalLong> finisher = c -> c.index == -1 ? OptionalLong.empty() : OptionalLong .of(c.index); return Collector.of(Container::new, accumulator, combiner, finisher); } /** * Returns a {@code Collector} which finds the index of the minimal stream * element according to the elements natural order. If there are several * minimal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds the index of the minimal element. * @see #minIndex(Comparator) * @since 0.3.5 */ public static <T extends Comparable<? super T>> Collector<T, ?, OptionalLong> minIndex() { return minIndex(Comparator.naturalOrder()); } /** * Returns a {@code Collector} which finds the index of the maximal stream * element according to the specified {@link Comparator}. If there are * several maximal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @param comparator * a {@code Comparator} to compare the elements * @return a {@code Collector} which finds the index of the maximal element. * @see #maxIndex() * @since 0.3.5 */ public static <T> Collector<T, ?, OptionalLong> maxIndex(Comparator<? super T> comparator) { return minIndex(comparator.reversed()); } /** * Returns a {@code Collector} which finds the index of the maximal stream * element according to the elements natural order. If there are several * maximal elements, the index of the first one is returned. * * @param <T> * the type of the input elements * @return a {@code Collector} which finds the index of the maximal element. * @see #maxIndex(Comparator) * @since 0.3.5 */ public static <T extends Comparable<? super T>> Collector<T, ?, OptionalLong> maxIndex() { return minIndex(Comparator.reverseOrder()); } /** * Returns a {@code Collector} implementing a cascaded "group by" operation * on input elements of type {@code T}, for classification function which * maps input elements to the enum values. The downstream reduction for * repeating keys is performed using the specified downstream * {@code Collector}. * * <p> * Unlike the {@link Collectors#groupingBy(Function, Collector)} collector * this collector produces an {@link EnumMap} which contains all possible * keys including keys which were never returned by the classification * function. These keys are mapped to the default collector value which is * equivalent to collecting an empty stream with the same collector. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. The * collection might stop when for every possible enum key the downstream * collection is known to be finished. * * @param <T> * the type of the input elements * @param <K> * the type of the enum values returned by the classifier * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param enumClass * the class of enum values returned by the classifier * @param classifier * a classifier function mapping input elements to enum values * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded group-by operation * @see Collectors#groupingBy(Function, Collector) * @since 0.3.7 */ public static <T, K extends Enum<K>, A, D> Collector<T, ?, EnumMap<K, D>> groupingByEnum(Class<K> enumClass, Function<? super T, K> classifier, Collector<? super T, A, D> downstream) { return groupingBy(classifier, EnumSet.allOf(enumClass), () -> new EnumMap<>(enumClass), downstream); } public static <T, K, D, A> Collector<T, ?, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, Set<K> domain, Collector<? super T, A, D> downstream) { return groupingBy(classifier, domain, HashMap::new, downstream); } public static <T, K, D, A, M extends Map<K, D>> Collector<T, ?, M> groupingBy( Function<? super T, ? extends K> classifier, Set<K> domain, Supplier<M> mapFactory, Collector<? super T, A, D> downstream) { Supplier<A> downstreamSupplier = downstream.supplier(); Collector<T, ?, M> groupingBy; if (downstream instanceof CancellableCollectorImpl) { Function<K, A> supplier = k -> { if (!domain.contains(k)) throw new IllegalStateException("Classifier returned value '" + k + "' which is out of domain"); return downstreamSupplier.get(); }; BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator(); Predicate<A> downstreamFinished = ((CancellableCollectorImpl<? super T, A, D>) downstream).finished(); BiConsumer<Map<K, A>, T> accumulator = (m, t) -> { K key = Objects.requireNonNull(classifier.apply(t)); A container = m.computeIfAbsent(key, supplier); downstreamAccumulator.accept(container, t); }; int size = domain.size(); groupingBy = PartialCollector.grouping(mapFactory, downstream).asCancellable(accumulator, map -> { if (map.size() < size) return false; for (A container : map.values()) { if (!downstreamFinished.test(container)) return false; } return true; }); } else { groupingBy = Collectors.groupingBy(classifier, mapFactory, downstream); } return collectingAndThen(groupingBy, map -> { Function<A, D> finisher = downstream.finisher(); domain.forEach(key -> map.computeIfAbsent(key, k -> finisher.apply(downstreamSupplier.get()))); return map; }); } /** * Returns a {@code Collector} which collects the intersection of the input * collections into the newly-created {@link Set}. * * <p> * The returned collector produces an empty set if the input is empty or * intersection of the input collections is empty. * * <p> * There are no guarantees on the type, mutability, serializability, or * thread-safety of the {@code Set} returned. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the resulting * intersection is empty. * * @param <T> * the type of the elements in the input collections * @param <S> * the type of the input collections * @return a {@code Collector} which finds all the minimal elements and * collects them to the {@code List}. * @since 0.4.0 */ public static <T, S extends Collection<T>> Collector<S, ?, Set<T>> intersecting() { return new CancellableCollectorImpl<>(() -> new Box<Set<T>>(null), (b, t) -> { if (b.a == null) { b.a = new HashSet<>(t); } else { b.a.retainAll(t); } }, (b1, b2) -> { if (b1.a == null) return b2; if (b2.a != null) b1.a.retainAll(b2.a); return b1; }, b -> b.a == null ? Collections.emptySet() : b.a, b -> b.a != null && b.a.isEmpty(), UNORDERED_CHARACTERISTICS); } /** * Adapts a {@code Collector} to perform an additional finishing * transformation. * * <p> * Unlike {@link Collectors#collectingAndThen(Collector, Function)} this * method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. * * @param <T> * the type of the input elements * @param <A> * intermediate accumulation type of the downstream collector * @param <R> * result type of the downstream collector * @param <RR> * result type of the resulting collector * @param downstream * a collector * @param finisher * a function to be applied to the final result of the downstream * collector * @return a collector which performs the action of the downstream * collector, followed by an additional finishing step * @see Collectors#collectingAndThen(Collector, Function) * @since 0.4.0 */ public static <T, A, R, RR> Collector<T, A, RR> collectingAndThen(Collector<T, A, R> downstream, Function<R, RR> finisher) { if (downstream instanceof CancellableCollector) { return new CancellableCollectorImpl<>(downstream.supplier(), downstream.accumulator(), downstream.combiner(), downstream.finisher().andThen(finisher), ((CancellableCollector<T, A, R>) downstream).finished(), downstream.characteristics().contains( Characteristics.UNORDERED) ? UNORDERED_CHARACTERISTICS : NO_CHARACTERISTICS); } return Collectors.collectingAndThen(downstream, finisher); } /** * Returns a {@code Collector} which partitions the input elements according * to a {@code Predicate}, reduces the values in each partition according to * another {@code Collector}, and organizes them into a * {@code Map<Boolean, D>} whose values are the result of the downstream * reduction. * * <p> * Unlike {@link Collectors#partitioningBy(Predicate, Collector)} this * method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. * * @param <T> * the type of the input elements * @param <A> * the intermediate accumulation type of the downstream collector * @param <D> * the result type of the downstream reduction * @param predicate * a predicate used for classifying input elements * @param downstream * a {@code Collector} implementing the downstream reduction * @return a {@code Collector} implementing the cascaded partitioning * operation * @since 0.4.0 * @see Collectors#partitioningBy(Predicate, Collector) */ public static <T, D, A> Collector<T, ?, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, Collector<? super T, A, D> downstream) { if (downstream instanceof CancellableCollector) { BiConsumer<A, ? super T> accumulator = downstream.accumulator(); Predicate<A> finished = ((CancellableCollector<? super T, A, D>) downstream).finished(); return BooleanMap.partialCollector(downstream).asCancellable( (map, t) -> accumulator.accept(predicate.test(t) ? map.trueValue : map.falseValue, t), map -> finished.test(map.trueValue) && finished.test(map.falseValue)); } return Collectors.partitioningBy(predicate, downstream); } /** * Adapts a {@code Collector} accepting elements of type {@code U} to one * accepting elements of type {@code T} by applying a mapping function to * each input element before accumulation. * * <p> * Unlike {@link Collectors#mapping(Function, Collector)} this method * returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if the downstream collector is short-circuiting. * * @param <T> * the type of the input elements * @param <U> * type of elements accepted by downstream collector * @param <A> * intermediate accumulation type of the downstream collector * @param <R> * result type of collector * @param mapper * a function to be applied to the input elements * @param downstream * a collector which will accept mapped values * @return a collector which applies the mapping function to the input * elements and provides the mapped results to the downstream * collector * @see Collectors#mapping(Function, Collector) * @since 0.4.0 */ public static <T, U, A, R> Collector<T, ?, R> mapping(Function<? super T, ? extends U> mapper, Collector<? super U, A, R> downstream) { if (downstream instanceof CancellableCollector) { BiConsumer<A, ? super U> downstreamAccumulator = downstream.accumulator(); return new CancellableCollectorImpl<>(downstream.supplier(), (r, t) -> downstreamAccumulator.accept(r, mapper.apply(t)), downstream.combiner(), downstream.finisher(), ((CancellableCollector<? super U, A, R>) downstream).finished(), downstream.characteristics()); } return Collectors.mapping(mapper, downstream); } /** * Returns a {@code Collector} which passes only those elements to the * specified downstream collector which match given predicate. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a> if downstream collector is short-circuiting. * * <p> * The operation performed by the returned collector is equivalent to * {@code stream.filter(predicate).collect(downstream)}. This collector is * mostly useful as a downstream collector in cascaded operation involving * {@link #pairing(Collector, Collector, BiFunction)} collector. * * @param <T> * the type of the input elements * @param <A> * intermediate accumulation type of the downstream collector * @param <R> * result type of collector * @param predicate * a filter function to be applied to the input elements * @param downstream * a collector which will accept filtered values * @return a collector which applies the predicate to the input elements and * provides the elements for which predicate returned true to the * downstream collector * @see #pairing(Collector, Collector, BiFunction) * @since 0.4.0 */ public static <T, A, R> Collector<T, ?, R> filtering(Predicate<? super T> predicate, Collector<T, A, R> downstream) { BiConsumer<A, T> downstreamAccumulator = downstream.accumulator(); BiConsumer<A, T> accumulator = (acc, t) -> { if (predicate.test(t)) downstreamAccumulator.accept(acc, t); }; if (downstream instanceof CancellableCollector) { return new CancellableCollectorImpl<>(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), ((CancellableCollector<T, A, R>) downstream).finished(), downstream.characteristics()); } return Collector.of(downstream.supplier(), accumulator, downstream.combiner(), downstream.finisher(), downstream.characteristics().toArray(new Characteristics[downstream.characteristics().size()])); } public static Collector<CharSequence, ?, String> joining(CharSequence delimiter, CharSequence ellipsis, int limit, boolean partial) { if (limit <= 0) return empty(() -> ""); int delimLength = delimiter.length(); BiConsumer<ObjIntBox<ArrayList<String>>, CharSequence> accumulator = (acc, str) -> { if (acc.b <= limit) { acc.b += str.length() + (acc.a.isEmpty() ? 0 : delimLength); acc.a.add(str.toString()); } }; BinaryOperator<ObjIntBox<ArrayList<String>>> combiner = (acc1, acc2) -> { int len = acc1.b + acc2.b + ((acc1.a.isEmpty() || acc2.a.isEmpty()) ? 0 : delimLength); if (len <= limit) { acc1.b = len; acc1.a.addAll(acc2.a); } else { for (CharSequence s : acc2.a) { if (acc1.b > limit) break; accumulator.accept(acc1, s); } } return acc1; }; Function<ObjIntBox<ArrayList<String>>, String> finisher = acc -> { char[] result = new char[Math.min(limit, acc.b)]; char[] delimArray = delimiter.toString().toCharArray(); int ellipsisLength = Math.min(ellipsis.length(), limit); int pos = 0; boolean overflow = false; int prevPos = 0; for (int i = 0; i < acc.a.size(); i++) { String s = acc.a.get(i); int nextPos; if (i > 0) { nextPos = pos + delimArray.length; System.arraycopy(delimArray, 0, result, pos, Math.min(nextPos, limit) - pos); if (nextPos > limit) { overflow = true; break; } pos = nextPos; if (!partial && pos <= limit - ellipsisLength) { prevPos = pos; } } nextPos = pos + s.length(); s.getChars(0, Math.min(nextPos, limit) - pos, result, pos); if (nextPos > limit) { overflow = true; break; } pos = nextPos; } if (overflow) { if (!partial) { ellipsis.toString().getChars(0, ellipsisLength, result, prevPos); return new String(result, 0, prevPos + ellipsisLength); } ellipsis.toString().getChars(0, ellipsisLength, result, limit - ellipsisLength); } return new String(result); }; return new CancellableCollectorImpl<>(() -> new ObjIntBox<>(new ArrayList<String>(), 0), accumulator, combiner, finisher, acc -> acc.b > limit, NO_CHARACTERISTICS); } /** * Returns a {@code Collector} that produces the bitwise-and operation of a * integer-valued function applied to the input elements. If no elements are * present, the result is empty {@link OptionalInt}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the result is zero. * * @param <T> * the type of the input elements * @param mapper * a function extracting the property to be processed * @return a {@code Collector} that produces the bitwise-and operation of a * derived property * @since 0.4.0 */ public static <T> Collector<T, ?, OptionalInt> andingInt(ToIntFunction<T> mapper) { return new CancellableCollectorImpl<>(PrimitiveBox::new, (acc, t) -> { if (!acc.b) { acc.i = mapper.applyAsInt(t); acc.b = true; } else { acc.i &= mapper.applyAsInt(t); } }, (acc1, acc2) -> { if (!acc1.b) return acc2; if (!acc2.b) return acc1; acc1.i &= acc2.i; return acc1; }, PrimitiveBox::asInt, acc -> acc.b && acc.i == 0, UNORDERED_CHARACTERISTICS); } /** * Returns a {@code Collector} that produces the bitwise-and operation of a * long-valued function applied to the input elements. If no elements are * present, the result is empty {@link OptionalLong}. * * <p> * This method returns a <a * href="package-summary.html#ShortCircuitReduction">short-circuiting * collector</a>: it may not process all the elements if the result is zero. * * @param <T> * the type of the input elements * @param mapper * a function extracting the property to be processed * @return a {@code Collector} that produces the bitwise-and operation of a * derived property * @since 0.4.0 */ public static <T> Collector<T, ?, OptionalLong> andingLong(ToLongFunction<T> mapper) { return new CancellableCollectorImpl<>(PrimitiveBox::new, (acc, t) -> { if (!acc.b) { acc.l = mapper.applyAsLong(t); acc.b = true; } else { acc.l &= mapper.applyAsLong(t); } }, (acc1, acc2) -> { if (!acc1.b) return acc2; if (!acc2.b) return acc1; acc1.l &= acc2.l; return acc1; }, PrimitiveBox::asLong, acc -> acc.b && acc.l == 0, UNORDERED_CHARACTERISTICS); } }
MoreCollectors: cosmetic
src/main/java/javax/util/streamex/MoreCollectors.java
MoreCollectors: cosmetic
Java
apache-2.0
d7a9d3d234c43088f70ffe7cf0a89e55a5de5cbd
0
Flamenco/vaadin,synes/vaadin,cbmeeks/vaadin,kironapublic/vaadin,bmitc/vaadin,Scarlethue/vaadin,asashour/framework,carrchang/vaadin,peterl1084/framework,asashour/framework,fireflyc/vaadin,peterl1084/framework,kironapublic/vaadin,shahrzadmn/vaadin,Peppe/vaadin,bmitc/vaadin,bmitc/vaadin,peterl1084/framework,udayinfy/vaadin,jdahlstrom/vaadin.react,magi42/vaadin,kironapublic/vaadin,Legioth/vaadin,oalles/vaadin,Flamenco/vaadin,synes/vaadin,mittop/vaadin,mstahv/framework,shahrzadmn/vaadin,Peppe/vaadin,travisfw/vaadin,bmitc/vaadin,udayinfy/vaadin,magi42/vaadin,mstahv/framework,oalles/vaadin,asashour/framework,Peppe/vaadin,Peppe/vaadin,jdahlstrom/vaadin.react,mstahv/framework,synes/vaadin,Darsstar/framework,sitexa/vaadin,udayinfy/vaadin,shahrzadmn/vaadin,travisfw/vaadin,kironapublic/vaadin,magi42/vaadin,Scarlethue/vaadin,travisfw/vaadin,jdahlstrom/vaadin.react,carrchang/vaadin,synes/vaadin,cbmeeks/vaadin,Flamenco/vaadin,asashour/framework,oalles/vaadin,peterl1084/framework,magi42/vaadin,mittop/vaadin,travisfw/vaadin,Scarlethue/vaadin,Darsstar/framework,Darsstar/framework,fireflyc/vaadin,carrchang/vaadin,udayinfy/vaadin,jdahlstrom/vaadin.react,sitexa/vaadin,magi42/vaadin,Darsstar/framework,Peppe/vaadin,shahrzadmn/vaadin,Flamenco/vaadin,Legioth/vaadin,oalles/vaadin,fireflyc/vaadin,mstahv/framework,fireflyc/vaadin,Darsstar/framework,carrchang/vaadin,asashour/framework,travisfw/vaadin,oalles/vaadin,Legioth/vaadin,Legioth/vaadin,mittop/vaadin,mstahv/framework,sitexa/vaadin,Scarlethue/vaadin,cbmeeks/vaadin,fireflyc/vaadin,jdahlstrom/vaadin.react,sitexa/vaadin,sitexa/vaadin,Scarlethue/vaadin,udayinfy/vaadin,cbmeeks/vaadin,shahrzadmn/vaadin,kironapublic/vaadin,peterl1084/framework,Legioth/vaadin,mittop/vaadin,synes/vaadin
/* @ITMillApache2LicenseForJavaFiles@ */ package com.itmill.toolkit.demo; import com.itmill.toolkit.Application; import com.itmill.toolkit.data.util.MethodProperty; import com.itmill.toolkit.data.util.ObjectProperty; import com.itmill.toolkit.ui.Button; import com.itmill.toolkit.ui.Label; import com.itmill.toolkit.ui.TextField; import com.itmill.toolkit.ui.Window; import com.itmill.toolkit.ui.Button.ClickEvent; import com.itmill.toolkit.ui.Window.Notification; public class BufferedComponents extends Application { private ObjectProperty property; private TextField text; public void init() { final Window w = new Window("Buffered UI components demo"); addWindow(w); // Create property final Float floatValue = new Float(1.0f); property = new ObjectProperty(floatValue); // Textfield text = new TextField("TextField (Buffered, using ObjectProperty)", property); text.setImmediate(true); text.setWriteThrough(false); w.addComponent(text); // Property state final Label propertyState = new Label(property); propertyState.setCaption("Property (data source) state"); w.addComponent(propertyState); // Button state final Label textState = new Label(text); textState.setCaption("TextField state"); w.addComponent(textState); // Button to change the property w.addComponent(new Button("increase property value", new Button.ClickListener() { public void buttonClick(ClickEvent event) { final Float currentValue = (Float) property.getValue(); property.setValue(new Float( currentValue.floatValue() + 1.0)); } })); // Buffering w.addComponent(new Button("Write through enabled", new MethodProperty( text, "writeThrough"))); w.addComponent(new Button("discard", new Button.ClickListener() { public void buttonClick(ClickEvent event) { text.discard(); } })); Button commit = new Button("commit", new Button.ClickListener() { public void buttonClick(ClickEvent event) { try { text.commit(); w.showNotification("Committed " + property + " to datasource."); } catch (Throwable e) { w.showNotification("Error committing an invalid value: " + text, Notification.TYPE_WARNING_MESSAGE); } } }); w.addComponent(commit); // Restart button for application // (easier debugging when you dont have to restart the server to // make // code changes) final Button restart = new Button("restart", this, "close"); restart.addStyleName(Button.STYLE_LINK); w.addComponent(restart); } }
src/com/itmill/toolkit/demo/BufferedComponents.java
/* @ITMillApache2LicenseForJavaFiles@ */ package com.itmill.toolkit.demo; import com.itmill.toolkit.Application; import com.itmill.toolkit.data.util.MethodProperty; import com.itmill.toolkit.data.util.ObjectProperty; import com.itmill.toolkit.ui.Button; import com.itmill.toolkit.ui.Label; import com.itmill.toolkit.ui.TextField; import com.itmill.toolkit.ui.Window; import com.itmill.toolkit.ui.Button.ClickEvent; public class BufferedComponents extends Application { private ObjectProperty property; private TextField text; public void init() { final Window w = new Window("Buffered UI components demo"); addWindow(w); // Create property final Float floatValue = new Float(1.0f); property = new ObjectProperty(floatValue); // Textfield text = new TextField("TextField (Buffered, using ObjectProperty)", property); text.setImmediate(true); text.setWriteThrough(false); w.addComponent(text); // Property state final Label propertyState = new Label(property); propertyState.setCaption("Property (data source) state"); w.addComponent(propertyState); // Button state final Label textState = new Label(text); textState.setCaption("TextField state"); w.addComponent(textState); // Button to change the property w.addComponent(new Button("increase property value", new Button.ClickListener() { public void buttonClick(ClickEvent event) { final Float currentValue = (Float) property.getValue(); property.setValue(new Float( currentValue.floatValue() + 1.0)); } })); // Buffering w.addComponent(new Button("Write through enabled", new MethodProperty( text, "writeThrough"))); w.addComponent(new Button("discard", new Button.ClickListener() { public void buttonClick(ClickEvent event) { text.discard(); } })); w.addComponent(new Button("commit", new Button.ClickListener() { public void buttonClick(ClickEvent event) { text.commit(); } })); // Restart button for application // (easier debugging when you dont have to restart the server to // make // code changes) final Button restart = new Button("restart", this, "close"); restart.addStyleName(Button.STYLE_LINK); w.addComponent(restart); } }
Fixed #2067 : Added exception-handler to BufferedComponents demo. svn changeset:5495/svn branch:trunk
src/com/itmill/toolkit/demo/BufferedComponents.java
Fixed #2067 : Added exception-handler to BufferedComponents demo.
Java
apache-2.0
619e6b18125115a3d600607bebef28e7dc7cac73
0
mojo2012/spot-framework,mojo2012/spOt,mojo2012/spot-framework,mojo2012/spOt
package io.spotnext.core.persistence.hibernate.impl; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.persistence.CacheRetrieveMode; import javax.persistence.EntityGraph; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityNotFoundException; import javax.persistence.PersistenceException; import javax.persistence.PersistenceUnit; import javax.persistence.Subgraph; import javax.persistence.TransactionRequiredException; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Order; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.validation.ConstraintViolationException; import javax.validation.ValidationException; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.math.NumberUtils; import org.hibernate.CacheMode; import org.hibernate.HibernateException; import org.hibernate.LockMode; import org.hibernate.ScrollableResults; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.internal.FetchingScrollableResultsImpl; import org.hibernate.internal.SessionImpl; import org.hibernate.persister.entity.AbstractEntityPersister; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.query.Query; import org.hibernate.stat.Statistics; import org.hibernate.tool.hbm2ddl.SchemaExport; import org.hibernate.tool.hbm2ddl.SchemaUpdate; import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.spi.SchemaManagementException; import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator; import org.hibernate.tool.schema.spi.SchemaValidator; import org.hibernate.transform.AliasToEntityMapResultTransformer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.DependsOn; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.orm.jpa.EntityManagerFactoryUtils; import org.springframework.orm.jpa.EntityManagerHolder; import org.springframework.transaction.TransactionException; import org.springframework.transaction.support.TransactionSynchronizationManager; //import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.spotnext.core.infrastructure.annotation.logging.Log; import io.spotnext.core.infrastructure.exception.ModelNotFoundException; import io.spotnext.core.infrastructure.exception.ModelSaveException; import io.spotnext.core.infrastructure.exception.UnknownTypeException; import io.spotnext.core.infrastructure.service.ConfigurationService; import io.spotnext.core.infrastructure.service.ValidationService; import io.spotnext.core.infrastructure.support.LogLevel; import io.spotnext.core.infrastructure.support.Logger; import io.spotnext.core.persistence.exception.ModelNotUniqueException; import io.spotnext.core.persistence.exception.QueryException; import io.spotnext.core.persistence.query.JpqlQuery; import io.spotnext.core.persistence.query.ModelQuery; import io.spotnext.core.persistence.query.QueryResult; import io.spotnext.core.persistence.query.SortOrder; import io.spotnext.core.persistence.query.SortOrder.OrderDirection; import io.spotnext.core.persistence.service.TransactionService; import io.spotnext.core.persistence.service.impl.AbstractPersistenceService; import io.spotnext.infrastructure.annotation.Property; import io.spotnext.infrastructure.type.Item; import io.spotnext.infrastructure.type.ItemTypePropertyDefinition; import io.spotnext.support.util.ClassUtil; import io.spotnext.support.util.MiscUtil; /** * <p> * HibernatePersistenceService class. * </p> * * @author mojo2012 * @version 1.0 * @since 1.0 */ @DependsOn("typeService") //@SuppressFBWarnings("BC_UNCONFIRMED_CAST_OF_RETURN_VALUE") public class HibernatePersistenceService extends AbstractPersistenceService { @Value("${hibernate.jdbc.batch_size:}") private int jdbcBatchSize = 100; protected MetadataExtractorIntegrator metadataIntegrator = MetadataExtractorIntegrator.INSTANCE; @PersistenceUnit protected EntityManagerFactory entityManagerFactory; protected TransactionService transactionService; @Autowired protected ValidationService validationService; /** * <p> * Constructor for HibernatePersistenceService. * </p> * * @param entityManagerFactory a {@link javax.persistence.EntityManagerFactory} object. * @param transactionService a {@link io.spotnext.core.persistence.service.TransactionService} object. * @param configurationService a {@link io.spotnext.infrastructure.service.ConfigurationService} object. * @param loggingService a {@link io.spotnext.infrastructure.service.LoggingService} object. */ @Autowired public HibernatePersistenceService(EntityManagerFactory entityManagerFactory, TransactionService transactionService, ConfigurationService configurationService) { this.entityManagerFactory = entityManagerFactory; this.transactionService = transactionService; this.configurationService = configurationService; if (configurationService.getBoolean("core.setup.typesystem.initialize", false)) { initializeTypeSystem(); } if (configurationService.getBoolean("core.setup.typesystem.update", false)) { updateTypeSystem(); } validateTypeSystem(); if (configurationService.getBoolean("cleantypesystem", false)) { Logger.info("Cleaning type system ... (not yet implemented)"); clearTypeSystem(); } Logger.info(String.format("Persistence service initialized")); } @Override public void initializeTypeSystem() { Logger.info("Initializing type system schema ..."); final SchemaExport schemaExport = new SchemaExport(); schemaExport.setHaltOnError(true); schemaExport.setFormat(true); schemaExport.setDelimiter(";"); schemaExport.setOutputFile("db-schema.sql"); try { // TODO will most likely fail, implement a pure JDBC "drop // database" approach? schemaExport.drop(EnumSet.of(TargetType.DATABASE), metadataIntegrator.getMetadata()); } catch (final Exception e) { Logger.warn("Could not drop type system schema."); } schemaExport.createOnly(EnumSet.of(TargetType.DATABASE), metadataIntegrator.getMetadata()); } @Override public void updateTypeSystem() { Logger.info("Updating type system schema ..."); final SchemaUpdate schemaExport = new SchemaUpdate(); schemaExport.setHaltOnError(true); schemaExport.setFormat(true); schemaExport.setDelimiter(";"); schemaExport.setOutputFile("db-schema.sql"); schemaExport.execute(EnumSet.of(TargetType.DATABASE), metadataIntegrator.getMetadata()); } @Override public void validateTypeSystem() { final SchemaManagementTool tool = metadataIntegrator.getServiceRegistry() .getService(SchemaManagementTool.class); try { final SchemaValidator validator = tool.getSchemaValidator(entityManagerFactory.getProperties()); validator.doValidation(metadataIntegrator.getMetadata(), SchemaManagementToolCoordinator .buildExecutionOptions(entityManagerFactory.getProperties(), ExceptionHandlerLoggedImpl.INSTANCE)); Logger.debug("Type system schema seems to be OK"); } catch (final SchemaManagementException e) { // currently hibernate throws a validation exception for float values that are being created as doubles ... // see https://hibernate.atlassian.net/browse/HHH-8690 // so we hide that message in case we just did an initialization, otherwise it would look confusing in the logs if (!configurationService.getBoolean("core.setup.typesystem.initialize", false)) { Logger.warn("Type system schema needs to be initialized/updated"); } } } protected void clearTypeSystem() { } private QueryResult executeQuery(JpqlQuery sourceQuery, Query query) { List values = new ArrayList<>(); Integer totalCount = null; if (sourceQuery.getPageSize() > 0) { int start = (sourceQuery.getPage() > 0 ? sourceQuery.getPage() - 1 : 0) * sourceQuery.getPageSize(); ScrollableResults scrollResult = null; try { scrollResult = query.scroll(); if (start > 0) { scrollResult.scroll(start); } do { Object value = scrollResult.get(); // this should actually not happen, but it does ... // TODO: check and fix null result objects if (value != null) { if (value.getClass().isArray()) { Object[] valueArray = (Object[]) value; if (valueArray.length > 0) { values.add(valueArray[0]); } } else { values.add(value); } } } while (values.size() < sourceQuery.getPageSize() && scrollResult.next()); // go to last row to get max rows scrollResult.last(); totalCount = scrollResult.getRowNumber(); // different implementations handle this either with a start index of 0 or 1 ... if (!(scrollResult instanceof FetchingScrollableResultsImpl)) { totalCount += 1; } } finally { MiscUtil.closeQuietly(scrollResult); } } else { values = query.list(); totalCount = values.size(); } QueryResult result = new QueryResult(values, sourceQuery.getPage(), sourceQuery.getPageSize(), totalCount != null ? Long.valueOf(totalCount) : null); return result; } /** {@inheritDoc} */ // @SuppressFBWarnings("REC_CATCH_EXCEPTION") @Override public <T> QueryResult<T> query(final io.spotnext.core.persistence.query.JpqlQuery<T> sourceQuery) throws QueryException { bindSession(); try { return transactionService.execute(() -> { QueryResult<T> results = null; final Session session = getSession(); session.setDefaultReadOnly(sourceQuery.isReadOnly()); // if this is an item type, we just load the entities // if it is a "primitive" natively supported type we can also // just let hibernate do the work if (Item.class.isAssignableFrom(sourceQuery.getResultClass()) || NATIVE_DATATYPES.contains(sourceQuery.getResultClass())) { Query<T> query = null; try { query = session.createQuery(sourceQuery.getQuery(), sourceQuery.getResultClass()); } catch (final Exception e) { throw new QueryException("Could not parse query", e); } setAccessLevel(sourceQuery, query); setCacheSettings(session, sourceQuery, query); setFetchSubGraphsHint(session, sourceQuery, query); setParameters(sourceQuery.getParams(), query); // setPagination(query, sourceQuery.getPage(), sourceQuery.getPageSize()); results = executeQuery(sourceQuery, query); } else { // otherwise we load each value into a list of tuples // in that case the selected columns need to be aliased in // case the given result type has no constructor that exactly matches the returned // columns' types, as otherwise we cannot map the row values to properties. // only try to load results if the result type is not Void if (sourceQuery.isExecuteUpdate()) { final Query<Integer> query = session.createQuery(sourceQuery.getQuery()); setAccessLevel(sourceQuery, (Query<T>) query); setParameters(sourceQuery.getParams(), query); int resultCode = query.executeUpdate(); session.flush(); if (sourceQuery.isClearCaches()) { session.clear(); } boolean returnTypeSpecified = !Void.class.isAssignableFrom(sourceQuery.getResultClass()); results = (QueryResult<T>) new QueryResult<Integer>(returnTypeSpecified ? Arrays.asList(resultCode) : null, 0, 0, null); } else { // fetch the temporary Tuple (!) result and convert it into the target type manually final Query<Tuple> query = session.createQuery(sourceQuery.getQuery(), Tuple.class); setAccessLevel(sourceQuery, (Query<T>) query); setParameters(sourceQuery.getParams(), query); if (Map.class.isAssignableFrom(sourceQuery.getResultClass())) { // all selected columns must specify an alias, otherwise the column value would not appear in the map! query.setResultTransformer(AliasToEntityMapResultTransformer.INSTANCE); results = executeQuery(sourceQuery, query); } else { final QueryResult<Tuple> tempResults = (QueryResult<Tuple>) executeQuery(sourceQuery, query); List<T> finalResults = new ArrayList<>(); // if the return type is Tuple, a tuple array is returned // therefore we have to extract the first tuple first and use that as a base object // if no return type is set, it is only a Tuple for (final Object entry : tempResults.getResults()) { // first try to create the pojo using a constructor // that matches the result's column types Tuple t = null; if (entry != null && entry.getClass().isArray()) { Object[] entryArray = ((Object[]) entry); if (entryArray.length > 0) { t = (Tuple) entryArray[0]; } } else if (entry instanceof Tuple) { t = (Tuple) entry; } if (t == null) { continue; } final Tuple tupleEntry = t; final List<Object> values = t.getElements().stream().map(e -> tupleEntry.get(e)) .collect(Collectors.toList()); if (Tuple.class.isAssignableFrom(sourceQuery.getResultClass())) { // if the only object in the tuple is an item, we can directly return it, otherwise we just return a list of values if (values != null && values.size() == 1 && values.get(0) instanceof Item) { finalResults.add((T) values.get(0)); } else { finalResults.add((T) values); } } else { Optional<T> pojo = ClassUtil.instantiate(sourceQuery.getResultClass(), values.toArray()); // if the POJO can't be instantiated, we try to // create it manually and inject the data using // reflection for this to work, each selected column // has to have the same alias as the pojo's // property! if (!pojo.isPresent()) { final Optional<T> obj = ClassUtil.instantiate(sourceQuery.getResultClass()); if (obj.isPresent()) { final Object o = obj.get(); t.getElements().stream() .forEach(el -> ClassUtil.setField(o, el.getAlias(), tupleEntry.get(el.getAlias()))); } pojo = obj; } if (pojo.isPresent()) { finalResults.add(pojo.get()); } else { throw new InstantiationException(String.format("Could not instantiate result type '%s'", sourceQuery.getResultClass())); } } } results = new QueryResult<>(finalResults, sourceQuery.getPage(), sourceQuery.getPageSize(), tempResults.getTotalCount()); } } } return results; }); } catch (final QueryException e) { throw e; } catch (final Exception e) { throw new QueryException(String.format("Could not execute query '%s'", sourceQuery.getQuery()), e); } } private <T, Q extends io.spotnext.core.persistence.query.Query<T>> void setAccessLevel(Q sourceQuery, Query<T> query) { query.setReadOnly(sourceQuery.isReadOnly()); } protected <T, Q extends io.spotnext.core.persistence.query.Query<T>> void setCacheSettings(final Session session, final Q sourceQuery, final TypedQuery<T> query) { CacheMode cacheMode = CacheMode.NORMAL; if (!sourceQuery.isCachable() && !sourceQuery.isIgnoreCache()) { cacheMode = CacheMode.GET; } else if (!sourceQuery.isCachable() && sourceQuery.isIgnoreCache()) { cacheMode = CacheMode.IGNORE; } else if (sourceQuery.isCachable() && sourceQuery.isIgnoreCache()) { cacheMode = CacheMode.PUT; } session.setCacheMode(cacheMode); // query.setHint("org.hibernate.cacheable", sourceQuery.isCachable()); query.setHint("javax.persistence.cache.retrieveMode", sourceQuery.isIgnoreCache() ? CacheRetrieveMode.BYPASS : CacheRetrieveMode.USE); } protected <T, Q extends io.spotnext.core.persistence.query.Query<T>> void setFetchSubGraphsHint( final Session session, final Q sourceQuery, final TypedQuery<T> query) throws UnknownTypeException { // TODO what about fetchgraph? final List<String> fetchSubGraphs = new ArrayList<>(); if (sourceQuery.isEagerFetchRelations()) { final Map<String, ItemTypePropertyDefinition> props = typeService .getItemTypeProperties(typeService.getTypeCodeForClass((Class<Item>) sourceQuery.getResultClass())); // add all properties final List<String> validProperties = props.values().stream() // .filter(p -> Item.class.isAssignableFrom(p.getReturnType()) || p.getRelationDefinition() != null) // .map(p -> p.getName()) // .collect(Collectors.toList()); fetchSubGraphs.addAll(validProperties); } else if (sourceQuery.getEagerFetchRelationProperties().size() > 0) { fetchSubGraphs.addAll(sourceQuery.getEagerFetchRelationProperties()); } if (fetchSubGraphs.size() > 0) { if (!Item.class.isAssignableFrom(sourceQuery.getResultClass())) { Logger.debug("Fetch sub graphs can only be used for item queries - ignoring"); return; } final EntityGraph<T> graph = session.createEntityGraph(sourceQuery.getResultClass()); for (final String subgraph : fetchSubGraphs) { final Subgraph<?> itemGraph = graph.addSubgraph(subgraph); } query.setHint("javax.persistence.loadgraph", graph); } } protected <T> void setParameters(final Map<String, Object> params, final Query<T> query) { for (final Map.Entry<String, Object> entry : params.entrySet()) { if (NumberUtils.isCreatable(entry.getKey())) { query.setParameter(Integer.parseInt(entry.getKey()), entry.getValue()); } else { query.setParameter(entry.getKey(), entry.getValue()); } } } protected void setPagination(final javax.persistence.Query query, final int page, final int pageSize) { if (pageSize > 0) { query.setFirstResult((page > 0 ? page - 1 : 0) * pageSize); query.setMaxResults(pageSize); } } /** {@inheritDoc} */ @Log(logLevel = LogLevel.DEBUG, measureExecutionTime = true, executionTimeThreshold = 100) @Override public <T extends Item> void save(final List<T> items) throws ModelSaveException, ModelNotUniqueException { bindSession(); try { transactionService.execute(() -> { final Session session = getSession(); int i = 0; try { for (final T item : items) { if (item.getVersion() == -1) { session.save(item); } else { session.saveOrUpdate(item); } // use same as the JDBC batch size if (i >= jdbcBatchSize && i % jdbcBatchSize == 0) { // flush a batch of inserts and release memory: session.flush(); } i++; } // this is needed, otherwise saved entities are not session.flush(); items.stream().forEach(o -> session.evict(o)); } catch (final ValidationException e) { final String message; if (e instanceof ConstraintViolationException) { message = validationService .convertToReadableMessage(((ConstraintViolationException) e).getConstraintViolations()); } else { message = e.getMessage(); } throw new ModelSaveException(message, e); } catch (final DataIntegrityViolationException | TransactionRequiredException | IllegalArgumentException e) { throw new ModelSaveException("Could not save given items: " + e.getMessage(), e); } catch (final Exception e) { final Throwable rootCause = ExceptionUtils.getRootCause(e); final String rootCauseMessage = rootCause != null ? rootCause.getMessage() : e.getMessage(); throw new ModelSaveException(rootCauseMessage, e); } return null; }); } catch (final TransactionException e) { if (e.getCause() instanceof ModelSaveException) { throw (ModelSaveException) e.getCause(); } else if (e.getCause() instanceof ModelNotUniqueException) { throw (ModelNotUniqueException) e.getCause(); } else { throw e; } } } /** {@inheritDoc} */ @Override public <T extends Item> T load(final Class<T> type, final long id, boolean returnProxy) throws ModelNotFoundException { bindSession(); try { return transactionService.execute(() -> { T item = returnProxy ? getSession().load(type, id) : getSession().get(type, id); return item; }); } catch (final TransactionException e) { if (e.getCause() instanceof ModelNotFoundException) { throw (ModelNotFoundException) e.getCause(); } else { throw e; } } } /** {@inheritDoc} */ @Override public <T extends Item> void refresh(final List<T> items) throws ModelNotFoundException { bindSession(); try { transactionService.execute(() -> { for (final T item : items) { try { if (attach(item)) { getSession().refresh(item, LockMode.NONE); } } catch (DataIntegrityViolationException | HibernateException | TransactionRequiredException | IllegalArgumentException | EntityNotFoundException e) { throw new ModelNotFoundException( String.format("Could not refresh item with id=%s.", item.getId()), e); } } return null; }); } catch (final TransactionException e) { if (e.getCause() instanceof ModelNotFoundException) { throw (ModelNotFoundException) e.getCause(); } else { throw e; } } } /** {@inheritDoc} */ @Override public <T extends Item> boolean attach(final T item) throws ModelNotFoundException { bindSession(); try { // ignore unpersisted or already attached items if (isAttached(item)) { return true; } getSession().load(item, item.getId()); } catch (HibernateException | TransactionRequiredException | IllegalArgumentException | EntityNotFoundException e) { throw new ModelNotFoundException( String.format("Could not attach item with id=%s to the current session.", item.getId()), e); } return false; } /** {@inheritDoc} */ @Override public <T extends Item> List<T> load(final ModelQuery<T> sourceQuery) { bindSession(); return transactionService.execute(() -> { final Session session = getSession(); final CriteriaBuilder builder = session.getCriteriaBuilder(); final CriteriaQuery<T> cq = builder.createQuery(sourceQuery.getResultClass()); final Root<T> queryResultType = cq.from(sourceQuery.getResultClass()); CriteriaQuery<T> itemSelect = cq.select(queryResultType); // check if we have to perform a separate query for pagination // hibernate can't handle pagination together with FETCH JOINs! boolean isIdQueryForPaginationNeeded = sourceQuery.getPageSize() > 0 && (sourceQuery.getEagerFetchRelationProperties().size() > 0 || sourceQuery.isEagerFetchRelations()); boolean isSearchParametersDefined = MapUtils.isNotEmpty(sourceQuery.getSearchParameters()); Predicate whereClause = null; if (isSearchParametersDefined) { whereClause = builder.conjunction(); for (final Map.Entry<String, Object> entry : sourceQuery.getSearchParameters().entrySet()) { if (entry.getValue() instanceof Item && !((Item) entry.getValue()).isPersisted()) { throw new PersistenceException(String.format( "Passing non-persisted item as search param '%s' is not supported.", entry.getKey())); } whereClause = builder.and(whereClause, builder.equal(queryResultType.get(entry.getKey()), entry.getValue())); } } // always order by last created date and THEN ID, so we have a consistent ordering, even if new items are created // IDs are random, so they don't increment! boolean orderByNeeded = false; // make additional query to fetch the ids, applied the "maxResults" correctly if (isIdQueryForPaginationNeeded) { // we always have to order in case of a ID subquery for both queries! orderByNeeded = true; CriteriaQuery<Long> idCriteriaQuery = builder.createQuery(Long.class); final Root<T> idRoot = idCriteriaQuery.from(sourceQuery.getResultClass()); idCriteriaQuery = idCriteriaQuery.select(idRoot.get(Item.PROPERTY_ID)); // apply original where clause here, it will be indirectly applied to the original query using the fetched IDs if (whereClause != null) { idCriteriaQuery = idCriteriaQuery.where(whereClause); } // always apply the same order for all queries final TypedQuery<Long> idQuery = session.createQuery(idCriteriaQuery.orderBy(applyOrderBy(sourceQuery, builder, idRoot))); setPagination(idQuery, sourceQuery.getPage(), sourceQuery.getPageSize()); final List<Long> idsToSelect = idQuery.getResultList(); // only add where clause when there are actual IDs to select if (idsToSelect.size() > 0) { itemSelect = itemSelect.where(queryResultType.get(Item.PROPERTY_ID).in(idsToSelect)); } } else { if (whereClause != null) { itemSelect = itemSelect.where(whereClause); } // if we have a single query, we only need to order if pagination is used if (sourceQuery.getOrderBy().size() > 0) { orderByNeeded = true; } } if (orderByNeeded) { // always apply the order here again, even if using id sub-query! itemSelect = itemSelect.orderBy(applyOrderBy(sourceQuery, builder, queryResultType)); } final TypedQuery<T> query = session.createQuery(itemSelect); // only set these values if no fetch joins are used! // if we have fetch joins we just select by the ids that are fetched before using firstResult and maxResults if (!isIdQueryForPaginationNeeded) { setPagination(query, sourceQuery.getPage(), sourceQuery.getPageSize()); } setFetchSubGraphsHint(session, sourceQuery, query); setCacheSettings(session, sourceQuery, query); final Query<T> queryObj = ((Query<T>) query); // set proper access level setAccessLevel(sourceQuery, queryObj); final List<T> results = queryObj.getResultList(); return results; }); } /** * Generates the ORDER BY clause either for the {@link ModelQuery#getOrderBy()} or if empty for the default properties ({@link Item#PROPERTY_CREATED_AT} and * {@link Item#PROPERTY_ID}). * * @param sourceQuery * @param builder * @param root * @return the generated order by clause */ protected Order[] applyOrderBy(final ModelQuery<?> sourceQuery, CriteriaBuilder builder, Root<?> root) { final List<Order> orderBys = new ArrayList<>(); if (sourceQuery.getOrderBy().size() > 0) { for (SortOrder order : sourceQuery.getOrderBy()) { if (OrderDirection.ASC.equals(order.getDirection())) { orderBys.add(builder.asc(root.get(order.getColumnName()))); } else { orderBys.add(builder.desc(root.get(order.getColumnName()))); } } } else { orderBys.add(builder.asc(root.get(Item.PROPERTY_CREATED_AT))); orderBys.add(builder.asc(root.get(Item.PROPERTY_ID))); } return orderBys.toArray(new Order[orderBys.size()]); } /** {@inheritDoc} */ @Override public <T extends Item> void remove(final List<T> items) { bindSession(); transactionService.execute(() -> { for (final T item : items) { if (isAttached(item)) { getSession().remove(item); } else { remove(item.getClass(), item.getId()); } } return null; }); } /** {@inheritDoc} */ @Override public <T extends Item> void remove(final Class<T> type, final long id) { bindSession(); transactionService.execute(() -> { // TODO: improve // final String query = String.format("DELETE FROM %s WHERE id IN // (?id)", type.getSimpleName()); // em.createQuery(query, type).setParameter("id", id); final T item = getSession().find(type, id); getSession().remove(item); return null; }); } /** {@inheritDoc} */ @Override public void saveDataStorage() { bindSession(); getSession().flush(); } /** {@inheritDoc} */ @Override public void clearDataStorage() { Logger.warn("Clearing database not supported yet"); } @Override public void evictCaches() { bindSession(); getSession().clear(); } /** {@inheritDoc} */ @Override public <T extends Item> void initItem(final T item) { for (final Field field : ClassUtil.getFieldsWithAnnotation(item.getClass(), Property.class)) { Object instanceValue = ClassUtil.getField(item, field.getName(), true); if (instanceValue == null) { if (field.getType().isAssignableFrom(Set.class)) { instanceValue = new HashSet<>(); } else if (field.getType().isAssignableFrom(List.class) || field.getType().isAssignableFrom(Collection.class)) { instanceValue = new ArrayList<>(); } else if (field.getType().isAssignableFrom(Map.class)) { instanceValue = new HashMap<>(); } if (instanceValue != null) { ClassUtil.setField(item, field.getName(), instanceValue); } } } } /** {@inheritDoc} */ @Override public <T extends Item> void detach(final List<T> items) { bindSession(); for (final T item : items) { getSession().detach(item); } } /** {@inheritDoc} */ @Override public <T extends Item> boolean isAttached(final T item) { bindSession(); return getSession().contains(item); } @Override public <T extends Item> Optional<String> getTableName(Class<T> itemType) { bindSession(); return transactionService.execute(() -> { SessionImpl session = (SessionImpl) getSession(); final Optional<T> example = ClassUtil.instantiate(itemType); final EntityPersister persister = session.getEntityPersister(null, example.get()); if (persister instanceof AbstractEntityPersister) { AbstractEntityPersister persisterImpl = (AbstractEntityPersister) persister; String tableName = persisterImpl.getTableName(); String rootTableName = persisterImpl.getRootTableName(); return Optional.of(tableName); } else { throw new RuntimeException("Unexpected persister type; a subtype of AbstractEntityPersister expected."); } }); } public Session getSession() { final EntityManagerHolder holder = ((EntityManagerHolder) TransactionSynchronizationManager .getResource(entityManagerFactory)); if (holder != null) { if (Logger.isLogLevelEnabled(LogLevel.DEBUG)) { getSessionFactory().getStatistics().setStatisticsEnabled(true); } return holder.getEntityManager().unwrap(Session.class); } throw new IllegalStateException("Could not fetch persistence entity manager"); } protected void bindSession() { if (!TransactionSynchronizationManager.hasResource(entityManagerFactory)) { TransactionSynchronizationManager.bindResource(entityManagerFactory, new EntityManagerHolder(entityManagerFactory.createEntityManager())); } } /** {@inheritDoc} */ @Override public void unbindSession() { if (TransactionSynchronizationManager.hasResource(entityManagerFactory)) { final EntityManagerHolder emHolder = (EntityManagerHolder) TransactionSynchronizationManager .unbindResource(entityManagerFactory); EntityManagerFactoryUtils.closeEntityManager(emHolder.getEntityManager()); } else { throw new IllegalStateException("No entitiy manager factory found"); } } /** * <p> * Getter for the field <code>entityManagerFactory</code>. * </p> * * @return a {@link javax.persistence.EntityManagerFactory} object. */ public EntityManagerFactory getEntityManagerFactory() { return entityManagerFactory; } /** * <p> * getSessionFactory. * </p> * * @return a {@link SessionFactory} object. */ public SessionFactory getSessionFactory() { return entityManagerFactory.unwrap(SessionFactory.class); } public Statistics getStatistics() { return getSessionFactory().getStatistics(); } }
spot-core/src/main/java/io/spotnext/core/persistence/hibernate/impl/HibernatePersistenceService.java
package io.spotnext.core.persistence.hibernate.impl; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.persistence.CacheRetrieveMode; import javax.persistence.EntityGraph; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityNotFoundException; import javax.persistence.PersistenceException; import javax.persistence.PersistenceUnit; import javax.persistence.Subgraph; import javax.persistence.TransactionRequiredException; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Order; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.validation.ConstraintViolationException; import javax.validation.ValidationException; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.math.NumberUtils; import org.hibernate.CacheMode; import org.hibernate.HibernateException; import org.hibernate.LockMode; import org.hibernate.ScrollableResults; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.internal.FetchingScrollableResultsImpl; import org.hibernate.internal.SessionImpl; import org.hibernate.persister.entity.AbstractEntityPersister; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.query.Query; import org.hibernate.stat.Statistics; import org.hibernate.tool.hbm2ddl.SchemaExport; import org.hibernate.tool.hbm2ddl.SchemaUpdate; import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.spi.SchemaManagementException; import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator; import org.hibernate.tool.schema.spi.SchemaValidator; import org.hibernate.transform.AliasToEntityMapResultTransformer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.DependsOn; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.orm.jpa.EntityManagerFactoryUtils; import org.springframework.orm.jpa.EntityManagerHolder; import org.springframework.transaction.TransactionException; import org.springframework.transaction.support.TransactionSynchronizationManager; //import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.spotnext.core.infrastructure.annotation.logging.Log; import io.spotnext.core.infrastructure.exception.ModelNotFoundException; import io.spotnext.core.infrastructure.exception.ModelSaveException; import io.spotnext.core.infrastructure.exception.UnknownTypeException; import io.spotnext.core.infrastructure.service.ConfigurationService; import io.spotnext.core.infrastructure.service.ValidationService; import io.spotnext.core.infrastructure.support.LogLevel; import io.spotnext.core.infrastructure.support.Logger; import io.spotnext.core.persistence.exception.ModelNotUniqueException; import io.spotnext.core.persistence.exception.QueryException; import io.spotnext.core.persistence.query.JpqlQuery; import io.spotnext.core.persistence.query.ModelQuery; import io.spotnext.core.persistence.query.QueryResult; import io.spotnext.core.persistence.query.SortOrder; import io.spotnext.core.persistence.query.SortOrder.OrderDirection; import io.spotnext.core.persistence.service.TransactionService; import io.spotnext.core.persistence.service.impl.AbstractPersistenceService; import io.spotnext.infrastructure.annotation.Property; import io.spotnext.infrastructure.type.Item; import io.spotnext.infrastructure.type.ItemTypePropertyDefinition; import io.spotnext.support.util.ClassUtil; import io.spotnext.support.util.MiscUtil; /** * <p> * HibernatePersistenceService class. * </p> * * @author mojo2012 * @version 1.0 * @since 1.0 */ @DependsOn("typeService") //@SuppressFBWarnings("BC_UNCONFIRMED_CAST_OF_RETURN_VALUE") public class HibernatePersistenceService extends AbstractPersistenceService { @Value("${hibernate.jdbc.batch_size:}") private int jdbcBatchSize = 100; protected MetadataExtractorIntegrator metadataIntegrator = MetadataExtractorIntegrator.INSTANCE; @PersistenceUnit protected EntityManagerFactory entityManagerFactory; protected TransactionService transactionService; @Autowired protected ValidationService validationService; /** * <p> * Constructor for HibernatePersistenceService. * </p> * * @param entityManagerFactory a {@link javax.persistence.EntityManagerFactory} object. * @param transactionService a {@link io.spotnext.core.persistence.service.TransactionService} object. * @param configurationService a {@link io.spotnext.infrastructure.service.ConfigurationService} object. * @param loggingService a {@link io.spotnext.infrastructure.service.LoggingService} object. */ @Autowired public HibernatePersistenceService(EntityManagerFactory entityManagerFactory, TransactionService transactionService, ConfigurationService configurationService) { this.entityManagerFactory = entityManagerFactory; this.transactionService = transactionService; this.configurationService = configurationService; if (configurationService.getBoolean("core.setup.typesystem.initialize", false)) { initializeTypeSystem(); } if (configurationService.getBoolean("core.setup.typesystem.update", false)) { updateTypeSystem(); } validateTypeSystem(); if (configurationService.getBoolean("cleantypesystem", false)) { Logger.info("Cleaning type system ... (not yet implemented)"); clearTypeSystem(); } Logger.info(String.format("Persistence service initialized")); } @Override public void initializeTypeSystem() { Logger.info("Initializing type system schema ..."); final SchemaExport schemaExport = new SchemaExport(); schemaExport.setHaltOnError(true); schemaExport.setFormat(true); schemaExport.setDelimiter(";"); schemaExport.setOutputFile("db-schema.sql"); try { // TODO will most likely fail, implement a pure JDBC "drop // database" approach? schemaExport.drop(EnumSet.of(TargetType.DATABASE), metadataIntegrator.getMetadata()); } catch (final Exception e) { Logger.warn("Could not drop type system schema."); } schemaExport.createOnly(EnumSet.of(TargetType.DATABASE), metadataIntegrator.getMetadata()); } @Override public void updateTypeSystem() { Logger.info("Updating type system schema ..."); final SchemaUpdate schemaExport = new SchemaUpdate(); schemaExport.setHaltOnError(true); schemaExport.setFormat(true); schemaExport.setDelimiter(";"); schemaExport.setOutputFile("db-schema.sql"); schemaExport.execute(EnumSet.of(TargetType.DATABASE), metadataIntegrator.getMetadata()); } @Override public void validateTypeSystem() { final SchemaManagementTool tool = metadataIntegrator.getServiceRegistry() .getService(SchemaManagementTool.class); try { final SchemaValidator validator = tool.getSchemaValidator(entityManagerFactory.getProperties()); validator.doValidation(metadataIntegrator.getMetadata(), SchemaManagementToolCoordinator .buildExecutionOptions(entityManagerFactory.getProperties(), ExceptionHandlerLoggedImpl.INSTANCE)); Logger.debug("Type system schema seems to be OK"); } catch (final SchemaManagementException e) { // currently hibernate throws a validation exception for float values that are being created as doubles ... // see https://hibernate.atlassian.net/browse/HHH-8690 // so we hide that message in case we just did an initialization, otherwise it would look confusing in the logs if (!configurationService.getBoolean("core.setup.typesystem.initialize", false)) { Logger.warn("Type system schema needs to be initialized/updated"); } } } protected void clearTypeSystem() { } private QueryResult executeQuery(JpqlQuery sourceQuery, Query query) { List values = new ArrayList<>(); Integer totalCount = null; if (sourceQuery.getPageSize() > 0) { int start = (sourceQuery.getPage() > 0 ? sourceQuery.getPage() - 1 : 0) * sourceQuery.getPageSize(); ScrollableResults scrollResult = null; try { scrollResult = query.scroll(); if (start > 0) { scrollResult.scroll(start); } do { Object value = scrollResult.get(); // this should actually not happen, but it does ... // TODO: check and fix null result objects if (value != null) { if (value.getClass().isArray()) { Object[] valueArray = (Object[]) value; if (valueArray.length > 0) { values.add(valueArray[0]); } } else { values.add(value); } } } while (values.size() < sourceQuery.getPageSize() && scrollResult.next()); // go to last row to get max rows scrollResult.last(); totalCount = scrollResult.getRowNumber(); // different implementations handle this either with a start index of 0 or 1 ... if (!(scrollResult instanceof FetchingScrollableResultsImpl)) { totalCount += 1; } } finally { MiscUtil.closeQuietly(scrollResult); } } else { values = query.list(); totalCount = values.size(); } QueryResult result = new QueryResult(values, sourceQuery.getPage(), sourceQuery.getPageSize(), totalCount != null ? Long.valueOf(totalCount) : null); return result; } /** {@inheritDoc} */ // @SuppressFBWarnings("REC_CATCH_EXCEPTION") @Override public <T> QueryResult<T> query(final io.spotnext.core.persistence.query.JpqlQuery<T> sourceQuery) throws QueryException { bindSession(); try { return transactionService.execute(() -> { QueryResult<T> results = null; final Session session = getSession(); session.setDefaultReadOnly(sourceQuery.isReadOnly()); // if this is an item type, we just load the entities // if it is a "primitive" natively supported type we can also // just let hibernate do the work if (Item.class.isAssignableFrom(sourceQuery.getResultClass()) || NATIVE_DATATYPES.contains(sourceQuery.getResultClass())) { Query<T> query = null; try { query = session.createQuery(sourceQuery.getQuery(), sourceQuery.getResultClass()); } catch (final Exception e) { throw new QueryException("Could not parse query", e); } setAccessLevel(sourceQuery, query); setCacheSettings(session, sourceQuery, query); setFetchSubGraphsHint(session, sourceQuery, query); setParameters(sourceQuery.getParams(), query); // setPagination(query, sourceQuery.getPage(), sourceQuery.getPageSize()); results = executeQuery(sourceQuery, query); } else { // otherwise we load each value into a list of tuples // in that case the selected columns need to be aliased in // case the given result type has no constructor that exactly matches the returned // columns' types, as otherwise we cannot map the row values to properties. // only try to load results if the result type is not Void if (sourceQuery.isExecuteUpdate()) { final Query<Integer> query = session.createQuery(sourceQuery.getQuery()); setAccessLevel(sourceQuery, (Query<T>) query); setParameters(sourceQuery.getParams(), query); int resultCode = query.executeUpdate(); session.flush(); if (sourceQuery.isClearCaches()) { session.clear(); } boolean returnTypeSpecified = !Void.class.isAssignableFrom(sourceQuery.getResultClass()); results = (QueryResult<T>) new QueryResult<Integer>(returnTypeSpecified ? Arrays.asList(resultCode) : null, 0, 0, null); } else { // fetch the temporary Tuple (!) result and convert it into the target type manually final Query<Tuple> query = session.createQuery(sourceQuery.getQuery(), Tuple.class); setAccessLevel(sourceQuery, (Query<T>) query); setParameters(sourceQuery.getParams(), query); if (Map.class.isAssignableFrom(sourceQuery.getResultClass())) { // all selected columns must specify an alias, otherwise the column value would not appear in the map! query.setResultTransformer(AliasToEntityMapResultTransformer.INSTANCE); results = executeQuery(sourceQuery, query); } else { final QueryResult<Tuple> tempResults = (QueryResult<Tuple>) executeQuery(sourceQuery, query); List<T> finalResults = new ArrayList<>(); // if the return type is Tuple, a tuple array is returned // therefore we have to extract the first tuple first and use that as a base object // if no return type is set, it is only a Tuple for (final Object entry : tempResults.getResults()) { // first try to create the pojo using a constructor // that matches the result's column types Tuple t = null; if (entry != null && entry.getClass().isArray()) { Object[] entryArray = ((Object[]) entry); if (entryArray.length > 0) { t = (Tuple) entryArray[0]; } } else if (entry instanceof Tuple) { t = (Tuple) entry; } if (t == null) { continue; } final Tuple tupleEntry = t; final List<Object> values = t.getElements().stream().map(e -> tupleEntry.get(e)) .collect(Collectors.toList()); if (Tuple.class.isAssignableFrom(sourceQuery.getResultClass())) { // if the only object in the tuple is an item, we can directly return it, otherwise we just return a list of values if (values != null && values.size() == 1 && values.get(0) instanceof Item) { finalResults.add((T) values.get(0)); } else { finalResults.add((T) values); } } else { Optional<T> pojo = ClassUtil.instantiate(sourceQuery.getResultClass(), values.toArray()); // if the POJO can't be instantiated, we try to // create it manually and inject the data using // reflection for this to work, each selected column // has to have the same alias as the pojo's // property! if (!pojo.isPresent()) { final Optional<T> obj = ClassUtil.instantiate(sourceQuery.getResultClass()); if (obj.isPresent()) { final Object o = obj.get(); t.getElements().stream() .forEach(el -> ClassUtil.setField(o, el.getAlias(), tupleEntry.get(el.getAlias()))); } pojo = obj; } if (pojo.isPresent()) { finalResults.add(pojo.get()); } else { throw new InstantiationException(String.format("Could not instantiate result type '%s'", sourceQuery.getResultClass())); } } } results = new QueryResult<>(finalResults, sourceQuery.getPage(), sourceQuery.getPageSize(), tempResults.getTotalCount()); } } } return results; }); } catch (final QueryException e) { throw e; } catch (final Exception e) { throw new QueryException(String.format("Could not execute query '%s'", sourceQuery.getQuery()), e); } } private <T, Q extends io.spotnext.core.persistence.query.Query<T>> void setAccessLevel(Q sourceQuery, Query<T> query) { query.setReadOnly(sourceQuery.isReadOnly()); } protected <T, Q extends io.spotnext.core.persistence.query.Query<T>> void setCacheSettings(final Session session, final Q sourceQuery, final TypedQuery<T> query) { CacheMode cacheMode = CacheMode.NORMAL; if (!sourceQuery.isCachable() && !sourceQuery.isIgnoreCache()) { cacheMode = CacheMode.GET; } else if (!sourceQuery.isCachable() && sourceQuery.isIgnoreCache()) { cacheMode = CacheMode.IGNORE; } else if (sourceQuery.isCachable() && sourceQuery.isIgnoreCache()) { cacheMode = CacheMode.PUT; } session.setCacheMode(cacheMode); // query.setHint("org.hibernate.cacheable", sourceQuery.isCachable()); query.setHint("javax.persistence.cache.retrieveMode", sourceQuery.isIgnoreCache() ? CacheRetrieveMode.BYPASS : CacheRetrieveMode.USE); } protected <T, Q extends io.spotnext.core.persistence.query.Query<T>> void setFetchSubGraphsHint( final Session session, final Q sourceQuery, final TypedQuery<T> query) throws UnknownTypeException { // TODO what about fetchgraph? final List<String> fetchSubGraphs = new ArrayList<>(); if (sourceQuery.isEagerFetchRelations()) { final Map<String, ItemTypePropertyDefinition> props = typeService .getItemTypeProperties(typeService.getTypeCodeForClass((Class<Item>) sourceQuery.getResultClass())); // add all properties final List<String> validProperties = props.values().stream() // .filter(p -> Item.class.isAssignableFrom(p.getReturnType()) || p.getRelationDefinition() != null) // .map(p -> p.getName()) // .collect(Collectors.toList()); fetchSubGraphs.addAll(validProperties); } else if (sourceQuery.getEagerFetchRelationProperties().size() > 0) { fetchSubGraphs.addAll(sourceQuery.getEagerFetchRelationProperties()); } if (fetchSubGraphs.size() > 0) { if (!Item.class.isAssignableFrom(sourceQuery.getResultClass())) { Logger.debug("Fetch sub graphs can only be used for item queries - ignoring"); return; } final EntityGraph<T> graph = session.createEntityGraph(sourceQuery.getResultClass()); for (final String subgraph : fetchSubGraphs) { final Subgraph<?> itemGraph = graph.addSubgraph(subgraph); } query.setHint("javax.persistence.loadgraph", graph); } } protected <T> void setParameters(final Map<String, Object> params, final Query<T> query) { for (final Map.Entry<String, Object> entry : params.entrySet()) { if (NumberUtils.isCreatable(entry.getKey())) { query.setParameter(Integer.parseInt(entry.getKey()), entry.getValue()); } else { query.setParameter(entry.getKey(), entry.getValue()); } } } protected void setPagination(final javax.persistence.Query query, final int page, final int pageSize) { if (pageSize > 0) { query.setFirstResult((page > 0 ? page - 1 : 0) * pageSize); query.setMaxResults(pageSize); } } /** {@inheritDoc} */ @Log(logLevel = LogLevel.DEBUG, measureExecutionTime = true, executionTimeThreshold = 100) @Override public <T extends Item> void save(final List<T> items) throws ModelSaveException, ModelNotUniqueException { bindSession(); try { transactionService.execute(() -> { final Session session = getSession(); int i = 0; try { for (final T item : items) { if (item.getVersion() == -1) { session.save(item); } else { session.saveOrUpdate(item); } // use same as the JDBC batch size if (i >= jdbcBatchSize && i % jdbcBatchSize == 0) { // flush a batch of inserts and release memory: session.flush(); } i++; } // this is needed, otherwise saved entities are not session.flush(); items.stream().forEach(o -> session.evict(o)); } catch (final ValidationException e) { final String message; if (e instanceof ConstraintViolationException) { message = validationService .convertToReadableMessage(((ConstraintViolationException) e).getConstraintViolations()); } else { message = e.getMessage(); } throw new ModelSaveException(message, e); } catch (final DataIntegrityViolationException | TransactionRequiredException | IllegalArgumentException e) { throw new ModelSaveException("Could not save given items: " + e.getMessage(), e); } catch (final Exception e) { final Throwable rootCause = ExceptionUtils.getRootCause(e); final String rootCauseMessage = rootCause != null ? rootCause.getMessage() : e.getMessage(); throw new ModelSaveException(rootCauseMessage, e); } return null; }); } catch (final TransactionException e) { if (e.getCause() instanceof ModelSaveException) { throw (ModelSaveException) e.getCause(); } else if (e.getCause() instanceof ModelNotUniqueException) { throw (ModelNotUniqueException) e.getCause(); } else { throw e; } } } /** {@inheritDoc} */ @Override public <T extends Item> T load(final Class<T> type, final long id, boolean returnProxy) throws ModelNotFoundException { bindSession(); try { return transactionService.execute(() -> { T item = returnProxy ? getSession().load(type, id) : getSession().get(type, id); return item; }); } catch (final TransactionException e) { if (e.getCause() instanceof ModelNotFoundException) { throw (ModelNotFoundException) e.getCause(); } else { throw e; } } } /** {@inheritDoc} */ @Override public <T extends Item> void refresh(final List<T> items) throws ModelNotFoundException { bindSession(); try { transactionService.execute(() -> { for (final T item : items) { try { if (attach(item)) { getSession().refresh(item, LockMode.NONE); } } catch (DataIntegrityViolationException | HibernateException | TransactionRequiredException | IllegalArgumentException | EntityNotFoundException e) { throw new ModelNotFoundException( String.format("Could not refresh item with id=%s.", item.getId()), e); } } return null; }); } catch (final TransactionException e) { if (e.getCause() instanceof ModelNotFoundException) { throw (ModelNotFoundException) e.getCause(); } else { throw e; } } } /** {@inheritDoc} */ @Override public <T extends Item> boolean attach(final T item) throws ModelNotFoundException { bindSession(); try { // ignore unpersisted or already attached items if (isAttached(item)) { return true; } getSession().load(item, item.getId()); } catch (HibernateException | TransactionRequiredException | IllegalArgumentException | EntityNotFoundException e) { throw new ModelNotFoundException( String.format("Could not attach item with id=%s to the current session.", item.getId()), e); } return false; } /** {@inheritDoc} */ @Override public <T extends Item> List<T> load(final ModelQuery<T> sourceQuery) { bindSession(); return transactionService.execute(() -> { final Session session = getSession(); final CriteriaBuilder builder = session.getCriteriaBuilder(); final CriteriaQuery<T> cq = builder.createQuery(sourceQuery.getResultClass()); final Root<T> queryResultType = cq.from(sourceQuery.getResultClass()); CriteriaQuery<T> itemSelect = cq.select(queryResultType); // check if we have to perform a separate query for pagination // hibernate can't handle pagination together with FETCH JOINs! boolean isIdQueryForPaginationNeeded = sourceQuery.getPageSize() > 0 && (sourceQuery.getEagerFetchRelationProperties().size() > 0 || sourceQuery.isEagerFetchRelations()); boolean isSearchParametersDefined = MapUtils.isNotEmpty(sourceQuery.getSearchParameters()); Predicate whereClause = null; if (isSearchParametersDefined) { whereClause = builder.conjunction(); for (final Map.Entry<String, Object> entry : sourceQuery.getSearchParameters().entrySet()) { if (entry.getValue() instanceof Item && !((Item) entry.getValue()).isPersisted()) { throw new PersistenceException(String.format( "Passing non-persisted item as search param '%s' is not supported.", entry.getKey())); } whereClause = builder.and(whereClause, builder.equal(queryResultType.get(entry.getKey()), entry.getValue())); } } // always order by last created date and THEN ID, so we have a consistent ordering, even if new items are created // IDs are random, so they don't increment! boolean orderByNeeded = false; // make additional query to fetch the ids, applied the "maxResults" correctly if (isIdQueryForPaginationNeeded) { // we always have to order in case of a ID subquery for both queries! orderByNeeded = true; CriteriaQuery<Long> idCriteriaQuery = builder.createQuery(Long.class); final Root<T> idRoot = idCriteriaQuery.from(sourceQuery.getResultClass()); idCriteriaQuery = idCriteriaQuery.select(idRoot.get(Item.PROPERTY_ID)); // apply original where clause here, it will be indirectly applied to the original query using the fetched IDs if (whereClause != null) { idCriteriaQuery = idCriteriaQuery.where(whereClause); } // always apply the same order for all queries final TypedQuery<Long> idQuery = session.createQuery(idCriteriaQuery.orderBy(applyOrderBy(sourceQuery, builder, idRoot))); setPagination(idQuery, sourceQuery.getPage(), sourceQuery.getPageSize()); final List<Long> idsToSelect = idQuery.getResultList(); // only add where clause when there are actual IDs to select if (idsToSelect.size() > 0) { itemSelect = itemSelect.where(queryResultType.get(Item.PROPERTY_ID).in(idsToSelect)); } } else { if (whereClause != null) { itemSelect = itemSelect.where(whereClause); } // if we have a single query, we only need to order if pagination is used if (sourceQuery.getOrderBy().size() > 0) { orderByNeeded = true; } } if (orderByNeeded) { // always apply the order here again, even if using id sub-query! itemSelect = itemSelect.orderBy(applyOrderBy(sourceQuery, builder, queryResultType)); } final TypedQuery<T> query = session.createQuery(itemSelect); // only set these values if no fetch joins are used! // if we have fetch joins we just select by the ids that are fetched before using firstResult and maxResults if (!isIdQueryForPaginationNeeded) { setPagination(query, sourceQuery.getPage(), sourceQuery.getPageSize()); } setFetchSubGraphsHint(session, sourceQuery, query); setCacheSettings(session, sourceQuery, query); final Query<T> queryObj = ((Query<T>) query); // set proper access level setAccessLevel(sourceQuery, queryObj); final List<T> results = queryObj.getResultList(); return results; }); } /** * Generates the ORDER BY clause either for the {@link ModelQuery#getOrderBy()} or if empty for the default properties ({@link Item#PROPERTY_CREATED_AT} and * {@link Item#PROPERTY_ID}). * * @param sourceQuery * @param builder * @param root * @return the generated order by clause */ protected Order[] applyOrderBy(final ModelQuery<?> sourceQuery, CriteriaBuilder builder, Root<?> root) { final List<Order> orderBys = new ArrayList<>(); if (sourceQuery.getOrderBy().size() > 0) { for (SortOrder order : sourceQuery.getOrderBy()) { if (OrderDirection.ASC.equals(order.getDirection())) { orderBys.add(builder.asc(root.get(order.getColumnName()))); } else { orderBys.add(builder.desc(root.get(order.getColumnName()))); } } } else { orderBys.add(builder.asc(root.get(Item.PROPERTY_CREATED_AT))); orderBys.add(builder.asc(root.get(Item.PROPERTY_ID))); } return orderBys.toArray(new Order[orderBys.size()]); } /** {@inheritDoc} */ @Override public <T extends Item> void remove(final List<T> items) { bindSession(); transactionService.execute(() -> { for (final T item : items) { getSession().remove(item); } return null; }); } /** {@inheritDoc} */ @Override public <T extends Item> void remove(final Class<T> type, final long id) { bindSession(); transactionService.execute(() -> { // TODO: improve // final String query = String.format("DELETE FROM %s WHERE id IN // (?id)", type.getSimpleName()); // em.createQuery(query, type).setParameter("id", id); final T item = getSession().find(type, id); getSession().remove(item); return null; }); } /** {@inheritDoc} */ @Override public void saveDataStorage() { bindSession(); getSession().flush(); } /** {@inheritDoc} */ @Override public void clearDataStorage() { Logger.warn("Clearing database not supported yet"); } @Override public void evictCaches() { bindSession(); getSession().clear(); } /** {@inheritDoc} */ @Override public <T extends Item> void initItem(final T item) { for (final Field field : ClassUtil.getFieldsWithAnnotation(item.getClass(), Property.class)) { Object instanceValue = ClassUtil.getField(item, field.getName(), true); if (instanceValue == null) { if (field.getType().isAssignableFrom(Set.class)) { instanceValue = new HashSet<>(); } else if (field.getType().isAssignableFrom(List.class) || field.getType().isAssignableFrom(Collection.class)) { instanceValue = new ArrayList<>(); } else if (field.getType().isAssignableFrom(Map.class)) { instanceValue = new HashMap<>(); } if (instanceValue != null) { ClassUtil.setField(item, field.getName(), instanceValue); } } } } /** {@inheritDoc} */ @Override public <T extends Item> void detach(final List<T> items) { bindSession(); for (final T item : items) { getSession().detach(item); } } /** {@inheritDoc} */ @Override public <T extends Item> boolean isAttached(final T item) { bindSession(); return getSession().contains(item); } @Override public <T extends Item> Optional<String> getTableName(Class<T> itemType) { bindSession(); return transactionService.execute(() -> { SessionImpl session = (SessionImpl) getSession(); final Optional<T> example = ClassUtil.instantiate(itemType); final EntityPersister persister = session.getEntityPersister(null, example.get()); if (persister instanceof AbstractEntityPersister) { AbstractEntityPersister persisterImpl = (AbstractEntityPersister) persister; String tableName = persisterImpl.getTableName(); String rootTableName = persisterImpl.getRootTableName(); return Optional.of(tableName); } else { throw new RuntimeException("Unexpected persister type; a subtype of AbstractEntityPersister expected."); } }); } public Session getSession() { final EntityManagerHolder holder = ((EntityManagerHolder) TransactionSynchronizationManager .getResource(entityManagerFactory)); if (holder != null) { if (Logger.isLogLevelEnabled(LogLevel.DEBUG)) { getSessionFactory().getStatistics().setStatisticsEnabled(true); } return holder.getEntityManager().unwrap(Session.class); } throw new IllegalStateException("Could not fetch persistence entity manager"); } protected void bindSession() { if (!TransactionSynchronizationManager.hasResource(entityManagerFactory)) { TransactionSynchronizationManager.bindResource(entityManagerFactory, new EntityManagerHolder(entityManagerFactory.createEntityManager())); } } /** {@inheritDoc} */ @Override public void unbindSession() { if (TransactionSynchronizationManager.hasResource(entityManagerFactory)) { final EntityManagerHolder emHolder = (EntityManagerHolder) TransactionSynchronizationManager .unbindResource(entityManagerFactory); EntityManagerFactoryUtils.closeEntityManager(emHolder.getEntityManager()); } else { throw new IllegalStateException("No entitiy manager factory found"); } } /** * <p> * Getter for the field <code>entityManagerFactory</code>. * </p> * * @return a {@link javax.persistence.EntityManagerFactory} object. */ public EntityManagerFactory getEntityManagerFactory() { return entityManagerFactory; } /** * <p> * getSessionFactory. * </p> * * @return a {@link SessionFactory} object. */ public SessionFactory getSessionFactory() { return entityManagerFactory.unwrap(SessionFactory.class); } public Statistics getStatistics() { return getSessionFactory().getStatistics(); } }
fixed remove detached items
spot-core/src/main/java/io/spotnext/core/persistence/hibernate/impl/HibernatePersistenceService.java
fixed remove detached items
Java
apache-2.0
92dd7073922e5bf772a71a77a104a49bf0928893
0
anishek/hive,vineetgarg02/hive,alanfgates/hive,nishantmonu51/hive,vineetgarg02/hive,alanfgates/hive,nishantmonu51/hive,sankarh/hive,nishantmonu51/hive,vineetgarg02/hive,lirui-apache/hive,sankarh/hive,jcamachor/hive,anishek/hive,lirui-apache/hive,b-slim/hive,nishantmonu51/hive,sankarh/hive,alanfgates/hive,b-slim/hive,alanfgates/hive,jcamachor/hive,lirui-apache/hive,vineetgarg02/hive,b-slim/hive,anishek/hive,jcamachor/hive,anishek/hive,b-slim/hive,sankarh/hive,alanfgates/hive,vineetgarg02/hive,lirui-apache/hive,vineetgarg02/hive,b-slim/hive,nishantmonu51/hive,sankarh/hive,jcamachor/hive,anishek/hive,jcamachor/hive,b-slim/hive,sankarh/hive,anishek/hive,jcamachor/hive,jcamachor/hive,nishantmonu51/hive,nishantmonu51/hive,lirui-apache/hive,vineetgarg02/hive,nishantmonu51/hive,anishek/hive,alanfgates/hive,lirui-apache/hive,nishantmonu51/hive,jcamachor/hive,alanfgates/hive,anishek/hive,alanfgates/hive,b-slim/hive,anishek/hive,sankarh/hive,lirui-apache/hive,lirui-apache/hive,jcamachor/hive,vineetgarg02/hive,b-slim/hive,lirui-apache/hive,alanfgates/hive,sankarh/hive,sankarh/hive,vineetgarg02/hive,b-slim/hive
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.plan.mapping; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.DriverFactory; import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.plan.Statistics; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper; import org.apache.hadoop.hive.ql.plan.mapper.StatsSources; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper.EquivGroup; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.stats.OperatorStats; import org.apache.hadoop.hive.ql.stats.OperatorStatsReaderHook; import org.apache.hive.testutils.HiveTestEnvSetup; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; @Ignore("Flaky. Will be re-enabled by HIVE-19697") public class TestReOptimization { @ClassRule public static HiveTestEnvSetup env_setup = new HiveTestEnvSetup(); @Rule public TestRule methodRule = env_setup.getMethodRule(); @BeforeClass public static void beforeClass() throws Exception { IDriver driver = createDriver(""); dropTables(driver); String[] cmds = { // @formatter:off "create table tu(id_uv int,id_uw int,u int)", "create table tv(id_uv int,v int)", "create table tw(id_uw int,w int)", "insert into tu values (10,10,10),(1,1,1),(2,2,2),(3,3,3),(4,4,4),(5,5,5),(6,6,6)", "insert into tv values (10,10),(1,1),(2,2),(3,3)", "insert into tw values (10,10),(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(9,9)", // @formatter:on }; for (String cmd : cmds) { int ret = driver.run(cmd).getResponseCode(); assertEquals("Checking command success", 0, ret); } } @AfterClass public static void afterClass() throws Exception { IDriver driver = createDriver(""); dropTables(driver); } @After public void after() { StatsSources.clearGlobalStats(); } public static void dropTables(IDriver driver) throws Exception { String[] tables = new String[] {"tu", "tv", "tw" }; for (String t : tables) { int ret = driver.run("drop table if exists " + t).getResponseCode(); assertEquals("Checking command success", 0, ret); } } private PlanMapper getMapperForQuery(IDriver driver, String query) throws CommandProcessorResponse { CommandProcessorResponse res = driver.run(query); if (res.getResponseCode() != 0) { throw res; } PlanMapper pm0 = driver.getContext().getPlanMapper(); return pm0; } @Test public void testStatsAreSetInReopt() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "select assert_true_oom(${hiveconf:zzz} > sum(u*v))" + " from tu join tv on (tu.id_uv=tv.id_uv)" + " where u<10 and v>1"; PlanMapper pm = getMapperForQuery(driver, query); Iterator<EquivGroup> itG = pm.iterateGroups(); int checkedOperators = 0; while (itG.hasNext()) { EquivGroup g = itG.next(); List<FilterOperator> fos = g.getAll(FilterOperator.class); List<OperatorStats> oss = g.getAll(OperatorStats.class); // FIXME: oss seems to contain duplicates if (fos.size() > 0 && oss.size() > 0) { fos.sort(TestCounterMapping.OPERATOR_ID_COMPARATOR.reversed()); FilterOperator fo = fos.get(0); OperatorStats os = oss.get(0); Statistics stats = fo.getStatistics(); assertEquals(os.getOutputRecords(), stats.getNumRows()); if (!(os.getOutputRecords() == 3 || os.getOutputRecords() == 6)) { fail("nonexpected number of records produced"); } checkedOperators++; } } assertEquals(2, checkedOperators); } @Test public void testReExecutedIfMapJoinError() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "select assert_true_oom(${hiveconf:zzz}>sum(1)) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; getMapperForQuery(driver, query); } @Test(expected = CommandProcessorResponse.class) public void testNotReExecutedIfAssertionError() throws Exception { IDriver driver = createDriver("reoptimize"); String query = "select assert_true(${hiveconf:zzz}>sum(1)) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; getMapperForQuery(driver, query); assertEquals(1, driver.getContext().getExecutionIndex()); } @Test public void testStatCachingQuery() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "query"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(false, false, false); } @Test public void testStatCachingHS2() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "hiveserver"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(true, true, false); } @Test public void testStatCachingMetaStore() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "metastore"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(true, true, true); } private void checkRuntimeStatsReuse( boolean expectInSameSession, boolean expectNewHs2Session, boolean expectHs2Instance) throws CommandProcessorResponse { { // same session IDriver driver = createDriver("reoptimize"); checkUsageOfRuntimeStats(driver, false); driver = DriverFactory.newDriver(env_setup.getTestCtx().hiveConf); checkUsageOfRuntimeStats(driver, expectInSameSession); } { // new session IDriver driver = createDriver("reoptimize"); checkUsageOfRuntimeStats(driver, expectNewHs2Session); } StatsSources.clearGlobalStats(); { // new hs2 instance session IDriver driver = createDriver("reoptimize"); checkUsageOfRuntimeStats(driver, expectHs2Instance); } } @SuppressWarnings("rawtypes") private void checkUsageOfRuntimeStats(IDriver driver, boolean expected) throws CommandProcessorResponse { String query = "select sum(u) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; PlanMapper pm = getMapperForQuery(driver, query); assertEquals(1, driver.getContext().getExecutionIndex()); List<CommonJoinOperator> allJoin = pm.getAll(CommonJoinOperator.class); CommonJoinOperator join = allJoin.iterator().next(); Statistics joinStat = join.getStatistics(); assertEquals("expectation of the usage of runtime stats doesn't match", expected, joinStat.isRuntimeStats()); } @Test public void testExplainSupport() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "explain reoptimization select 1 from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; getMapperForQuery(driver, query); List<String> res = new ArrayList<>(); List<String> res1 = new ArrayList<>(); while (driver.getResults(res1)) { res.addAll(res1); } assertEquals("2TS", 2, res.stream().filter(line -> line.contains("TS_")).count()); assertEquals("2TS(runtime)", 2, res.stream().filter(line -> line.contains("TS") && line.contains("runtime")).count()); } private static IDriver createDriver(String strategies) { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STRATEGIES, strategies); conf.setBoolVar(ConfVars.HIVE_EXPLAIN_USER, true); conf.set("zzz", "1"); conf.set("reexec.overlay.zzz", "2000"); // conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); HiveConf.setVar(conf, HiveConf.ConfVars.POSTEXECHOOKS, OperatorStatsReaderHook.class.getName()); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; } }
ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestReOptimization.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.plan.mapping; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.DriverFactory; import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.plan.Statistics; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper; import org.apache.hadoop.hive.ql.plan.mapper.StatsSources; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper.EquivGroup; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.stats.OperatorStats; import org.apache.hadoop.hive.ql.stats.OperatorStatsReaderHook; import org.apache.hive.testutils.HiveTestEnvSetup; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; public class TestReOptimization { @ClassRule public static HiveTestEnvSetup env_setup = new HiveTestEnvSetup(); @Rule public TestRule methodRule = env_setup.getMethodRule(); @BeforeClass public static void beforeClass() throws Exception { IDriver driver = createDriver(""); dropTables(driver); String[] cmds = { // @formatter:off "create table tu(id_uv int,id_uw int,u int)", "create table tv(id_uv int,v int)", "create table tw(id_uw int,w int)", "insert into tu values (10,10,10),(1,1,1),(2,2,2),(3,3,3),(4,4,4),(5,5,5),(6,6,6)", "insert into tv values (10,10),(1,1),(2,2),(3,3)", "insert into tw values (10,10),(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(9,9)", // @formatter:on }; for (String cmd : cmds) { int ret = driver.run(cmd).getResponseCode(); assertEquals("Checking command success", 0, ret); } } @AfterClass public static void afterClass() throws Exception { IDriver driver = createDriver(""); dropTables(driver); } @After public void after() { StatsSources.clearGlobalStats(); } public static void dropTables(IDriver driver) throws Exception { String[] tables = new String[] {"tu", "tv", "tw" }; for (String t : tables) { int ret = driver.run("drop table if exists " + t).getResponseCode(); assertEquals("Checking command success", 0, ret); } } private PlanMapper getMapperForQuery(IDriver driver, String query) throws CommandProcessorResponse { CommandProcessorResponse res = driver.run(query); if (res.getResponseCode() != 0) { throw res; } PlanMapper pm0 = driver.getContext().getPlanMapper(); return pm0; } @Test public void testStatsAreSetInReopt() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "select assert_true_oom(${hiveconf:zzz} > sum(u*v))" + " from tu join tv on (tu.id_uv=tv.id_uv)" + " where u<10 and v>1"; PlanMapper pm = getMapperForQuery(driver, query); Iterator<EquivGroup> itG = pm.iterateGroups(); int checkedOperators = 0; while (itG.hasNext()) { EquivGroup g = itG.next(); List<FilterOperator> fos = g.getAll(FilterOperator.class); List<OperatorStats> oss = g.getAll(OperatorStats.class); // FIXME: oss seems to contain duplicates if (fos.size() > 0 && oss.size() > 0) { fos.sort(TestCounterMapping.OPERATOR_ID_COMPARATOR.reversed()); FilterOperator fo = fos.get(0); OperatorStats os = oss.get(0); Statistics stats = fo.getStatistics(); assertEquals(os.getOutputRecords(), stats.getNumRows()); if (!(os.getOutputRecords() == 3 || os.getOutputRecords() == 6)) { fail("nonexpected number of records produced"); } checkedOperators++; } } assertEquals(2, checkedOperators); } @Test public void testReExecutedIfMapJoinError() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "select assert_true_oom(${hiveconf:zzz}>sum(1)) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; getMapperForQuery(driver, query); } @Test(expected = CommandProcessorResponse.class) public void testNotReExecutedIfAssertionError() throws Exception { IDriver driver = createDriver("reoptimize"); String query = "select assert_true(${hiveconf:zzz}>sum(1)) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; getMapperForQuery(driver, query); assertEquals(1, driver.getContext().getExecutionIndex()); } @Test public void testStatCachingQuery() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "query"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(false, false, false); } @Test public void testStatCachingHS2() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "hiveserver"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(true, true, false); } @Test public void testStatCachingMetaStore() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "metastore"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(true, true, true); } private void checkRuntimeStatsReuse( boolean expectInSameSession, boolean expectNewHs2Session, boolean expectHs2Instance) throws CommandProcessorResponse { { // same session IDriver driver = createDriver("reoptimize"); checkUsageOfRuntimeStats(driver, false); driver = DriverFactory.newDriver(env_setup.getTestCtx().hiveConf); checkUsageOfRuntimeStats(driver, expectInSameSession); } { // new session IDriver driver = createDriver("reoptimize"); checkUsageOfRuntimeStats(driver, expectNewHs2Session); } StatsSources.clearGlobalStats(); { // new hs2 instance session IDriver driver = createDriver("reoptimize"); checkUsageOfRuntimeStats(driver, expectHs2Instance); } } @SuppressWarnings("rawtypes") private void checkUsageOfRuntimeStats(IDriver driver, boolean expected) throws CommandProcessorResponse { String query = "select sum(u) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; PlanMapper pm = getMapperForQuery(driver, query); assertEquals(1, driver.getContext().getExecutionIndex()); List<CommonJoinOperator> allJoin = pm.getAll(CommonJoinOperator.class); CommonJoinOperator join = allJoin.iterator().next(); Statistics joinStat = join.getStatistics(); assertEquals("expectation of the usage of runtime stats doesn't match", expected, joinStat.isRuntimeStats()); } @Test public void testExplainSupport() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "explain reoptimization select 1 from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; getMapperForQuery(driver, query); List<String> res = new ArrayList<>(); List<String> res1 = new ArrayList<>(); while (driver.getResults(res1)) { res.addAll(res1); } assertEquals("2TS", 2, res.stream().filter(line -> line.contains("TS_")).count()); assertEquals("2TS(runtime)", 2, res.stream().filter(line -> line.contains("TS") && line.contains("runtime")).count()); } private static IDriver createDriver(String strategies) { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STRATEGIES, strategies); conf.setBoolVar(ConfVars.HIVE_EXPLAIN_USER, true); conf.set("zzz", "1"); conf.set("reexec.overlay.zzz", "2000"); // conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); HiveConf.setVar(conf, HiveConf.ConfVars.POSTEXECHOOKS, OperatorStatsReaderHook.class.getName()); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; } }
HIVE-19697: TestReOptimization#testStatCachingMetaStore is flaky (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)
ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestReOptimization.java
HIVE-19697: TestReOptimization#testStatCachingMetaStore is flaky (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)
Java
apache-2.0
2fc99ade2d6499c656641049fae6c871f1705617
0
s13372/SORCER,mwsobol/SORCER,mwsobol/SORCER,s13372/SORCER,dudzislaw/SORCER,dudzislaw/SORCER,s8537/SORCER,dudzislaw/SORCER,s13372/SORCER,s8537/SORCER,s8537/SORCER,mwsobol/SORCER
/* * Copyright 2010 the original author or authors. * Copyright 2010 SorcerSoft.org. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sorcer.core; import java.io.File; import java.io.Serializable; import java.util.StringTokenizer; import net.jini.core.lookup.ServiceID; /** * Sometimes it's critical to identify an exertion associated with a provider. * For example, a broker or monitor might like to have an identifier associated * with the exertion which identifies not only the exertion but also the * provider who's the owner. * * In other words, this is a kind of cookie which uniquely associates a provider * with a particular exertion. * * This object is immutable. */ public class UEID implements Serializable { private static final long serialVersionUID = 2366387989071307201L; public final ServiceID sid; public final String exertionID; public UEID(ServiceID sid, String exertionID) { if (exertionID == null) throw new NullPointerException("exertionID cannot be NULL"); this.sid = sid; this.exertionID = exertionID; } public boolean equals(Object o) { if (!(o instanceof UEID)) return false; UEID other = (UEID) o; return other.sid.equals(sid) && other.exertionID.equals(exertionID); } public String asString() { String sidString = (sid == null) ? "|" : sid.getLeastSignificantBits() + "|" + sid.getMostSignificantBits(); return sidString + "|" + exertionID; } public static UEID fromString(String ueid) { String[] str = toArray(ueid); ServiceID sid = null; try { if (!"".equals(str[0]) && !"".equals(str[0])) sid = new ServiceID(Long.parseLong(str[0]), Long.parseLong(str[1])); } catch (Exception e) { e.printStackTrace(); return null; } return new UEID(sid, str[2]); } public static String[] toArray(String arg) { StringTokenizer tok = new StringTokenizer(arg, " |" + File.pathSeparator); String[] array = new String[tok.countTokens()]; int i = 0; while (tok.hasMoreTokens()) { array[i] = tok.nextToken(); i++; } return (array); } }
core/sorcer-dl/src/main/java/sorcer/core/UEID.java
/* * Copyright 2010 the original author or authors. * Copyright 2010 SorcerSoft.org. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sorcer.core; import java.io.File; import java.util.StringTokenizer; import net.jini.core.lookup.ServiceID; /** * Sometimes it's critical to identify an exertion associated with a provider. * For example, a broker or monitor might like to have an identifier associated * with the exertion which identifies not only the exertion but also the * provider who's the owner. * * In other words, this is a kind of cookie which uniquely associates a provider * with a particular exertion. * * This object is immutable. */ public class UEID { public final ServiceID sid; public final String exertionID; public UEID(ServiceID sid, String exertionID) { if (exertionID == null) throw new NullPointerException("exertionID cannot be NULL"); this.sid = sid; this.exertionID = exertionID; } public boolean equals(Object o) { if (!(o instanceof UEID)) return false; UEID other = (UEID) o; return other.sid.equals(sid) && other.exertionID.equals(exertionID); } public String asString() { String sidString = (sid == null) ? "|" : sid.getLeastSignificantBits() + "|" + sid.getMostSignificantBits(); return sidString + "|" + exertionID; } public static UEID fromString(String ueid) { String[] str = toArray(ueid); ServiceID sid = null; try { if (!"".equals(str[0]) && !"".equals(str[0])) sid = new ServiceID(Long.parseLong(str[0]), Long.parseLong(str[1])); } catch (Exception e) { e.printStackTrace(); return null; } return new UEID(sid, str[2]); } public static String[] toArray(String arg) { StringTokenizer tok = new StringTokenizer(arg, " |" + File.pathSeparator); String[] array = new String[tok.countTokens()]; int i = 0; while (tok.hasMoreTokens()) { array[i] = tok.nextToken(); i++; } return (array); } }
UEID made serializable - used in ExertMonitor
core/sorcer-dl/src/main/java/sorcer/core/UEID.java
UEID made serializable - used in ExertMonitor
Java
apache-2.0
a9c6230c3c273c55c9c96fe44e6b18240f45a4b6
0
aalmiray/ikonli,aalmiray/ikonli
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2020 Andres Almiray * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kordamp.ikonli.bpmn; import org.kordamp.ikonli.Ikon; /** * @author Andres Almiray */ public enum Bpmn implements Ikon { AD_HOC_MARKER("bpmn-ad-hoc-marker", '\ue855'), BPMN_IO("bpmn-bpmn-io", '\ue831'), BUSINESS_RULE("bpmn-business-rule", '\ue818'), BUSINESS_RULE_TASK("bpmn-business-rule-task", '\ue827'), CALL_ACTIVITY("bpmn-call-activity", '\ue82a'), COMPENSATION_MARKER("bpmn-compensation-marker", '\ue858'), CONDITIONAL_FLOW("bpmn-conditional-flow", '\ue802'), CONNECTION("bpmn-connection", '\ue810'), CONNECTION_MULTI("bpmn-connection-multi", '\ue860'), DATA_INPUT("bpmn-data-input", '\ue866'), DATA_OBJECT("bpmn-data-object", '\ue84b'), DATA_OUTPUT("bpmn-data-output", '\ue867'), DATA_STORE("bpmn-data-store", '\ue84e'), DEFAULT_FLOW("bpmn-default-flow", '\ue803'), END_EVENT_CANCEL("bpmn-end-event-cancel", '\ue811'), END_EVENT_COMPENSATION("bpmn-end-event-compensation", '\ue81e'), END_EVENT_ERROR("bpmn-end-event-error", '\ue822'), END_EVENT_ESCALATION("bpmn-end-event-escalation", '\ue82f'), END_EVENT_LINK("bpmn-end-event-link", '\ue83b'), END_EVENT_MESSAGE("bpmn-end-event-message", '\ue83a'), END_EVENT_MULTIPLE("bpmn-end-event-multiple", '\ue839'), END_EVENT_NONE("bpmn-end-event-none", '\ue838'), END_EVENT_SIGNAL("bpmn-end-event-signal", '\ue837'), END_EVENT_TERMINATE("bpmn-end-event-terminate", '\ue836'), EVENT_SUBPROCESS_EXPANDED("bpmn-event-subprocess-expanded", '\ue85d'), GATEWAY_COMPLEX("bpmn-gateway-complex", '\ue832'), GATEWAY_EVENTBASED("bpmn-gateway-eventbased", '\ue833'), GATEWAY_NONE("bpmn-gateway-none", '\ue834'), GATEWAY_OR("bpmn-gateway-or", '\ue835'), GATEWAY_PARALLEL("bpmn-gateway-parallel", '\ue804'), GATEWAY_XOR("bpmn-gateway-xor", '\ue80f'), GROUP("bpmn-group", '\ue869'), HAND_TOOL("bpmn-hand-tool", '\ue868'), INTERMEDIATE_EVENT_CATCH_CANCEL("bpmn-intermediate-event-catch-cancel", '\ue805'), INTERMEDIATE_EVENT_CATCH_COMPENSATION("bpmn-intermediate-event-catch-compensation", '\ue80e'), INTERMEDIATE_EVENT_CATCH_CONDITION("bpmn-intermediate-event-catch-condition", '\ue812'), INTERMEDIATE_EVENT_CATCH_ERROR("bpmn-intermediate-event-catch-error", '\ue81d'), INTERMEDIATE_EVENT_CATCH_ESCALATION("bpmn-intermediate-event-catch-escalation", '\ue823'), INTERMEDIATE_EVENT_CATCH_LINK("bpmn-intermediate-event-catch-link", '\ue82e'), INTERMEDIATE_EVENT_CATCH_MESSAGE("bpmn-intermediate-event-catch-message", '\ue83c'), INTERMEDIATE_EVENT_CATCH_MULTIPLE("bpmn-intermediate-event-catch-multiple", '\ue847'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_CONDITION("bpmn-intermediate-event-catch-non-interrupting-condition", '\ue85b'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_ESCALATION("bpmn-intermediate-event-catch-non-interrupting-escalation", '\ue848'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_MESSAGE("bpmn-intermediate-event-catch-non-interrupting-message", '\ue806'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_MULTIPLE("bpmn-intermediate-event-catch-non-interrupting-multiple", '\ue851'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_PARALLEL_MULTIPLE("bpmn-intermediate-event-catch-non-interrupting-parallel-multiple", '\ue813'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_SIGNAL("bpmn-intermediate-event-catch-non-interrupting-signal", '\ue852'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_TIMER("bpmn-intermediate-event-catch-non-interrupting-timer", '\ue80c'), INTERMEDIATE_EVENT_CATCH_PARALLEL_MULTIPLE("bpmn-intermediate-event-catch-parallel-multiple", '\ue80d'), INTERMEDIATE_EVENT_CATCH_SIGNAL("bpmn-intermediate-event-catch-signal", '\ue81c'), INTERMEDIATE_EVENT_CATCH_TIMER("bpmn-intermediate-event-catch-timer", '\ue824'), INTERMEDIATE_EVENT_NONE("bpmn-intermediate-event-none", '\ue82d'), INTERMEDIATE_EVENT_THROW_COMPENSATION("bpmn-intermediate-event-throw-compensation", '\ue83d'), INTERMEDIATE_EVENT_THROW_ESCALATION("bpmn-intermediate-event-throw-escalation", '\ue846'), INTERMEDIATE_EVENT_THROW_LINK("bpmn-intermediate-event-throw-link", '\ue849'), INTERMEDIATE_EVENT_THROW_MESSAGE("bpmn-intermediate-event-throw-message", '\ue850'), INTERMEDIATE_EVENT_THROW_MULTIPLE("bpmn-intermediate-event-throw-multiple", '\ue853'), INTERMEDIATE_EVENT_THROW_SIGNAL("bpmn-intermediate-event-throw-signal", '\ue85a'), LANE("bpmn-lane", '\ue861'), LANE_DIVIDE_THREE("bpmn-lane-divide-three", '\ue864'), LANE_DIVIDE_TWO("bpmn-lane-divide-two", '\ue865'), LANE_INSERT_ABOVE("bpmn-lane-insert-above", '\ue863'), LANE_INSERT_BELOW("bpmn-lane-insert-below", '\ue85e'), LASSO_TOOL("bpmn-lasso-tool", '\ue862'), LOOP_MARKER("bpmn-loop-marker", '\ue809'), MANUAL("bpmn-manual", '\ue828'), MANUAL_TASK("bpmn-manual-task", '\ue840'), PARALLEL_MI_MARKER("bpmn-parallel-mi-marker", '\ue80a'), PARTICIPANT("bpmn-participant", '\ue85c'), RECEIVE("bpmn-receive", '\ue829'), RECEIVE_TASK("bpmn-receive-task", '\ue843'), SCREW_WRENCH("bpmn-screw-wrench", '\ue800'), SCRIPT("bpmn-script", '\ue83f'), SCRIPT_TASK("bpmn-script-task", '\ue84c'), SEND("bpmn-send", '\ue841'), SEND_TASK("bpmn-send-task", '\ue84d'), SEQUENTIAL_MI_MARKER("bpmn-sequential-mi-marker", '\ue816'), SERVICE("bpmn-service", '\ue842'), SERVICE_TASK("bpmn-service-task", '\ue856'), SPACE_TOOL("bpmn-space-tool", '\ue85f'), START_EVENT_COMPENSATION("bpmn-start-event-compensation", '\ue807'), START_EVENT_CONDITION("bpmn-start-event-condition", '\ue814'), START_EVENT_ERROR("bpmn-start-event-error", '\ue81b'), START_EVENT_ESCALATION("bpmn-start-event-escalation", '\ue825'), START_EVENT_MESSAGE("bpmn-start-event-message", '\ue82c'), START_EVENT_MULTIPLE("bpmn-start-event-multiple", '\ue83e'), START_EVENT_NONE("bpmn-start-event-none", '\ue845'), START_EVENT_NON_INTERRUPTING_CONDITION("bpmn-start-event-non-interrupting-condition", '\ue84a'), START_EVENT_NON_INTERRUPTING_ESCALATION("bpmn-start-event-non-interrupting-escalation", '\ue84f'), START_EVENT_NON_INTERRUPTING_MESSAGE("bpmn-start-event-non-interrupting-message", '\ue854'), START_EVENT_NON_INTERRUPTING_MULTIPLE("bpmn-start-event-non-interrupting-multiple", '\ue859'), START_EVENT_NON_INTERRUPTING_PARALLEL_MULTIPLE("bpmn-start-event-non-interrupting-parallel-multiple", '\ue808'), START_EVENT_NON_INTERRUPTING_SIGNAL("bpmn-start-event-non-interrupting-signal", '\ue80b'), START_EVENT_NON_INTERRUPTING_TIMER("bpmn-start-event-non-interrupting-timer", '\ue815'), START_EVENT_PARALLEL_MULTIPLE("bpmn-start-event-parallel-multiple", '\ue81a'), START_EVENT_SIGNAL("bpmn-start-event-signal", '\ue826'), START_EVENT_TIMER("bpmn-start-event-timer", '\ue82b'), SUBPROCESS_COLLAPSED("bpmn-subprocess-collapsed", '\ue81f'), SUBPROCESS_EXPANDED("bpmn-subprocess-expanded", '\ue820'), SUB_PROCESS_MARKER("bpmn-sub-process-marker", '\ue819'), TASK("bpmn-task", '\ue821'), TASK_NONE("bpmn-task-none", '\ue857'), TEXT_ANNOTATION("bpmn-text-annotation", '\ue830'), TRANSACTION("bpmn-transaction", '\ue8c4'), TRASH("bpmn-trash", '\ue801'), USER("bpmn-user", '\ue844'), USER_TASK("bpmn-user-task", '\ue817'); public static Bpmn findByDescription(String description) { for (Bpmn font : values()) { if (font.getDescription().equals(description)) { return font; } } throw new IllegalArgumentException("Icon description '" + description + "' is invalid!"); } private String description; private int code; Bpmn(String description, int code) { this.description = description; this.code = code; } @Override public String getDescription() { return description; } @Override public int getCode() { return code; } }
icon-packs/ikonli-bpmn-pack/src/main/java/org/kordamp/ikonli/bpmn/Bpmn.java
package org.kordamp.ikonli.bpmn; import org.kordamp.ikonli.Ikon; /** * @author Andres Almiray */ public enum Bpmn implements Ikon { AD_HOC_MARKER("bpmn-ad-hoc-marker", '\ue855'), BPMN_IO("bpmn-bpmn-io", '\ue831'), BUSINESS_RULE("bpmn-business-rule", '\ue818'), BUSINESS_RULE_TASK("bpmn-business-rule-task", '\ue827'), CALL_ACTIVITY("bpmn-call-activity", '\ue82a'), COMPENSATION_MARKER("bpmn-compensation-marker", '\ue858'), CONDITIONAL_FLOW("bpmn-conditional-flow", '\ue802'), CONNECTION("bpmn-connection", '\ue810'), CONNECTION_MULTI("bpmn-connection-multi", '\ue860'), DATA_INPUT("bpmn-data-input", '\ue866'), DATA_OBJECT("bpmn-data-object", '\ue84b'), DATA_OUTPUT("bpmn-data-output", '\ue867'), DATA_STORE("bpmn-data-store", '\ue84e'), DEFAULT_FLOW("bpmn-default-flow", '\ue803'), END_EVENT_CANCEL("bpmn-end-event-cancel", '\ue811'), END_EVENT_COMPENSATION("bpmn-end-event-compensation", '\ue81e'), END_EVENT_ERROR("bpmn-end-event-error", '\ue822'), END_EVENT_ESCALATION("bpmn-end-event-escalation", '\ue82f'), END_EVENT_LINK("bpmn-end-event-link", '\ue83b'), END_EVENT_MESSAGE("bpmn-end-event-message", '\ue83a'), END_EVENT_MULTIPLE("bpmn-end-event-multiple", '\ue839'), END_EVENT_NONE("bpmn-end-event-none", '\ue838'), END_EVENT_SIGNAL("bpmn-end-event-signal", '\ue837'), END_EVENT_TERMINATE("bpmn-end-event-terminate", '\ue836'), EVENT_SUBPROCESS_EXPANDED("bpmn-event-subprocess-expanded", '\ue85d'), GATEWAY_COMPLEX("bpmn-gateway-complex", '\ue832'), GATEWAY_EVENTBASED("bpmn-gateway-eventbased", '\ue833'), GATEWAY_NONE("bpmn-gateway-none", '\ue834'), GATEWAY_OR("bpmn-gateway-or", '\ue835'), GATEWAY_PARALLEL("bpmn-gateway-parallel", '\ue804'), GATEWAY_XOR("bpmn-gateway-xor", '\ue80f'), GROUP("bpmn-group", '\ue869'), HAND_TOOL("bpmn-hand-tool", '\ue868'), INTERMEDIATE_EVENT_CATCH_CANCEL("bpmn-intermediate-event-catch-cancel", '\ue805'), INTERMEDIATE_EVENT_CATCH_COMPENSATION("bpmn-intermediate-event-catch-compensation", '\ue80e'), INTERMEDIATE_EVENT_CATCH_CONDITION("bpmn-intermediate-event-catch-condition", '\ue812'), INTERMEDIATE_EVENT_CATCH_ERROR("bpmn-intermediate-event-catch-error", '\ue81d'), INTERMEDIATE_EVENT_CATCH_ESCALATION("bpmn-intermediate-event-catch-escalation", '\ue823'), INTERMEDIATE_EVENT_CATCH_LINK("bpmn-intermediate-event-catch-link", '\ue82e'), INTERMEDIATE_EVENT_CATCH_MESSAGE("bpmn-intermediate-event-catch-message", '\ue83c'), INTERMEDIATE_EVENT_CATCH_MULTIPLE("bpmn-intermediate-event-catch-multiple", '\ue847'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_CONDITION("bpmn-intermediate-event-catch-non-interrupting-condition", '\ue85b'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_ESCALATION("bpmn-intermediate-event-catch-non-interrupting-escalation", '\ue848'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_MESSAGE("bpmn-intermediate-event-catch-non-interrupting-message", '\ue806'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_MULTIPLE("bpmn-intermediate-event-catch-non-interrupting-multiple", '\ue851'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_PARALLEL_MULTIPLE("bpmn-intermediate-event-catch-non-interrupting-parallel-multiple", '\ue813'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_SIGNAL("bpmn-intermediate-event-catch-non-interrupting-signal", '\ue852'), INTERMEDIATE_EVENT_CATCH_NON_INTERRUPTING_TIMER("bpmn-intermediate-event-catch-non-interrupting-timer", '\ue80c'), INTERMEDIATE_EVENT_CATCH_PARALLEL_MULTIPLE("bpmn-intermediate-event-catch-parallel-multiple", '\ue80d'), INTERMEDIATE_EVENT_CATCH_SIGNAL("bpmn-intermediate-event-catch-signal", '\ue81c'), INTERMEDIATE_EVENT_CATCH_TIMER("bpmn-intermediate-event-catch-timer", '\ue824'), INTERMEDIATE_EVENT_NONE("bpmn-intermediate-event-none", '\ue82d'), INTERMEDIATE_EVENT_THROW_COMPENSATION("bpmn-intermediate-event-throw-compensation", '\ue83d'), INTERMEDIATE_EVENT_THROW_ESCALATION("bpmn-intermediate-event-throw-escalation", '\ue846'), INTERMEDIATE_EVENT_THROW_LINK("bpmn-intermediate-event-throw-link", '\ue849'), INTERMEDIATE_EVENT_THROW_MESSAGE("bpmn-intermediate-event-throw-message", '\ue850'), INTERMEDIATE_EVENT_THROW_MULTIPLE("bpmn-intermediate-event-throw-multiple", '\ue853'), INTERMEDIATE_EVENT_THROW_SIGNAL("bpmn-intermediate-event-throw-signal", '\ue85a'), LANE("bpmn-lane", '\ue861'), LANE_DIVIDE_THREE("bpmn-lane-divide-three", '\ue864'), LANE_DIVIDE_TWO("bpmn-lane-divide-two", '\ue865'), LANE_INSERT_ABOVE("bpmn-lane-insert-above", '\ue863'), LANE_INSERT_BELOW("bpmn-lane-insert-below", '\ue85e'), LASSO_TOOL("bpmn-lasso-tool", '\ue862'), LOOP_MARKER("bpmn-loop-marker", '\ue809'), MANUAL("bpmn-manual", '\ue828'), MANUAL_TASK("bpmn-manual-task", '\ue840'), PARALLEL_MI_MARKER("bpmn-parallel-mi-marker", '\ue80a'), PARTICIPANT("bpmn-participant", '\ue85c'), RECEIVE("bpmn-receive", '\ue829'), RECEIVE_TASK("bpmn-receive-task", '\ue843'), SCREW_WRENCH("bpmn-screw-wrench", '\ue800'), SCRIPT("bpmn-script", '\ue83f'), SCRIPT_TASK("bpmn-script-task", '\ue84c'), SEND("bpmn-send", '\ue841'), SEND_TASK("bpmn-send-task", '\ue84d'), SEQUENTIAL_MI_MARKER("bpmn-sequential-mi-marker", '\ue816'), SERVICE("bpmn-service", '\ue842'), SERVICE_TASK("bpmn-service-task", '\ue856'), SPACE_TOOL("bpmn-space-tool", '\ue85f'), START_EVENT_COMPENSATION("bpmn-start-event-compensation", '\ue807'), START_EVENT_CONDITION("bpmn-start-event-condition", '\ue814'), START_EVENT_ERROR("bpmn-start-event-error", '\ue81b'), START_EVENT_ESCALATION("bpmn-start-event-escalation", '\ue825'), START_EVENT_MESSAGE("bpmn-start-event-message", '\ue82c'), START_EVENT_MULTIPLE("bpmn-start-event-multiple", '\ue83e'), START_EVENT_NONE("bpmn-start-event-none", '\ue845'), START_EVENT_NON_INTERRUPTING_CONDITION("bpmn-start-event-non-interrupting-condition", '\ue84a'), START_EVENT_NON_INTERRUPTING_ESCALATION("bpmn-start-event-non-interrupting-escalation", '\ue84f'), START_EVENT_NON_INTERRUPTING_MESSAGE("bpmn-start-event-non-interrupting-message", '\ue854'), START_EVENT_NON_INTERRUPTING_MULTIPLE("bpmn-start-event-non-interrupting-multiple", '\ue859'), START_EVENT_NON_INTERRUPTING_PARALLEL_MULTIPLE("bpmn-start-event-non-interrupting-parallel-multiple", '\ue808'), START_EVENT_NON_INTERRUPTING_SIGNAL("bpmn-start-event-non-interrupting-signal", '\ue80b'), START_EVENT_NON_INTERRUPTING_TIMER("bpmn-start-event-non-interrupting-timer", '\ue815'), START_EVENT_PARALLEL_MULTIPLE("bpmn-start-event-parallel-multiple", '\ue81a'), START_EVENT_SIGNAL("bpmn-start-event-signal", '\ue826'), START_EVENT_TIMER("bpmn-start-event-timer", '\ue82b'), SUBPROCESS_COLLAPSED("bpmn-subprocess-collapsed", '\ue81f'), SUBPROCESS_EXPANDED("bpmn-subprocess-expanded", '\ue820'), SUB_PROCESS_MARKER("bpmn-sub-process-marker", '\ue819'), TASK("bpmn-task", '\ue821'), TASK_NONE("bpmn-task-none", '\ue857'), TEXT_ANNOTATION("bpmn-text-annotation", '\ue830'), TRANSACTION("bpmn-transaction", '\ue8c4'), TRASH("bpmn-trash", '\ue801'), USER("bpmn-user", '\ue844'), USER_TASK("bpmn-user-task", '\ue817'); public static Bpmn findByDescription(String description) { for (Bpmn font : values()) { if (font.getDescription().equals(description)) { return font; } } throw new IllegalArgumentException("Icon description '" + description + "' is invalid!"); } private String description; private int code; Bpmn(String description, int code) { this.description = description; this.code = code; } @Override public String getDescription() { return description; } @Override public int getCode() { return code; } }
Fix build breakage
icon-packs/ikonli-bpmn-pack/src/main/java/org/kordamp/ikonli/bpmn/Bpmn.java
Fix build breakage
Java
apache-2.0
1b40139f19dde23d66127b3ac5c85caf763b4a18
0
AliaksandrShuhayeu/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,EcoleKeine/pentaho-kettle,pavel-sakun/pentaho-kettle,HiromuHota/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,stepanovdg/pentaho-kettle,brosander/pentaho-kettle,codek/pentaho-kettle,aminmkhan/pentaho-kettle,ma459006574/pentaho-kettle,kurtwalker/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,matthewtckr/pentaho-kettle,flbrino/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,GauravAshara/pentaho-kettle,mdamour1976/pentaho-kettle,nicoben/pentaho-kettle,sajeetharan/pentaho-kettle,alina-ipatina/pentaho-kettle,eayoungs/pentaho-kettle,emartin-pentaho/pentaho-kettle,cjsonger/pentaho-kettle,mbatchelor/pentaho-kettle,HiromuHota/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,roboguy/pentaho-kettle,pentaho/pentaho-kettle,brosander/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,graimundo/pentaho-kettle,EcoleKeine/pentaho-kettle,eayoungs/pentaho-kettle,stevewillcock/pentaho-kettle,emartin-pentaho/pentaho-kettle,yshakhau/pentaho-kettle,nanata1115/pentaho-kettle,nanata1115/pentaho-kettle,marcoslarsen/pentaho-kettle,codek/pentaho-kettle,mkambol/pentaho-kettle,e-cuellar/pentaho-kettle,SergeyTravin/pentaho-kettle,wseyler/pentaho-kettle,yshakhau/pentaho-kettle,brosander/pentaho-kettle,EcoleKeine/pentaho-kettle,ddiroma/pentaho-kettle,bmorrise/pentaho-kettle,birdtsai/pentaho-kettle,pedrofvteixeira/pentaho-kettle,alina-ipatina/pentaho-kettle,e-cuellar/pentaho-kettle,stepanovdg/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,zlcnju/kettle,eayoungs/pentaho-kettle,matthewtckr/pentaho-kettle,nantunes/pentaho-kettle,aminmkhan/pentaho-kettle,pminutillo/pentaho-kettle,Advent51/pentaho-kettle,sajeetharan/pentaho-kettle,mbatchelor/pentaho-kettle,pymjer/pentaho-kettle,birdtsai/pentaho-kettle,Advent51/pentaho-kettle,roboguy/pentaho-kettle,tkafalas/pentaho-kettle,ccaspanello/pentaho-kettle,DFieldFL/pentaho-kettle,EcoleKeine/pentaho-kettle,lgrill-pentaho/pentaho-kettle,e-cuellar/pentaho-kettle,HiromuHota/pentaho-kettle,graimundo/pentaho-kettle,rmansoor/pentaho-kettle,rmansoor/pentaho-kettle,ivanpogodin/pentaho-kettle,pminutillo/pentaho-kettle,kurtwalker/pentaho-kettle,alina-ipatina/pentaho-kettle,pedrofvteixeira/pentaho-kettle,skofra0/pentaho-kettle,CapeSepias/pentaho-kettle,ViswesvarSekar/pentaho-kettle,airy-ict/pentaho-kettle,dkincade/pentaho-kettle,pminutillo/pentaho-kettle,kurtwalker/pentaho-kettle,codek/pentaho-kettle,drndos/pentaho-kettle,gretchiemoran/pentaho-kettle,pavel-sakun/pentaho-kettle,denisprotopopov/pentaho-kettle,tmcsantos/pentaho-kettle,stevewillcock/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,YuryBY/pentaho-kettle,mbatchelor/pentaho-kettle,matthewtckr/pentaho-kettle,MikhailHubanau/pentaho-kettle,YuryBY/pentaho-kettle,jbrant/pentaho-kettle,birdtsai/pentaho-kettle,gretchiemoran/pentaho-kettle,emartin-pentaho/pentaho-kettle,birdtsai/pentaho-kettle,mbatchelor/pentaho-kettle,dkincade/pentaho-kettle,akhayrutdinov/pentaho-kettle,emartin-pentaho/pentaho-kettle,jbrant/pentaho-kettle,ivanpogodin/pentaho-kettle,aminmkhan/pentaho-kettle,drndos/pentaho-kettle,ViswesvarSekar/pentaho-kettle,pedrofvteixeira/pentaho-kettle,pymjer/pentaho-kettle,sajeetharan/pentaho-kettle,SergeyTravin/pentaho-kettle,kurtwalker/pentaho-kettle,pavel-sakun/pentaho-kettle,HiromuHota/pentaho-kettle,nanata1115/pentaho-kettle,stevewillcock/pentaho-kettle,lgrill-pentaho/pentaho-kettle,ddiroma/pentaho-kettle,zlcnju/kettle,SergeyTravin/pentaho-kettle,yshakhau/pentaho-kettle,nicoben/pentaho-kettle,bmorrise/pentaho-kettle,denisprotopopov/pentaho-kettle,denisprotopopov/pentaho-kettle,pentaho/pentaho-kettle,ccaspanello/pentaho-kettle,matthewtckr/pentaho-kettle,skofra0/pentaho-kettle,pentaho/pentaho-kettle,tmcsantos/pentaho-kettle,flbrino/pentaho-kettle,pymjer/pentaho-kettle,nantunes/pentaho-kettle,zlcnju/kettle,lgrill-pentaho/pentaho-kettle,akhayrutdinov/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,GauravAshara/pentaho-kettle,GauravAshara/pentaho-kettle,codek/pentaho-kettle,matrix-stone/pentaho-kettle,Advent51/pentaho-kettle,ivanpogodin/pentaho-kettle,cjsonger/pentaho-kettle,DFieldFL/pentaho-kettle,pedrofvteixeira/pentaho-kettle,marcoslarsen/pentaho-kettle,alina-ipatina/pentaho-kettle,airy-ict/pentaho-kettle,tmcsantos/pentaho-kettle,airy-ict/pentaho-kettle,ddiroma/pentaho-kettle,stepanovdg/pentaho-kettle,e-cuellar/pentaho-kettle,graimundo/pentaho-kettle,airy-ict/pentaho-kettle,jbrant/pentaho-kettle,pymjer/pentaho-kettle,MikhailHubanau/pentaho-kettle,MikhailHubanau/pentaho-kettle,nanata1115/pentaho-kettle,flbrino/pentaho-kettle,rmansoor/pentaho-kettle,GauravAshara/pentaho-kettle,bmorrise/pentaho-kettle,hudak/pentaho-kettle,sajeetharan/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,dkincade/pentaho-kettle,mkambol/pentaho-kettle,lgrill-pentaho/pentaho-kettle,DFieldFL/pentaho-kettle,brosander/pentaho-kettle,mkambol/pentaho-kettle,jbrant/pentaho-kettle,hudak/pentaho-kettle,pentaho/pentaho-kettle,gretchiemoran/pentaho-kettle,mattyb149/pentaho-kettle,ViswesvarSekar/pentaho-kettle,CapeSepias/pentaho-kettle,drndos/pentaho-kettle,skofra0/pentaho-kettle,nantunes/pentaho-kettle,roboguy/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,graimundo/pentaho-kettle,stepanovdg/pentaho-kettle,wseyler/pentaho-kettle,mattyb149/pentaho-kettle,wseyler/pentaho-kettle,mattyb149/pentaho-kettle,rmansoor/pentaho-kettle,nicoben/pentaho-kettle,CapeSepias/pentaho-kettle,bmorrise/pentaho-kettle,tkafalas/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,akhayrutdinov/pentaho-kettle,mdamour1976/pentaho-kettle,denisprotopopov/pentaho-kettle,tkafalas/pentaho-kettle,matrix-stone/pentaho-kettle,mattyb149/pentaho-kettle,marcoslarsen/pentaho-kettle,mdamour1976/pentaho-kettle,ma459006574/pentaho-kettle,ViswesvarSekar/pentaho-kettle,flbrino/pentaho-kettle,roboguy/pentaho-kettle,zlcnju/kettle,cjsonger/pentaho-kettle,yshakhau/pentaho-kettle,tmcsantos/pentaho-kettle,gretchiemoran/pentaho-kettle,marcoslarsen/pentaho-kettle,ma459006574/pentaho-kettle,ccaspanello/pentaho-kettle,SergeyTravin/pentaho-kettle,tkafalas/pentaho-kettle,mkambol/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,mdamour1976/pentaho-kettle,CapeSepias/pentaho-kettle,nantunes/pentaho-kettle,eayoungs/pentaho-kettle,stevewillcock/pentaho-kettle,matrix-stone/pentaho-kettle,dkincade/pentaho-kettle,pminutillo/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,YuryBY/pentaho-kettle,DFieldFL/pentaho-kettle,ddiroma/pentaho-kettle,nicoben/pentaho-kettle,aminmkhan/pentaho-kettle,Advent51/pentaho-kettle,drndos/pentaho-kettle,hudak/pentaho-kettle,pavel-sakun/pentaho-kettle,wseyler/pentaho-kettle,akhayrutdinov/pentaho-kettle,skofra0/pentaho-kettle,YuryBY/pentaho-kettle,ccaspanello/pentaho-kettle,cjsonger/pentaho-kettle,ivanpogodin/pentaho-kettle,ma459006574/pentaho-kettle,matrix-stone/pentaho-kettle,hudak/pentaho-kettle
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.vfs; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.util.Comparator; import org.apache.commons.vfs.FileContent; import org.apache.commons.vfs.FileName; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSystemException; import org.apache.commons.vfs.FileSystemManager; import org.apache.commons.vfs.FileSystemOptions; import org.apache.commons.vfs.cache.WeakRefFilesCache; import org.apache.commons.vfs.impl.DefaultFileSystemManager; import org.apache.commons.vfs.impl.StandardFileSystemManager; import org.apache.commons.vfs.provider.local.LocalFile; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.util.UUIDUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.configuration.IKettleFileSystemConfigBuilder; import org.pentaho.di.core.vfs.configuration.KettleFileSystemConfigBuilderFactory; import org.pentaho.di.i18n.BaseMessages; public class KettleVFS { private static Class<?> PKG = KettleVFS.class; // for i18n purposes, needed by Translator2!! private static final KettleVFS kettleVFS = new KettleVFS(); private final DefaultFileSystemManager fsm; private static VariableSpace defaultVariableSpace; static { // Create a new empty variable space... // defaultVariableSpace = new Variables(); defaultVariableSpace.initializeVariablesFrom( null ); } private KettleVFS() { fsm = new StandardFileSystemManager(); try { fsm.setFilesCache( new WeakRefFilesCache() ); fsm.init(); } catch ( FileSystemException e ) { e.printStackTrace(); } // Install a shutdown hook to make sure that the file system manager is closed // This will clean up temporary files in vfs_cache Runtime.getRuntime().addShutdownHook( new Thread( new Runnable() { @Override public void run() { if ( fsm != null ) { fsm.close(); } } } ) ); } public FileSystemManager getFileSystemManager() { return fsm; } public static KettleVFS getInstance() { return kettleVFS; } public static FileObject getFileObject( String vfsFilename ) throws KettleFileException { return getFileObject( vfsFilename, defaultVariableSpace ); } public static FileObject getFileObject( String vfsFilename, VariableSpace space ) throws KettleFileException { return getFileObject( vfsFilename, space, null ); } public static FileObject getFileObject( String vfsFilename, FileSystemOptions fsOptions ) throws KettleFileException { return getFileObject( vfsFilename, defaultVariableSpace, fsOptions ); } public static FileObject getFileObject( String vfsFilename, VariableSpace space, FileSystemOptions fsOptions ) throws KettleFileException { try { FileSystemManager fsManager = getInstance().getFileSystemManager(); // We have one problem with VFS: if the file is in a subdirectory of the current one: somedir/somefile // In that case, VFS doesn't parse the file correctly. // We need to put file: in front of it to make it work. // However, how are we going to verify this? // // We are going to see if the filename starts with one of the known protocols like file: zip: ram: smb: jar: etc. // If not, we are going to assume it's a file. // boolean relativeFilename = true; String[] schemes = fsManager.getSchemes(); for ( int i = 0; i < schemes.length && relativeFilename; i++ ) { if ( vfsFilename.startsWith( schemes[i] + ":" ) ) { relativeFilename = false; // We have a VFS URL, load any options for the file system driver fsOptions = buildFsOptions( space, fsOptions, vfsFilename, schemes[i] ); } } String filename; if ( vfsFilename.startsWith( "\\\\" ) ) { File file = new File( vfsFilename ); filename = file.toURI().toString(); } else { if ( relativeFilename ) { File file = new File( vfsFilename ); filename = file.getAbsolutePath(); } else { filename = vfsFilename; } } FileObject fileObject = null; if ( fsOptions != null ) { fileObject = fsManager.resolveFile( filename, fsOptions ); } else { fileObject = fsManager.resolveFile( filename ); } return fileObject; } catch ( IOException e ) { throw new KettleFileException( "Unable to get VFS File object for filename '" + vfsFilename + "' : " + e.getMessage() ); } } private static FileSystemOptions buildFsOptions( VariableSpace varSpace, FileSystemOptions sourceOptions, String vfsFilename, String scheme ) throws IOException { if ( varSpace == null || vfsFilename == null ) { // We cannot extract settings from a non-existant variable space return null; } IKettleFileSystemConfigBuilder configBuilder = KettleFileSystemConfigBuilderFactory.getConfigBuilder( varSpace, scheme ); FileSystemOptions fsOptions = ( sourceOptions == null ) ? new FileSystemOptions() : sourceOptions; String[] varList = varSpace.listVariables(); for ( String var : varList ) { if ( var.startsWith( "vfs." ) ) { String param = configBuilder.parseParameterName( var, scheme ); if ( param != null ) { configBuilder.setParameter( fsOptions, param, varSpace.getVariable( var ), var, vfsFilename ); } else { throw new IOException( "FileSystemConfigBuilder could not parse parameter: " + var ); } } } return fsOptions; } /** * Read a text file (like an XML document). WARNING DO NOT USE FOR DATA FILES. * * @param vfsFilename * the filename or URL to read from * @param charSetName * the character set of the string (UTF-8, ISO8859-1, etc) * @return The content of the file as a String * @throws IOException */ public static String getTextFileContent( String vfsFilename, String charSetName ) throws KettleFileException { return getTextFileContent( vfsFilename, null, charSetName ); } public static String getTextFileContent( String vfsFilename, VariableSpace space, String charSetName ) throws KettleFileException { try { InputStream inputStream = null; if ( space == null ) { inputStream = getInputStream( vfsFilename ); } else { inputStream = getInputStream( vfsFilename, space ); } InputStreamReader reader = new InputStreamReader( inputStream, charSetName ); int c; StringBuffer stringBuffer = new StringBuffer(); while ( ( c = reader.read() ) != -1 ) { stringBuffer.append( (char) c ); } reader.close(); inputStream.close(); return stringBuffer.toString(); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static boolean fileExists( String vfsFilename ) throws KettleFileException { return fileExists( vfsFilename, null ); } public static boolean fileExists( String vfsFilename, VariableSpace space ) throws KettleFileException { FileObject fileObject = null; try { fileObject = getFileObject( vfsFilename, space ); return fileObject.exists(); } catch ( IOException e ) { throw new KettleFileException( e ); } finally { if ( fileObject != null ) { try { fileObject.close(); } catch ( Exception e ) { /* Ignore */ } } } } public static InputStream getInputStream( FileObject fileObject ) throws FileSystemException { FileContent content = fileObject.getContent(); return content.getInputStream(); } public static InputStream getInputStream( String vfsFilename ) throws KettleFileException { return getInputStream( vfsFilename, defaultVariableSpace ); } public static InputStream getInputStream( String vfsFilename, VariableSpace space ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space ); return getInputStream( fileObject ); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static OutputStream getOutputStream( FileObject fileObject, boolean append ) throws IOException { FileObject parent = fileObject.getParent(); if ( parent != null ) { if ( !parent.exists() ) { throw new IOException( BaseMessages.getString( PKG, "KettleVFS.Exception.ParentDirectoryDoesNotExist", getFilename( parent ) ) ); } } try { fileObject.createFile(); FileContent content = fileObject.getContent(); return content.getOutputStream( append ); } catch ( FileSystemException e ) { // Perhaps if it's a local file, we can retry using the standard // File object. This is because on Windows there is a bug in VFS. // if ( fileObject instanceof LocalFile ) { try { String filename = getFilename( fileObject ); return new FileOutputStream( new File( filename ), append ); } catch ( Exception e2 ) { throw e; // throw the original exception: hide the retry. } } else { throw e; } } } public static OutputStream getOutputStream( String vfsFilename, boolean append ) throws KettleFileException { return getOutputStream( vfsFilename, defaultVariableSpace, append ); } public static OutputStream getOutputStream( String vfsFilename, VariableSpace space, boolean append ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space ); return getOutputStream( fileObject, append ); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static OutputStream getOutputStream( String vfsFilename, VariableSpace space, FileSystemOptions fsOptions, boolean append ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space, fsOptions ); return getOutputStream( fileObject, append ); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static String getFilename( FileObject fileObject ) { FileName fileName = fileObject.getName(); String root = fileName.getRootURI(); if ( !root.startsWith( "file:" ) ) { return fileName.getURI(); // nothing we can do about non-normal files. } if ( root.startsWith( "file:////" ) ) { return fileName.getURI(); // we'll see 4 forward slashes for a windows/smb network share } if ( root.endsWith( ":/" ) ) { // Windows root = root.substring( 8, 10 ); } else { // *nix & OSX root = ""; } String fileString = root + fileName.getPath(); if ( !"/".equals( Const.FILE_SEPARATOR ) ) { fileString = Const.replace( fileString, "/", Const.FILE_SEPARATOR ); } return fileString; } public static FileObject createTempFile( String prefix, String suffix, String directory ) throws KettleFileException { return createTempFile( prefix, suffix, directory, null ); } public static FileObject createTempFile( String prefix, String suffix, String directory, VariableSpace space ) throws KettleFileException { try { FileObject fileObject; do { // Build temporary file name using UUID to ensure uniqueness. Old mechanism would fail using Sort Rows (for // example) // when there multiple nodes with multiple JVMs on each node. In this case, the temp file names would end up // being // duplicated which would cause the sort to fail. String filename = new StringBuffer( 50 ).append( directory ).append( '/' ).append( prefix ).append( '_' ).append( UUIDUtil.getUUIDAsString() ).append( suffix ).toString(); fileObject = getFileObject( filename, space ); } while ( fileObject.exists() ); return fileObject; } catch ( IOException e ) { throw new KettleFileException( e ); } } public static Comparator<FileObject> getComparator() { return new Comparator<FileObject>() { @Override public int compare( FileObject o1, FileObject o2 ) { String filename1 = getFilename( o1 ); String filename2 = getFilename( o2 ); return filename1.compareTo( filename2 ); } }; } /** * Get a FileInputStream for a local file. Local files can be read with NIO. * * @param fileObject * @return a FileInputStream * @throws IOException * @deprecated because of API change in Apache VFS. As a workaround use FileObject.getName().getPathDecoded(); Then * use a regular File() object to create a File Input stream. */ @Deprecated public static FileInputStream getFileInputStream( FileObject fileObject ) throws IOException { if ( !( fileObject instanceof LocalFile ) ) { // We can only use NIO on local files at the moment, so that's what we limit ourselves to. // throw new IOException( BaseMessages.getString( PKG, "FixedInput.Log.OnlyLocalFilesAreSupported" ) ); } return new FileInputStream( fileObject.getName().getPathDecoded() ); } }
core/src/org/pentaho/di/core/vfs/KettleVFS.java
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.vfs; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.util.Comparator; import org.apache.commons.vfs.FileContent; import org.apache.commons.vfs.FileName; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSystemException; import org.apache.commons.vfs.FileSystemManager; import org.apache.commons.vfs.FileSystemOptions; import org.apache.commons.vfs.cache.WeakRefFilesCache; import org.apache.commons.vfs.impl.DefaultFileSystemManager; import org.apache.commons.vfs.impl.StandardFileSystemManager; import org.apache.commons.vfs.provider.local.LocalFile; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.util.UUIDUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.configuration.IKettleFileSystemConfigBuilder; import org.pentaho.di.core.vfs.configuration.KettleFileSystemConfigBuilderFactory; import org.pentaho.di.i18n.BaseMessages; public class KettleVFS { private static Class<?> PKG = KettleVFS.class; // for i18n purposes, needed by Translator2!! private static final KettleVFS kettleVFS = new KettleVFS(); private final DefaultFileSystemManager fsm; private static VariableSpace defaultVariableSpace; static { // Create a new empty variable space... // defaultVariableSpace = new Variables(); defaultVariableSpace.initializeVariablesFrom( null ); } private KettleVFS() { fsm = new StandardFileSystemManager(); try { fsm.setFilesCache( new WeakRefFilesCache() ); fsm.init(); } catch ( FileSystemException e ) { e.printStackTrace(); } // Install a shutdown hook to make sure that the file system manager is closed // This will clean up temporary files in vfs_cache Runtime.getRuntime().addShutdownHook( new Thread( new Runnable() { @Override public void run() { if ( fsm != null ) { fsm.close(); } } } ) ); } public FileSystemManager getFileSystemManager() { return fsm; } public static KettleVFS getInstance() { return kettleVFS; } public static FileObject getFileObject( String vfsFilename ) throws KettleFileException { return getFileObject( vfsFilename, defaultVariableSpace ); } public static FileObject getFileObject( String vfsFilename, VariableSpace space ) throws KettleFileException { return getFileObject( vfsFilename, space, null ); } public static FileObject getFileObject( String vfsFilename, FileSystemOptions fsOptions ) throws KettleFileException { return getFileObject( vfsFilename, defaultVariableSpace, fsOptions ); } public static FileObject getFileObject( String vfsFilename, VariableSpace space, FileSystemOptions fsOptions ) throws KettleFileException { try { FileSystemManager fsManager = getInstance().getFileSystemManager(); // We have one problem with VFS: if the file is in a subdirectory of the current one: somedir/somefile // In that case, VFS doesn't parse the file correctly. // We need to put file: in front of it to make it work. // However, how are we going to verify this? // // We are going to see if the filename starts with one of the known protocols like file: zip: ram: smb: jar: etc. // If not, we are going to assume it's a file. // boolean relativeFilename = true; String[] schemes = fsManager.getSchemes(); for ( int i = 0; i < schemes.length && relativeFilename; i++ ) { if ( vfsFilename.startsWith( schemes[i] + ":" ) ) { relativeFilename = false; // We have a VFS URL, load any options for the file system driver fsOptions = buildFsOptions( space, fsOptions, vfsFilename, schemes[i] ); } } String filename; if ( vfsFilename.startsWith( "\\\\" ) ) { File file = new File( vfsFilename ); filename = file.toURI().toString(); } else { if ( relativeFilename ) { File file = new File( vfsFilename ); filename = file.getAbsolutePath(); } else { filename = vfsFilename; } } FileObject fileObject = null; if ( fsOptions != null ) { fileObject = fsManager.resolveFile( filename, fsOptions ); } else { fileObject = fsManager.resolveFile( filename ); } return fileObject; } catch ( IOException e ) { throw new KettleFileException( "Unable to get VFS File object for filename '" + vfsFilename + "' : " + e.getMessage() ); } } private static FileSystemOptions buildFsOptions( VariableSpace varSpace, FileSystemOptions sourceOptions, String vfsFilename, String scheme ) throws IOException { if ( varSpace == null || vfsFilename == null ) { // We cannot extract settings from a non-existant variable space return null; } IKettleFileSystemConfigBuilder configBuilder = KettleFileSystemConfigBuilderFactory.getConfigBuilder( varSpace, scheme ); FileSystemOptions fsOptions = ( sourceOptions == null ) ? new FileSystemOptions() : sourceOptions; String[] varList = varSpace.listVariables(); for ( String var : varList ) { if ( var.startsWith( "vfs." ) ) { String param = configBuilder.parseParameterName( var, scheme ); if ( param != null ) { configBuilder.setParameter( fsOptions, param, varSpace.getVariable( var ), var, vfsFilename ); } else { throw new IOException( "FileSystemConfigBuilder could not parse parameter: " + var ); } } } return fsOptions; } /** * Read a text file (like an XML document). WARNING DO NOT USE FOR DATA FILES. * * @param vfsFilename * the filename or URL to read from * @param charSetName * the character set of the string (UTF-8, ISO8859-1, etc) * @return The content of the file as a String * @throws IOException */ public static String getTextFileContent( String vfsFilename, String charSetName ) throws KettleFileException { return getTextFileContent( vfsFilename, null, charSetName ); } public static String getTextFileContent( String vfsFilename, VariableSpace space, String charSetName ) throws KettleFileException { try { InputStream inputStream = null; if ( space == null ) { inputStream = getInputStream( vfsFilename ); } else { inputStream = getInputStream( vfsFilename, space ); } InputStreamReader reader = new InputStreamReader( inputStream, charSetName ); int c; StringBuffer stringBuffer = new StringBuffer(); while ( ( c = reader.read() ) != -1 ) { stringBuffer.append( (char) c ); } reader.close(); inputStream.close(); return stringBuffer.toString(); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static boolean fileExists( String vfsFilename ) throws KettleFileException { return fileExists( vfsFilename, null ); } public static boolean fileExists( String vfsFilename, VariableSpace space ) throws KettleFileException { FileObject fileObject = null; try { fileObject = getFileObject( vfsFilename, space ); return fileObject.exists(); } catch ( IOException e ) { throw new KettleFileException( e ); } finally { if ( fileObject != null ) { try { fileObject.close(); } catch ( Exception e ) { /* Ignore */ } } } } public static InputStream getInputStream( FileObject fileObject ) throws FileSystemException { FileContent content = fileObject.getContent(); return content.getInputStream(); } public static InputStream getInputStream( String vfsFilename ) throws KettleFileException { return getInputStream( vfsFilename, null ); } public static InputStream getInputStream( String vfsFilename, VariableSpace space ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space ); return getInputStream( fileObject ); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static OutputStream getOutputStream( FileObject fileObject, boolean append ) throws IOException { FileObject parent = fileObject.getParent(); if ( parent != null ) { if ( !parent.exists() ) { throw new IOException( BaseMessages.getString( PKG, "KettleVFS.Exception.ParentDirectoryDoesNotExist", getFilename( parent ) ) ); } } try { fileObject.createFile(); FileContent content = fileObject.getContent(); return content.getOutputStream( append ); } catch ( FileSystemException e ) { // Perhaps if it's a local file, we can retry using the standard // File object. This is because on Windows there is a bug in VFS. // if ( fileObject instanceof LocalFile ) { try { String filename = getFilename( fileObject ); return new FileOutputStream( new File( filename ), append ); } catch ( Exception e2 ) { throw e; // throw the original exception: hide the retry. } } else { throw e; } } } public static OutputStream getOutputStream( String vfsFilename, boolean append ) throws KettleFileException { return getOutputStream( vfsFilename, null, append ); } public static OutputStream getOutputStream( String vfsFilename, VariableSpace space, boolean append ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space ); return getOutputStream( fileObject, append ); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static OutputStream getOutputStream( String vfsFilename, VariableSpace space, FileSystemOptions fsOptions, boolean append ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space, fsOptions ); return getOutputStream( fileObject, append ); } catch ( IOException e ) { throw new KettleFileException( e ); } } public static String getFilename( FileObject fileObject ) { FileName fileName = fileObject.getName(); String root = fileName.getRootURI(); if ( !root.startsWith( "file:" ) ) { return fileName.getURI(); // nothing we can do about non-normal files. } if ( root.startsWith( "file:////" ) ) { return fileName.getURI(); // we'll see 4 forward slashes for a windows/smb network share } if ( root.endsWith( ":/" ) ) { // Windows root = root.substring( 8, 10 ); } else { // *nix & OSX root = ""; } String fileString = root + fileName.getPath(); if ( !"/".equals( Const.FILE_SEPARATOR ) ) { fileString = Const.replace( fileString, "/", Const.FILE_SEPARATOR ); } return fileString; } public static FileObject createTempFile( String prefix, String suffix, String directory ) throws KettleFileException { return createTempFile( prefix, suffix, directory, null ); } public static FileObject createTempFile( String prefix, String suffix, String directory, VariableSpace space ) throws KettleFileException { try { FileObject fileObject; do { // Build temporary file name using UUID to ensure uniqueness. Old mechanism would fail using Sort Rows (for // example) // when there multiple nodes with multiple JVMs on each node. In this case, the temp file names would end up // being // duplicated which would cause the sort to fail. String filename = new StringBuffer( 50 ).append( directory ).append( '/' ).append( prefix ).append( '_' ).append( UUIDUtil.getUUIDAsString() ).append( suffix ).toString(); fileObject = getFileObject( filename, space ); } while ( fileObject.exists() ); return fileObject; } catch ( IOException e ) { throw new KettleFileException( e ); } } public static Comparator<FileObject> getComparator() { return new Comparator<FileObject>() { @Override public int compare( FileObject o1, FileObject o2 ) { String filename1 = getFilename( o1 ); String filename2 = getFilename( o2 ); return filename1.compareTo( filename2 ); } }; } /** * Get a FileInputStream for a local file. Local files can be read with NIO. * * @param fileObject * @return a FileInputStream * @throws IOException * @deprecated because of API change in Apache VFS. As a workaround use FileObject.getName().getPathDecoded(); Then * use a regular File() object to create a File Input stream. */ @Deprecated public static FileInputStream getFileInputStream( FileObject fileObject ) throws IOException { if ( !( fileObject instanceof LocalFile ) ) { // We can only use NIO on local files at the moment, so that's what we limit ourselves to. // throw new IOException( BaseMessages.getString( PKG, "FixedInput.Log.OnlyLocalFilesAreSupported" ) ); } return new FileInputStream( fileObject.getName().getPathDecoded() ); } }
[MARKET-192] - Allow environment be used by KettleVFS
core/src/org/pentaho/di/core/vfs/KettleVFS.java
[MARKET-192] - Allow environment be used by KettleVFS
Java
apache-2.0
770dcebc4d55858245f5254de1bbe867589c11ad
0
googleinterns/receipt-roundup,googleinterns/receipt-roundup,googleinterns/receipt-roundup
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.servlets; import com.google.appengine.api.blobstore.BlobInfo; import com.google.appengine.api.blobstore.BlobInfoFactory; import com.google.appengine.api.blobstore.BlobKey; import com.google.appengine.api.blobstore.BlobstoreService; import com.google.appengine.api.blobstore.BlobstoreServiceFactory; import com.google.appengine.api.blobstore.UploadOptions; import com.google.appengine.api.blobstore.UploadOptions.Builder; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Text; import com.google.sps.data.AnalysisResults; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Servlet with a GET handler that creates a URL that uploads a receipt image to Blobstore and * a POST handler that extracts data from the image and inserts it into Datastore. */ @WebServlet("/upload-receipt") public class UploadReceiptServlet extends HttpServlet { // Max upload size of 5 MB. private static final long MAX_UPLOAD_SIZE_BYTES = 5 * 1024 * 1024; // Matches JPEG image filenames. private static final Pattern validFilename = Pattern.compile("([^\\s]+(\\.(?i)(jpe?g))$)"); // Logs to System.err by default. private static final Logger logger = Logger.getLogger(UploadReceiptServlet.class.getName()); /** * Creates a URL that uploads the receipt image to Blobstore when the user submits the upload * form. After Blobstore handles the parsing, storing, and hosting of the image, the form * data and a URL where the image can be accessed is forwarded to this servlet in a POST * request. */ @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); UploadOptions uploadOptions = UploadOptions.Builder.withMaxUploadSizeBytesPerBlob(MAX_UPLOAD_SIZE_BYTES); String uploadUrl = blobstoreService.createUploadUrl("/upload-receipt", uploadOptions); response.setContentType("text/html"); response.getWriter().println(uploadUrl); } /** * When the user submits the upload form, Blobstore processes the image and then forwards the * request to this servlet, which analyzes the receipt image and inserts information * about the receipt into Datastore. */ @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { Entity receipt = null; try { receipt = createReceiptEntity(request); } catch (FileNotSelectedException | InvalidFileException e) { logger.warning(e.toString()); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); response.getWriter().println(e.toString()); return; } catch (ReceiptAnalysisException e) { logger.warning(e.toString()); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); response.getWriter().println(e.toString()); return; } // Store the receipt entity in Datastore. DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); datastore.put(receipt); } /** * Creates and returns a receipt entity, which includes the receipt image and * information about the receipt. */ private Entity createReceiptEntity(HttpServletRequest request) throws FileNotSelectedException, InvalidFileException, ReceiptAnalysisException { BlobKey blobKey = getUploadedBlobKey(request, "receipt-image"); long timestamp = System.currentTimeMillis(); String label = request.getParameter("label"); // Populate a receipt entity with the information extracted from the image with Cloud Vision. Entity receipt = analyzeReceiptImage(blobKey, request); receipt.setProperty("blobKey", blobKey); receipt.setProperty("timestamp", timestamp); receipt.setProperty("label", label); return receipt; } /** * Returns a blob key that points to the uploaded file. */ private BlobKey getUploadedBlobKey(HttpServletRequest request, String formInputElementName) throws FileNotSelectedException, InvalidFileException { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); Map<String, List<BlobKey>> blobs = blobstoreService.getUploads(request); List<BlobKey> blobKeys = blobs.get(formInputElementName); // User submitted the form without selecting a file. (dev server) if (blobKeys == null || blobKeys.isEmpty()) { throw new FileNotSelectedException("No file was uploaded by the user (dev server)."); } // The form only contains a single file input, so get the first index. BlobKey blobKey = blobKeys.get(0); // User submitted the form without selecting a file. (live server) BlobInfo blobInfo = new BlobInfoFactory().loadBlobInfo(blobKey); if (blobInfo.getSize() == 0) { blobstoreService.delete(blobKey); throw new FileNotSelectedException("No file was uploaded by the user (live server)."); } String filename = blobInfo.getFilename(); if (!isValidFilename(filename)) { blobstoreService.delete(blobKey); throw new InvalidFileException("Uploaded file must be a JPEG image."); } return blobKey; } /** * Checks if the filename is a valid JPEG file. */ private static boolean isValidFilename(String filename) { return validFilename.matcher(filename).matches(); } /** * Extracts the raw text from the image with the Cloud Vision API. Returns a receipt * entity populated with the extracted fields. */ private Entity analyzeReceiptImage(BlobKey blobKey, HttpServletRequest request) throws ReceiptAnalysisException { String imageUrl = getBlobServingUrl(blobKey); String baseUrl = getBaseUrl(request); String DEV_SERVER_BASE_URL = "http://0.0.0.0:80"; AnalysisResults results = null; try { // For the dev server, authentication is required to access the image served at the URL, so // fetch the bytes directly from Blobstore instead. if (baseUrl.equals(DEV_SERVER_BASE_URL)) { results = ReceiptAnalysis.serveImageText(blobKey); } else { String absoluteUrl = baseUrl + imageUrl; results = ReceiptAnalysis.serveImageText(absoluteUrl); } } catch (IOException e) { throw new ReceiptAnalysisException("Receipt analysis failed.", e); } // TODO: Replace hard-coded values using receipt analysis with Cloud Vision. double price = 5.89; String store = "McDonald's"; // Create an entity with a kind of Receipt. Entity receipt = new Entity("Receipt"); receipt.setProperty("imageUrl", imageUrl); receipt.setProperty("price", price); receipt.setProperty("store", store); // Text objects wrap around a string of unlimited size while strings are limited to 1500 bytes. receipt.setUnindexedProperty("rawText", new Text(results.getRawText())); return receipt; } /** * Gets a URL that serves the blob file using the blob key. */ private String getBlobServingUrl(BlobKey blobKey) { return "/serve-image?blob-key=" + blobKey.getKeyString(); } /** * Get the base URL of the web application. */ private String getBaseUrl(HttpServletRequest request) { String baseUrl = request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath(); return baseUrl; } public class InvalidFileException extends Exception { public InvalidFileException(String errorMessage) { super(errorMessage); } } public class FileNotSelectedException extends Exception { public FileNotSelectedException(String errorMessage) { super(errorMessage); } } public class ReceiptAnalysisException extends Exception { public ReceiptAnalysisException(String errorMessage, Throwable err) { super(errorMessage, err); } } }
src/main/java/servlets/UploadReceiptServlet.java
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.servlets; import com.google.appengine.api.blobstore.BlobInfo; import com.google.appengine.api.blobstore.BlobInfoFactory; import com.google.appengine.api.blobstore.BlobKey; import com.google.appengine.api.blobstore.BlobstoreService; import com.google.appengine.api.blobstore.BlobstoreServiceFactory; import com.google.appengine.api.blobstore.UploadOptions; import com.google.appengine.api.blobstore.UploadOptions.Builder; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.sps.data.AnalysisResults; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Servlet with a GET handler that creates a URL that uploads a receipt image to Blobstore and * a POST handler that extracts data from the image and inserts it into Datastore. */ @WebServlet("/upload-receipt") public class UploadReceiptServlet extends HttpServlet { // Max upload size of 5 MB. private static final long MAX_UPLOAD_SIZE_BYTES = 5 * 1024 * 1024; // Matches JPEG image filenames. private static final Pattern validFilename = Pattern.compile("([^\\s]+(\\.(?i)(jpe?g))$)"); // Logs to System.err by default. private static final Logger logger = Logger.getLogger(UploadReceiptServlet.class.getName()); /** * Creates a URL that uploads the receipt image to Blobstore when the user submits the upload * form. After Blobstore handles the parsing, storing, and hosting of the image, the form * data and a URL where the image can be accessed is forwarded to this servlet in a POST * request. */ @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); UploadOptions uploadOptions = UploadOptions.Builder.withMaxUploadSizeBytesPerBlob(MAX_UPLOAD_SIZE_BYTES); String uploadUrl = blobstoreService.createUploadUrl("/upload-receipt", uploadOptions); response.setContentType("text/html"); response.getWriter().println(uploadUrl); } /** * When the user submits the upload form, Blobstore processes the image and then forwards the * request to this servlet, which analyzes the receipt image and inserts information * about the receipt into Datastore. */ @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { Entity receipt = null; try { receipt = createReceiptEntity(request); } catch (FileNotSelectedException | InvalidFileException e) { logger.warning(e.toString()); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); response.getWriter().println(e.toString()); return; } catch (ReceiptAnalysisException e) { logger.warning(e.toString()); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); response.getWriter().println(e.toString()); return; } // Store the receipt entity in Datastore. DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); datastore.put(receipt); } /** * Creates and returns a receipt entity, which includes the receipt image and * information about the receipt. */ private Entity createReceiptEntity(HttpServletRequest request) throws FileNotSelectedException, InvalidFileException, ReceiptAnalysisException { BlobKey blobKey = getUploadedBlobKey(request, "receipt-image"); long timestamp = System.currentTimeMillis(); String label = request.getParameter("label"); // Populate a receipt entity with the information extracted from the image with Cloud Vision. Entity receipt = analyzeReceiptImage(blobKey, request); receipt.setProperty("blobKey", blobKey); receipt.setProperty("timestamp", timestamp); receipt.setProperty("label", label); return receipt; } /** * Returns a blob key that points to the uploaded file. */ private BlobKey getUploadedBlobKey(HttpServletRequest request, String formInputElementName) throws FileNotSelectedException, InvalidFileException { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); Map<String, List<BlobKey>> blobs = blobstoreService.getUploads(request); List<BlobKey> blobKeys = blobs.get(formInputElementName); // User submitted the form without selecting a file. (dev server) if (blobKeys == null || blobKeys.isEmpty()) { throw new FileNotSelectedException("No file was uploaded by the user (dev server)."); } // The form only contains a single file input, so get the first index. BlobKey blobKey = blobKeys.get(0); // User submitted the form without selecting a file. (live server) BlobInfo blobInfo = new BlobInfoFactory().loadBlobInfo(blobKey); if (blobInfo.getSize() == 0) { blobstoreService.delete(blobKey); throw new FileNotSelectedException("No file was uploaded by the user (live server)."); } String filename = blobInfo.getFilename(); if (!isValidFilename(filename)) { blobstoreService.delete(blobKey); throw new InvalidFileException("Uploaded file must be a JPEG image."); } return blobKey; } /** * Checks if the filename is a valid JPEG file. */ private static boolean isValidFilename(String filename) { return validFilename.matcher(filename).matches(); } /** * Extracts the raw text from the image with the Cloud Vision API. Returns a receipt * entity populated with the extracted fields. */ private Entity analyzeReceiptImage(BlobKey blobKey, HttpServletRequest request) throws ReceiptAnalysisException { String imageUrl = getBlobServingUrl(blobKey); String baseUrl = getBaseUrl(request); String DEV_SERVER_BASE_URL = "http://0.0.0.0:80"; AnalysisResults results = null; try { // For the dev server, authentication is required to access the image served at the URL, so // fetch the bytes directly from Blobstore instead. if (baseUrl.equals(DEV_SERVER_BASE_URL)) { results = ReceiptAnalysis.serveImageText(blobKey); } else { String absoluteUrl = baseUrl + imageUrl; results = ReceiptAnalysis.serveImageText(absoluteUrl); } } catch (IOException e) { throw new ReceiptAnalysisException("Receipt analysis failed.", e); } // TODO: Replace hard-coded values using receipt analysis with Cloud Vision. double price = 5.89; String store = "McDonald's"; // Create an entity with a kind of Receipt. Entity receipt = new Entity("Receipt"); receipt.setProperty("imageUrl", imageUrl); receipt.setProperty("price", price); receipt.setProperty("store", store); receipt.setUnindexedProperty("rawText", results.getRawText()); return receipt; } /** * Gets a URL that serves the blob file using the blob key. */ private String getBlobServingUrl(BlobKey blobKey) { return "/serve-image?blob-key=" + blobKey.getKeyString(); } /** * Get the base URL of the web application. */ private String getBaseUrl(HttpServletRequest request) { String baseUrl = request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath(); return baseUrl; } public class InvalidFileException extends Exception { public InvalidFileException(String errorMessage) { super(errorMessage); } } public class FileNotSelectedException extends Exception { public FileNotSelectedException(String errorMessage) { super(errorMessage); } } public class ReceiptAnalysisException extends Exception { public ReceiptAnalysisException(String errorMessage, Throwable err) { super(errorMessage, err); } } }
Store the rawText of the receipt as Text instead of String to support unlimited size.
src/main/java/servlets/UploadReceiptServlet.java
Store the rawText of the receipt as Text instead of String to support unlimited size.
Java
apache-2.0
f50bde7312e9acfe2b4a267a5fa98816bad0a757
0
literacyapp-org/literacyapp-model
package org.literacyapp.model.gson.content; import java.util.Calendar; import org.literacyapp.model.enums.Locale; import org.literacyapp.model.enums.content.ContentStatus; public abstract class ContentGson { private Long id; private Locale locale; private Calendar timeLastUpdate; private Integer revisionNumber; // [1, 2, 3, ...] private ContentStatus contentStatus; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Locale getLocale() { return locale; } public void setLocale(Locale locale) { this.locale = locale; } public Calendar getTimeLastUpdate() { return timeLastUpdate; } public void setTimeLastUpdate(Calendar timeLastUpdate) { this.timeLastUpdate = timeLastUpdate; } public Integer getRevisionNumber() { return revisionNumber; } public void setRevisionNumber(Integer revisionNumber) { this.revisionNumber = revisionNumber; } public ContentStatus getContentStatus() { return contentStatus; } public void setContentStatus(ContentStatus contentStatus) { this.contentStatus = contentStatus; } }
src/main/java/org/literacyapp/model/gson/content/ContentGson.java
package org.literacyapp.model.gson.content; import java.util.Calendar; import org.literacyapp.model.enums.Locale; public abstract class ContentGson { private Long id; private Locale locale; private Calendar timeLastUpdate; private Integer revisionNumber; // [1, 2, 3, ...] public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Locale getLocale() { return locale; } public void setLocale(Locale locale) { this.locale = locale; } public Calendar getTimeLastUpdate() { return timeLastUpdate; } public void setTimeLastUpdate(Calendar timeLastUpdate) { this.timeLastUpdate = timeLastUpdate; } public Integer getRevisionNumber() { return revisionNumber; } public void setRevisionNumber(Integer revisionNumber) { this.revisionNumber = revisionNumber; } }
Added content status
src/main/java/org/literacyapp/model/gson/content/ContentGson.java
Added content status
Java
apache-2.0
0f7d94e9113195ffd04dbb3aa3075a26835603cc
0
aaudiber/alluxio,Alluxio/alluxio,bf8086/alluxio,riversand963/alluxio,Alluxio/alluxio,Alluxio/alluxio,calvinjia/tachyon,wwjiang007/alluxio,aaudiber/alluxio,madanadit/alluxio,yuluo-ding/alluxio,riversand963/alluxio,Reidddddd/mo-alluxio,madanadit/alluxio,calvinjia/tachyon,maobaolong/alluxio,uronce-cc/alluxio,PasaLab/tachyon,yuluo-ding/alluxio,yuluo-ding/alluxio,aaudiber/alluxio,apc999/alluxio,Reidddddd/alluxio,jsimsa/alluxio,bf8086/alluxio,EvilMcJerkface/alluxio,ChangerYoung/alluxio,WilliamZapata/alluxio,maboelhassan/alluxio,wwjiang007/alluxio,maboelhassan/alluxio,madanadit/alluxio,maobaolong/alluxio,PasaLab/tachyon,uronce-cc/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,wwjiang007/alluxio,PasaLab/tachyon,apc999/alluxio,yuluo-ding/alluxio,maboelhassan/alluxio,ShailShah/alluxio,Reidddddd/alluxio,maobaolong/alluxio,ShailShah/alluxio,WilliamZapata/alluxio,Alluxio/alluxio,apc999/alluxio,apc999/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,maboelhassan/alluxio,PasaLab/tachyon,riversand963/alluxio,maobaolong/alluxio,bf8086/alluxio,maobaolong/alluxio,jswudi/alluxio,jswudi/alluxio,ShailShah/alluxio,PasaLab/tachyon,EvilMcJerkface/alluxio,WilliamZapata/alluxio,uronce-cc/alluxio,bf8086/alluxio,uronce-cc/alluxio,yuluo-ding/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,jsimsa/alluxio,jsimsa/alluxio,Reidddddd/alluxio,ChangerYoung/alluxio,aaudiber/alluxio,jswudi/alluxio,ChangerYoung/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,Alluxio/alluxio,madanadit/alluxio,ShailShah/alluxio,madanadit/alluxio,aaudiber/alluxio,maobaolong/alluxio,madanadit/alluxio,Reidddddd/mo-alluxio,bf8086/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,bf8086/alluxio,wwjiang007/alluxio,WilliamZapata/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,PasaLab/tachyon,calvinjia/tachyon,maobaolong/alluxio,jsimsa/alluxio,riversand963/alluxio,calvinjia/tachyon,jswudi/alluxio,wwjiang007/alluxio,uronce-cc/alluxio,apc999/alluxio,Reidddddd/mo-alluxio,riversand963/alluxio,Reidddddd/alluxio,apc999/alluxio,Reidddddd/alluxio,aaudiber/alluxio,calvinjia/tachyon,Alluxio/alluxio,Reidddddd/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,aaudiber/alluxio,PasaLab/tachyon,jsimsa/alluxio,wwjiang007/alluxio,maboelhassan/alluxio,ShailShah/alluxio,WilliamZapata/alluxio,ChangerYoung/alluxio,jswudi/alluxio,wwjiang007/alluxio,Alluxio/alluxio,uronce-cc/alluxio,jsimsa/alluxio,Reidddddd/mo-alluxio,Reidddddd/alluxio,ChangerYoung/alluxio,jswudi/alluxio,riversand963/alluxio,calvinjia/tachyon,apc999/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,maboelhassan/alluxio,WilliamZapata/alluxio,calvinjia/tachyon,yuluo-ding/alluxio,maobaolong/alluxio,Alluxio/alluxio,ShailShah/alluxio,Reidddddd/mo-alluxio,ChangerYoung/alluxio,madanadit/alluxio,bf8086/alluxio
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.underfs.swift; import org.javaswift.joss.headers.object.range.AbstractRange; /** * A range of a Swift object. This class is a substitute for JOSS MidPartRange which takes 'int' * parameters that might overflow for large objects. */ public class SwiftRange extends AbstractRange { /** * Constructor for a range in a Swift object. * @param startPos starting position in bytes * @param endPos ending position in bytes */ public SwiftRange(long startPos, long endPos) { super(startPos, endPos); } @Override public long getFrom(int byteArrayLength) { return this.offset; } @Override public long getTo(int byteArrayLength) { return this.length; } }
underfs/swift/src/main/java/alluxio/underfs/swift/SwiftRange.java
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.underfs.swift; import org.javaswift.joss.headers.object.range.AbstractRange; /** * A range of a Swift object. */ public class SwiftRange extends AbstractRange { /** * Constructor for a range in a Swift object. * @param startPos starting position in bytes * @param endPos ending position in bytes */ public SwiftRange(long startPos, long endPos) { super(startPos, endPos); } @Override public long getFrom(int byteArrayLength) { return this.offset; } @Override public long getTo(int byteArrayLength) { return this.length; } }
Updating javadoc for SwiftRange
underfs/swift/src/main/java/alluxio/underfs/swift/SwiftRange.java
Updating javadoc for SwiftRange
Java
bsd-2-clause
a870bfa33b09caa08029f22ad78920a9d1034e7f
0
laffer1/midnightbsd-app-store,laffer1/midnightbsd-app-store,laffer1/midnightbsd-app-store
package org.midnightbsd.appstore.services; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.midnightbsd.appstore.model.Architecture; import org.midnightbsd.appstore.repository.ArchitectureRepository; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import java.util.Calendar; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.*; /** * @author Lucas Holt */ @RunWith(MockitoJUnitRunner.class) public class ArchitectureServiceTest { @Mock private ArchitectureRepository architectureRepository; @InjectMocks private ArchitectureService architectureService; @Before public void setup() { Architecture architecture = new Architecture(); architecture.setId(1); architecture.setName("test"); architecture.setDescription("Foo"); architecture.setCreated(Calendar.getInstance().getTime()); when(architectureRepository.findOneByName("test")).thenReturn(architecture); when(architectureRepository.findOne(1)).thenReturn(architecture); when(architectureRepository.findAll()).thenReturn(Collections.singletonList(architecture)); } @Test public void testGetName() { Architecture arch = architectureService.getByName("test"); assertNotNull(arch); assertEquals(1, arch.getId()); assertEquals("test", arch.getName()); assertEquals("Foo", arch.getDescription()); verify(architectureRepository, times(1)).findOneByName(anyString()); } @Test public void testGet() { Architecture arch = architectureService.get(1); assertNotNull(arch); assertEquals(1, arch.getId()); assertEquals("test", arch.getName()); assertEquals("Foo", arch.getDescription()); verify(architectureRepository, times(1)).findOne(1); } @Test public void testList() { List<Architecture> items = architectureService.list(); assertNotNull(items); assertTrue(items.size() > 0); verify(architectureRepository, times(1)).findAll(); } }
src/test/java/org/midnightbsd/appstore/services/ArchitectureServiceTest.java
package org.midnightbsd.appstore.services; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.midnightbsd.appstore.model.Architecture; import org.midnightbsd.appstore.repository.ArchitectureRepository; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import java.util.Calendar; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.*; /** * @author Lucas Holt */ @RunWith(MockitoJUnitRunner.class) public class ArchitectureServiceTest { @Mock private ArchitectureRepository architectureRepository; @InjectMocks private ArchitectureService architectureService; @Before public void setup() { Architecture architecture = new Architecture(); architecture.setId(1); architecture.setName("test"); architecture.setDescription("Foo"); architecture.setCreated(Calendar.getInstance().getTime()); when(architectureRepository.findOneByName("test")).thenReturn(architecture); when(architectureRepository.findOne(1)).thenReturn(architecture); } @Test public void testGetName() { Architecture arch = architectureService.getByName("test"); assertNotNull(arch); assertEquals(1, arch.getId()); assertEquals("test", arch.getName()); assertEquals("Foo", arch.getDescription()); verify(architectureRepository, times(1)).findOneByName(anyString()); } @Test public void testGet() { Architecture arch = architectureService.get(1); assertNotNull(arch); assertEquals(1, arch.getId()); assertEquals("test", arch.getName()); assertEquals("Foo", arch.getDescription()); verify(architectureRepository, times(1)).findOne(1); } }
add list test
src/test/java/org/midnightbsd/appstore/services/ArchitectureServiceTest.java
add list test
Java
bsd-3-clause
65e4ce6f8216560ab07d8913e5fd5b909787984c
0
uzen/byteseek
/* * Copyright Matt Palmer 2009-2011, All rights reserved. * * This code is licensed under a standard 3-clause BSD license: * * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the "byteseek" name nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * */ package net.domesdaybook.matcher; import java.io.IOException; import net.domesdaybook.reader.Reader; /** * An interface for classes that can match bytes from a given position, either * directly within a byte array, or through a Reader object. * * @author Matt Palmer */ public interface Matcher { /** * Returns whether there is a match or not at the given position in a Reader. * * @param reader The {@link Reader} to read from. * @param matchPosition The position to try to match at. * @return Whether there is a match at the given position. * @throws IOException if the Reader cannot read. */ public boolean matches(final Reader reader, final long matchPosition) throws IOException; /** * Returns whether there is a match or not at the given position in a byte array. * * @param bytes An array of bytes to read from. * @param matchPosition The position to try to match at. * @return Whether there is a match at the given position. */ public boolean matches(final byte[] bytes, final int matchPosition); }
src/net/domesdaybook/matcher/Matcher.java
/* * Copyright Matt Palmer 2009-2011, All rights reserved. * * This code is licensed under a standard 3-clause BSD license: * * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the "byteseek" name nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * */ package net.domesdaybook.matcher; import java.io.IOException; import net.domesdaybook.reader.Reader; /** * An interface for classes that can match bytes from a given position. * * @author Matt Palmer */ public interface Matcher { /** * Returns whether there is a match or not at the given position in a Reader. * * @param reader The {@link Reader} to read from. * @param matchPosition The position to try to match at. * @return Whether there is a match at the given position. * @throws IOException if the Reader cannot read. */ public boolean matches(final Reader reader, final long matchPosition) throws IOException; /** * Returns whether there is a match or not at the given position in a byte array. * * @param bytes An array of bytes to read from. * @param matchPosition The position to try to match at. * @return Whether there is a match at the given position. */ public boolean matches(final byte[] bytes, final int matchPosition); }
Tweaked javadoc.
src/net/domesdaybook/matcher/Matcher.java
Tweaked javadoc.
Java
bsd-3-clause
9cee8c9301ede2f47e548e092ef934020f2710b2
0
mkoistinen/JBookTrader,GabrielDancause/jbooktrader,mkoistinen/JBookTrader,GabrielDancause/jbooktrader,GabrielDancause/jbooktrader,mkoistinen/JBookTrader
package com.jbooktrader.platform.util; import com.jbooktrader.platform.marketdepth.*; import com.jbooktrader.platform.model.*; import com.jbooktrader.platform.startup.*; import java.io.*; import java.text.*; import java.util.*; /** * Converts historical market depth data from CME format to JBT format, and writes the data to a file. * The created data file can be used for backtesting and optimization of trading strategies. */ public class CMEDataConverter { private static final long RECORDING_START = 9 * 60 * 60 + 10 * 60; // 9:10:00 EDT private static final long RECORDING_END = 16 * 60 * 60 + 15 * 60; // 16:15:00 EDT private static final String LINE_SEP = System.getProperty("line.separator"); private final LinkedList<MarketDepthItem> bids, asks; private final PrintWriter writer; private final BufferedReader reader; private final SimpleDateFormat jbtDateFormat, cmeDateFormat; private final DecimalFormat decimalFormat; private final String contract; private long time; private long lineNumber; private final Calendar instant; private int lowBalance, highBalance; public static void main(String[] args) throws JBookTraderException { if (args.length != 4) { throw new JBookTraderException("Usage: <cmeFileName> <jbtFileName> <contract> <samplingFrequency>"); } CMEDataConverter dataConverter = new CMEDataConverter(args[0], args[1], args[2]); long samplingFrequency = Long.valueOf(args[3]); dataConverter.convert(samplingFrequency); } private CMEDataConverter(String cmeFileName, String jbtFileName, String contract) throws JBookTraderException { this.contract = contract; decimalFormat = NumberFormatterFactory.getNumberFormatter(5); jbtDateFormat = new SimpleDateFormat("MMddyy,HHmmss"); jbtDateFormat.setTimeZone(TimeZone.getTimeZone("America/New_York")); cmeDateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS"); cmeDateFormat.setLenient(false); cmeDateFormat.setTimeZone(TimeZone.getTimeZone("America/Chicago")); instant = Calendar.getInstance(TimeZone.getTimeZone("America/New_York")); bids = new LinkedList<MarketDepthItem>(); asks = new LinkedList<MarketDepthItem>(); for (int level = 0; level < 5; level++) { bids.add(null); asks.add(null); } lowBalance = 100; highBalance = -100; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(cmeFileName))); } catch (FileNotFoundException fnfe) { throw new JBookTraderException("Could not find file " + cmeFileName); } try { writer = new PrintWriter(new BufferedWriter(new FileWriter(jbtFileName, false))); } catch (IOException ioe) { throw new JBookTraderException("Could not create file " + jbtFileName); } System.out.println("Converting " + cmeFileName + " to " + jbtFileName); } private void write() { StringBuilder sb = new StringBuilder(); sb.append(jbtDateFormat.format(new Date(time))).append(","); sb.append(lowBalance).append(","); sb.append(highBalance).append(","); sb.append(decimalFormat.format(bids.getFirst().getPrice())); writer.println(sb); } private int getCumulativeSize(LinkedList<MarketDepthItem> items) { int cumulativeSize = 0; for (MarketDepthItem item : items) { if (item != null) { cumulativeSize += item.getSize(); } } return cumulativeSize; } private void updateMinMaxBalance() { int cumulativeBid = getCumulativeSize(bids); int cumulativeAsk = getCumulativeSize(asks); double totalDepth = cumulativeBid + cumulativeAsk; int balance = (int) (100. * (cumulativeBid - cumulativeAsk) / totalDepth); lowBalance = Math.min(balance, lowBalance); highBalance = Math.max(balance, highBalance); } private boolean isRecordable(Calendar instant) { int secondsOfDay = instant.get(Calendar.HOUR_OF_DAY) * 60 * 60 + instant.get(Calendar.MINUTE) * 60 + instant.get(Calendar.SECOND); return secondsOfDay >= RECORDING_START && secondsOfDay <= RECORDING_END; } private void convert(long samplingFrequency) { String line = null; try { long previousTime = 0; StringBuilder header = getHeader(); writer.println(header); System.out.println("Conversion started..."); while ((line = reader.readLine()) != null) { lineNumber++; if (lineNumber % 250000 == 0) { System.out.println(lineNumber + " lines converted"); } try { parse(line); updateMinMaxBalance(); instant.setTimeInMillis(time); if ((time - previousTime) >= samplingFrequency) { if (isRecordable(instant)) { write(); } lowBalance = 100; highBalance = -100; previousTime = time; } } catch (Exception e) { String errorMsg = "Problem parsing line #" + lineNumber + LINE_SEP; System.out.println(errorMsg); e.printStackTrace(); } } System.out.println("Done: " + lineNumber + " lines converted successfully."); } catch (Exception e) { String errorMsg = "Problem parsing line #" + lineNumber + LINE_SEP; errorMsg += line + LINE_SEP; String description = e.getMessage(); if (description == null) { description = e.toString(); } errorMsg += description; System.out.println(errorMsg); } finally { try { reader.close(); writer.close(); } catch (IOException ioe) { ioe.printStackTrace(); } } } private void parse(String line) throws ParseException { boolean isSpecifiedContract = (line.substring(49, 54).trim().equals(contract)); boolean isLimitOrderMessage = line.substring(33, 35).equals("MA"); if (isLimitOrderMessage && isSpecifiedContract) { String centiseconds = line.substring(14, 16); int millis = Integer.valueOf(centiseconds) * 10; String date = line.substring(17, 29) + line.substring(12, 14) + millis; time = cmeDateFormat.parse(date).getTime(); int position = 82; for (int level = 0; level < 5; level++) { if (line.charAt(76 + level) == '1') { int bidSize = Integer.parseInt(line.substring(position, position + 12)); position += 16; double bidPrice = Integer.valueOf(line.substring(position + 1, position + 19)) / 100d; position += 19; double askPrice = Integer.valueOf(line.substring(position + 1, position + 19)) / 100d; position += 23; int askSize = Integer.parseInt(line.substring(position, position + 12)); bids.set(level, new MarketDepthItem(bidSize, bidPrice)); asks.set(level, new MarketDepthItem(askSize, askPrice)); //System.out.println("Level: " + level + " Bid: " + bidPrice + " BidSize: " + bidSize + " Ask price:" + askPrice + " Ask Size:" + askSize); position += 14; } } } } private StringBuilder getHeader() { StringBuilder header = new StringBuilder(); header.append("# This historical data file is created by " + JBookTrader.APP_NAME).append(LINE_SEP); header.append("# Each line represents the order book at a particular time and contains 5 columns:").append(LINE_SEP); header.append("# date, time, lowBalance, highBalance, bid").append(LINE_SEP); header.append("# 1. date is in the MMddyy format").append(LINE_SEP); header.append("# 2. time is in the HHmmss format").append(LINE_SEP); header.append("# 3. lowBalance is the period's lowest balance between cumulativeBidSize and cumulativeAskSize as percentage").append(LINE_SEP); header.append("# 4. highBalance is the period's highest balance between cumulativeBidSize and cumulativeAskSize as percentage").append(LINE_SEP); header.append("# 5. bid is the best (highest) bid price").append(LINE_SEP); header.append(LINE_SEP); header.append("timeZone=").append(jbtDateFormat.getTimeZone().getID()).append(LINE_SEP); header.append("bidAskSpread=0.25").append(LINE_SEP); return header; } }
source/com/jbooktrader/platform/util/CMEDataConverter.java
package com.jbooktrader.platform.util; import com.jbooktrader.platform.marketdepth.*; import com.jbooktrader.platform.model.*; import com.jbooktrader.platform.startup.*; import java.io.*; import java.text.*; import java.util.*; /** * Converts historical market depth data from CME format to JBT format, and writes the data to a file. * The created data file can be used for backtesting and optimization of trading strategies. */ public class CMEDataConverter { private static final String LINE_SEP = System.getProperty("line.separator"); private final LinkedList<MarketDepthItem> bids, asks; private final PrintWriter writer; private final BufferedReader reader; private final SimpleDateFormat jbtDateFormat, cmeDateFormat; private final DecimalFormat decimalFormat; private final String contract; private long time; private long lineNumber; private final Calendar instant; public static void main(String[] args) throws JBookTraderException { if (args.length != 4) { throw new JBookTraderException("Usage: <cmeFileName> <jbtFileName> <contract> <samplingFrequency>"); } CMEDataConverter dataConverter = new CMEDataConverter(args[0], args[1], args[2]); long samplingFrequency = Long.valueOf(args[3]); dataConverter.convert(samplingFrequency); } private CMEDataConverter(String cmeFileName, String jbtFileName, String contract) throws JBookTraderException { this.contract = contract; decimalFormat = NumberFormatterFactory.getNumberFormatter(5); jbtDateFormat = new SimpleDateFormat("MMddyy,HH:mm:ss.SSS"); jbtDateFormat.setTimeZone(TimeZone.getTimeZone("America/New_York")); cmeDateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS"); cmeDateFormat.setLenient(false); cmeDateFormat.setTimeZone(TimeZone.getTimeZone("America/Chicago")); instant = Calendar.getInstance(TimeZone.getTimeZone("America/New_York")); bids = new LinkedList<MarketDepthItem>(); asks = new LinkedList<MarketDepthItem>(); for (int level = 0; level < 5; level++) { bids.add(null); asks.add(null); } try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(cmeFileName))); } catch (FileNotFoundException fnfe) { throw new JBookTraderException("Could not find file " + cmeFileName); } try { writer = new PrintWriter(new BufferedWriter(new FileWriter(jbtFileName, false))); } catch (IOException ioe) { throw new JBookTraderException("Could not create file " + jbtFileName); } System.out.println("Converting " + cmeFileName + " to " + jbtFileName); } private void write() { StringBuilder sb = new StringBuilder(); sb.append(jbtDateFormat.format(new Date(time))); sb.append(";");// separator after date and time for (MarketDepthItem item : bids) { if (item != null) { sb.append(item.getSize()).append(","); sb.append(decimalFormat.format(item.getPrice())).append(","); } } sb.deleteCharAt(sb.length() - 1); sb.append(";");// separator between bids and asks for (MarketDepthItem item : asks) { if (item != null) { sb.append(item.getSize()).append(","); sb.append(decimalFormat.format(item.getPrice())).append(","); } } sb.deleteCharAt(sb.length() - 1); writer.println(sb); } private void convert(long samplingFrequency) { String line = null; try { long previousTime = 0; StringBuilder header = getHeader(); writer.println(header); System.out.println("Conversion started..."); while ((line = reader.readLine()) != null) { lineNumber++; if (lineNumber % 250000 == 0) { System.out.println(lineNumber + " lines read"); } try { parse(line); instant.setTimeInMillis(time); int minutesOfDay = instant.get(Calendar.HOUR_OF_DAY) * 60 + instant.get(Calendar.MINUTE); boolean inDaySession = (minutesOfDay >= 9 * 60 + 15 && minutesOfDay < 16 * 60 + 15); if (inDaySession && (time - previousTime) >= samplingFrequency) { previousTime = time; write(); } } catch (Exception e) { String errorMsg = "Problem parsing line #" + lineNumber + LINE_SEP; System.out.println(errorMsg); e.printStackTrace(); } } System.out.println("Done: " + lineNumber + " lines read and converted successfully."); } catch (Exception e) { String errorMsg = "Problem parsing line #" + lineNumber + LINE_SEP; errorMsg += line + LINE_SEP; String description = e.getMessage(); if (description == null) { description = e.toString(); } errorMsg += description; System.out.println(errorMsg); } finally { try { reader.close(); writer.close(); } catch (IOException ioe) { ioe.printStackTrace(); } } } private void parse(String line) throws ParseException { // This needs to be investigated deeper. Frost suggested that the lines // containing line.charAt(35) == '0' can be skipped boolean isValidLine = (line.charAt(35) != '0'); boolean isSpecifiedContract = (line.substring(49, 54).trim().equals(contract)); boolean isLimitOrderMessage = line.substring(33, 35).equals("MA"); if (isLimitOrderMessage && isValidLine && isSpecifiedContract) { String centiseconds = line.substring(14, 16); int millis = Integer.valueOf(centiseconds) * 10; String date = line.substring(17, 29) + line.substring(12, 14) + millis; time = cmeDateFormat.parse(date).getTime(); int position = 82; for (int level = 0; level < 5; level++) { if (line.charAt(76 + level) == '1') { int bidSize = Integer.parseInt(line.substring(position, position + 12)); position += 16; //int bidPriceDecimalLocator = Integer.valueOf(line.substring(position, position + 1)); double bidPrice = Integer.valueOf(line.substring(position + 1, position + 19)) / 100d; position += 19; //int askPriceDecimalLocator = Integer.valueOf(line.substring(position, position + 1)); double askPrice = Integer.valueOf(line.substring(position + 1, position + 19)) / 100d; position += 23; int askSize = Integer.parseInt(line.substring(position, position + 12)); bids.set(level, new MarketDepthItem(bidSize, bidPrice)); asks.set(level, new MarketDepthItem(askSize, askPrice)); //System.out.println("Level: " + level + " Bid: " + bidPrice + " BidSize: " + bidSize + " Ask price:" + askPrice + " Ask Size:" + askSize); position += 14; } } } } private StringBuilder getHeader() { StringBuilder header = new StringBuilder(); header.append("# This historical data file is created by " + JBookTrader.APP_NAME).append(LINE_SEP); header.append("# Each line represents the order book at a particular time and contains 3 sections,:").append(LINE_SEP); header.append("# separated by semicolons as follows:").append(LINE_SEP); header.append("# {date, time}; {bids}; {asks}").append(LINE_SEP); header.append("# The date is in the MMddyy format, and the time in the HH:mm:ss.SSS format").append(LINE_SEP); header.append("# The {bids} section has a variable number of comma-separated columns").append(LINE_SEP); header.append("# and contains bids (each defined by bid size and bid price), starting from the highest bid price").append(LINE_SEP); header.append("# The {asks} section has a variable number of comma-separated columns").append(LINE_SEP); header.append("# and contains asks (each defined by ask size and ask price), starting from the lowest ask price").append(LINE_SEP); header.append(LINE_SEP); header.append("timeZone=").append(jbtDateFormat.getTimeZone().getID()).append(LINE_SEP); return header; } }
conversion from CME to new JBT format
source/com/jbooktrader/platform/util/CMEDataConverter.java
conversion from CME to new JBT format
Java
mit
082a1e8b72b449b1ffba8a3f96cabfb43771d46a
0
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.plugins.magic; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.bukkit.Material; import org.bukkit.Server; import org.bukkit.block.Block; import org.bukkit.entity.Player; import org.bukkit.event.block.Action; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.entity.EntityDeathEvent; import org.bukkit.event.player.PlayerAnimationEvent; import org.bukkit.event.player.PlayerAnimationType; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerMoveEvent; import org.bukkit.event.player.PlayerQuitEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.scheduler.BukkitScheduler; import com.elmakers.mine.bukkit.dao.BlockList; import com.elmakers.mine.bukkit.plugins.magic.spells.AbsorbSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.AlterSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.ArrowSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BlastSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BlinkSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BoomSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BridgeSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.ConstructSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.CushionSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.DisintegrateSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FamiliarSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FillSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FireSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FireballSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FlingSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.ForceSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FrostSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.GillsSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.GotoSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.GrenadeSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.HealSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.InvincibleSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.InvisibilitySpell; import com.elmakers.mine.bukkit.plugins.magic.spells.LavaSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.LevitateSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.LightningSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.MineSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.PeekSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.PillarSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.PortalSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.RecallSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.SignSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.TorchSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.TreeSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.UndoSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.WeatherSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.WolfSpell; import com.elmakers.mine.bukkit.utilities.PluginProperties; import com.elmakers.mine.bukkit.utilities.UndoQueue; import com.nijiko.permissions.PermissionHandler; public class Spells { /* * Public API - Use for hooking up a plugin, or calling a spell */ public Spell getSpell(Material material, Player player) { Spell spell = spellsByMaterial.get(material); if (spell == null || !spell.hasSpellPermission(player)) return null; return getSpell(spell.getName(), player); } public Spell getSpell(String name, Player player) { Spell spell = spells.get(name); if (spell == null || !spell.hasSpellPermission(player)) return null; PlayerSpells playerSpells = getPlayerSpells(player); Spell playerSpell = playerSpells.getSpell(spell.getName()); if (playerSpell == null) { playerSpell = (Spell) spell.clone(); playerSpell.setPlayer(player); playerSpells.addSpell(playerSpell); } return playerSpell; } public PlayerSpells getPlayerSpells(Player player) { PlayerSpells spells = playerSpells.get(player.getName()); if (spells == null) { spells = new PlayerSpells(player); playerSpells.put(player.getName(), spells); } return spells; } protected void loadSpells() { loadSpell(new AbsorbSpell(), "absorb", Material.BUCKET, "Absorb some of the target", "construction", ""); loadSpell(new AlterSpell(), "alter", Material.REDSTONE_TORCH_ON, "Alter certain objects", "construction", ""); loadSpell(new ArrowSpell(), "arrow", Material.ARROW, "Fire a magic arrow", "combat", ""); loadSpell(new ArrowSpell(), "arrowrain", Material.BOW, "Fire a volley of arrows", "combat", "4"); loadSpell(new BlastSpell(), "blast", Material.SULPHUR, "Mine out a large area", "mining", ""); loadSpell(new BlastSpell(), "superblast", Material.SLIME_BALL, "Mine out a very large area", "mining", "16"); loadSpell(new BlinkSpell(), "blink", Material.FEATHER, "Teleport to your target", "psychic", ""); loadSpell(new BlinkSpell(), "ascend", Material.RED_MUSHROOM, "Go up to the nearest safe spot", "psychic", "ascend"); loadSpell(new BlinkSpell(), "descend", Material.BROWN_MUSHROOM, "Travel underground", "psychic", "descend"); loadSpell(new BlinkSpell(), "tesseract", Material.WEB, "Blink a short distance", "psychic", "8"); loadSpell(new BoomSpell(), "boom", Material.RED_ROSE, "Create an explosion", "combat", ""); loadSpell(new BoomSpell(), "kaboom", Material.REDSTONE_WIRE, "Create a big explosion", "combat", "6"); loadSpell(new BoomSpell(), "kamikazee", Material.DEAD_BUSH, "Kill yourself with an explosion", "combat", "8 here"); loadSpell(new BoomSpell(), "nuke", Material.BED, "Create a huge explosino", "combat", "20"); loadSpell(new BridgeSpell(), "bridge", Material.GOLD_HOE, "Extend the ground underneath you", "construction", ""); loadSpell(new ConstructSpell(), "blob", Material.CLAY_BALL, "Create a solid blob", "construction", "sphere 3"); loadSpell(new ConstructSpell(), "shell", Material.BOWL, "Create a large spherical shell", "construction", "sphere hollow 10"); loadSpell(new ConstructSpell(), "box", Material.GOLD_HELMET, "Create a large hollow box", "construction", "cuboid hollow 6"); loadSpell(new ConstructSpell(), "superblob", Material.CLAY_BRICK, "Create a large solid sphere", "construction", "sphere 8"); loadSpell(new ConstructSpell(), "sandblast", Material.SANDSTONE, "Drop a big block of sand", "combat", "cuboid 4 with sand"); loadSpell(new CushionSpell(), "cushion", Material.SOUL_SAND, "Create a safety bubble", "alchemy", ""); loadSpell(new DisintegrateSpell(), "disintegrate", Material.BONE, "Damage your target", "combat", ""); loadSpell(new FamiliarSpell(), "familiar", Material.EGG, "Create an animal familiar", "summoner", ""); loadSpell(new FamiliarSpell(), "monster", Material.PUMPKIN, "Call a monster to your side", "summoner", "monster"); loadSpell(new FamiliarSpell(), "mob", Material.JACK_O_LANTERN, "Summon a mob of monsters", "summoner", "mob 20"); loadSpell(new FamiliarSpell(), "farm", Material.WHEAT, "Create a herd", "farming", "30"); loadSpell(new FillSpell(), "fill", Material.GOLD_SPADE, "Fill a selected area (cast twice)", "construction", ""); loadSpell(new FillSpell(), "paint", Material.PAINTING, "Fill a single block", "alchemy", "single"); loadSpell(new FillSpell(), "recurse", Material.WOOD_SPADE, "Recursively fill blocks", "alchemy", "recurse"); loadSpell(new FireballSpell(), "fireball", Material.NETHERRACK, "Cast an exploding fireball", "combat", "", 1500); loadSpell(new FireSpell(), "fire", Material.FLINT_AND_STEEL, "Light fires from a distance", "elemental", ""); loadSpell(new FireSpell(), "inferno", Material.FIRE, "Burn a wide area", "master", "6"); loadSpell(new FlingSpell(), "fling", Material.IRON_BOOTS, "Sends you flying in the target direction", "psychic", "5"); loadSpell(new ForceSpell(), "force", Material.STRING, "Use telekinesis", "psychic", ""); loadSpell(new ForceSpell(), "pull", Material.FISHING_ROD, "Pull things toward you", "psychic", "pull"); loadSpell(new ForceSpell(), "push", Material.RAILS, "Push things away from you", "psychic", "push"); loadSpell(new FrostSpell(), "frost", Material.SNOW_BALL, "Freeze water and create snow", "alchemy", ""); loadSpell(new GillsSpell(), "gills", Material.RAW_FISH, "Restores health while moving underwater", "medic", ""); loadSpell(new GotoSpell(), "gather", Material.GLOWSTONE_DUST, "Gather groups of players together", "master", ""); loadSpell(new GrenadeSpell(), "grenade", Material.TNT, "Place a primed grenade", "combat", "", 2000); loadSpell(new HealSpell(), "heal", Material.BREAD, "Heal yourself or others", "medic", "", 5000); loadSpell(new InvincibleSpell(), "invincible", Material.GOLDEN_APPLE, "Make yourself impervious to damage", "master", ""); loadSpell(new InvincibleSpell(), "ironskin", Material.IRON_CHESTPLATE, "Protect you from damage", "master", "99"); loadSpell(new InvincibleSpell(), "leatherskin", Material.LEATHER_CHESTPLATE, "Protect you from some damage", "combat", "50"); loadSpell(new InvisibilitySpell(), "cloak", Material.CHAINMAIL_CHESTPLATE, "Make yourself invisible while still", "psychic", ""); loadSpell(new LavaSpell(), "lava", Material.LAVA, "Fire a stream of lava", "combat", ""); loadSpell(new LevitateSpell(), "levitate", Material.GOLD_BOOTS, "Levitate yourself up into the air", "psychic", ""); loadSpell(new LightningSpell(), "lightning", Material.COOKED_FISH, "Strike lighting at your target", "combat", ""); loadSpell(new LightningSpell(), "storm", Material.GRILLED_PORK, "Start a lightning storm", "elemental", "10", 2000); loadSpell(new MineSpell(), "mine", Material.GOLD_PICKAXE, "Mines and drops the targeted resources", "mining", ""); loadSpell(new PeekSpell(), "peek", Material.SUGAR_CANE, "Temporarily glass your target surface", "psychic", ""); loadSpell(new PillarSpell(), "pillar", Material.GOLD_AXE, "Raises a pillar up", "construction", ""); loadSpell(new PillarSpell(), "stalactite", Material.WOOD_AXE, "Create a downward pillar", "construction", "down"); loadSpell(new PortalSpell(), "portal", Material.PORTAL, "Create two connected portals", "psychic", ""); loadSpell(new RecallSpell(), "recall", Material.COMPASS, "Marks locations for return", "exploration", ""); loadSpell(new RecallSpell(), "spawn", Material.YELLOW_FLOWER, "Take yourself back home", "exploration", "spawn"); loadSpell(new SignSpell(), "sign", Material.SIGN_POST, "Give yourself some signs", "master", ""); loadSpell(new SignSpell(), "tag", Material.SIGN, "Leave a sign with your name", "exploration", "tag", 30000); loadSpell(new TorchSpell(), "torch", Material.TORCH, "Shed some light", "exploration", ""); loadSpell(new TorchSpell(), "day", Material.FLINT, "Change time time to day", "elemental", "day"); loadSpell(new TorchSpell(), "night", Material.COAL, "Change time time to night", "elemental", "night"); loadSpell(new TreeSpell(), "tree", Material.SAPLING, "Instantly grow a tree", "farming", ""); loadSpell(new UndoSpell(), "rewind", Material.WATCH, "Undo your last action", "alchemy", ""); loadSpell(new UndoSpell(), "erase", Material.LEVER, "Undo your target construction", "alchemy", ""); loadSpell(new WeatherSpell(), "weather", Material.WATER, "Change the weather", "elemental", ""); loadSpell(new WolfSpell(), "wolf", Material.PORK, "Create a wolf familiar to follow you around", "summoner", "", 5000); } public void loadSpell(Spell template, String name, Material icon, String description, String category, String parameterString) { loadSpell(template, name, icon, description, category, parameterString, 0); } public void loadSpell(Spell template, String name, Material icon, String description, String category, String parameterString, int cooldown) { String[] parameters = parameterString.split(" "); template.load(name, description, category, icon, parameters, cooldown); addSpell(template); } public void addSpell(Spell variant) { Spell conflict = spells.get(variant.getName()); if (conflict != null) { log.log(Level.WARNING, "Duplicate spell name: '" + conflict.getName() + "'"); } else { spells.put(variant.getName(), variant); } Material m = variant.getMaterial(); if (m != null && m != Material.AIR) { if (buildingMaterials.contains(m)) { log.warning("Spell " + variant.getName() + " uses building material as icon: " + m.name().toLowerCase()); } conflict = spellsByMaterial.get(m); if (conflict != null) { log.log(Level.WARNING, "Duplicate spell material: " + m.name() + " for " + conflict.getName() + " and " + variant.getName()); } else { spellsByMaterial.put(variant.getMaterial(), variant); } } variant.initialize(this); } /* * Material use system */ public List<Material> getBuildingMaterials() { return buildingMaterials; } /* * Undo system */ public UndoQueue getUndoQueue(String playerName) { UndoQueue queue = playerUndoQueues.get(playerName); if (queue == null) { queue = new UndoQueue(); queue.setMaxSize(undoQueueDepth); playerUndoQueues.put(playerName, queue); } return queue; } public void addToUndoQueue(Player player, BlockList blocks) { UndoQueue queue = getUndoQueue(player.getName()); queue.add(blocks); } public boolean undoAny(Player player, Block target) { for (String playerName : playerUndoQueues.keySet()) { UndoQueue queue = playerUndoQueues.get(playerName); if (queue.undo(target)) { if (!player.getName().equals(playerName)) { player.sendMessage("Undid one of " + playerName + "'s spells"); } return true; } } return false; } public boolean undo(String playerName) { UndoQueue queue = getUndoQueue(playerName); return queue.undo(); } public boolean undo(String playerName, Block target) { UndoQueue queue = getUndoQueue(playerName); return queue.undo(target); } public BlockList getLastBlockList(String playerName, Block target) { UndoQueue queue = getUndoQueue(playerName); return queue.getLast(target); } public BlockList getLastBlockList(String playerName) { UndoQueue queue = getUndoQueue(playerName); return queue.getLast(); } public void scheduleCleanup(BlockList blocks) { Server server = plugin.getServer(); BukkitScheduler scheduler = server.getScheduler(); // scheduler works in ticks- 20 ticks per second. long ticksToLive = blocks.getTimeToLive() * 20 / 1000; scheduler.scheduleSyncDelayedTask(plugin, new CleanupBlocksTask(blocks), ticksToLive); } /* * Event registration- call to listen for events */ public void registerEvent(SpellEventType type, Spell spell) { PlayerSpells spells = getPlayerSpells(spell.getPlayer()); spells.registerEvent(type, spell); } public void unregisterEvent(SpellEventType type, Spell spell) { PlayerSpells spells = getPlayerSpells(spell.getPlayer()); spells.registerEvent(type, spell); } /* * Random utility functions */ public int getWandTypeId() { return wandTypeId; } public void cancel(Player player) { PlayerSpells playerSpells = getPlayerSpells(player); playerSpells.cancel(); } public boolean isQuiet() { return quiet; } public boolean isSilent() { return silent; } public boolean isSolid(Material mat) { return (mat != Material.AIR && mat != Material.WATER && mat != Material.STATIONARY_WATER && mat != Material.LAVA && mat != Material.STATIONARY_LAVA); } public boolean isSticky(Material mat) { return stickyMaterials.contains(mat); } public boolean isStickyAndTall(Material mat) { return stickyMaterialsDoubleHeight.contains(mat); } public boolean isAffectedByGravity(Material mat) { // DOORS are on this list, it's a bit of a hack, but if you consider // them // as two separate blocks, the top one of which "breaks" when the bottom // one does, // it applies- but only really in the context of the auto-undo system, // so this should probably be its own mat list, ultimately. return (mat == Material.GRAVEL || mat == Material.SAND || mat == Material.WOOD_DOOR || mat == Material.IRON_DOOR); } /* * Get the log, if you need to debug or log errors. */ public Logger getLog() { return log; } public MagicPlugin getPlugin() { return plugin; } /* * Internal functions - don't call these, or really anything below here. */ /* * Saving and loading */ public void initialize(MagicPlugin plugin) { this.plugin = plugin; load(); log.info("Magic: Loaded " + spells.size() + " spells."); } public void load() { loadSpells(); loadProperties(); } protected void loadProperties() { File dataFolder = plugin.getDataFolder(); dataFolder.mkdirs(); File pFile = new File(dataFolder, propertiesFile); PluginProperties properties = new PluginProperties(pFile.getAbsolutePath()); properties.load(); undoQueueDepth = properties.getInteger("spells-general-undo-depth", undoQueueDepth); silent = properties.getBoolean("spells-general-silent", silent); quiet = properties.getBoolean("spells-general-quiet", quiet); stickyMaterials = PluginProperties.parseMaterials(STICKY_MATERIALS); stickyMaterialsDoubleHeight = PluginProperties.parseMaterials(STICKY_MATERIALS_DOUBLE_HEIGHT); buildingMaterials = properties.getMaterials("spells-general-building", DEFAULT_BUILDING_MATERIALS); wandTypeId = properties.getInteger("wand-type-id", wandTypeId); for (Spell spell : spells.values()) { spell.onLoad(properties); } properties.save(); } public void clear() { playerSpells.clear(); spells.clear(); spellsByMaterial.clear(); } /* * Listeners / callbacks */ public void onPlayerQuit(PlayerQuitEvent event) { PlayerSpells spells = getPlayerSpells(event.getPlayer()); spells.onPlayerQuit(event); } public void onPlayerMove(PlayerMoveEvent event) { PlayerSpells spells = getPlayerSpells(event.getPlayer()); spells.onPlayerMove(event); } public void onPlayerDeath(Player player, EntityDeathEvent event) { PlayerSpells spells = getPlayerSpells(player); spells.onPlayerDeath(event); } public void onPlayerDamage(Player player, EntityDamageEvent event) { PlayerSpells spells = getPlayerSpells(player); spells.onPlayerDamage(event); } public List<Spell> getAllSpells() { List<Spell> allSpells = new ArrayList<Spell>(); allSpells.addAll(spells.values()); return allSpells; } /** * Called when a player plays an animation, such as an arm swing * * @param event * Relevant event details */ public void onPlayerAnimation(PlayerAnimationEvent event) { Player player = event.getPlayer(); if (event.getAnimationType() == PlayerAnimationType.ARM_SWING) { if (event.getPlayer().getInventory().getItemInHand().getTypeId() == getWandTypeId()) { if (!hasWandPermission(player)) { return; } Inventory inventory = player.getInventory(); ItemStack[] contents = inventory.getContents(); Spell spell = null; for (int i = 0; i < 9; i++) { if (contents[i].getType() == Material.AIR || contents[i].getTypeId() == getWandTypeId()) { continue; } spell = getSpell(contents[i].getType(), player); if (spell != null) { break; } } if (spell != null) { spell.cast(); } } } } @SuppressWarnings("deprecation") public boolean cycleMaterials(Player player) { List<Material> buildingMaterials = getBuildingMaterials(); PlayerInventory inventory = player.getInventory(); ItemStack[] contents = inventory.getContents(); int firstMaterialSlot = 8; boolean foundAir = false; for (int i = 8; i >= 0; i--) { Material mat = contents[i] == null ? Material.AIR : contents[i].getType(); if (mat == Material.AIR) { if (foundAir) { break; } else { foundAir = true; firstMaterialSlot = i; continue; } } else { if (buildingMaterials.contains(mat)) { firstMaterialSlot = i; continue; } else { break; } } } if (firstMaterialSlot == 8) return false; ItemStack lastSlot = contents[8]; for (int i = 7; i >= firstMaterialSlot; i--) { contents[i + 1] = contents[i]; } contents[firstMaterialSlot] = lastSlot; inventory.setContents(contents); player.updateInventory(); return true; } @SuppressWarnings("deprecation") public void cycleSpells(Player player) { Inventory inventory = player.getInventory(); ItemStack[] contents = inventory.getContents(); ItemStack[] active = new ItemStack[9]; for (int i = 0; i < 9; i++) { active[i] = contents[i]; } int maxSpellSlot = 0; int firstSpellSlot = -1; for (int i = 0; i < 9; i++) { boolean isEmpty = active[i] == null; Material activeType = isEmpty ? Material.AIR : active[i].getType(); boolean isWand = activeType.getId() == getWandTypeId(); boolean isSpell = false; if (activeType != Material.AIR) { Spell spell = getSpell(activeType, player); isSpell = spell != null; } if (isSpell) { if (firstSpellSlot < 0) firstSpellSlot = i; maxSpellSlot = i; } else { if (!isWand && firstSpellSlot >= 0) { break; } } } int numSpellSlots = firstSpellSlot < 0 ? 0 : maxSpellSlot - firstSpellSlot + 1; if (numSpellSlots < 2) { return; } for (int ddi = 0; ddi < numSpellSlots; ddi++) { int i = ddi + firstSpellSlot; Material contentsType = contents[i] == null ? Material.AIR : active[i].getType(); if (contentsType.getId() != getWandTypeId()) { for (int di = 1; di < numSpellSlots; di++) { int dni = (ddi + di) % numSpellSlots; int ni = dni + firstSpellSlot; Material activeType = active[ni] == null ? Material.AIR : active[ni].getType(); if (activeType.getId() != getWandTypeId()) { contents[i] = active[ni]; break; } } } } inventory.setContents(contents); player.updateInventory(); } /** * Called when a player uses an item * * @param event * Relevant event details */ public void onPlayerInteract(PlayerInteractEvent event) { if (event.getAction() == Action.RIGHT_CLICK_AIR || event.getAction() == Action.RIGHT_CLICK_BLOCK) { cancel(event.getPlayer()); int materialId = event.getPlayer().getInventory().getItemInHand().getTypeId(); Player player = event.getPlayer(); if (!hasWandPermission(player)) { return; } boolean cycleSpells = false; cycleSpells = player.isSneaking(); if (materialId == getWandTypeId()) { if (cycleSpells) { if (!cycleMaterials(event.getPlayer())) { cycleSpells(event.getPlayer()); } } else { cycleSpells(event.getPlayer()); } } } } public boolean allowPhysics(Block block) { if (physicsDisableTimeout == 0) return true; if (System.currentTimeMillis() > physicsDisableTimeout) physicsDisableTimeout = 0; return false; } public void disablePhysics(int interval) { physicsDisableTimeout = System.currentTimeMillis() + interval; } public boolean hasWandPermission(Player player) { return hasPermission(player, "Magic.wand.use"); } public boolean hasPermission(Player player, String pNode, boolean defaultValue) { PermissionHandler permissions = MagicPlugin.getPermissionHandler(); if (permissions == null) { return defaultValue; } return permissions.has(player, pNode); } public boolean hasPermission(Player player, String pNode) { return hasPermission(player, pNode, true); } /* * Private data */ private final String propertiesFile = "magic.properties"; private int wandTypeId = 280; static final String DEFAULT_BUILDING_MATERIALS = "0,1,2,3,4,5,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,33,34,35,41,42,43,45,46,47,48,49,52,53,55,56,57,58,60,61,62,65,66,67,73,74,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96"; static final String STICKY_MATERIALS = "37,38,39,50,51,55,59,63,64,65,66,68,70,71,72,75,76,77,78,83"; static final String STICKY_MATERIALS_DOUBLE_HEIGHT = "64,71,"; private List<Material> buildingMaterials = new ArrayList<Material>(); private List<Material> stickyMaterials = new ArrayList<Material>(); private List<Material> stickyMaterialsDoubleHeight = new ArrayList<Material>(); private long physicsDisableTimeout = 0; private int undoQueueDepth = 256; private boolean silent = false; private boolean quiet = true; private HashMap<String, UndoQueue> playerUndoQueues = new HashMap<String, UndoQueue>(); private final Logger log = Logger.getLogger("Minecraft"); private final HashMap<String, Spell> spells = new HashMap<String, Spell>(); private final HashMap<Material, Spell> spellsByMaterial = new HashMap<Material, Spell>(); private final HashMap<String, PlayerSpells> playerSpells = new HashMap<String, PlayerSpells>(); private MagicPlugin plugin = null; }
src/main/java/com/elmakers/mine/bukkit/plugins/magic/Spells.java
package com.elmakers.mine.bukkit.plugins.magic; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.bukkit.Material; import org.bukkit.Server; import org.bukkit.block.Block; import org.bukkit.entity.Player; import org.bukkit.event.block.Action; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.entity.EntityDeathEvent; import org.bukkit.event.player.PlayerAnimationEvent; import org.bukkit.event.player.PlayerAnimationType; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerMoveEvent; import org.bukkit.event.player.PlayerQuitEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.scheduler.BukkitScheduler; import com.elmakers.mine.bukkit.dao.BlockList; import com.elmakers.mine.bukkit.plugins.magic.spells.AbsorbSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.AlterSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.ArrowSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BlastSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BlinkSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BoomSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.BridgeSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.ConstructSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.CushionSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.DisintegrateSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FamiliarSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FillSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FireSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FireballSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FlingSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.ForceSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.FrostSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.GillsSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.GotoSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.GrenadeSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.HealSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.InvincibleSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.InvisibilitySpell; import com.elmakers.mine.bukkit.plugins.magic.spells.LavaSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.LevitateSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.LightningSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.MineSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.PeekSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.PillarSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.PortalSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.RecallSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.SignSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.TorchSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.TreeSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.UndoSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.WeatherSpell; import com.elmakers.mine.bukkit.plugins.magic.spells.WolfSpell; import com.elmakers.mine.bukkit.utilities.PluginProperties; import com.elmakers.mine.bukkit.utilities.UndoQueue; import com.nijiko.permissions.PermissionHandler; public class Spells { /* * Public API - Use for hooking up a plugin, or calling a spell */ public Spell getSpell(Material material, Player player) { Spell spell = spellsByMaterial.get(material); if (spell == null || !spell.hasSpellPermission(player)) return null; return getSpell(spell.getName(), player); } public Spell getSpell(String name, Player player) { Spell spell = spells.get(name); if (spell == null || !spell.hasSpellPermission(player)) return null; PlayerSpells playerSpells = getPlayerSpells(player); Spell playerSpell = playerSpells.getSpell(spell.getName()); if (playerSpell == null) { playerSpell = (Spell) spell.clone(); playerSpell.setPlayer(player); playerSpells.addSpell(playerSpell); } return playerSpell; } public PlayerSpells getPlayerSpells(Player player) { PlayerSpells spells = playerSpells.get(player.getName()); if (spells == null) { spells = new PlayerSpells(player); playerSpells.put(player.getName(), spells); } return spells; } protected void loadSpells() { loadSpell(new AbsorbSpell(), "absorb", Material.BUCKET, "Absorb some of the target", "construction", ""); loadSpell(new AlterSpell(), "alter", Material.REDSTONE_TORCH_ON, "Alter certain objects", "construction", ""); loadSpell(new ArrowSpell(), "arrow", Material.ARROW, "Fire a magic arrow", "combat", ""); loadSpell(new ArrowSpell(), "arrowrain", Material.BOW, "Fire a volley of arrows", "combat", "4"); loadSpell(new BlastSpell(), "blast", Material.SULPHUR, "Mine out a large area", "mining", ""); loadSpell(new BlastSpell(), "superblast", Material.SLIME_BALL, "Mine out a very large area", "mining", "16"); loadSpell(new BlinkSpell(), "blink", Material.FEATHER, "Teleport to your target", "psychic", ""); loadSpell(new BlinkSpell(), "ascend", Material.RED_MUSHROOM, "Go up to the nearest safe spot", "psychic", "ascend"); loadSpell(new BlinkSpell(), "descend", Material.BROWN_MUSHROOM, "Travel underground", "psychic", "descend"); loadSpell(new BlinkSpell(), "tesseract", Material.WEB, "Blink a short distance", "psychic", "8"); loadSpell(new BoomSpell(), "boom", Material.RED_ROSE, "Create an explosion", "combat", ""); loadSpell(new BoomSpell(), "kaboom", Material.REDSTONE_WIRE, "Create a big explosion", "combat", "6"); loadSpell(new BoomSpell(), "kamikazee", Material.DEAD_BUSH, "Kill yourself with an explosion", "combat", "8 here"); loadSpell(new BoomSpell(), "nuke", Material.BED, "Create a huge explosino", "combat", "20"); loadSpell(new BridgeSpell(), "bridge", Material.GOLD_HOE, "Extend the ground underneath you", "construction", ""); loadSpell(new ConstructSpell(), "blob", Material.CLAY_BALL, "Create a solid blob", "construction", "sphere 3"); loadSpell(new ConstructSpell(), "shell", Material.BOWL, "Create a large spherical shell", "construction", "sphere hollow 10"); loadSpell(new ConstructSpell(), "box", Material.GOLD_HELMET, "Create a large hollow box", "construction", "cuboid hollow 6"); loadSpell(new ConstructSpell(), "superblob", Material.CLAY_BRICK, "Create a large solid sphere", "construction", "sphere 8"); loadSpell(new ConstructSpell(), "sandblast", Material.SANDSTONE, "Drop a big block of sand", "combat", "cuboid 4 with sand"); loadSpell(new CushionSpell(), "cushion", Material.SOUL_SAND, "Create a safety bubble", "alchemy", ""); loadSpell(new DisintegrateSpell(), "disintegrate", Material.BONE, "Damage your target", "combat", ""); loadSpell(new FamiliarSpell(), "familiar", Material.EGG, "Create an animal familiar", "summoner", ""); loadSpell(new FamiliarSpell(), "monster", Material.PUMPKIN, "Call a monster to your side", "summoner", "monster"); loadSpell(new FamiliarSpell(), "mob", Material.JACK_O_LANTERN, "Summon a mob of monsters", "summoner", "mob 20"); loadSpell(new FamiliarSpell(), "farm", Material.WHEAT, "Create a herd", "farming", "30"); loadSpell(new FillSpell(), "fill", Material.GOLD_SPADE, "Fill a selected area (cast twice)", "construction", ""); loadSpell(new FillSpell(), "paint", Material.PAINTING, "Fill a single block", "alchemy", "single"); loadSpell(new FillSpell(), "recurse", Material.WOOD_SPADE, "Recursively fill blocks", "alchemy", "recurse"); loadSpell(new FireballSpell(), "fireball", Material.NETHERRACK, "Cast an exploding fireball", "combat", "", 1500); loadSpell(new FireSpell(), "fire", Material.FLINT_AND_STEEL, "Light fires from a distance", "elemental", ""); loadSpell(new FireSpell(), "inferno", Material.FIRE, "Burn a wide area", "master", "6"); loadSpell(new FlingSpell(), "fling", Material.IRON_BOOTS, "Sends you flying in the target direction", "psychic", "5"); loadSpell(new FlingSpell(), "leap", Material.LEATHER_BOOTS, "Take a big leap", "psychic", "2"); loadSpell(new ForceSpell(), "force", Material.STRING, "Use telekinesis", "psychic", ""); loadSpell(new ForceSpell(), "pull", Material.FISHING_ROD, "Pull things toward you", "psychic", "pull"); loadSpell(new ForceSpell(), "push", Material.RAILS, "Push things away from you", "psychic", "push"); loadSpell(new FrostSpell(), "frost", Material.SNOW_BALL, "Freeze water and create snow", "alchemy", ""); loadSpell(new GillsSpell(), "gills", Material.RAW_FISH, "Restores health while moving underwater", "medic", ""); loadSpell(new GotoSpell(), "gather", Material.GLOWSTONE_DUST, "Gather groups of players together", "master", ""); loadSpell(new GrenadeSpell(), "grenade", Material.TNT, "Place a primed grenade", "combat", "", 2000); loadSpell(new HealSpell(), "heal", Material.BREAD, "Heal yourself or others", "medic", "", 5000); loadSpell(new InvincibleSpell(), "invincible", Material.GOLDEN_APPLE, "Make yourself impervious to damage", "master", ""); loadSpell(new InvincibleSpell(), "ironskin", Material.IRON_CHESTPLATE, "Protect you from damage", "master", "99"); loadSpell(new InvincibleSpell(), "leatherskin", Material.LEATHER_CHESTPLATE, "Protect you from some damage", "combat", "50"); loadSpell(new InvisibilitySpell(), "cloak", Material.CHAINMAIL_CHESTPLATE, "Make yourself invisible while still", "psychic", ""); loadSpell(new LavaSpell(), "lava", Material.LAVA, "Fire a stream of lava", "combat", ""); loadSpell(new LevitateSpell(), "levitate", Material.GOLD_BOOTS, "Levitate yourself up into the air", "psychic", ""); loadSpell(new LightningSpell(), "lightning", Material.COOKED_FISH, "Strike lighting at your target", "combat", ""); loadSpell(new LightningSpell(), "storm", Material.GRILLED_PORK, "Start a lightning storm", "elemental", "10", 2000); loadSpell(new MineSpell(), "mine", Material.GOLD_PICKAXE, "Mines and drops the targeted resources", "mining", ""); loadSpell(new PeekSpell(), "peek", Material.SUGAR_CANE, "Temporarily glass your target surface", "psychic", ""); loadSpell(new PillarSpell(), "pillar", Material.GOLD_AXE, "Raises a pillar up", "construction", ""); loadSpell(new PillarSpell(), "stalactite", Material.WOOD_AXE, "Create a downward pillar", "construction", "down"); loadSpell(new PortalSpell(), "portal", Material.PORTAL, "Create two connected portals", "psychic", ""); loadSpell(new RecallSpell(), "recall", Material.COMPASS, "Marks locations for return", "exploration", ""); loadSpell(new RecallSpell(), "spawn", Material.YELLOW_FLOWER, "Take yourself back home", "exploration", "spawn"); loadSpell(new SignSpell(), "sign", Material.SIGN_POST, "Give yourself some signs", "master", ""); loadSpell(new SignSpell(), "tag", Material.SIGN, "Leave a sign with your name", "exploration", "tag", 30000); loadSpell(new TorchSpell(), "torch", Material.TORCH, "Shed some light", "exploration", ""); loadSpell(new TorchSpell(), "day", Material.FLINT, "Change time time to day", "elemental", "day"); loadSpell(new TorchSpell(), "night", Material.COAL, "Change time time to night", "elemental", "night"); loadSpell(new TreeSpell(), "tree", Material.SAPLING, "Instantly grow a tree", "farming", ""); loadSpell(new UndoSpell(), "rewind", Material.WATCH, "Undo your last action", "alchemy", ""); loadSpell(new UndoSpell(), "erase", Material.LEVER, "Undo your target construction", "alchemy", ""); loadSpell(new WeatherSpell(), "weather", Material.WATER, "Change the weather", "elemental", ""); loadSpell(new WolfSpell(), "wolf", Material.PORK, "Create a wolf familiar to follow you around", "summoner", "", 5000); } public void loadSpell(Spell template, String name, Material icon, String description, String category, String parameterString) { loadSpell(template, name, icon, description, category, parameterString, 0); } public void loadSpell(Spell template, String name, Material icon, String description, String category, String parameterString, int cooldown) { String[] parameters = parameterString.split(" "); template.load(name, description, category, icon, parameters, cooldown); addSpell(template); } public void addSpell(Spell variant) { Spell conflict = spells.get(variant.getName()); if (conflict != null) { log.log(Level.WARNING, "Duplicate spell name: '" + conflict.getName() + "'"); } else { spells.put(variant.getName(), variant); } Material m = variant.getMaterial(); if (m != null && m != Material.AIR) { if (buildingMaterials.contains(m)) { log.warning("Spell " + variant.getName() + " uses building material as icon: " + m.name().toLowerCase()); } conflict = spellsByMaterial.get(m); if (conflict != null) { log.log(Level.WARNING, "Duplicate spell material: " + m.name() + " for " + conflict.getName() + " and " + variant.getName()); } else { spellsByMaterial.put(variant.getMaterial(), variant); } } variant.initialize(this); } /* * Material use system */ public List<Material> getBuildingMaterials() { return buildingMaterials; } /* * Undo system */ public UndoQueue getUndoQueue(String playerName) { UndoQueue queue = playerUndoQueues.get(playerName); if (queue == null) { queue = new UndoQueue(); queue.setMaxSize(undoQueueDepth); playerUndoQueues.put(playerName, queue); } return queue; } public void addToUndoQueue(Player player, BlockList blocks) { UndoQueue queue = getUndoQueue(player.getName()); queue.add(blocks); } public boolean undoAny(Player player, Block target) { for (String playerName : playerUndoQueues.keySet()) { UndoQueue queue = playerUndoQueues.get(playerName); if (queue.undo(target)) { if (!player.getName().equals(playerName)) { player.sendMessage("Undid one of " + playerName + "'s spells"); } return true; } } return false; } public boolean undo(String playerName) { UndoQueue queue = getUndoQueue(playerName); return queue.undo(); } public boolean undo(String playerName, Block target) { UndoQueue queue = getUndoQueue(playerName); return queue.undo(target); } public BlockList getLastBlockList(String playerName, Block target) { UndoQueue queue = getUndoQueue(playerName); return queue.getLast(target); } public BlockList getLastBlockList(String playerName) { UndoQueue queue = getUndoQueue(playerName); return queue.getLast(); } public void scheduleCleanup(BlockList blocks) { Server server = plugin.getServer(); BukkitScheduler scheduler = server.getScheduler(); // scheduler works in ticks- 20 ticks per second. long ticksToLive = blocks.getTimeToLive() * 20 / 1000; scheduler.scheduleSyncDelayedTask(plugin, new CleanupBlocksTask(blocks), ticksToLive); } /* * Event registration- call to listen for events */ public void registerEvent(SpellEventType type, Spell spell) { PlayerSpells spells = getPlayerSpells(spell.getPlayer()); spells.registerEvent(type, spell); } public void unregisterEvent(SpellEventType type, Spell spell) { PlayerSpells spells = getPlayerSpells(spell.getPlayer()); spells.registerEvent(type, spell); } /* * Random utility functions */ public int getWandTypeId() { return wandTypeId; } public void cancel(Player player) { PlayerSpells playerSpells = getPlayerSpells(player); playerSpells.cancel(); } public boolean isQuiet() { return quiet; } public boolean isSilent() { return silent; } public boolean isSolid(Material mat) { return (mat != Material.AIR && mat != Material.WATER && mat != Material.STATIONARY_WATER && mat != Material.LAVA && mat != Material.STATIONARY_LAVA); } public boolean isSticky(Material mat) { return stickyMaterials.contains(mat); } public boolean isStickyAndTall(Material mat) { return stickyMaterialsDoubleHeight.contains(mat); } public boolean isAffectedByGravity(Material mat) { // DOORS are on this list, it's a bit of a hack, but if you consider // them // as two separate blocks, the top one of which "breaks" when the bottom // one does, // it applies- but only really in the context of the auto-undo system, // so this should probably be its own mat list, ultimately. return (mat == Material.GRAVEL || mat == Material.SAND || mat == Material.WOOD_DOOR || mat == Material.IRON_DOOR); } /* * Get the log, if you need to debug or log errors. */ public Logger getLog() { return log; } public MagicPlugin getPlugin() { return plugin; } /* * Internal functions - don't call these, or really anything below here. */ /* * Saving and loading */ public void initialize(MagicPlugin plugin) { this.plugin = plugin; load(); log.info("Magic: Loaded " + spells.size() + " spells."); } public void load() { loadSpells(); loadProperties(); } protected void loadProperties() { File dataFolder = plugin.getDataFolder(); dataFolder.mkdirs(); File pFile = new File(dataFolder, propertiesFile); PluginProperties properties = new PluginProperties(pFile.getAbsolutePath()); properties.load(); undoQueueDepth = properties.getInteger("spells-general-undo-depth", undoQueueDepth); silent = properties.getBoolean("spells-general-silent", silent); quiet = properties.getBoolean("spells-general-quiet", quiet); stickyMaterials = PluginProperties.parseMaterials(STICKY_MATERIALS); stickyMaterialsDoubleHeight = PluginProperties.parseMaterials(STICKY_MATERIALS_DOUBLE_HEIGHT); buildingMaterials = properties.getMaterials("spells-general-building", DEFAULT_BUILDING_MATERIALS); wandTypeId = properties.getInteger("wand-type-id", wandTypeId); for (Spell spell : spells.values()) { spell.onLoad(properties); } properties.save(); } public void clear() { playerSpells.clear(); spells.clear(); spellsByMaterial.clear(); } /* * Listeners / callbacks */ public void onPlayerQuit(PlayerQuitEvent event) { PlayerSpells spells = getPlayerSpells(event.getPlayer()); spells.onPlayerQuit(event); } public void onPlayerMove(PlayerMoveEvent event) { PlayerSpells spells = getPlayerSpells(event.getPlayer()); spells.onPlayerMove(event); } public void onPlayerDeath(Player player, EntityDeathEvent event) { PlayerSpells spells = getPlayerSpells(player); spells.onPlayerDeath(event); } public void onPlayerDamage(Player player, EntityDamageEvent event) { PlayerSpells spells = getPlayerSpells(player); spells.onPlayerDamage(event); } public List<Spell> getAllSpells() { List<Spell> allSpells = new ArrayList<Spell>(); allSpells.addAll(spells.values()); return allSpells; } /** * Called when a player plays an animation, such as an arm swing * * @param event * Relevant event details */ public void onPlayerAnimation(PlayerAnimationEvent event) { Player player = event.getPlayer(); if (event.getAnimationType() == PlayerAnimationType.ARM_SWING) { if (event.getPlayer().getInventory().getItemInHand().getTypeId() == getWandTypeId()) { if (!hasWandPermission(player)) { return; } Inventory inventory = player.getInventory(); ItemStack[] contents = inventory.getContents(); Spell spell = null; for (int i = 0; i < 9; i++) { if (contents[i].getType() == Material.AIR || contents[i].getTypeId() == getWandTypeId()) { continue; } spell = getSpell(contents[i].getType(), player); if (spell != null) { break; } } if (spell != null) { spell.cast(); } } } } @SuppressWarnings("deprecation") public boolean cycleMaterials(Player player) { List<Material> buildingMaterials = getBuildingMaterials(); PlayerInventory inventory = player.getInventory(); ItemStack[] contents = inventory.getContents(); int firstMaterialSlot = 8; boolean foundAir = false; for (int i = 8; i >= 0; i--) { Material mat = contents[i] == null ? Material.AIR : contents[i].getType(); if (mat == Material.AIR) { if (foundAir) { break; } else { foundAir = true; firstMaterialSlot = i; continue; } } else { if (buildingMaterials.contains(mat)) { firstMaterialSlot = i; continue; } else { break; } } } if (firstMaterialSlot == 8) return false; ItemStack lastSlot = contents[8]; for (int i = 7; i >= firstMaterialSlot; i--) { contents[i + 1] = contents[i]; } contents[firstMaterialSlot] = lastSlot; inventory.setContents(contents); player.updateInventory(); return true; } @SuppressWarnings("deprecation") public void cycleSpells(Player player) { Inventory inventory = player.getInventory(); ItemStack[] contents = inventory.getContents(); ItemStack[] active = new ItemStack[9]; for (int i = 0; i < 9; i++) { active[i] = contents[i]; } int maxSpellSlot = 0; int firstSpellSlot = -1; for (int i = 0; i < 9; i++) { boolean isEmpty = active[i] == null; Material activeType = isEmpty ? Material.AIR : active[i].getType(); boolean isWand = activeType.getId() == getWandTypeId(); boolean isSpell = false; if (activeType != Material.AIR) { Spell spell = getSpell(activeType, player); isSpell = spell != null; } if (isSpell) { if (firstSpellSlot < 0) firstSpellSlot = i; maxSpellSlot = i; } else { if (!isWand && firstSpellSlot >= 0) { break; } } } int numSpellSlots = firstSpellSlot < 0 ? 0 : maxSpellSlot - firstSpellSlot + 1; if (numSpellSlots < 2) { return; } for (int ddi = 0; ddi < numSpellSlots; ddi++) { int i = ddi + firstSpellSlot; Material contentsType = contents[i] == null ? Material.AIR : active[i].getType(); if (contentsType.getId() != getWandTypeId()) { for (int di = 1; di < numSpellSlots; di++) { int dni = (ddi + di) % numSpellSlots; int ni = dni + firstSpellSlot; Material activeType = active[ni] == null ? Material.AIR : active[ni].getType(); if (activeType.getId() != getWandTypeId()) { contents[i] = active[ni]; break; } } } } inventory.setContents(contents); player.updateInventory(); } /** * Called when a player uses an item * * @param event * Relevant event details */ public void onPlayerInteract(PlayerInteractEvent event) { if (event.getAction() == Action.RIGHT_CLICK_AIR || event.getAction() == Action.RIGHT_CLICK_BLOCK) { cancel(event.getPlayer()); int materialId = event.getPlayer().getInventory().getItemInHand().getTypeId(); Player player = event.getPlayer(); if (!hasWandPermission(player)) { return; } boolean cycleSpells = false; cycleSpells = player.isSneaking(); if (materialId == getWandTypeId()) { if (cycleSpells) { if (!cycleMaterials(event.getPlayer())) { cycleSpells(event.getPlayer()); } } else { cycleSpells(event.getPlayer()); } } } } public boolean allowPhysics(Block block) { if (physicsDisableTimeout == 0) return true; if (System.currentTimeMillis() > physicsDisableTimeout) physicsDisableTimeout = 0; return false; } public void disablePhysics(int interval) { physicsDisableTimeout = System.currentTimeMillis() + interval; } public boolean hasWandPermission(Player player) { return hasPermission(player, "Magic.wand.use"); } public boolean hasPermission(Player player, String pNode, boolean defaultValue) { PermissionHandler permissions = MagicPlugin.getPermissionHandler(); if (permissions == null) { return defaultValue; } return permissions.has(player, pNode); } public boolean hasPermission(Player player, String pNode) { return hasPermission(player, pNode, true); } /* * Private data */ private final String propertiesFile = "magic.properties"; private int wandTypeId = 280; static final String DEFAULT_BUILDING_MATERIALS = "0,1,2,3,4,5,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,33,34,35,41,42,43,45,46,47,48,49,52,53,55,56,57,58,60,61,62,65,66,67,73,74,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96"; static final String STICKY_MATERIALS = "37,38,39,50,51,55,59,63,64,65,66,68,70,71,72,75,76,77,78,83"; static final String STICKY_MATERIALS_DOUBLE_HEIGHT = "64,71,"; private List<Material> buildingMaterials = new ArrayList<Material>(); private List<Material> stickyMaterials = new ArrayList<Material>(); private List<Material> stickyMaterialsDoubleHeight = new ArrayList<Material>(); private long physicsDisableTimeout = 0; private int undoQueueDepth = 256; private boolean silent = false; private boolean quiet = true; private HashMap<String, UndoQueue> playerUndoQueues = new HashMap<String, UndoQueue>(); private final Logger log = Logger.getLogger("Minecraft"); private final HashMap<String, Spell> spells = new HashMap<String, Spell>(); private final HashMap<Material, Spell> spellsByMaterial = new HashMap<Material, Spell>(); private final HashMap<String, PlayerSpells> playerSpells = new HashMap<String, PlayerSpells>(); private MagicPlugin plugin = null; }
Drop the leap variant.
src/main/java/com/elmakers/mine/bukkit/plugins/magic/Spells.java
Drop the leap variant.
Java
mit
ab60d908dcbb2517730c33d6a5b68fe8d1ec4036
0
zmaster587/AdvancedRocketry,zmaster587/AdvancedRocketry
package zmaster587.advancedRocketry.entity; import io.netty.buffer.ByteBuf; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import javax.annotation.Nullable; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.GLAllocation; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.network.datasync.DataParameter; import net.minecraft.network.datasync.DataSerializers; import net.minecraft.network.datasync.EntityDataManager; import net.minecraft.network.play.server.SPacketRespawn; import net.minecraft.server.MinecraftServer; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.SoundCategory; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.util.text.TextComponentString; import net.minecraft.world.Teleporter; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fluids.FluidContainerRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.IFluidContainerItem; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import zmaster587.advancedRocketry.AdvancedRocketry; import zmaster587.advancedRocketry.achievements.ARAchivements; import zmaster587.advancedRocketry.api.AdvancedRocketryItems; import zmaster587.advancedRocketry.api.Configuration; import zmaster587.advancedRocketry.api.EntityRocketBase; import zmaster587.advancedRocketry.api.IInfrastructure; import zmaster587.advancedRocketry.api.RocketEvent; import zmaster587.advancedRocketry.api.RocketEvent.RocketLaunchEvent; import zmaster587.advancedRocketry.api.RocketEvent.RocketPreLaunchEvent; import zmaster587.advancedRocketry.api.SatelliteRegistry; import zmaster587.advancedRocketry.api.StatsRocket; import zmaster587.advancedRocketry.api.fuel.FuelRegistry; import zmaster587.advancedRocketry.api.fuel.FuelRegistry.FuelType; import zmaster587.advancedRocketry.api.satellite.SatelliteBase; import zmaster587.advancedRocketry.api.stations.ISpaceObject; import zmaster587.advancedRocketry.atmosphere.AtmosphereHandler; import zmaster587.advancedRocketry.client.SoundRocketEngine; import zmaster587.advancedRocketry.dimension.DimensionManager; import zmaster587.advancedRocketry.dimension.DimensionProperties; import zmaster587.advancedRocketry.event.PlanetEventHandler; import zmaster587.advancedRocketry.inventory.TextureResources; import zmaster587.advancedRocketry.inventory.modules.ModulePlanetSelector; import zmaster587.advancedRocketry.item.ItemAsteroidChip; import zmaster587.advancedRocketry.item.ItemPackedStructure; import zmaster587.advancedRocketry.item.ItemPlanetIdentificationChip; import zmaster587.advancedRocketry.item.ItemStationChip; import zmaster587.advancedRocketry.mission.MissionOreMining; import zmaster587.advancedRocketry.network.PacketSatellite; import zmaster587.advancedRocketry.stations.SpaceObject; import zmaster587.advancedRocketry.stations.SpaceObjectManager; import zmaster587.advancedRocketry.tile.TileGuidanceComputer; import zmaster587.advancedRocketry.tile.hatch.TileSatelliteHatch; import zmaster587.advancedRocketry.util.AudioRegistry; import zmaster587.advancedRocketry.util.RocketInventoryHelper; import zmaster587.advancedRocketry.util.StorageChunk; import zmaster587.advancedRocketry.util.TransitionEntity; import zmaster587.advancedRocketry.world.util.TeleporterNoPortal; import zmaster587.libVulpes.LibVulpes; import zmaster587.libVulpes.client.util.ProgressBarImage; import zmaster587.libVulpes.gui.CommonResources; import zmaster587.libVulpes.interfaces.INetworkEntity; import zmaster587.libVulpes.inventory.GuiHandler; import zmaster587.libVulpes.inventory.modules.IButtonInventory; import zmaster587.libVulpes.inventory.modules.IModularInventory; import zmaster587.libVulpes.inventory.modules.IProgressBar; import zmaster587.libVulpes.inventory.modules.ISelectionNotify; import zmaster587.libVulpes.inventory.modules.ModuleBase; import zmaster587.libVulpes.inventory.modules.ModuleButton; import zmaster587.libVulpes.inventory.modules.ModuleImage; import zmaster587.libVulpes.inventory.modules.ModuleProgress; import zmaster587.libVulpes.inventory.modules.ModuleSlotButton; import zmaster587.libVulpes.items.ItemLinker; import zmaster587.libVulpes.network.PacketEntity; import zmaster587.libVulpes.network.PacketHandler; import zmaster587.libVulpes.util.HashedBlockPosition; import zmaster587.libVulpes.util.IconResource; import zmaster587.libVulpes.util.Vector3F; public class EntityRocket extends EntityRocketBase implements INetworkEntity, IModularInventory, IProgressBar, IButtonInventory, ISelectionNotify { //true if the rocket is on decent private boolean isInOrbit; //True if the rocket isn't on the ground private boolean isInFlight; //used in the rare case a player goes to a non-existant space station private int lastDimensionFrom = 0; public StorageChunk storage; private String errorStr; private long lastErrorTime = Long.MIN_VALUE; private static long ERROR_DISPLAY_TIME = 100; private static int DESCENT_TIMER = 500; protected long lastWorldTickTicked; private SatelliteBase satallite; protected int destinationDimId; //Offset for buttons linking to the tileEntityGrid private int tilebuttonOffset = 3; private int autoDescendTimer; private WeakReference<Entity>[] mountedEntities; protected ModulePlanetSelector container; boolean acceptedPacket = false; public static enum PacketType { RECIEVENBT, SENDINTERACT, REQUESTNBT, FORCEMOUNT, LAUNCH, DECONSTRUCT, OPENGUI, CHANGEWORLD, REVERTWORLD, OPENPLANETSELECTION, SENDPLANETDATA, DISCONNECTINFRASTRUCTURE, CONNECTINFRASTRUCTURE, ROCKETLANDEVENT, MENU_CHANGE, UPDATE_ATM, UPDATE_ORBIT, UPDATE_FLIGHT, DISMOUNTCLIENT } private static final DataParameter<Integer> fuelLevel = EntityDataManager.<Integer>createKey(EntityRocket.class, DataSerializers.VARINT); private static final DataParameter<Boolean> INFLIGHT = EntityDataManager.<Boolean>createKey(EntityRocket.class, DataSerializers.BOOLEAN); private static final DataParameter<Boolean> INORBIT = EntityDataManager.<Boolean>createKey(EntityRocket.class, DataSerializers.BOOLEAN); public EntityRocket(World p_i1582_1_) { super(p_i1582_1_); isInOrbit = false; stats = new StatsRocket(); isInFlight = false; connectedInfrastructure = new LinkedList<IInfrastructure>(); infrastructureCoords = new LinkedList<HashedBlockPosition>(); mountedEntities = new WeakReference[stats.getNumPassengerSeats()]; lastWorldTickTicked = p_i1582_1_.getTotalWorldTime(); autoDescendTimer = 5000; } public EntityRocket(World world, StorageChunk storage, StatsRocket stats, double x, double y, double z) { this(world); this.stats = stats; this.setPosition(x, y, z); this.storage = storage; this.storage.setEntity(this); initFromBounds(); isInFlight = false; mountedEntities = new WeakReference[stats.getNumPassengerSeats()]; lastWorldTickTicked = world.getTotalWorldTime(); autoDescendTimer = 5000; } @Override public AxisAlignedBB getEntityBoundingBox() { if(storage != null) { return super.getEntityBoundingBox();//.offset(0, -storage.getSizeY(), 0); } return new AxisAlignedBB(0,0,0,1,1,1); } @Override public void setEntityBoundingBox(AxisAlignedBB bb) { //if(storage != null) // super.setEntityBoundingBox(bb.offset(0, storage.getSizeY(),0)); //else super.setEntityBoundingBox(bb); } @Override public AxisAlignedBB getCollisionBoundingBox() { // TODO Auto-generated method stub return getEntityBoundingBox(); } /** * @return the amount of fuel stored in the rocket */ public int getFuelAmount() { int amount = dataManager.get(fuelLevel); stats.setFuelAmount(FuelType.LIQUID,amount); return amount; } /** * Adds fuel and updates the datawatcher * @param amount amount of fuel to add * @return the amount of fuel added */ public int addFuelAmount(int amount) { int ret = stats.addFuelAmount(FuelType.LIQUID, amount); setFuelAmount(stats.getFuelAmount(FuelType.LIQUID)); return ret; } public void disconnectInfrastructure(IInfrastructure infrastructure){ infrastructure.unlinkRocket(); infrastructureCoords.remove(new HashedBlockPosition(((TileEntity)infrastructure).getPos())); if(!worldObj.isRemote) { int pos[] = {((TileEntity)infrastructure).getPos().getX(), ((TileEntity)infrastructure).getPos().getY(), ((TileEntity)infrastructure).getPos().getZ()}; NBTTagCompound nbt = new NBTTagCompound(); nbt.setIntArray("pos", pos); //PacketHandler.sendToPlayersTrackingEntity(new PacketEntity(this, (byte)PacketType.DISCONNECTINFRASTRUCTURE.ordinal(), nbt), this); } } @Override public String getTextOverlay() { if(this.worldObj.getTotalWorldTime() < this.lastErrorTime + ERROR_DISPLAY_TIME) return errorStr; //Get destination string String displayStr = "N/A"; if(storage != null) { int dimid = storage.getDestinationDimId(this.worldObj.provider.getDimension(), (int)posX, (int)posZ); if(dimid == Configuration.spaceDimId) { Vector3F<Float> vec = storage.getDestinationCoordinates(dimid, false); if(vec != null) { ISpaceObject obj = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(new BlockPos(vec.x,vec.y,vec.z)); if(obj != null) { displayStr = "Station " + obj.getId(); } } } else if(dimid != -1 && dimid != SpaceObjectManager.WARPDIMID) { displayStr = DimensionManager.getInstance().getDimensionProperties(dimid).getName(); } } if(isInOrbit() && !isInFlight()) return "Press Space to descend!\n Auto descend in " + ((DESCENT_TIMER - this.ticksExisted)/20); else if(!isInFlight()) return "Press Space to take off!\nDest: " + displayStr; return super.getTextOverlay(); } private void setError(String error) { this.errorStr = error; this.lastErrorTime = this.worldObj.getTotalWorldTime(); } @Override public void setPosition(double x, double y, double z) { super.setPosition(x, y, z); if(storage != null) { float sizeX = storage.getSizeX()/2.0f; float sizeY = storage.getSizeY(); float sizeZ = storage.getSizeZ()/2.0f; //setEntityBoundingBox(new AxisAlignedBB(x - sizeX, y - (double)this.getYOffset() + this.height, z - sizeZ, x + sizeX, y + sizeY - (double)this.getYOffset() + this.height, z + sizeZ)); } } /** * Updates the data option * @param amt sets the amount of fuel in the rocket */ public void setFuelAmount(int amt) { dataManager.set(fuelLevel, amt); dataManager.setDirty(fuelLevel); } /** * @return gets the fuel capacity of the rocket */ public int getFuelCapacity() { return stats.getFuelCapacity(FuelType.LIQUID); } @Override public void setEntityId(int id){ super.setEntityId(id); //Ask server for nbt data if(worldObj.isRemote) { PacketHandler.sendToServer(new PacketEntity(this, (byte)PacketType.REQUESTNBT.ordinal())); } } @Override public boolean canBeCollidedWith() { return true; } /** * If the rocket is in flight, ie the rocket has taken off and has not touched the ground * @return true if in flight */ public boolean isInFlight() { if(!worldObj.isRemote) { return isInFlight; } return this.dataManager.get(INFLIGHT); } /** * Sets the the status of flight of the rocket and updates the datawatcher * @param inflight status of flight */ public void setInOrbit(boolean inOrbit) { this.isInOrbit = inOrbit; this.dataManager.set(INORBIT, inOrbit); this.dataManager.setDirty(INORBIT); } /** * If the rocket is in flight, ie the rocket has taken off and has not touched the ground * @return true if in flight */ public boolean isInOrbit() { if(!worldObj.isRemote) { return isInOrbit; } return this.dataManager.get(INORBIT); } /** * Sets the the status of flight of the rocket and updates the datawatcher * @param inflight status of flight */ public void setInFlight(boolean inflight) { this.isInFlight = inflight; this.dataManager.set(INFLIGHT, inflight); this.dataManager.setDirty(INFLIGHT); } @Override protected void entityInit() { this.dataManager.register(INFLIGHT, false); this.dataManager.register(fuelLevel, 0); this.dataManager.register(INORBIT, false); } //Set the size and position of the rocket from storage public void initFromBounds() { if(storage != null) { this.setSize(Math.max(storage.getSizeX(), storage.getSizeZ()), storage.getSizeY()); this.setPosition(this.posX, this.posY, this.posZ); } } protected boolean interact(EntityPlayer player) { //Actual interact code needs to be moved to a packet receive on the server ItemStack heldItem = player.getHeldItem(EnumHand.MAIN_HAND); //Handle linkers and right-click with fuel if(heldItem != null) { float fuelMult; FluidStack fluidStack; if(heldItem.getItem() instanceof ItemLinker) { if(ItemLinker.isSet(heldItem)) { TileEntity tile = this.worldObj.getTileEntity(ItemLinker.getMasterCoords(heldItem)); if(tile instanceof IInfrastructure) { IInfrastructure infrastructure = (IInfrastructure)tile; if(this.getDistance(ItemLinker.getMasterX(heldItem), this.posY, ItemLinker.getMasterZ(heldItem)) < infrastructure.getMaxLinkDistance() + Math.max(storage.getSizeX(), storage.getSizeZ())) { if(!connectedInfrastructure.contains(tile)) { linkInfrastructure(infrastructure); if(!worldObj.isRemote) { player.addChatMessage(new TextComponentString("Linked Sucessfully")); } ItemLinker.resetPosition(heldItem); return true; } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("Already linked!")); } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("The object you are trying to link is too far away")); } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("This cannot be linked to a rocket!")); } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("Nothing to be linked")); return false; } else if((FluidContainerRegistry.isFilledContainer(heldItem) && (fuelMult = FuelRegistry.instance.getMultiplier(FuelType.LIQUID, (fluidStack = FluidContainerRegistry.getFluidForFilledItem(heldItem)).getFluid()) ) > 0 ) || ( heldItem.getItem() instanceof IFluidContainerItem && ((IFluidContainerItem) heldItem.getItem()).getFluid(heldItem) != null && ((IFluidContainerItem) heldItem.getItem()).getFluid(heldItem).amount >= FluidContainerRegistry.BUCKET_VOLUME && (fuelMult = FuelRegistry.instance.getMultiplier(FuelType.LIQUID, (fluidStack = ((IFluidContainerItem) heldItem.getItem()).getFluid(heldItem)).getFluid())) > 0 )) { int amountToAdd = (int) (fuelMult*fluidStack.amount); this.addFuelAmount(amountToAdd); //if the player is not in creative then try to use the fluid container if(!player.capabilities.isCreativeMode) { if(heldItem.getItem() instanceof IFluidContainerItem) { ((IFluidContainerItem) heldItem.getItem()).drain(heldItem, FluidContainerRegistry.BUCKET_VOLUME, true); } else { ItemStack emptyStack = FluidContainerRegistry.drainFluidContainer(player.getHeldItem(EnumHand.MAIN_HAND)); if(player.inventory.addItemStackToInventory(emptyStack)) { player.getHeldItem(EnumHand.MAIN_HAND).splitStack(1); if(player.getHeldItem(EnumHand.MAIN_HAND).stackSize == 0) player.inventory.setInventorySlotContents(player.inventory.currentItem, null); } } } return true; } } //If player is holding shift open GUI if(player.isSneaking()) { openGui(player); } else if(stats.hasSeat()) { //If pilot seat is open mount entity there if(stats.hasSeat() && this.getPassengers().isEmpty()) { if(!worldObj.isRemote) player.startRiding(this); } /*else if(stats.getNumPassengerSeats() > 0) { //If a passenger seat exists and one is empty, mount the player to it for(int i = 0; i < stats.getNumPassengerSeats(); i++) { if(this.mountedEntities[i] == null || this.mountedEntities[i].get() == null) { player.ridingEntity = this; this.mountedEntities[i] = new WeakReference<Entity>(player); break; } } }*/ } return true; } public void openGui(EntityPlayer player) { player.openGui(LibVulpes.instance, GuiHandler.guiId.MODULAR.ordinal(), player.worldObj, this.getEntityId(), -1,0); //Only handle the bypass on the server if(!worldObj.isRemote) RocketInventoryHelper.addPlayerToInventoryBypass(player); } @Override public boolean processInitialInteract(EntityPlayer player, @Nullable ItemStack stack, EnumHand hand){ if(worldObj.isRemote) { //Due to forge's rigid handling of entities (NetHanlderPlayServer:866) needs to be handled differently for large rockets PacketHandler.sendToServer(new PacketEntity(this, (byte)PacketType.SENDINTERACT.ordinal())); return interact(player); } return true; } public boolean isBurningFuel() { return (getFuelAmount() > 0 || !Configuration.rocketRequireFuel) && ((!this.getPassengers().isEmpty() && getPassengerMovingForward() > 0) || !isInOrbit()); } public float getPassengerMovingForward() { for(Entity entity : this.getPassengers()) { if(entity instanceof EntityPlayer) { return ((EntityPlayer) entity).moveForward; } } return 0f; } private boolean hasHumanPassenger() { for(Entity entity : this.getPassengers()) { if(entity instanceof EntityPlayer) { return true; } } return false; } public boolean isDescentPhase() { return Configuration.automaticRetroRockets && isInOrbit() && this.posY < 300 && (this.motionY < -0.4f || worldObj.isRemote); } public boolean areEnginesRunning() { return (this.motionY > 0 || isDescentPhase() || (getPassengerMovingForward() > 0)); } @Override public void onUpdate() { super.onUpdate(); long deltaTime = worldObj.getTotalWorldTime() - lastWorldTickTicked; lastWorldTickTicked = worldObj.getTotalWorldTime(); if(this.ticksExisted == 20) { //problems with loading on other world then where the infrastructure was set? ListIterator<HashedBlockPosition> itr = infrastructureCoords.listIterator(); while(itr.hasNext()) { HashedBlockPosition temp = itr.next(); TileEntity tile = this.worldObj.getTileEntity(new BlockPos(temp.x, temp.y, temp.z)); if(tile instanceof IInfrastructure) { this.linkInfrastructure((IInfrastructure)tile); itr.remove(); } } if(worldObj.isRemote) LibVulpes.proxy.playSound(new SoundRocketEngine( AudioRegistry.combustionRocket, SoundCategory.NEUTRAL,this)); } if(this.ticksExisted > DESCENT_TIMER && isInOrbit() && !isInFlight()) setInFlight(true); //Hackish crap to make clients mount entities immediately after server transfer and fire events //Known race condition... screw me... if(!worldObj.isRemote && (this.isInFlight() || this.isInOrbit()) && this.ticksExisted == 20) { //Deorbiting MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketDeOrbitingEvent(this)); PacketHandler.sendToNearby(new PacketEntity(this, (byte)PacketType.ROCKETLANDEVENT.ordinal()), worldObj.provider.getDimension(), (int)posX, (int)posY, (int)posZ, 64); for(Entity riddenByEntity : getPassengers()) { if(riddenByEntity instanceof EntityPlayer) { EntityPlayer player = (EntityPlayer)riddenByEntity; if(player instanceof EntityPlayer) PacketHandler.sendToPlayer(new PacketEntity((INetworkEntity)this,(byte)PacketType.FORCEMOUNT.ordinal()), player); } } } if(isInFlight()) { boolean burningFuel = isBurningFuel(); boolean descentPhase = isDescentPhase(); if(burningFuel || descentPhase) { //Burn the rocket fuel if(!worldObj.isRemote && !descentPhase) setFuelAmount(getFuelAmount() - stats.getFuelRate(FuelType.LIQUID)); //Spawn in the particle effects for the engines int engineNum = 0; if(worldObj.isRemote && Minecraft.getMinecraft().gameSettings.particleSetting < 2 && areEnginesRunning()) { for(Vector3F<Float> vec : stats.getEngineLocations()) { AtmosphereHandler handler; if(Minecraft.getMinecraft().gameSettings.particleSetting < 1 && worldObj.getTotalWorldTime() % 10 == 0 && (engineNum < 8 || ((worldObj.getTotalWorldTime()/10) % Math.max((stats.getEngineLocations().size()/8),1)) == (engineNum/8)) && ( (handler = AtmosphereHandler.getOxygenHandler(worldObj.provider.getDimension())) == null || (handler.getAtmosphereType(this) != null && handler.getAtmosphereType(this).allowsCombustion())) ) AdvancedRocketry.proxy.spawnParticle("rocketSmoke", worldObj, this.posX + vec.x, this.posY + vec.y - 0.75, this.posZ +vec.z,0,0,0); for(int i = 0; i < 4; i++) { AdvancedRocketry.proxy.spawnParticle("rocketFlame", worldObj, this.posX + vec.x, this.posY + vec.y - 0.75, this.posZ +vec.z,(this.rand.nextFloat() - 0.5f)/8f,-.75 ,(this.rand.nextFloat() - 0.5f)/8f); } } } } if(!this.getPassengers().isEmpty()) { for(Entity entity : this.getPassengers()) { entity.fallDistance = 0; this.fallDistance = 0; } //if the player holds the forward key then decelerate if(isInOrbit() && (burningFuel || descentPhase)) { float vel = descentPhase ? 1f : getPassengerMovingForward(); this.motionY -= this.motionY*vel/50f; } this.velocityChanged = true; } else if(isInOrbit() && descentPhase) { //For unmanned rockets this.motionY -= this.motionY/50f; this.velocityChanged = true; } if(!worldObj.isRemote) { //If out of fuel or descending then accelerate downwards if(isInOrbit() || !burningFuel) { this.motionY = Math.min(this.motionY - 0.001, 1); } else //this.motionY = Math.min(this.motionY + 0.001, 1); this.motionY += stats.getAcceleration() * deltaTime; double lastPosY = this.posY; double prevMotion = this.motionY; this.moveEntity(0, prevMotion*deltaTime, 0); //Check to see if it's landed if((isInOrbit() || !burningFuel) && isInFlight() && lastPosY + prevMotion != this.posY && this.posY < 256) { MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketLandedEvent(this)); //PacketHandler.sendToPlayersTrackingEntity(new PacketEntity(this, (byte)PacketType.ROCKETLANDEVENT.ordinal()), this); this.setInFlight(false); this.setInOrbit(false); } if(!isInOrbit() && (this.posY > Configuration.orbit)) { onOrbitReached(); } //If the rocket falls out of the world while in orbit either fall back to earth or die if(this.posY < 0) { int dimId = worldObj.provider.getDimension(); if(dimId == Configuration.spaceDimId) { ISpaceObject obj = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(getPosition()); if(obj != null) { int targetDimID = obj.getOrbitingPlanetId(); Vector3F<Float> pos = storage.getDestinationCoordinates(targetDimID, true); if(pos != null) { setInOrbit(true); setInFlight(false); this.changeDimension(targetDimID, pos.x, Configuration.orbit, pos.z); } else this.setDead(); } else { Vector3F<Float> pos = storage.getDestinationCoordinates(0, true); if(pos != null) { setInOrbit(true); setInFlight(false); this.changeDimension(lastDimensionFrom, pos.x, Configuration.orbit, pos.z); } else this.setDead(); } } else this.setDead(); } } else { this.moveEntity(0, this.motionY, 0); } } } /** * @return a list of satellites stores in this rocket */ public List<SatelliteBase> getSatellites() { List<SatelliteBase> satellites = new ArrayList<SatelliteBase>(); for(TileSatelliteHatch tile : storage.getSatelliteHatches()) { SatelliteBase satellite = tile.getSatellite(); if(satellite != null) satellites.add(satellite); } return satellites; } /** * Called when the rocket reaches orbit */ public void onOrbitReached() { super.onOrbitReached(); //TODO: support multiple riders and rider/satellite combo if(!stats.hasSeat()) { TileGuidanceComputer computer = storage.getGuidanceComputer(); if(computer != null && computer.getStackInSlot(0) != null && computer.getStackInSlot(0).getItem() instanceof ItemAsteroidChip) { //make it 30 minutes with one drill float drillingPower = stats.getDrillingPower(); MissionOreMining miningMission = new MissionOreMining((long)(Configuration.asteroidMiningTimeMult*(drillingPower == 0f ? 36000 : 360/stats.getDrillingPower())), this, connectedInfrastructure); DimensionProperties properties = DimensionManager.getInstance().getDimensionProperties(worldObj.provider.getDimension()); miningMission.setDimensionId(worldObj); properties.addSatallite(miningMission, worldObj); if(!worldObj.isRemote) PacketHandler.sendToAll(new PacketSatellite(miningMission)); for(IInfrastructure i : connectedInfrastructure) { i.linkMission(miningMission); } this.setDead(); //TODO: Move tracking stations over to the mission handler } else { unpackSatellites(); } destinationDimId = storage.getDestinationDimId(this.worldObj.provider.getDimension(), (int)this.posX, (int)this.posZ); if(DimensionManager.getInstance().canTravelTo(destinationDimId)) { Vector3F<Float> pos = storage.getDestinationCoordinates(destinationDimId, true); storage.setDestinationCoordinates(new Vector3F<Float>((float)this.posX, (float)this.posY, (float)this.posZ), this.worldObj.provider.getDimension()); if(pos != null) { this.setInOrbit(true); this.motionY = -this.motionY; this.changeDimension(destinationDimId, pos.x, Configuration.orbit, pos.z); return; } } else this.setDead(); //TODO: satellite event? } else { unpackSatellites(); //TODO: maybe add orbit dimension this.motionY = -this.motionY; setInOrbit(true); //If going to a station or something make sure to set coords accordingly //If in space land on the planet, if on the planet go to space if(destinationDimId == Configuration.spaceDimId || this.worldObj.provider.getDimension() == Configuration.spaceDimId) { Vector3F<Float> pos = storage.getDestinationCoordinates(destinationDimId, true); storage.setDestinationCoordinates(new Vector3F<Float>((float)this.posX, (float)this.posY, (float)this.posZ), this.worldObj.provider.getDimension()); if(pos != null) { //Make player confirm deorbit if a player is riding the rocket if(hasHumanPassenger()) { setInFlight(false); pos.y = (float) Configuration.orbit; } this.changeDimension(destinationDimId, pos.x, pos.y, pos.z); return; } } //Make player confirm deorbit if a player is riding the rocket if(hasHumanPassenger()) { setInFlight(false); if(DimensionManager.getInstance().getDimensionProperties(destinationDimId).getName().equals("Luna")) { for(Entity player : this.getPassengers()) { if(player instanceof EntityPlayer) { ((EntityPlayer)player).addStat(ARAchivements.moonLanding); if(!DimensionManager.hasReachedMoon) ((EntityPlayer)player).addStat(ARAchivements.oneSmallStep); } } DimensionManager.hasReachedMoon = true; } } else setPosition(posX, Configuration.orbit, posZ); if(destinationDimId != this.worldObj.provider.getDimension()) this.changeDimension(this.worldObj.provider.getDimension() == destinationDimId ? 0 : destinationDimId); } } private void unpackSatellites() { List<TileSatelliteHatch> satelliteHatches = storage.getSatelliteHatches(); for(TileSatelliteHatch tile : satelliteHatches) { SatelliteBase satellite = tile.getSatellite(); if(satellite == null) { ItemStack stack = tile.getStackInSlot(0); if(stack != null && stack.getItem() == AdvancedRocketryItems.itemSpaceStation) { StorageChunk storage = ((ItemPackedStructure)stack.getItem()).getStructure(stack); ISpaceObject object = SpaceObjectManager.getSpaceManager().getSpaceStation((int)ItemStationChip.getUUID(stack)); //in case of no NBT data or the like if(object == null) { tile.setInventorySlotContents(0, null); continue; } SpaceObjectManager.getSpaceManager().moveStationToBody(object, this.worldObj.provider.getDimension()); //Vector3F<Integer> spawn = object.getSpawnLocation(); object.onModuleUnpack(storage); tile.setInventorySlotContents(0, null); } } else { DimensionProperties properties = DimensionManager.getEffectiveDimId(worldObj, this.getPosition()); World world = net.minecraftforge.common.DimensionManager.getWorld(properties.getId()); properties.addSatallite(satellite, world); tile.setInventorySlotContents(0, null); } } } @Override /** * Called immediately before launch */ public void prepareLaunch() { RocketPreLaunchEvent event = new RocketEvent.RocketPreLaunchEvent(this); MinecraftForge.EVENT_BUS.post(event); if(!event.isCanceled()) { if(worldObj.isRemote) PacketHandler.sendToServer(new PacketEntity(this, (byte)EntityRocket.PacketType.LAUNCH.ordinal())); launch(); } } @Override public void launch() { if(isInFlight()) return; if(isInOrbit()) { setInFlight(true); return; } //Get destination dimid and lock the computer //TODO: lock the computer destinationDimId = storage.getDestinationDimId(worldObj.provider.getDimension(), (int)this.posX, (int)this.posZ); //TODO: make sure this doesn't break asteriod mining if(!(DimensionManager.getInstance().canTravelTo(destinationDimId) || (destinationDimId == -1 && storage.getSatelliteHatches().size() != 0))) { setError(LibVulpes.proxy.getLocalizedString("error.rocket.cannotGetThere")); return; } int finalDest = destinationDimId; if(destinationDimId == Configuration.spaceDimId) { ISpaceObject obj = null; Vector3F<Float> vec = storage.getDestinationCoordinates(destinationDimId,false); if(vec != null) obj = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(new BlockPos(vec.x, vec.y, vec.z)); if( obj != null) finalDest = obj.getOrbitingPlanetId(); else { setError(LibVulpes.proxy.getLocalizedString("error.rocket.destinationNotExist")); return; } } int thisDimId = this.worldObj.provider.getDimension(); if(this.worldObj.provider.getDimension() == Configuration.spaceDimId) { ISpaceObject object = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(this.getPosition()); if(object != null) thisDimId = object.getProperties().getParentProperties().getId(); } if(finalDest != -1 && !DimensionManager.getInstance().areDimensionsInSamePlanetMoonSystem(finalDest, thisDimId)) { setError(LibVulpes.proxy.getLocalizedString("error.rocket.notSameSystem")); return; } //TODO: Clean this logic a bit? if(!stats.hasSeat() || ((DimensionManager.getInstance().isDimensionCreated(destinationDimId)) || destinationDimId == Configuration.spaceDimId || destinationDimId == 0) ) { //Abort if destination is invalid setInFlight(true); Iterator<IInfrastructure> connectedTiles = connectedInfrastructure.iterator(); MinecraftForge.EVENT_BUS.post(new RocketLaunchEvent(this)); //Disconnect things linked to the rocket on liftoff while(connectedTiles.hasNext()) { IInfrastructure i = connectedTiles.next(); if(i.disconnectOnLiftOff()) { disconnectInfrastructure(i); connectedTiles.remove(); } } } } /** * Called when the rocket is to be deconstructed */ @Override public void deconstructRocket() { super.deconstructRocket(); for(IInfrastructure infrastructure : connectedInfrastructure) { infrastructure.unlinkRocket(); } //paste the rocket into the world as blocks storage.pasteInWorld(this.worldObj, (int)(this.posX - storage.getSizeX()/2f), (int)this.posY, (int)(this.posZ - storage.getSizeZ()/2f)); this.setDead(); } @Override public void setDead() { super.setDead(); if(storage != null && storage.world.displayListIndex != -1) GLAllocation.deleteDisplayLists(storage.world.displayListIndex); //unlink any connected tiles Iterator<IInfrastructure> connectedTiles = connectedInfrastructure.iterator(); while(connectedTiles.hasNext()) { connectedTiles.next().unlinkRocket(); connectedTiles.remove(); } } public void setOverriddenCoords(int dimId, float x, float y, float z) { TileGuidanceComputer tile = storage.getGuidanceComputer(); if(tile != null) { tile.setFallbackDestination(dimId, new Vector3F<Float>(x, y, z)); } } @Override public Entity changeDimension(int newDimId) { return changeDimension(newDimId, this.posX, (double)Configuration.orbit, this.posZ); } @Nullable public Entity changeDimension(int dimensionIn, double posX, double y, double posZ) { if (!this.worldObj.isRemote && !this.isDead) { if(!DimensionManager.getInstance().canTravelTo(dimensionIn)) { AdvancedRocketry.logger.warn("Rocket trying to travel from Dim" + this.worldObj.provider.getDimension() + " to Dim " + dimensionIn + ". target not accessible by rocket from launch dim"); return null; } lastDimensionFrom = this.worldObj.provider.getDimension(); List<Entity> passengers = getPassengers(); if (!net.minecraftforge.common.ForgeHooks.onTravelToDimension(this, dimensionIn)) return null; this.worldObj.theProfiler.startSection("changeDimension"); MinecraftServer minecraftserver = this.getServer(); int i = this.dimension; WorldServer worldserver = minecraftserver.worldServerForDimension(i); WorldServer worldserver1 = minecraftserver.worldServerForDimension(dimensionIn); this.dimension = dimensionIn; if (i == 1 && dimensionIn == 1) { worldserver1 = minecraftserver.worldServerForDimension(0); this.dimension = 0; } this.worldObj.removeEntity(this); this.isDead = false; this.worldObj.theProfiler.startSection("reposition"); BlockPos blockpos; double d0 = this.posX; double d1 = this.posZ; double d2 = 8.0D; d0 = MathHelper.clamp_double(d0 * 8.0D, worldserver1.getWorldBorder().minX() + 16.0D, worldserver1.getWorldBorder().maxX() - 16.0D); d1 = MathHelper.clamp_double(d1 * 8.0D, worldserver1.getWorldBorder().minZ() + 16.0D, worldserver1.getWorldBorder().maxZ() - 16.0D); d0 = (double)MathHelper.clamp_int((int)d0, -29999872, 29999872); d1 = (double)MathHelper.clamp_int((int)d1, -29999872, 29999872); float f = this.rotationYaw; this.setLocationAndAngles(d0, this.posY, d1, 90.0F, 0.0F); Teleporter teleporter = new TeleporterNoPortal(worldserver1); teleporter.placeInExistingPortal(this, f); worldserver.updateEntityWithOptionalForce(this, false); this.worldObj.theProfiler.endStartSection("reloading"); Entity entity = EntityList.createEntityByName(EntityList.getEntityString(this), worldserver1); if (entity != null) { this.moveToBlockPosAndAngles(new BlockPos(posX, y, posZ), entity.rotationYaw, entity.rotationPitch); ((EntityRocket)entity).copyDataFromOld(this); entity.forceSpawn = true; worldserver1.spawnEntityInWorld(entity); worldserver1.updateEntityWithOptionalForce(entity, true); int timeOffset = 1; for(Entity e : passengers) { //Fix that darn random crash? worldserver.resetUpdateEntityTick(); worldserver1.resetUpdateEntityTick(); //Transfer the player if applicable //Need to handle our own removal to avoid race condition where player is mounted on client on the old entity but is already mounted to the new one on server //PacketHandler.sendToPlayer(new PacketEntity(this, (byte)PacketType.DISMOUNTCLIENT.ordinal()), (EntityPlayer) e); PlanetEventHandler.addDelayedTransition(worldserver.getTotalWorldTime(), new TransitionEntity(worldserver.getTotalWorldTime(), e, dimensionIn, new BlockPos(posX + 16, y, posZ), entity)); //minecraftserver.getPlayerList().transferPlayerToDimension((EntityPlayerMP)e, dimensionIn, teleporter); //e.setLocationAndAngles(posX, Configuration.orbit, posZ, this.rotationYaw, this.rotationPitch); //e.startRiding(entity); //e.playerNetServerHandler.sendPacket(new SPacketRespawn(e.dimension, e.worldObj.getDifficulty(), worldserver1.getWorldInfo().getTerrainType(), ((EntityPlayerMP)e).interactionManager.getGameType())); //((WorldServer)startWorld).getPlayerManager().removePlayer(player); } } this.isDead = true; this.worldObj.theProfiler.endSection(); worldserver.resetUpdateEntityTick(); worldserver1.resetUpdateEntityTick(); this.worldObj.theProfiler.endSection(); return entity; } else { return null; } } /** * Prepares this entity in new dimension by copying NBT data from entity in old dimension */ public void copyDataFromOld(Entity entityIn) { NBTTagCompound nbttagcompound = entityIn.writeToNBT(new NBTTagCompound()); nbttagcompound.removeTag("Dimension"); nbttagcompound.removeTag("Passengers"); this.readFromNBT(nbttagcompound); this.timeUntilPortal = entityIn.timeUntilPortal; } protected void readNetworkableNBT(NBTTagCompound nbt) { //Normal function checks for the existance of the data anyway readEntityFromNBT(nbt); } @Override protected void readEntityFromNBT(NBTTagCompound nbt) { setInOrbit(isInOrbit = nbt.getBoolean("orbit")); stats.readFromNBT(nbt); mountedEntities = new WeakReference[stats.getNumPassengerSeats()]; setFuelAmount(stats.getFuelAmount(FuelType.LIQUID)); setInFlight(isInFlight = nbt.getBoolean("flight")); readMissionPersistantNBT(nbt); if(nbt.hasKey("data")) { if(storage == null) storage = new StorageChunk(); storage.readFromNBT(nbt.getCompoundTag("data")); storage.setEntity(this); this.setSize(Math.max(storage.getSizeX(), storage.getSizeZ()), storage.getSizeY()); } NBTTagList tagList = nbt.getTagList("infrastructure", 10); for (int i = 0; i < tagList.tagCount(); i++) { int coords[] = tagList.getCompoundTagAt(i).getIntArray("loc"); //If called on server causes recursive loop, use hackish workaround with tempcoords and onChunkLoad if on server if(worldObj.isRemote) { TileEntity tile = this.worldObj.getTileEntity(new BlockPos(coords[0], coords[1], coords[2])); if(tile instanceof IInfrastructure) this.linkInfrastructure((IInfrastructure)tile); } else infrastructureCoords.add(new HashedBlockPosition(coords[0], coords[1], coords[2])); } destinationDimId = nbt.getInteger("destinationDimId"); lastDimensionFrom = nbt.getInteger("lastDimensionFrom"); //Satallite if(nbt.hasKey("satallite")) { NBTTagCompound satalliteNbt = nbt.getCompoundTag("satallite"); satallite = SatelliteRegistry.createFromNBT(satalliteNbt); } } protected void writeNetworkableNBT(NBTTagCompound nbt) { writeMissionPersistantNBT(nbt); nbt.setBoolean("orbit", isInOrbit()); nbt.setBoolean("flight", isInFlight()); stats.writeToNBT(nbt); NBTTagList itemList = new NBTTagList(); for(int i = 0; i < connectedInfrastructure.size(); i++) { IInfrastructure inf = connectedInfrastructure.get(i); if(inf instanceof TileEntity) { TileEntity ent = (TileEntity)inf; NBTTagCompound tag = new NBTTagCompound(); tag.setIntArray("loc", new int[] {ent.getPos().getX(), ent.getPos().getY(), ent.getPos().getZ()}); itemList.appendTag(tag); } } nbt.setTag("infrastructure", itemList); nbt.setInteger("destinationDimId", destinationDimId); //Satallite if(satallite != null) { NBTTagCompound satalliteNbt = new NBTTagCompound(); satallite.writeToNBT(satalliteNbt); satalliteNbt.setString("DataType",SatelliteRegistry.getKey(satallite.getClass())); nbt.setTag("satallite", satalliteNbt); } } public void writeMissionPersistantNBT(NBTTagCompound nbt) { } public void readMissionPersistantNBT(NBTTagCompound nbt) { } @Override protected void writeEntityToNBT(NBTTagCompound nbt) { writeNetworkableNBT(nbt); if(storage != null) { NBTTagCompound blocks = new NBTTagCompound(); storage.writeToNBT(blocks); nbt.setTag("data", blocks); } //TODO handle non tile Infrastructure nbt.setInteger("lastDimensionFrom", lastDimensionFrom); } @Override public void readDataFromNetwork(ByteBuf in, byte packetId, NBTTagCompound nbt) { if(packetId == PacketType.RECIEVENBT.ordinal()) { storage = new StorageChunk(); storage.setEntity(this); storage.readFromNetwork(in); } else if(packetId == PacketType.SENDPLANETDATA.ordinal()) { nbt.setInteger("selection", in.readInt()); } } @Override public void writeDataToNetwork(ByteBuf out, byte id) { if(id == PacketType.RECIEVENBT.ordinal()) { storage.writeToNetwork(out); } else if(id == PacketType.SENDPLANETDATA.ordinal()) { if(worldObj.isRemote) out.writeInt(container.getSelectedSystem()); else { if(storage.getGuidanceComputer() != null) { ItemStack stack = storage.getGuidanceComputer().getStackInSlot(0); if(stack != null && stack.getItem() == AdvancedRocketryItems.itemPlanetIdChip) { out.writeInt(((ItemPlanetIdentificationChip)AdvancedRocketryItems.itemPlanetIdChip).getDimensionId(stack)); } } } } } @Override public void useNetworkData(EntityPlayer player, Side side, byte id, NBTTagCompound nbt) { if(id == PacketType.RECIEVENBT.ordinal()) { this.readEntityFromNBT(nbt); initFromBounds(); } else if(id == PacketType.DECONSTRUCT.ordinal()) { deconstructRocket(); } else if(id == PacketType.SENDINTERACT.ordinal()) { interact(player); } else if(id == PacketType.OPENGUI.ordinal()) { //Used in key handler if(player.getRidingEntity() == this) //Prevent cheating openGui(player); } else if(id == PacketType.REQUESTNBT.ordinal()) { if(storage != null) { NBTTagCompound nbtdata = new NBTTagCompound(); this.writeNetworkableNBT(nbtdata); PacketHandler.sendToPlayer(new PacketEntity((INetworkEntity)this, (byte)PacketType.RECIEVENBT.ordinal(), nbtdata), player); } } else if(id == PacketType.FORCEMOUNT.ordinal()) { //Used for pesky dimension transfers //When dimensions are transferred make sure to remount the player on the client if(!acceptedPacket) { acceptedPacket = true; player.setPositionAndRotation(this.posX, this.posY, this.posZ, player.rotationYaw, player.rotationPitch); player.startRiding(this); MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketDeOrbitingEvent(this)); } } else if(id == PacketType.LAUNCH.ordinal()) { if(this.getPassengers().contains(player)) this.prepareLaunch(); } else if(id == PacketType.CHANGEWORLD.ordinal()) { AdvancedRocketry.proxy.changeClientPlayerWorld(storage.world); } else if(id == PacketType.REVERTWORLD.ordinal()) { AdvancedRocketry.proxy.changeClientPlayerWorld(this.worldObj); } else if(id == PacketType.OPENPLANETSELECTION.ordinal()) { player.openGui(LibVulpes.instance, GuiHandler.guiId.MODULARFULLSCREEN.ordinal(), player.worldObj, this.getEntityId(), -1,0); } else if(id == PacketType.SENDPLANETDATA.ordinal()) { ItemStack stack = storage.getGuidanceComputer().getStackInSlot(0); if(stack != null && stack.getItem() == AdvancedRocketryItems.itemPlanetIdChip) { ((ItemPlanetIdentificationChip)AdvancedRocketryItems.itemPlanetIdChip).setDimensionId(stack, nbt.getInteger("selection")); //Send data back to sync destination dims if(!worldObj.isRemote) { PacketHandler.sendToPlayersTrackingEntity(new PacketEntity(this, (byte)PacketType.SENDPLANETDATA.ordinal()), this); } } } else if(id == PacketType.DISCONNECTINFRASTRUCTURE.ordinal()) { int pos[] = nbt.getIntArray("pos"); connectedInfrastructure.remove(new HashedBlockPosition(pos[0], pos[1], pos[2])); TileEntity tile = worldObj.getTileEntity(new BlockPos(pos[0], pos[1], pos[2])); if(tile instanceof IInfrastructure) { ((IInfrastructure)tile).unlinkRocket(); connectedInfrastructure.remove(tile); } } else if(id == PacketType.ROCKETLANDEVENT.ordinal() && worldObj.isRemote) { MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketLandedEvent(this)); } else if(id == PacketType.DISMOUNTCLIENT.ordinal() && worldObj.isRemote) { player.dismountRidingEntity(); //this.removePassenger(player); } else if(id > 100) { TileEntity tile = storage.getGUItiles().get(id - 100 - tilebuttonOffset); //Welcome to super hack time with packets //Due to the fact the client uses the player's current world to open the gui, we have to move the client between worlds for a bit PacketHandler.sendToPlayer(new PacketEntity(this, (byte)PacketType.CHANGEWORLD.ordinal()), player); storage.getBlockState(tile.getPos()).getBlock().onBlockActivated(storage.world, tile.getPos(), storage.getBlockState(tile.getPos()), player, EnumHand.MAIN_HAND, null, EnumFacing.DOWN, 0, 0, 0); PacketHandler.sendToPlayer(new PacketEntity(this, (byte)PacketType.REVERTWORLD.ordinal()), player); } } @Override public void updatePassenger(Entity entity) { if (entity != null ) { //Bind player to the seat if(this.storage != null) { //Conditional b/c for some reason client/server positions do not match float xOffset = this.storage.getSizeX() % 2 == 0 ? 0.5f : 0f; float zOffset = this.storage.getSizeZ() % 2 == 0 ? 0.5f : 0f; entity.setPosition(this.posX + stats.getSeatX() + xOffset, this.posY + stats.getSeatY() - 0.5f, this.posZ + stats.getSeatZ() + zOffset ); } else entity.setPosition(this.posX , this.posY , this.posZ ); } for(int i = 0; i < this.stats.getNumPassengerSeats(); i++) { HashedBlockPosition pos = this.stats.getPassengerSeat(i); if(mountedEntities[i] != null && mountedEntities[i].get() != null) { mountedEntities[i].get().setPosition(this.posX + pos.x, this.posY + pos.y, this.posZ + pos.z); System.out.println("Additional: " + mountedEntities[i].get()); } } } @Override public List<ModuleBase> getModules(int ID, EntityPlayer player) { List<ModuleBase> modules; //If the rocket is flight don't load the interface modules = new LinkedList<ModuleBase>(); if(ID == GuiHandler.guiId.MODULAR.ordinal()) { //Backgrounds if(worldObj.isRemote) { modules.add(new ModuleImage(173, 0, new IconResource(128, 0, 48, 86, CommonResources.genericBackground))); modules.add(new ModuleImage(173, 86, new IconResource(98, 0, 78, 83, CommonResources.genericBackground))); modules.add(new ModuleImage(173, 168, new IconResource(98, 168, 78, 3, CommonResources.genericBackground))); } //Fuel modules.add(new ModuleProgress(192, 7, 0, new ProgressBarImage(2, 173, 12, 71, 17, 6, 3, 69, 1, 1, EnumFacing.UP, TextureResources.rocketHud), this)); //TODO DEBUG tiles! List<TileEntity> tiles = storage.getGUItiles(); for(int i = 0; i < tiles.size(); i++) { TileEntity tile = tiles.get(i); IBlockState state = storage.getBlockState(tile.getPos()); try { modules.add(new ModuleSlotButton(8 + 18* (i % 9), 17 + 18*(i/9), i + tilebuttonOffset, this, new ItemStack(state.getBlock(), 1, state.getBlock().getMetaFromState(state)), worldObj)); } catch (NullPointerException e) { } } //Add buttons modules.add(new ModuleButton(180, 140, 0, "Dissassemble", this, zmaster587.libVulpes.inventory.TextureResources.buttonBuild, 64, 20)); //modules.add(new ModuleButton(180, 95, 1, "", this, TextureResources.buttonLeft, 10, 16)); //modules.add(new ModuleButton(202, 95, 2, "", this, TextureResources.buttonRight, 10, 16)); modules.add(new ModuleButton(180, 114, 1, "Select Dst", this, zmaster587.libVulpes.inventory.TextureResources.buttonBuild, 64,20)); //modules.add(new ModuleText(180, 114, "Inventories", 0x404040)); } else { DimensionProperties properties = DimensionManager.getEffectiveDimId(worldObj, this.getPosition()); while(properties.getParentProperties() != null) properties = properties.getParentProperties(); container = new ModulePlanetSelector(properties.getId(), zmaster587.libVulpes.inventory.TextureResources.starryBG, this, false); container.setOffset(1000, 1000); modules.add(container); } return modules; } @Override public String getModularInventoryName() { return "Rocket"; } @Override public float getNormallizedProgress(int id) { if(id == 0) return getFuelAmount()/(float)getFuelCapacity(); return 0; } @Override public void setProgress(int id, int progress) { } @Override public int getProgress(int id) { return 0; } @Override public int getTotalProgress(int id) { return 0; } @Override public void setTotalProgress(int id, int progress) {} @Override public boolean startRiding(Entity entityIn, boolean force) { // TODO Auto-generated method stub return super.startRiding(entityIn, force); } @Override public boolean startRiding(Entity entityIn) { // TODO Auto-generated method stub return super.startRiding(entityIn); } @Override @SideOnly(Side.CLIENT) public void onInventoryButtonPressed(int buttonId) { switch(buttonId) { case 0: PacketHandler.sendToServer(new PacketEntity(this, (byte)EntityRocket.PacketType.DECONSTRUCT.ordinal())); break; case 1: PacketHandler.sendToServer(new PacketEntity(this, (byte)EntityRocket.PacketType.OPENPLANETSELECTION.ordinal())); break; default: PacketHandler.sendToServer(new PacketEntity(this, (byte)(buttonId + 100))); //Minecraft.getMinecraft().thePlayer.closeScreen(); TileEntity tile = storage.getGUItiles().get(buttonId - tilebuttonOffset); storage.getBlockState(tile.getPos()).getBlock().onBlockActivated(storage.world, tile.getPos(), storage.getBlockState(tile.getPos()), Minecraft.getMinecraft().thePlayer, EnumHand.MAIN_HAND, null, EnumFacing.DOWN, 0, 0, 0); } } @Override public boolean canInteractWithContainer(EntityPlayer entity) { boolean ret = !this.isDead && this.getDistanceToEntity(entity) < 64; if(!ret) RocketInventoryHelper.removePlayerFromInventoryBypass(entity); RocketInventoryHelper.updateTime(entity, worldObj.getWorldTime()); return ret; } @Override public StatsRocket getRocketStats() { return stats; } @Override public void onSelected(Object sender) { } @Override public void onSelectionConfirmed(Object sender) { PacketHandler.sendToServer(new PacketEntity(this, (byte)PacketType.SENDPLANETDATA.ordinal())); } @Override public void onSystemFocusChanged(Object sender) { // TODO Auto-generated method stub } public LinkedList<IInfrastructure> getConnectedInfrastructure() { return connectedInfrastructure; } }
src/main/java/zmaster587/advancedRocketry/entity/EntityRocket.java
package zmaster587.advancedRocketry.entity; import io.netty.buffer.ByteBuf; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import javax.annotation.Nullable; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.GLAllocation; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.network.datasync.DataParameter; import net.minecraft.network.datasync.DataSerializers; import net.minecraft.network.datasync.EntityDataManager; import net.minecraft.network.play.server.SPacketRespawn; import net.minecraft.server.MinecraftServer; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.SoundCategory; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.util.text.TextComponentString; import net.minecraft.world.Teleporter; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fluids.FluidContainerRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.IFluidContainerItem; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import zmaster587.advancedRocketry.AdvancedRocketry; import zmaster587.advancedRocketry.achievements.ARAchivements; import zmaster587.advancedRocketry.api.AdvancedRocketryItems; import zmaster587.advancedRocketry.api.Configuration; import zmaster587.advancedRocketry.api.EntityRocketBase; import zmaster587.advancedRocketry.api.IInfrastructure; import zmaster587.advancedRocketry.api.RocketEvent; import zmaster587.advancedRocketry.api.RocketEvent.RocketLaunchEvent; import zmaster587.advancedRocketry.api.RocketEvent.RocketPreLaunchEvent; import zmaster587.advancedRocketry.api.SatelliteRegistry; import zmaster587.advancedRocketry.api.StatsRocket; import zmaster587.advancedRocketry.api.fuel.FuelRegistry; import zmaster587.advancedRocketry.api.fuel.FuelRegistry.FuelType; import zmaster587.advancedRocketry.api.satellite.SatelliteBase; import zmaster587.advancedRocketry.api.stations.ISpaceObject; import zmaster587.advancedRocketry.atmosphere.AtmosphereHandler; import zmaster587.advancedRocketry.client.SoundRocketEngine; import zmaster587.advancedRocketry.dimension.DimensionManager; import zmaster587.advancedRocketry.dimension.DimensionProperties; import zmaster587.advancedRocketry.event.PlanetEventHandler; import zmaster587.advancedRocketry.inventory.TextureResources; import zmaster587.advancedRocketry.inventory.modules.ModulePlanetSelector; import zmaster587.advancedRocketry.item.ItemAsteroidChip; import zmaster587.advancedRocketry.item.ItemPackedStructure; import zmaster587.advancedRocketry.item.ItemPlanetIdentificationChip; import zmaster587.advancedRocketry.item.ItemStationChip; import zmaster587.advancedRocketry.mission.MissionOreMining; import zmaster587.advancedRocketry.network.PacketSatellite; import zmaster587.advancedRocketry.stations.SpaceObject; import zmaster587.advancedRocketry.stations.SpaceObjectManager; import zmaster587.advancedRocketry.tile.TileGuidanceComputer; import zmaster587.advancedRocketry.tile.hatch.TileSatelliteHatch; import zmaster587.advancedRocketry.util.AudioRegistry; import zmaster587.advancedRocketry.util.RocketInventoryHelper; import zmaster587.advancedRocketry.util.StorageChunk; import zmaster587.advancedRocketry.util.TransitionEntity; import zmaster587.advancedRocketry.world.util.TeleporterNoPortal; import zmaster587.libVulpes.LibVulpes; import zmaster587.libVulpes.client.util.ProgressBarImage; import zmaster587.libVulpes.gui.CommonResources; import zmaster587.libVulpes.interfaces.INetworkEntity; import zmaster587.libVulpes.inventory.GuiHandler; import zmaster587.libVulpes.inventory.modules.IButtonInventory; import zmaster587.libVulpes.inventory.modules.IModularInventory; import zmaster587.libVulpes.inventory.modules.IProgressBar; import zmaster587.libVulpes.inventory.modules.ISelectionNotify; import zmaster587.libVulpes.inventory.modules.ModuleBase; import zmaster587.libVulpes.inventory.modules.ModuleButton; import zmaster587.libVulpes.inventory.modules.ModuleImage; import zmaster587.libVulpes.inventory.modules.ModuleProgress; import zmaster587.libVulpes.inventory.modules.ModuleSlotButton; import zmaster587.libVulpes.items.ItemLinker; import zmaster587.libVulpes.network.PacketEntity; import zmaster587.libVulpes.network.PacketHandler; import zmaster587.libVulpes.util.HashedBlockPosition; import zmaster587.libVulpes.util.IconResource; import zmaster587.libVulpes.util.Vector3F; public class EntityRocket extends EntityRocketBase implements INetworkEntity, IModularInventory, IProgressBar, IButtonInventory, ISelectionNotify { //true if the rocket is on decent private boolean isInOrbit; //True if the rocket isn't on the ground private boolean isInFlight; //used in the rare case a player goes to a non-existant space station private int lastDimensionFrom = 0; public StorageChunk storage; private String errorStr; private long lastErrorTime = Long.MIN_VALUE; private static long ERROR_DISPLAY_TIME = 100; private static int DESCENT_TIMER = 500; protected long lastWorldTickTicked; private SatelliteBase satallite; protected int destinationDimId; //Offset for buttons linking to the tileEntityGrid private int tilebuttonOffset = 3; private int autoDescendTimer; private WeakReference<Entity>[] mountedEntities; protected ModulePlanetSelector container; boolean acceptedPacket = false; public static enum PacketType { RECIEVENBT, SENDINTERACT, REQUESTNBT, FORCEMOUNT, LAUNCH, DECONSTRUCT, OPENGUI, CHANGEWORLD, REVERTWORLD, OPENPLANETSELECTION, SENDPLANETDATA, DISCONNECTINFRASTRUCTURE, CONNECTINFRASTRUCTURE, ROCKETLANDEVENT, MENU_CHANGE, UPDATE_ATM, UPDATE_ORBIT, UPDATE_FLIGHT, DISMOUNTCLIENT } private static final DataParameter<Integer> fuelLevel = EntityDataManager.<Integer>createKey(EntityRocket.class, DataSerializers.VARINT); private static final DataParameter<Boolean> INFLIGHT = EntityDataManager.<Boolean>createKey(EntityRocket.class, DataSerializers.BOOLEAN); private static final DataParameter<Boolean> INORBIT = EntityDataManager.<Boolean>createKey(EntityRocket.class, DataSerializers.BOOLEAN); public EntityRocket(World p_i1582_1_) { super(p_i1582_1_); isInOrbit = false; stats = new StatsRocket(); isInFlight = false; connectedInfrastructure = new LinkedList<IInfrastructure>(); infrastructureCoords = new LinkedList<HashedBlockPosition>(); mountedEntities = new WeakReference[stats.getNumPassengerSeats()]; lastWorldTickTicked = p_i1582_1_.getTotalWorldTime(); autoDescendTimer = 5000; } public EntityRocket(World world, StorageChunk storage, StatsRocket stats, double x, double y, double z) { this(world); this.stats = stats; this.setPosition(x, y, z); this.storage = storage; this.storage.setEntity(this); initFromBounds(); isInFlight = false; mountedEntities = new WeakReference[stats.getNumPassengerSeats()]; lastWorldTickTicked = world.getTotalWorldTime(); autoDescendTimer = 5000; } @Override public AxisAlignedBB getEntityBoundingBox() { if(storage != null) { return super.getEntityBoundingBox();//.offset(0, -storage.getSizeY(), 0); } return new AxisAlignedBB(0,0,0,1,1,1); } @Override public void setEntityBoundingBox(AxisAlignedBB bb) { //if(storage != null) // super.setEntityBoundingBox(bb.offset(0, storage.getSizeY(),0)); //else super.setEntityBoundingBox(bb); } @Override public AxisAlignedBB getCollisionBoundingBox() { // TODO Auto-generated method stub return getEntityBoundingBox(); } /** * @return the amount of fuel stored in the rocket */ public int getFuelAmount() { int amount = dataManager.get(fuelLevel); stats.setFuelAmount(FuelType.LIQUID,amount); return amount; } /** * Adds fuel and updates the datawatcher * @param amount amount of fuel to add * @return the amount of fuel added */ public int addFuelAmount(int amount) { int ret = stats.addFuelAmount(FuelType.LIQUID, amount); setFuelAmount(stats.getFuelAmount(FuelType.LIQUID)); return ret; } public void disconnectInfrastructure(IInfrastructure infrastructure){ infrastructure.unlinkRocket(); infrastructureCoords.remove(new HashedBlockPosition(((TileEntity)infrastructure).getPos())); if(!worldObj.isRemote) { int pos[] = {((TileEntity)infrastructure).getPos().getX(), ((TileEntity)infrastructure).getPos().getY(), ((TileEntity)infrastructure).getPos().getZ()}; NBTTagCompound nbt = new NBTTagCompound(); nbt.setIntArray("pos", pos); //PacketHandler.sendToPlayersTrackingEntity(new PacketEntity(this, (byte)PacketType.DISCONNECTINFRASTRUCTURE.ordinal(), nbt), this); } } @Override public String getTextOverlay() { if(this.worldObj.getTotalWorldTime() < this.lastErrorTime + ERROR_DISPLAY_TIME) return errorStr; //Get destination string String displayStr = "N/A"; if(storage != null) { int dimid = storage.getDestinationDimId(this.worldObj.provider.getDimension(), (int)posX, (int)posZ); if(dimid == Configuration.spaceDimId) { Vector3F<Float> vec = storage.getDestinationCoordinates(dimid, false); if(vec != null) { ISpaceObject obj = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(new BlockPos(vec.x,vec.y,vec.z)); if(obj != null) { displayStr = "Station " + obj.getId(); } } } else if(dimid != -1 && dimid != SpaceObjectManager.WARPDIMID) { displayStr = DimensionManager.getInstance().getDimensionProperties(dimid).getName(); } } if(isInOrbit() && !isInFlight()) return "Press Space to descend!\n Auto descend in " + ((DESCENT_TIMER - this.ticksExisted)/20); else if(!isInFlight()) return "Press Space to take off!\nDest: " + displayStr; return super.getTextOverlay(); } private void setError(String error) { this.errorStr = error; this.lastErrorTime = this.worldObj.getTotalWorldTime(); } @Override public void setPosition(double x, double y, double z) { super.setPosition(x, y, z); if(storage != null) { float sizeX = storage.getSizeX()/2.0f; float sizeY = storage.getSizeY(); float sizeZ = storage.getSizeZ()/2.0f; //setEntityBoundingBox(new AxisAlignedBB(x - sizeX, y - (double)this.getYOffset() + this.height, z - sizeZ, x + sizeX, y + sizeY - (double)this.getYOffset() + this.height, z + sizeZ)); } } /** * Updates the data option * @param amt sets the amount of fuel in the rocket */ public void setFuelAmount(int amt) { dataManager.set(fuelLevel, amt); dataManager.setDirty(fuelLevel); } /** * @return gets the fuel capacity of the rocket */ public int getFuelCapacity() { return stats.getFuelCapacity(FuelType.LIQUID); } @Override public void setEntityId(int id){ super.setEntityId(id); //Ask server for nbt data if(worldObj.isRemote) { PacketHandler.sendToServer(new PacketEntity(this, (byte)PacketType.REQUESTNBT.ordinal())); } } @Override public boolean canBeCollidedWith() { return true; } /** * If the rocket is in flight, ie the rocket has taken off and has not touched the ground * @return true if in flight */ public boolean isInFlight() { if(!worldObj.isRemote) { return isInFlight; } return this.dataManager.get(INFLIGHT); } /** * Sets the the status of flight of the rocket and updates the datawatcher * @param inflight status of flight */ public void setInOrbit(boolean inOrbit) { this.isInOrbit = inOrbit; this.dataManager.set(INORBIT, inOrbit); this.dataManager.setDirty(INORBIT); } /** * If the rocket is in flight, ie the rocket has taken off and has not touched the ground * @return true if in flight */ public boolean isInOrbit() { if(!worldObj.isRemote) { return isInOrbit; } return this.dataManager.get(INORBIT); } /** * Sets the the status of flight of the rocket and updates the datawatcher * @param inflight status of flight */ public void setInFlight(boolean inflight) { this.isInFlight = inflight; this.dataManager.set(INFLIGHT, inflight); this.dataManager.setDirty(INFLIGHT); } @Override protected void entityInit() { this.dataManager.register(INFLIGHT, false); this.dataManager.register(fuelLevel, 0); this.dataManager.register(INORBIT, false); } //Set the size and position of the rocket from storage public void initFromBounds() { if(storage != null) { this.setSize(Math.max(storage.getSizeX(), storage.getSizeZ()), storage.getSizeY()); this.setPosition(this.posX, this.posY, this.posZ); } } protected boolean interact(EntityPlayer player) { //Actual interact code needs to be moved to a packet receive on the server ItemStack heldItem = player.getHeldItem(EnumHand.MAIN_HAND); //Handle linkers and right-click with fuel if(heldItem != null) { float fuelMult; FluidStack fluidStack; if(heldItem.getItem() instanceof ItemLinker) { if(ItemLinker.isSet(heldItem)) { TileEntity tile = this.worldObj.getTileEntity(ItemLinker.getMasterCoords(heldItem)); if(tile instanceof IInfrastructure) { IInfrastructure infrastructure = (IInfrastructure)tile; if(this.getDistance(ItemLinker.getMasterX(heldItem), this.posY, ItemLinker.getMasterZ(heldItem)) < infrastructure.getMaxLinkDistance() + Math.max(storage.getSizeX(), storage.getSizeZ())) { if(!connectedInfrastructure.contains(tile)) { linkInfrastructure(infrastructure); if(!worldObj.isRemote) { player.addChatMessage(new TextComponentString("Linked Sucessfully")); } ItemLinker.resetPosition(heldItem); return true; } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("Already linked!")); } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("The object you are trying to link is too far away")); } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("This cannot be linked to a rocket!")); } else if(!worldObj.isRemote) player.addChatMessage(new TextComponentString("Nothing to be linked")); return false; } else if((FluidContainerRegistry.isFilledContainer(heldItem) && (fuelMult = FuelRegistry.instance.getMultiplier(FuelType.LIQUID, (fluidStack = FluidContainerRegistry.getFluidForFilledItem(heldItem)).getFluid()) ) > 0 ) || ( heldItem.getItem() instanceof IFluidContainerItem && ((IFluidContainerItem) heldItem.getItem()).getFluid(heldItem) != null && ((IFluidContainerItem) heldItem.getItem()).getFluid(heldItem).amount >= FluidContainerRegistry.BUCKET_VOLUME && (fuelMult = FuelRegistry.instance.getMultiplier(FuelType.LIQUID, (fluidStack = ((IFluidContainerItem) heldItem.getItem()).getFluid(heldItem)).getFluid())) > 0 )) { int amountToAdd = (int) (fuelMult*fluidStack.amount); this.addFuelAmount(amountToAdd); //if the player is not in creative then try to use the fluid container if(!player.capabilities.isCreativeMode) { if(heldItem.getItem() instanceof IFluidContainerItem) { ((IFluidContainerItem) heldItem.getItem()).drain(heldItem, FluidContainerRegistry.BUCKET_VOLUME, true); } else { ItemStack emptyStack = FluidContainerRegistry.drainFluidContainer(player.getHeldItem(EnumHand.MAIN_HAND)); if(player.inventory.addItemStackToInventory(emptyStack)) { player.getHeldItem(EnumHand.MAIN_HAND).splitStack(1); if(player.getHeldItem(EnumHand.MAIN_HAND).stackSize == 0) player.inventory.setInventorySlotContents(player.inventory.currentItem, null); } } } return true; } } //If player is holding shift open GUI if(player.isSneaking()) { openGui(player); } else if(stats.hasSeat()) { //If pilot seat is open mount entity there if(stats.hasSeat() && this.getPassengers().isEmpty()) { if(!worldObj.isRemote) player.startRiding(this); } /*else if(stats.getNumPassengerSeats() > 0) { //If a passenger seat exists and one is empty, mount the player to it for(int i = 0; i < stats.getNumPassengerSeats(); i++) { if(this.mountedEntities[i] == null || this.mountedEntities[i].get() == null) { player.ridingEntity = this; this.mountedEntities[i] = new WeakReference<Entity>(player); break; } } }*/ } return true; } public void openGui(EntityPlayer player) { player.openGui(LibVulpes.instance, GuiHandler.guiId.MODULAR.ordinal(), player.worldObj, this.getEntityId(), -1,0); //Only handle the bypass on the server if(!worldObj.isRemote) RocketInventoryHelper.addPlayerToInventoryBypass(player); } @Override public boolean processInitialInteract(EntityPlayer player, @Nullable ItemStack stack, EnumHand hand){ if(worldObj.isRemote) { //Due to forge's rigid handling of entities (NetHanlderPlayServer:866) needs to be handled differently for large rockets PacketHandler.sendToServer(new PacketEntity(this, (byte)PacketType.SENDINTERACT.ordinal())); return interact(player); } return true; } public boolean isBurningFuel() { return (getFuelAmount() > 0 || !Configuration.rocketRequireFuel) && ((!this.getPassengers().isEmpty() && getPassengerMovingForward() > 0) || !isInOrbit()); } public float getPassengerMovingForward() { for(Entity entity : this.getPassengers()) { if(entity instanceof EntityPlayer) { return ((EntityPlayer) entity).moveForward; } } return 0f; } private boolean hasHumanPassenger() { for(Entity entity : this.getPassengers()) { if(entity instanceof EntityPlayer) { return true; } } return false; } public boolean isDescentPhase() { return Configuration.automaticRetroRockets && isInOrbit() && this.posY < 300 && (this.motionY < -0.4f || worldObj.isRemote); } public boolean areEnginesRunning() { return (this.motionY > 0 || isDescentPhase() || (getPassengerMovingForward() > 0)); } @Override public void onUpdate() { super.onUpdate(); long deltaTime = worldObj.getTotalWorldTime() - lastWorldTickTicked; lastWorldTickTicked = worldObj.getTotalWorldTime(); if(this.ticksExisted == 20) { //problems with loading on other world then where the infrastructure was set? ListIterator<HashedBlockPosition> itr = infrastructureCoords.listIterator(); while(itr.hasNext()) { HashedBlockPosition temp = itr.next(); TileEntity tile = this.worldObj.getTileEntity(new BlockPos(temp.x, temp.y, temp.z)); if(tile instanceof IInfrastructure) { this.linkInfrastructure((IInfrastructure)tile); itr.remove(); } } if(worldObj.isRemote) LibVulpes.proxy.playSound(new SoundRocketEngine( AudioRegistry.combustionRocket, SoundCategory.NEUTRAL,this)); } if(this.ticksExisted > DESCENT_TIMER && isInOrbit() && !isInFlight()) setInFlight(true); //Hackish crap to make clients mount entities immediately after server transfer and fire events //Known race condition... screw me... if(!worldObj.isRemote && (this.isInFlight() || this.isInOrbit()) && this.ticksExisted == 20) { //Deorbiting MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketDeOrbitingEvent(this)); PacketHandler.sendToNearby(new PacketEntity(this, (byte)PacketType.ROCKETLANDEVENT.ordinal()), worldObj.provider.getDimension(), (int)posX, (int)posY, (int)posZ, 64); for(Entity riddenByEntity : getPassengers()) { if(riddenByEntity instanceof EntityPlayer) { EntityPlayer player = (EntityPlayer)riddenByEntity; if(player instanceof EntityPlayer) PacketHandler.sendToPlayer(new PacketEntity((INetworkEntity)this,(byte)PacketType.FORCEMOUNT.ordinal()), player); } } } if(isInFlight()) { boolean burningFuel = isBurningFuel(); boolean descentPhase = isDescentPhase(); if(burningFuel || descentPhase) { //Burn the rocket fuel if(!worldObj.isRemote && !descentPhase) setFuelAmount(getFuelAmount() - stats.getFuelRate(FuelType.LIQUID)); //Spawn in the particle effects for the engines int engineNum = 0; if(worldObj.isRemote && Minecraft.getMinecraft().gameSettings.particleSetting < 2 && areEnginesRunning()) { for(Vector3F<Float> vec : stats.getEngineLocations()) { AtmosphereHandler handler; if(Minecraft.getMinecraft().gameSettings.particleSetting < 1 && worldObj.getTotalWorldTime() % 10 == 0 && (engineNum < 8 || ((worldObj.getTotalWorldTime()/10) % Math.max((stats.getEngineLocations().size()/8),1)) == (engineNum/8)) && ( (handler = AtmosphereHandler.getOxygenHandler(worldObj.provider.getDimension())) == null || (handler.getAtmosphereType(this) != null && handler.getAtmosphereType(this).allowsCombustion())) ) AdvancedRocketry.proxy.spawnParticle("rocketSmoke", worldObj, this.posX + vec.x, this.posY + vec.y - 0.75, this.posZ +vec.z,0,0,0); for(int i = 0; i < 4; i++) { AdvancedRocketry.proxy.spawnParticle("rocketFlame", worldObj, this.posX + vec.x, this.posY + vec.y - 0.75, this.posZ +vec.z,(this.rand.nextFloat() - 0.5f)/8f,-.75 ,(this.rand.nextFloat() - 0.5f)/8f); } } } } if(!this.getPassengers().isEmpty()) { for(Entity entity : this.getPassengers()) { entity.fallDistance = 0; this.fallDistance = 0; } //if the player holds the forward key then decelerate if(isInOrbit() && (burningFuel || descentPhase)) { float vel = descentPhase ? 1f : getPassengerMovingForward(); this.motionY -= this.motionY*vel/50f; } this.velocityChanged = true; } else if(isInOrbit() && descentPhase) { //For unmanned rockets this.motionY -= this.motionY/50f; this.velocityChanged = true; } if(!worldObj.isRemote) { //If out of fuel or descending then accelerate downwards if(isInOrbit() || !burningFuel) { this.motionY = Math.min(this.motionY - 0.001, 1); } else //this.motionY = Math.min(this.motionY + 0.001, 1); this.motionY += stats.getAcceleration() * deltaTime; double lastPosY = this.posY; double prevMotion = this.motionY; this.moveEntity(0, prevMotion*deltaTime, 0); //Check to see if it's landed if((isInOrbit() || !burningFuel) && isInFlight() && lastPosY + prevMotion != this.posY && this.posY < 256) { MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketLandedEvent(this)); //PacketHandler.sendToPlayersTrackingEntity(new PacketEntity(this, (byte)PacketType.ROCKETLANDEVENT.ordinal()), this); this.setInFlight(false); this.setInOrbit(false); } if(!isInOrbit() && (this.posY > Configuration.orbit)) { onOrbitReached(); } //If the rocket falls out of the world while in orbit either fall back to earth or die if(this.posY < 0) { int dimId = worldObj.provider.getDimension(); if(dimId == Configuration.spaceDimId) { ISpaceObject obj = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(getPosition()); if(obj != null) { int targetDimID = obj.getOrbitingPlanetId(); Vector3F<Float> pos = storage.getDestinationCoordinates(targetDimID, true); if(pos != null) { setInOrbit(true); setInFlight(false); this.changeDimension(targetDimID, pos.x, Configuration.orbit, pos.z); } else this.setDead(); } else { Vector3F<Float> pos = storage.getDestinationCoordinates(0, true); if(pos != null) { setInOrbit(true); setInFlight(false); this.changeDimension(lastDimensionFrom, pos.x, Configuration.orbit, pos.z); } else this.setDead(); } } else this.setDead(); } } else { this.moveEntity(0, this.motionY, 0); } } } /** * @return a list of satellites stores in this rocket */ public List<SatelliteBase> getSatellites() { List<SatelliteBase> satellites = new ArrayList<SatelliteBase>(); for(TileSatelliteHatch tile : storage.getSatelliteHatches()) { SatelliteBase satellite = tile.getSatellite(); if(satellite != null) satellites.add(satellite); } return satellites; } /** * Called when the rocket reaches orbit */ public void onOrbitReached() { super.onOrbitReached(); //TODO: support multiple riders and rider/satellite combo if(!stats.hasSeat()) { TileGuidanceComputer computer = storage.getGuidanceComputer(); if(computer != null && computer.getStackInSlot(0) != null && computer.getStackInSlot(0).getItem() instanceof ItemAsteroidChip) { //make it 30 minutes with one drill float drillingPower = stats.getDrillingPower(); MissionOreMining miningMission = new MissionOreMining((long)(Configuration.asteroidMiningTimeMult*(drillingPower == 0f ? 36000 : 360/stats.getDrillingPower())), this, connectedInfrastructure); DimensionProperties properties = DimensionManager.getInstance().getDimensionProperties(worldObj.provider.getDimension()); miningMission.setDimensionId(worldObj); properties.addSatallite(miningMission, worldObj); if(!worldObj.isRemote) PacketHandler.sendToAll(new PacketSatellite(miningMission)); for(IInfrastructure i : connectedInfrastructure) { i.linkMission(miningMission); } this.setDead(); //TODO: Move tracking stations over to the mission handler } else { unpackSatellites(); } destinationDimId = storage.getDestinationDimId(this.worldObj.provider.getDimension(), (int)this.posX, (int)this.posZ); if(DimensionManager.getInstance().canTravelTo(destinationDimId)) { Vector3F<Float> pos = storage.getDestinationCoordinates(destinationDimId, true); storage.setDestinationCoordinates(new Vector3F<Float>((float)this.posX, (float)this.posY, (float)this.posZ), this.worldObj.provider.getDimension()); if(pos != null) { this.setInOrbit(true); this.motionY = -this.motionY; this.changeDimension(destinationDimId, pos.x, Configuration.orbit, pos.z); return; } } else this.setDead(); //TODO: satellite event? } else { unpackSatellites(); //TODO: maybe add orbit dimension this.motionY = -this.motionY; setInOrbit(true); //If going to a station or something make sure to set coords accordingly //If in space land on the planet, if on the planet go to space if(destinationDimId == Configuration.spaceDimId || this.worldObj.provider.getDimension() == Configuration.spaceDimId) { Vector3F<Float> pos = storage.getDestinationCoordinates(destinationDimId, true); storage.setDestinationCoordinates(new Vector3F<Float>((float)this.posX, (float)this.posY, (float)this.posZ), this.worldObj.provider.getDimension()); if(pos != null) { //Make player confirm deorbit if a player is riding the rocket if(hasHumanPassenger()) { setInFlight(false); pos.y = (float) Configuration.orbit; } this.changeDimension(destinationDimId, pos.x, pos.y, pos.z); return; } } //Make player confirm deorbit if a player is riding the rocket if(hasHumanPassenger()) { setInFlight(false); if(DimensionManager.getInstance().getDimensionProperties(destinationDimId).getName().equals("Luna")) { for(Entity player : this.getPassengers()) { if(player instanceof EntityPlayer) { ((EntityPlayer)player).addStat(ARAchivements.moonLanding); if(!DimensionManager.hasReachedMoon) ((EntityPlayer)player).addStat(ARAchivements.oneSmallStep); } } DimensionManager.hasReachedMoon = true; } } else setPosition(posX, Configuration.orbit, posZ); if(destinationDimId != this.worldObj.provider.getDimension()) this.changeDimension(this.worldObj.provider.getDimension() == destinationDimId ? 0 : destinationDimId); } } private void unpackSatellites() { List<TileSatelliteHatch> satelliteHatches = storage.getSatelliteHatches(); for(TileSatelliteHatch tile : satelliteHatches) { SatelliteBase satellite = tile.getSatellite(); if(satellite == null) { ItemStack stack = tile.getStackInSlot(0); if(stack != null && stack.getItem() == AdvancedRocketryItems.itemSpaceStation) { StorageChunk storage = ((ItemPackedStructure)stack.getItem()).getStructure(stack); ISpaceObject object = SpaceObjectManager.getSpaceManager().getSpaceStation((int)ItemStationChip.getUUID(stack)); SpaceObjectManager.getSpaceManager().moveStationToBody(object, this.worldObj.provider.getDimension()); //Vector3F<Integer> spawn = object.getSpawnLocation(); object.onModuleUnpack(storage); tile.setInventorySlotContents(0, null); } } else { DimensionProperties properties = DimensionManager.getEffectiveDimId(worldObj, this.getPosition()); World world = net.minecraftforge.common.DimensionManager.getWorld(properties.getId()); properties.addSatallite(satellite, world); tile.setInventorySlotContents(0, null); } } } @Override /** * Called immediately before launch */ public void prepareLaunch() { RocketPreLaunchEvent event = new RocketEvent.RocketPreLaunchEvent(this); MinecraftForge.EVENT_BUS.post(event); if(!event.isCanceled()) { if(worldObj.isRemote) PacketHandler.sendToServer(new PacketEntity(this, (byte)EntityRocket.PacketType.LAUNCH.ordinal())); launch(); } } @Override public void launch() { if(isInFlight()) return; if(isInOrbit()) { setInFlight(true); return; } //Get destination dimid and lock the computer //TODO: lock the computer destinationDimId = storage.getDestinationDimId(worldObj.provider.getDimension(), (int)this.posX, (int)this.posZ); //TODO: make sure this doesn't break asteriod mining if(!(DimensionManager.getInstance().canTravelTo(destinationDimId) || (destinationDimId == -1 && storage.getSatelliteHatches().size() != 0))) { setError(LibVulpes.proxy.getLocalizedString("error.rocket.cannotGetThere")); return; } int finalDest = destinationDimId; if(destinationDimId == Configuration.spaceDimId) { ISpaceObject obj = null; Vector3F<Float> vec = storage.getDestinationCoordinates(destinationDimId,false); if(vec != null) obj = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(new BlockPos(vec.x, vec.y, vec.z)); if( obj != null) finalDest = obj.getOrbitingPlanetId(); else { setError(LibVulpes.proxy.getLocalizedString("error.rocket.destinationNotExist")); return; } } int thisDimId = this.worldObj.provider.getDimension(); if(this.worldObj.provider.getDimension() == Configuration.spaceDimId) { ISpaceObject object = SpaceObjectManager.getSpaceManager().getSpaceStationFromBlockCoords(this.getPosition()); if(object != null) thisDimId = object.getProperties().getParentProperties().getId(); } if(finalDest != -1 && !DimensionManager.getInstance().areDimensionsInSamePlanetMoonSystem(finalDest, thisDimId)) { setError(LibVulpes.proxy.getLocalizedString("error.rocket.notSameSystem")); return; } //TODO: Clean this logic a bit? if(!stats.hasSeat() || ((DimensionManager.getInstance().isDimensionCreated(destinationDimId)) || destinationDimId == Configuration.spaceDimId || destinationDimId == 0) ) { //Abort if destination is invalid setInFlight(true); Iterator<IInfrastructure> connectedTiles = connectedInfrastructure.iterator(); MinecraftForge.EVENT_BUS.post(new RocketLaunchEvent(this)); //Disconnect things linked to the rocket on liftoff while(connectedTiles.hasNext()) { IInfrastructure i = connectedTiles.next(); if(i.disconnectOnLiftOff()) { disconnectInfrastructure(i); connectedTiles.remove(); } } } } /** * Called when the rocket is to be deconstructed */ @Override public void deconstructRocket() { super.deconstructRocket(); for(IInfrastructure infrastructure : connectedInfrastructure) { infrastructure.unlinkRocket(); } //paste the rocket into the world as blocks storage.pasteInWorld(this.worldObj, (int)(this.posX - storage.getSizeX()/2f), (int)this.posY, (int)(this.posZ - storage.getSizeZ()/2f)); this.setDead(); } @Override public void setDead() { super.setDead(); if(storage != null && storage.world.displayListIndex != -1) GLAllocation.deleteDisplayLists(storage.world.displayListIndex); //unlink any connected tiles Iterator<IInfrastructure> connectedTiles = connectedInfrastructure.iterator(); while(connectedTiles.hasNext()) { connectedTiles.next().unlinkRocket(); connectedTiles.remove(); } } public void setOverriddenCoords(int dimId, float x, float y, float z) { TileGuidanceComputer tile = storage.getGuidanceComputer(); if(tile != null) { tile.setFallbackDestination(dimId, new Vector3F<Float>(x, y, z)); } } @Override public Entity changeDimension(int newDimId) { return changeDimension(newDimId, this.posX, (double)Configuration.orbit, this.posZ); } @Nullable public Entity changeDimension(int dimensionIn, double posX, double y, double posZ) { if (!this.worldObj.isRemote && !this.isDead) { if(!DimensionManager.getInstance().canTravelTo(dimensionIn)) { AdvancedRocketry.logger.warn("Rocket trying to travel from Dim" + this.worldObj.provider.getDimension() + " to Dim " + dimensionIn + ". target not accessible by rocket from launch dim"); return null; } lastDimensionFrom = this.worldObj.provider.getDimension(); List<Entity> passengers = getPassengers(); if (!net.minecraftforge.common.ForgeHooks.onTravelToDimension(this, dimensionIn)) return null; this.worldObj.theProfiler.startSection("changeDimension"); MinecraftServer minecraftserver = this.getServer(); int i = this.dimension; WorldServer worldserver = minecraftserver.worldServerForDimension(i); WorldServer worldserver1 = minecraftserver.worldServerForDimension(dimensionIn); this.dimension = dimensionIn; if (i == 1 && dimensionIn == 1) { worldserver1 = minecraftserver.worldServerForDimension(0); this.dimension = 0; } this.worldObj.removeEntity(this); this.isDead = false; this.worldObj.theProfiler.startSection("reposition"); BlockPos blockpos; double d0 = this.posX; double d1 = this.posZ; double d2 = 8.0D; d0 = MathHelper.clamp_double(d0 * 8.0D, worldserver1.getWorldBorder().minX() + 16.0D, worldserver1.getWorldBorder().maxX() - 16.0D); d1 = MathHelper.clamp_double(d1 * 8.0D, worldserver1.getWorldBorder().minZ() + 16.0D, worldserver1.getWorldBorder().maxZ() - 16.0D); d0 = (double)MathHelper.clamp_int((int)d0, -29999872, 29999872); d1 = (double)MathHelper.clamp_int((int)d1, -29999872, 29999872); float f = this.rotationYaw; this.setLocationAndAngles(d0, this.posY, d1, 90.0F, 0.0F); Teleporter teleporter = new TeleporterNoPortal(worldserver1); teleporter.placeInExistingPortal(this, f); worldserver.updateEntityWithOptionalForce(this, false); this.worldObj.theProfiler.endStartSection("reloading"); Entity entity = EntityList.createEntityByName(EntityList.getEntityString(this), worldserver1); if (entity != null) { this.moveToBlockPosAndAngles(new BlockPos(posX, y, posZ), entity.rotationYaw, entity.rotationPitch); ((EntityRocket)entity).copyDataFromOld(this); entity.forceSpawn = true; worldserver1.spawnEntityInWorld(entity); worldserver1.updateEntityWithOptionalForce(entity, true); int timeOffset = 1; for(Entity e : passengers) { //Fix that darn random crash? worldserver.resetUpdateEntityTick(); worldserver1.resetUpdateEntityTick(); //Transfer the player if applicable //Need to handle our own removal to avoid race condition where player is mounted on client on the old entity but is already mounted to the new one on server //PacketHandler.sendToPlayer(new PacketEntity(this, (byte)PacketType.DISMOUNTCLIENT.ordinal()), (EntityPlayer) e); PlanetEventHandler.addDelayedTransition(worldserver.getTotalWorldTime(), new TransitionEntity(worldserver.getTotalWorldTime(), e, dimensionIn, new BlockPos(posX + 16, y, posZ), entity)); //minecraftserver.getPlayerList().transferPlayerToDimension((EntityPlayerMP)e, dimensionIn, teleporter); //e.setLocationAndAngles(posX, Configuration.orbit, posZ, this.rotationYaw, this.rotationPitch); //e.startRiding(entity); //e.playerNetServerHandler.sendPacket(new SPacketRespawn(e.dimension, e.worldObj.getDifficulty(), worldserver1.getWorldInfo().getTerrainType(), ((EntityPlayerMP)e).interactionManager.getGameType())); //((WorldServer)startWorld).getPlayerManager().removePlayer(player); } } this.isDead = true; this.worldObj.theProfiler.endSection(); worldserver.resetUpdateEntityTick(); worldserver1.resetUpdateEntityTick(); this.worldObj.theProfiler.endSection(); return entity; } else { return null; } } /** * Prepares this entity in new dimension by copying NBT data from entity in old dimension */ public void copyDataFromOld(Entity entityIn) { NBTTagCompound nbttagcompound = entityIn.writeToNBT(new NBTTagCompound()); nbttagcompound.removeTag("Dimension"); nbttagcompound.removeTag("Passengers"); this.readFromNBT(nbttagcompound); this.timeUntilPortal = entityIn.timeUntilPortal; } protected void readNetworkableNBT(NBTTagCompound nbt) { //Normal function checks for the existance of the data anyway readEntityFromNBT(nbt); } @Override protected void readEntityFromNBT(NBTTagCompound nbt) { setInOrbit(isInOrbit = nbt.getBoolean("orbit")); stats.readFromNBT(nbt); mountedEntities = new WeakReference[stats.getNumPassengerSeats()]; setFuelAmount(stats.getFuelAmount(FuelType.LIQUID)); setInFlight(isInFlight = nbt.getBoolean("flight")); readMissionPersistantNBT(nbt); if(nbt.hasKey("data")) { if(storage == null) storage = new StorageChunk(); storage.readFromNBT(nbt.getCompoundTag("data")); storage.setEntity(this); this.setSize(Math.max(storage.getSizeX(), storage.getSizeZ()), storage.getSizeY()); } NBTTagList tagList = nbt.getTagList("infrastructure", 10); for (int i = 0; i < tagList.tagCount(); i++) { int coords[] = tagList.getCompoundTagAt(i).getIntArray("loc"); //If called on server causes recursive loop, use hackish workaround with tempcoords and onChunkLoad if on server if(worldObj.isRemote) { TileEntity tile = this.worldObj.getTileEntity(new BlockPos(coords[0], coords[1], coords[2])); if(tile instanceof IInfrastructure) this.linkInfrastructure((IInfrastructure)tile); } else infrastructureCoords.add(new HashedBlockPosition(coords[0], coords[1], coords[2])); } destinationDimId = nbt.getInteger("destinationDimId"); lastDimensionFrom = nbt.getInteger("lastDimensionFrom"); //Satallite if(nbt.hasKey("satallite")) { NBTTagCompound satalliteNbt = nbt.getCompoundTag("satallite"); satallite = SatelliteRegistry.createFromNBT(satalliteNbt); } } protected void writeNetworkableNBT(NBTTagCompound nbt) { writeMissionPersistantNBT(nbt); nbt.setBoolean("orbit", isInOrbit()); nbt.setBoolean("flight", isInFlight()); stats.writeToNBT(nbt); NBTTagList itemList = new NBTTagList(); for(int i = 0; i < connectedInfrastructure.size(); i++) { IInfrastructure inf = connectedInfrastructure.get(i); if(inf instanceof TileEntity) { TileEntity ent = (TileEntity)inf; NBTTagCompound tag = new NBTTagCompound(); tag.setIntArray("loc", new int[] {ent.getPos().getX(), ent.getPos().getY(), ent.getPos().getZ()}); itemList.appendTag(tag); } } nbt.setTag("infrastructure", itemList); nbt.setInteger("destinationDimId", destinationDimId); //Satallite if(satallite != null) { NBTTagCompound satalliteNbt = new NBTTagCompound(); satallite.writeToNBT(satalliteNbt); satalliteNbt.setString("DataType",SatelliteRegistry.getKey(satallite.getClass())); nbt.setTag("satallite", satalliteNbt); } } public void writeMissionPersistantNBT(NBTTagCompound nbt) { } public void readMissionPersistantNBT(NBTTagCompound nbt) { } @Override protected void writeEntityToNBT(NBTTagCompound nbt) { writeNetworkableNBT(nbt); if(storage != null) { NBTTagCompound blocks = new NBTTagCompound(); storage.writeToNBT(blocks); nbt.setTag("data", blocks); } //TODO handle non tile Infrastructure nbt.setInteger("lastDimensionFrom", lastDimensionFrom); } @Override public void readDataFromNetwork(ByteBuf in, byte packetId, NBTTagCompound nbt) { if(packetId == PacketType.RECIEVENBT.ordinal()) { storage = new StorageChunk(); storage.setEntity(this); storage.readFromNetwork(in); } else if(packetId == PacketType.SENDPLANETDATA.ordinal()) { nbt.setInteger("selection", in.readInt()); } } @Override public void writeDataToNetwork(ByteBuf out, byte id) { if(id == PacketType.RECIEVENBT.ordinal()) { storage.writeToNetwork(out); } else if(id == PacketType.SENDPLANETDATA.ordinal()) { if(worldObj.isRemote) out.writeInt(container.getSelectedSystem()); else { if(storage.getGuidanceComputer() != null) { ItemStack stack = storage.getGuidanceComputer().getStackInSlot(0); if(stack != null && stack.getItem() == AdvancedRocketryItems.itemPlanetIdChip) { out.writeInt(((ItemPlanetIdentificationChip)AdvancedRocketryItems.itemPlanetIdChip).getDimensionId(stack)); } } } } } @Override public void useNetworkData(EntityPlayer player, Side side, byte id, NBTTagCompound nbt) { if(id == PacketType.RECIEVENBT.ordinal()) { this.readEntityFromNBT(nbt); initFromBounds(); } else if(id == PacketType.DECONSTRUCT.ordinal()) { deconstructRocket(); } else if(id == PacketType.SENDINTERACT.ordinal()) { interact(player); } else if(id == PacketType.OPENGUI.ordinal()) { //Used in key handler if(player.getRidingEntity() == this) //Prevent cheating openGui(player); } else if(id == PacketType.REQUESTNBT.ordinal()) { if(storage != null) { NBTTagCompound nbtdata = new NBTTagCompound(); this.writeNetworkableNBT(nbtdata); PacketHandler.sendToPlayer(new PacketEntity((INetworkEntity)this, (byte)PacketType.RECIEVENBT.ordinal(), nbtdata), player); } } else if(id == PacketType.FORCEMOUNT.ordinal()) { //Used for pesky dimension transfers //When dimensions are transferred make sure to remount the player on the client if(!acceptedPacket) { acceptedPacket = true; player.setPositionAndRotation(this.posX, this.posY, this.posZ, player.rotationYaw, player.rotationPitch); player.startRiding(this); MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketDeOrbitingEvent(this)); } } else if(id == PacketType.LAUNCH.ordinal()) { if(this.getPassengers().contains(player)) this.prepareLaunch(); } else if(id == PacketType.CHANGEWORLD.ordinal()) { AdvancedRocketry.proxy.changeClientPlayerWorld(storage.world); } else if(id == PacketType.REVERTWORLD.ordinal()) { AdvancedRocketry.proxy.changeClientPlayerWorld(this.worldObj); } else if(id == PacketType.OPENPLANETSELECTION.ordinal()) { player.openGui(LibVulpes.instance, GuiHandler.guiId.MODULARFULLSCREEN.ordinal(), player.worldObj, this.getEntityId(), -1,0); } else if(id == PacketType.SENDPLANETDATA.ordinal()) { ItemStack stack = storage.getGuidanceComputer().getStackInSlot(0); if(stack != null && stack.getItem() == AdvancedRocketryItems.itemPlanetIdChip) { ((ItemPlanetIdentificationChip)AdvancedRocketryItems.itemPlanetIdChip).setDimensionId(stack, nbt.getInteger("selection")); //Send data back to sync destination dims if(!worldObj.isRemote) { PacketHandler.sendToPlayersTrackingEntity(new PacketEntity(this, (byte)PacketType.SENDPLANETDATA.ordinal()), this); } } } else if(id == PacketType.DISCONNECTINFRASTRUCTURE.ordinal()) { int pos[] = nbt.getIntArray("pos"); connectedInfrastructure.remove(new HashedBlockPosition(pos[0], pos[1], pos[2])); TileEntity tile = worldObj.getTileEntity(new BlockPos(pos[0], pos[1], pos[2])); if(tile instanceof IInfrastructure) { ((IInfrastructure)tile).unlinkRocket(); connectedInfrastructure.remove(tile); } } else if(id == PacketType.ROCKETLANDEVENT.ordinal() && worldObj.isRemote) { MinecraftForge.EVENT_BUS.post(new RocketEvent.RocketLandedEvent(this)); } else if(id == PacketType.DISMOUNTCLIENT.ordinal() && worldObj.isRemote) { player.dismountRidingEntity(); //this.removePassenger(player); } else if(id > 100) { TileEntity tile = storage.getGUItiles().get(id - 100 - tilebuttonOffset); //Welcome to super hack time with packets //Due to the fact the client uses the player's current world to open the gui, we have to move the client between worlds for a bit PacketHandler.sendToPlayer(new PacketEntity(this, (byte)PacketType.CHANGEWORLD.ordinal()), player); storage.getBlockState(tile.getPos()).getBlock().onBlockActivated(storage.world, tile.getPos(), storage.getBlockState(tile.getPos()), player, EnumHand.MAIN_HAND, null, EnumFacing.DOWN, 0, 0, 0); PacketHandler.sendToPlayer(new PacketEntity(this, (byte)PacketType.REVERTWORLD.ordinal()), player); } } @Override public void updatePassenger(Entity entity) { if (entity != null ) { //Bind player to the seat if(this.storage != null) { //Conditional b/c for some reason client/server positions do not match float xOffset = this.storage.getSizeX() % 2 == 0 ? 0.5f : 0f; float zOffset = this.storage.getSizeZ() % 2 == 0 ? 0.5f : 0f; entity.setPosition(this.posX + stats.getSeatX() + xOffset, this.posY + stats.getSeatY() - 0.5f, this.posZ + stats.getSeatZ() + zOffset ); } else entity.setPosition(this.posX , this.posY , this.posZ ); } for(int i = 0; i < this.stats.getNumPassengerSeats(); i++) { HashedBlockPosition pos = this.stats.getPassengerSeat(i); if(mountedEntities[i] != null && mountedEntities[i].get() != null) { mountedEntities[i].get().setPosition(this.posX + pos.x, this.posY + pos.y, this.posZ + pos.z); System.out.println("Additional: " + mountedEntities[i].get()); } } } @Override public List<ModuleBase> getModules(int ID, EntityPlayer player) { List<ModuleBase> modules; //If the rocket is flight don't load the interface modules = new LinkedList<ModuleBase>(); if(ID == GuiHandler.guiId.MODULAR.ordinal()) { //Backgrounds if(worldObj.isRemote) { modules.add(new ModuleImage(173, 0, new IconResource(128, 0, 48, 86, CommonResources.genericBackground))); modules.add(new ModuleImage(173, 86, new IconResource(98, 0, 78, 83, CommonResources.genericBackground))); modules.add(new ModuleImage(173, 168, new IconResource(98, 168, 78, 3, CommonResources.genericBackground))); } //Fuel modules.add(new ModuleProgress(192, 7, 0, new ProgressBarImage(2, 173, 12, 71, 17, 6, 3, 69, 1, 1, EnumFacing.UP, TextureResources.rocketHud), this)); //TODO DEBUG tiles! List<TileEntity> tiles = storage.getGUItiles(); for(int i = 0; i < tiles.size(); i++) { TileEntity tile = tiles.get(i); IBlockState state = storage.getBlockState(tile.getPos()); try { modules.add(new ModuleSlotButton(8 + 18* (i % 9), 17 + 18*(i/9), i + tilebuttonOffset, this, new ItemStack(state.getBlock(), 1, state.getBlock().getMetaFromState(state)), worldObj)); } catch (NullPointerException e) { } } //Add buttons modules.add(new ModuleButton(180, 140, 0, "Dissassemble", this, zmaster587.libVulpes.inventory.TextureResources.buttonBuild, 64, 20)); //modules.add(new ModuleButton(180, 95, 1, "", this, TextureResources.buttonLeft, 10, 16)); //modules.add(new ModuleButton(202, 95, 2, "", this, TextureResources.buttonRight, 10, 16)); modules.add(new ModuleButton(180, 114, 1, "Select Dst", this, zmaster587.libVulpes.inventory.TextureResources.buttonBuild, 64,20)); //modules.add(new ModuleText(180, 114, "Inventories", 0x404040)); } else { DimensionProperties properties = DimensionManager.getEffectiveDimId(worldObj, this.getPosition()); while(properties.getParentProperties() != null) properties = properties.getParentProperties(); container = new ModulePlanetSelector(properties.getId(), zmaster587.libVulpes.inventory.TextureResources.starryBG, this, false); container.setOffset(1000, 1000); modules.add(container); } return modules; } @Override public String getModularInventoryName() { return "Rocket"; } @Override public float getNormallizedProgress(int id) { if(id == 0) return getFuelAmount()/(float)getFuelCapacity(); return 0; } @Override public void setProgress(int id, int progress) { } @Override public int getProgress(int id) { return 0; } @Override public int getTotalProgress(int id) { return 0; } @Override public void setTotalProgress(int id, int progress) {} @Override public boolean startRiding(Entity entityIn, boolean force) { // TODO Auto-generated method stub return super.startRiding(entityIn, force); } @Override public boolean startRiding(Entity entityIn) { // TODO Auto-generated method stub return super.startRiding(entityIn); } @Override @SideOnly(Side.CLIENT) public void onInventoryButtonPressed(int buttonId) { switch(buttonId) { case 0: PacketHandler.sendToServer(new PacketEntity(this, (byte)EntityRocket.PacketType.DECONSTRUCT.ordinal())); break; case 1: PacketHandler.sendToServer(new PacketEntity(this, (byte)EntityRocket.PacketType.OPENPLANETSELECTION.ordinal())); break; default: PacketHandler.sendToServer(new PacketEntity(this, (byte)(buttonId + 100))); //Minecraft.getMinecraft().thePlayer.closeScreen(); TileEntity tile = storage.getGUItiles().get(buttonId - tilebuttonOffset); storage.getBlockState(tile.getPos()).getBlock().onBlockActivated(storage.world, tile.getPos(), storage.getBlockState(tile.getPos()), Minecraft.getMinecraft().thePlayer, EnumHand.MAIN_HAND, null, EnumFacing.DOWN, 0, 0, 0); } } @Override public boolean canInteractWithContainer(EntityPlayer entity) { boolean ret = !this.isDead && this.getDistanceToEntity(entity) < 64; if(!ret) RocketInventoryHelper.removePlayerFromInventoryBypass(entity); RocketInventoryHelper.updateTime(entity, worldObj.getWorldTime()); return ret; } @Override public StatsRocket getRocketStats() { return stats; } @Override public void onSelected(Object sender) { } @Override public void onSelectionConfirmed(Object sender) { PacketHandler.sendToServer(new PacketEntity(this, (byte)PacketType.SENDPLANETDATA.ordinal())); } @Override public void onSystemFocusChanged(Object sender) { // TODO Auto-generated method stub } public LinkedList<IInfrastructure> getConnectedInfrastructure() { return connectedInfrastructure; } }
handle error in case of missing NBT on station containers in rockets
src/main/java/zmaster587/advancedRocketry/entity/EntityRocket.java
handle error in case of missing NBT on station containers in rockets
Java
mit
30a34e60850d5fb6832bd7ae1c9defd4075a7907
0
brianPlummer/TinyDancer,friendlyrobotnyc/TinyDancer
package com.codemonkeylabs.fpslibrary; import android.content.Context; /** * Created by brianplummer on 8/29/15. */ public class TinyDancer { public static TinyDancerBuilder create(){ return new TinyDancerBuilder(); } public static void hide(Context context) { TinyDancerBuilder.hide(context.getApplicationContext()); } }
library/src/main/java/com/codemonkeylabs/fpslibrary/TinyDancer.java
package com.codemonkeylabs.fpslibrary; import android.app.Application; /** * Created by brianplummer on 8/29/15. */ public class TinyDancer { public static TinyDancerBuilder create(){ return new TinyDancerBuilder(); } public static void hide(Content context) { TinyDancerBuilder.hide(context.getApplicationContext()); } }
fix copy paste error
library/src/main/java/com/codemonkeylabs/fpslibrary/TinyDancer.java
fix copy paste error
Java
mit
2d85b82ec95c179306f3c2cad60c627266e271f5
0
Warlander/DeedPlanner-2
package pl.wurmonline.deedplanner; import java.util.Random; public class Constants { public final static Random random = new Random(); public final static int FLOORS_LIMIT = 17; public final static float HEIGHT_MOD = 10f; public final static String ENTER = System.getProperty("line.separator"); public final static String VERSION_STRING = "DeedPlanner 2.4.2"; }
src/pl/wurmonline/deedplanner/Constants.java
package pl.wurmonline.deedplanner; import java.util.Random; public class Constants { public final static Random random = new Random(); public final static int FLOORS_LIMIT = 17; public final static float HEIGHT_MOD = 10f; public final static String ENTER = System.getProperty("line.separator"); public final static String VERSION_STRING = "DeedPlanner 2.4.1"; }
DeedPlanner 2.4.2 release.
src/pl/wurmonline/deedplanner/Constants.java
DeedPlanner 2.4.2 release.
Java
mit
631e0a1e746e84802c950c7eab761b50444faac6
0
simple-elf/selenide,codeborne/selenide,simple-elf/selenide,simple-elf/selenide,codeborne/selenide,codeborne/selenide,simple-elf/selenide
package com.codeborne.selenide.impl; import com.codeborne.selenide.Configuration; import com.codeborne.selenide.Driver; import com.codeborne.selenide.SelenideTargetLocator; import org.openqa.selenium.Alert; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.OutputType; import org.openqa.selenium.Point; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.UnhandledAlertException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.WebElement; import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.awt.image.RasterFormatException; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; import java.util.logging.Logger; import static com.codeborne.selenide.Configuration.reportsFolder; import static java.io.File.separatorChar; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.logging.Level.SEVERE; import static java.util.logging.Level.WARNING; import static org.openqa.selenium.OutputType.FILE; public class ScreenShotLaboratory { private static final Logger log = Logger.getLogger(ScreenShotLaboratory.class.getName()); private static final ScreenShotLaboratory instance = new ScreenShotLaboratory(); public static ScreenShotLaboratory getInstance() { return instance; } protected final List<File> allScreenshots = new ArrayList<>(); protected AtomicLong screenshotCounter = new AtomicLong(); protected ThreadLocal<String> currentContext = ThreadLocal.withInitial(() -> ""); protected ThreadLocal<List<File>> currentContextScreenshots = new ThreadLocal<>(); protected Set<String> printedErrors = new ConcurrentSkipListSet<>(); public String takeScreenShot(Driver driver, String className, String methodName) { return takeScreenShot(driver, getScreenshotFileName(className, methodName)); } protected String getScreenshotFileName(String className, String methodName) { return className.replace('.', separatorChar) + separatorChar + methodName + '.' + timestamp(); } public String takeScreenShot(Driver driver) { return takeScreenShot(driver, generateScreenshotFileName()); } /** * Takes screenshot of current browser window. * Stores 2 files: html of page (if "savePageSource" option is enabled), and (if possible) image in PNG format. * * @param fileName name of file (without extension) to store screenshot to. * @return the name of last saved screenshot or null if failed to create screenshot */ public String takeScreenShot(Driver driver, String fileName) { return ifWebDriverStarted(driver, () -> ifReportsFolderNotNull(() -> { File screenshot = null; if (Configuration.savePageSource) { screenshot = savePageSourceToFile(fileName, driver.getWebDriver()); } File imageFile = savePageImageToFile(fileName, driver.getWebDriver()); if (imageFile != null) { screenshot = imageFile; } if (screenshot == null) { return null; } return addToHistory(screenshot).getAbsolutePath(); })); } public File takeScreenshot(Driver driver, WebElement element) { try { BufferedImage destination = takeScreenshotAsImage(driver, element); if (destination == null) { return null; } File screenshotOfElement = new File(reportsFolder, generateScreenshotFileName() + ".png"); ensureFolderExists(screenshotOfElement); ImageIO.write(destination, "png", screenshotOfElement); return screenshotOfElement; } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element, e); return null; } } public BufferedImage takeScreenshotAsImage(Driver driver, WebElement element) { return ifWebDriverStarted(driver, () -> ifReportsFolderNotNull(() -> { WebDriver webdriver = driver.getWebDriver(); if (!(webdriver instanceof TakesScreenshot)) { log.warning("Cannot take screenshot because browser does not support screenshots"); return null; } byte[] screen = ((TakesScreenshot) webdriver).getScreenshotAs(OutputType.BYTES); Point elementLocation = element.getLocation(); try { BufferedImage img = ImageIO.read(new ByteArrayInputStream(screen)); int elementWidth = element.getSize().getWidth(); int elementHeight = element.getSize().getHeight(); if (elementWidth > img.getWidth()) { elementWidth = img.getWidth() - elementLocation.getX(); } if (elementHeight > img.getHeight()) { elementHeight = img.getHeight() - elementLocation.getY(); } return img.getSubimage(elementLocation.getX(), elementLocation.getY(), elementWidth, elementHeight); } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element, e); return null; } catch (RasterFormatException e) { log.warning("Cannot take screenshot because element is not displayed on current screen position"); return null; } })); } protected String generateScreenshotFileName() { return currentContext.get() + timestamp() + "." + screenshotCounter.getAndIncrement(); } protected File ensureFolderExists(File targetFile) { File folder = targetFile.getParentFile(); if (!folder.exists()) { log.info("Creating folder: " + folder); if (!folder.mkdirs()) { log.severe("Failed to create " + folder); } } return targetFile; } protected synchronized void printOnce(String action, Throwable error) { if (!printedErrors.contains(action)) { log.log(SEVERE, error.getMessage(), error); printedErrors.add(action); } else { log.severe("Failed to " + action + ": " + error); } } protected long timestamp() { return System.currentTimeMillis(); } public File takeScreenshot(Driver driver, WebElement iframe, WebElement element) { try { BufferedImage dest = takeScreenshotAsImage(driver, iframe, element); if (dest == null) { return null; } File screenshotOfElement = new File(reportsFolder, generateScreenshotFileName() + ".png"); ensureFolderExists(screenshotOfElement); ImageIO.write(dest, "png", screenshotOfElement); return screenshotOfElement; } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element + " inside frame " + iframe, e); return null; } } public BufferedImage takeScreenshotAsImage(Driver driver, WebElement iframe, WebElement element) { WebDriver webdriver = checkIfFullyValidDriver(driver); if (webdriver == null) { return null; } byte[] screen = ((TakesScreenshot) webdriver).getScreenshotAs(OutputType.BYTES); Point iframeLocation = iframe.getLocation(); BufferedImage img; try { img = ImageIO.read(new ByteArrayInputStream(screen)); } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element + " inside frame " + iframe, e); return null; } catch (RasterFormatException ex) { log.warning("Cannot take screenshot because iframe is not displayed"); return null; } int iframeHeight = iframe.getSize().getHeight(); SelenideTargetLocator switchTo = new SelenideTargetLocator(driver.getWebDriver()); switchTo.frame(iframe); int iframeWidth = ((Long) ((JavascriptExecutor) webdriver).executeScript("return document.body.clientWidth")).intValue(); if (iframeHeight > img.getHeight()) { iframeHeight = img.getHeight() - iframeLocation.getY(); } if (iframeWidth > img.getWidth()) { iframeWidth = img.getWidth() - iframeLocation.getX(); } Point elementLocation = element.getLocation(); int elementWidth = element.getSize().getWidth(); int elementHeight = element.getSize().getHeight(); if (elementWidth > iframeWidth) { elementWidth = iframeWidth - elementLocation.getX(); } if (elementHeight > iframeHeight) { elementHeight = iframeHeight - elementLocation.getY(); } switchTo.defaultContent(); try { img = img.getSubimage(iframeLocation.getX() + elementLocation.getX(), iframeLocation.getY() + elementLocation.getY(), elementWidth, elementHeight); } catch (RasterFormatException ex) { log.warning("Cannot take screenshot because element is not displayed in iframe"); return null; } return img; } private WebDriver checkIfFullyValidDriver(Driver driver) { return ifWebDriverStarted(driver, () -> { WebDriver webdriver = driver.getWebDriver(); if (!(webdriver instanceof TakesScreenshot)) { log.warning("Cannot take screenshot because browser does not support screenshots"); return null; } else if (!(webdriver instanceof JavascriptExecutor)) { log.warning("Cannot take screenshot as driver is not supporting javascript execution"); return null; } return webdriver; }); } public File takeScreenShotAsFile(Driver driver) { return ifWebDriverStarted(driver, () -> { WebDriver webdriver = driver.getWebDriver(); //File pageSource = savePageSourceToFile(fileName, webdriver); - temporary not available File scrFile = getPageImage(webdriver); addToHistory(scrFile); return scrFile; }); } protected File getPageImage(WebDriver webdriver) { File scrFile = null; if (webdriver instanceof TakesScreenshot) { scrFile = takeScreenshotInMemory((TakesScreenshot) webdriver); } return scrFile; } protected File addToHistory(File screenshot) { if (currentContextScreenshots.get() != null) { currentContextScreenshots.get().add(screenshot); } synchronized (allScreenshots) { allScreenshots.add(screenshot); } return screenshot; } protected File takeScreenshotInMemory(TakesScreenshot driver) { try { return driver.getScreenshotAs(FILE); } catch (Exception e) { log.log(SEVERE, "Failed to take screenshot in memory", e); return null; } } protected File savePageImageToFile(String fileName, WebDriver webdriver) { File imageFile = null; if (webdriver instanceof TakesScreenshot) { imageFile = takeScreenshotImage((TakesScreenshot) webdriver, fileName); } return imageFile; } protected File savePageSourceToFile(String fileName, WebDriver webdriver) { return savePageSourceToFile(fileName, webdriver, true); } protected File savePageSourceToFile(String fileName, WebDriver webdriver, boolean retryIfAlert) { File pageSource = new File(reportsFolder, fileName + ".html"); try { writeToFile(webdriver.getPageSource(), pageSource); } catch (UnhandledAlertException e) { if (retryIfAlert) { try { Alert alert = webdriver.switchTo().alert(); log.severe(e + ": " + alert.getText()); alert.accept(); savePageSourceToFile(fileName, webdriver, false); } catch (Exception unableToCloseAlert) { log.severe("Failed to close alert: " + unableToCloseAlert); } } else { printOnce("savePageSourceToFile", e); } } catch (WebDriverException e) { log.log(WARNING, "Failed to save page source to " + fileName + " because of " + e); writeToFile(e.toString(), pageSource); return pageSource; } catch (RuntimeException e) { log.log(SEVERE, "Failed to save page source to " + fileName, e); writeToFile(e.toString(), pageSource); } return pageSource; } protected File takeScreenshotImage(TakesScreenshot driver, String fileName) { try { File scrFile = driver.getScreenshotAs(FILE); File imageFile = new File(reportsFolder, fileName + ".png"); try { copyFile(scrFile, imageFile); } catch (IOException e) { log.log(SEVERE, "Failed to save screenshot to " + imageFile, e); } return imageFile; } catch (WebDriverException e) { log.log(SEVERE, "Failed to take screenshot to " + fileName + " because of " + e); return null; } } protected void copyFile(File sourceFile, File targetFile) throws IOException { try (FileInputStream in = new FileInputStream(sourceFile)) { copyFile(in, targetFile); } } protected void copyFile(InputStream in, File targetFile) throws IOException { ensureFolderExists(targetFile); try (FileOutputStream out = new FileOutputStream(targetFile)) { byte[] buffer = new byte[1024]; int len; while ((len = in.read(buffer)) != -1) { out.write(buffer, 0, len); } } } protected void writeToFile(String content, File targetFile) { try (ByteArrayInputStream in = new ByteArrayInputStream(content.getBytes(UTF_8))) { copyFile(in, targetFile); } catch (IOException e) { log.log(SEVERE, "Failed to write file " + targetFile.getAbsolutePath(), e); } } public void startContext(String className, String methodName) { String context = className.replace('.', separatorChar) + separatorChar + methodName + separatorChar; startContext(context); } public void startContext(String context) { currentContext.set(context); currentContextScreenshots.set(new ArrayList<>()); } public List<File> finishContext() { List<File> result = currentContextScreenshots.get(); currentContext.set(""); currentContextScreenshots.remove(); return result; } public List<File> getScreenshots() { synchronized (allScreenshots) { return Collections.unmodifiableList(allScreenshots); } } public File getLastScreenshot() { synchronized (allScreenshots) { return allScreenshots.isEmpty() ? null : allScreenshots.get(allScreenshots.size() - 1); } } public String formatScreenShotPath(Driver driver) { if (!Configuration.screenshots) { log.config("Automatic screenshots are disabled."); return ""; } String screenshot = takeScreenShot(driver); if (screenshot == null) { return ""; } if (Configuration.reportsUrl != null) { String screenshotRelativePath = screenshot.substring(System.getProperty("user.dir").length() + 1); String screenshotUrl = Configuration.reportsUrl + screenshotRelativePath.replace('\\', '/'); try { screenshotUrl = new URL(screenshotUrl).toExternalForm(); } catch (MalformedURLException ignore) { // ignored exception } log.config("Replaced screenshot file path '" + screenshot + "' by public CI URL '" + screenshotUrl + "'"); return screenshotUrl; } log.config("reportsUrl is not configured. Returning screenshot file name '" + screenshot + "'"); try { return new File(screenshot).toURI().toURL().toExternalForm(); } catch (MalformedURLException e) { return "file://" + screenshot; } } private <T> T ifWebDriverStarted(Driver driver, Supplier<T> lambda) { if (!driver.hasWebDriverStarted()) { log.warning("Cannot take screenshot because browser is not started"); return null; } return lambda.get(); } private <T> T ifReportsFolderNotNull(Supplier<T> lambda) { if (reportsFolder == null) { log.severe("Cannot take screenshot because Configuration.reportsFolder is null"); return null; } return lambda.get(); } }
src/main/java/com/codeborne/selenide/impl/ScreenShotLaboratory.java
package com.codeborne.selenide.impl; import com.codeborne.selenide.Configuration; import com.codeborne.selenide.Driver;import com.codeborne.selenide.SelenideTargetLocator; import org.openqa.selenium.Alert; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.OutputType; import org.openqa.selenium.Point; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.UnhandledAlertException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.WebElement; import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.awt.image.RasterFormatException; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Logger; import static com.codeborne.selenide.Configuration.reportsFolder; import static java.io.File.separatorChar; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.logging.Level.SEVERE; import static java.util.logging.Level.WARNING; import static org.openqa.selenium.OutputType.FILE; public class ScreenShotLaboratory { private static final Logger log = Logger.getLogger(ScreenShotLaboratory.class.getName()); private static final ScreenShotLaboratory instance = new ScreenShotLaboratory(); public static ScreenShotLaboratory getInstance() { return instance; } protected final List<File> allScreenshots = new ArrayList<>(); protected AtomicLong screenshotCounter = new AtomicLong(); protected ThreadLocal<String> currentContext = ThreadLocal.withInitial(() -> ""); protected ThreadLocal<List<File>> currentContextScreenshots = new ThreadLocal<>(); protected Set<String> printedErrors = new ConcurrentSkipListSet<>(); public String takeScreenShot(Driver driver, String className, String methodName) { return takeScreenShot(driver, getScreenshotFileName(className, methodName)); } protected String getScreenshotFileName(String className, String methodName) { return className.replace('.', separatorChar) + separatorChar + methodName + '.' + timestamp(); } public String takeScreenShot(Driver driver) { return takeScreenShot(driver, generateScreenshotFileName()); } /** * Takes screenshot of current browser window. * Stores 2 files: html of page (if "savePageSource" option is enabled), and (if possible) image in PNG format. * * @param fileName name of file (without extension) to store screenshot to. * * @return the name of last saved screenshot or null if failed to create screenshot */ public String takeScreenShot(Driver driver, String fileName) { if (!driver.hasWebDriverStarted()) { log.warning("Cannot take screenshot because browser is not started"); return null; } else if (reportsFolder == null) { log.severe("Cannot take screenshot because Configuration.reportsFolder is null"); return null; } File screenshot = null; if (Configuration.savePageSource) { screenshot = savePageSourceToFile(fileName, driver.getWebDriver()); } File imageFile = savePageImageToFile(fileName, driver.getWebDriver()); if (imageFile != null) { screenshot = imageFile; } if (screenshot == null) { return null; } return addToHistory(screenshot).getAbsolutePath(); } public File takeScreenshot(Driver driver, WebElement element) { try { BufferedImage destination = takeScreenshotAsImage(driver, element); if (destination == null) { return null; } File screenshotOfElement = new File(reportsFolder, generateScreenshotFileName() + ".png"); ensureFolderExists(screenshotOfElement); ImageIO.write(destination, "png", screenshotOfElement); return screenshotOfElement; } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element, e); return null; } } public BufferedImage takeScreenshotAsImage(Driver driver, WebElement element) { if (!driver.hasWebDriverStarted()) { log.warning("Cannot take screenshot because browser is not started"); return null; } WebDriver webdriver = driver.getWebDriver(); if (!(webdriver instanceof TakesScreenshot)) { log.warning("Cannot take screenshot because browser does not support screenshots"); return null; } byte[] screen = ((TakesScreenshot) webdriver).getScreenshotAs(OutputType.BYTES); Point elementLocation = element.getLocation(); try { BufferedImage img = ImageIO.read(new ByteArrayInputStream(screen)); int elementWidth = element.getSize().getWidth(); int elementHeight = element.getSize().getHeight(); if (elementWidth > img.getWidth()) { elementWidth = img.getWidth() - elementLocation.getX(); } if (elementHeight > img.getHeight()) { elementHeight = img.getHeight() - elementLocation.getY(); } return img.getSubimage(elementLocation.getX(), elementLocation.getY(), elementWidth, elementHeight); } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element, e); return null; } catch (RasterFormatException e) { log.warning("Cannot take screenshot because element is not displayed on current screen position"); return null; } } protected String generateScreenshotFileName() { return currentContext.get() + timestamp() + "." + screenshotCounter.getAndIncrement(); } protected File ensureFolderExists(File targetFile) { File folder = targetFile.getParentFile(); if (!folder.exists()) { log.info("Creating folder: " + folder); if (!folder.mkdirs()) { log.severe("Failed to create " + folder); } } return targetFile; } protected synchronized void printOnce(String action, Throwable error) { if (!printedErrors.contains(action)) { log.log(SEVERE, error.getMessage(), error); printedErrors.add(action); } else { log.severe("Failed to " + action + ": " + error); } } protected long timestamp() { return System.currentTimeMillis(); } public File takeScreenshot(Driver driver, WebElement iframe, WebElement element) { try { BufferedImage dest = takeScreenshotAsImage(driver, iframe, element); if (dest == null) { return null; } File screenshotOfElement = new File(reportsFolder, generateScreenshotFileName() + ".png"); ensureFolderExists(screenshotOfElement); ImageIO.write(dest, "png", screenshotOfElement); return screenshotOfElement; } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element + " inside frame " + iframe, e); return null; } } public BufferedImage takeScreenshotAsImage(Driver driver, WebElement iframe, WebElement element) { WebDriver webdriver = checkIfFullyValidDriver(driver); if (webdriver == null) { return null; } byte[] screen = ((TakesScreenshot) webdriver).getScreenshotAs(OutputType.BYTES); Point iframeLocation = iframe.getLocation(); BufferedImage img; try { img = ImageIO.read(new ByteArrayInputStream(screen)); } catch (IOException e) { log.log(SEVERE, "Failed to take screenshot of " + element + " inside frame " + iframe, e); return null; } catch (RasterFormatException ex) { log.warning("Cannot take screenshot because iframe is not displayed"); return null; } int iframeHeight = iframe.getSize().getHeight(); SelenideTargetLocator switchTo = new SelenideTargetLocator(driver.getWebDriver()); switchTo.frame(iframe); int iframeWidth = ((Long) ((JavascriptExecutor) webdriver).executeScript("return document.body.clientWidth")).intValue(); if (iframeHeight > img.getHeight()) { iframeHeight = img.getHeight() - iframeLocation.getY(); } if (iframeWidth > img.getWidth()) { iframeWidth = img.getWidth() - iframeLocation.getX(); } Point elementLocation = element.getLocation(); int elementWidth = element.getSize().getWidth(); int elementHeight = element.getSize().getHeight(); if (elementWidth > iframeWidth) { elementWidth = iframeWidth - elementLocation.getX(); } if (elementHeight > iframeHeight) { elementHeight = iframeHeight - elementLocation.getY(); } switchTo.defaultContent(); try { img = img.getSubimage(iframeLocation.getX() + elementLocation.getX(), iframeLocation.getY() + elementLocation.getY(), elementWidth, elementHeight); } catch (RasterFormatException ex) { log.warning("Cannot take screenshot because element is not displayed in iframe"); return null; } return img; } private WebDriver checkIfFullyValidDriver(Driver driver) { if (!driver.hasWebDriverStarted()) { log.warning("Cannot take screenshot because browser is not started"); return null; } WebDriver webdriver = driver.getWebDriver(); if (!(webdriver instanceof TakesScreenshot)) { log.warning("Cannot take screenshot because browser does not support screenshots"); return null; } else if (!(webdriver instanceof JavascriptExecutor)) { log.warning("Cannot take screenshot as driver is not supporting javascript execution"); return null; } return webdriver; } public File takeScreenShotAsFile(Driver driver) { if (!driver.hasWebDriverStarted()) { log.warning("Cannot take screenshot because browser is not started"); return null; } WebDriver webdriver = driver.getWebDriver(); //File pageSource = savePageSourceToFile(fileName, webdriver); - temporary not available File scrFile = getPageImage(webdriver); addToHistory(scrFile); return scrFile; } protected File getPageImage(WebDriver webdriver) { File scrFile = null; if (webdriver instanceof TakesScreenshot) { scrFile = takeScreenshotInMemory((TakesScreenshot) webdriver); } return scrFile; } protected File addToHistory(File screenshot) { if (currentContextScreenshots.get() != null) { currentContextScreenshots.get().add(screenshot); } synchronized (allScreenshots) { allScreenshots.add(screenshot); } return screenshot; } protected File takeScreenshotInMemory(TakesScreenshot driver) { try { return driver.getScreenshotAs(FILE); } catch (Exception e) { log.log(SEVERE, "Failed to take screenshot in memory", e); return null; } } protected File savePageImageToFile(String fileName, WebDriver webdriver) { File imageFile = null; if (webdriver instanceof TakesScreenshot) { imageFile = takeScreenshotImage((TakesScreenshot) webdriver, fileName); } return imageFile; } protected File savePageSourceToFile(String fileName, WebDriver webdriver) { return savePageSourceToFile(fileName, webdriver, true); } protected File savePageSourceToFile(String fileName, WebDriver webdriver, boolean retryIfAlert) { File pageSource = new File(reportsFolder, fileName + ".html"); try { writeToFile(webdriver.getPageSource(), pageSource); } catch (UnhandledAlertException e) { if (retryIfAlert) { try { Alert alert = webdriver.switchTo().alert(); log.severe(e + ": " + alert.getText()); alert.accept(); savePageSourceToFile(fileName, webdriver, false); } catch (Exception unableToCloseAlert) { log.severe("Failed to close alert: " + unableToCloseAlert); } } else { printOnce("savePageSourceToFile", e); } } catch (WebDriverException e) { log.log(WARNING, "Failed to save page source to " + fileName + " because of " + e); writeToFile(e.toString(), pageSource); return pageSource; } catch (RuntimeException e) { log.log(SEVERE, "Failed to save page source to " + fileName, e); writeToFile(e.toString(), pageSource); } return pageSource; } protected File takeScreenshotImage(TakesScreenshot driver, String fileName) { try { File scrFile = driver.getScreenshotAs(FILE); File imageFile = new File(reportsFolder, fileName + ".png"); try { copyFile(scrFile, imageFile); } catch (IOException e) { log.log(SEVERE, "Failed to save screenshot to " + imageFile, e); } return imageFile; } catch (WebDriverException e) { log.log(SEVERE, "Failed to take screenshot to " + fileName + " because of " + e); return null; } } protected void copyFile(File sourceFile, File targetFile) throws IOException { try (FileInputStream in = new FileInputStream(sourceFile)) { copyFile(in, targetFile); } } protected void copyFile(InputStream in, File targetFile) throws IOException { ensureFolderExists(targetFile); try (FileOutputStream out = new FileOutputStream(targetFile)) { byte[] buffer = new byte[1024]; int len; while ((len = in.read(buffer)) != -1) { out.write(buffer, 0, len); } } } protected void writeToFile(String content, File targetFile) { try (ByteArrayInputStream in = new ByteArrayInputStream(content.getBytes(UTF_8))) { copyFile(in, targetFile); } catch (IOException e) { log.log(SEVERE, "Failed to write file " + targetFile.getAbsolutePath(), e); } } public void startContext(String className, String methodName) { String context = className.replace('.', separatorChar) + separatorChar + methodName + separatorChar; startContext(context); } public void startContext(String context) { currentContext.set(context); currentContextScreenshots.set(new ArrayList<>()); } public List<File> finishContext() { List<File> result = currentContextScreenshots.get(); currentContext.set(""); currentContextScreenshots.remove(); return result; } public List<File> getScreenshots() { synchronized (allScreenshots) { return Collections.unmodifiableList(allScreenshots); } } public File getLastScreenshot() { synchronized (allScreenshots) { return allScreenshots.isEmpty() ? null : allScreenshots.get(allScreenshots.size() - 1); } } public String formatScreenShotPath(Driver driver) { if (!Configuration.screenshots) { log.config("Automatic screenshots are disabled."); return ""; } String screenshot = takeScreenShot(driver); if (screenshot == null) { return ""; } if (Configuration.reportsUrl != null) { String screenshotRelativePath = screenshot.substring(System.getProperty("user.dir").length() + 1); String screenshotUrl = Configuration.reportsUrl + screenshotRelativePath.replace('\\', '/'); try { screenshotUrl = new URL(screenshotUrl).toExternalForm(); } catch (MalformedURLException ignore) { // ignored exception } log.config("Replaced screenshot file path '" + screenshot + "' by public CI URL '" + screenshotUrl + "'"); return screenshotUrl; } log.config("reportsUrl is not configured. Returning screenshot file name '" + screenshot + "'"); try { return new File(screenshot).toURI().toURL().toExternalForm(); } catch (MalformedURLException e) { return "file://" + screenshot; } } }
oppa functional style
src/main/java/com/codeborne/selenide/impl/ScreenShotLaboratory.java
oppa functional style
Java
mit
57abf3a5b9638f264179e737d5a33bd02229a2ce
0
westerwave/livestreamer_twitch_gui,westerwave/livestreamer_twitch_gui
/** * MIT License * * Copyright (c) 2016 Jan-Niklas Keck * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package app.lsgui.utils; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Locale; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.stream.JsonWriter; import app.lsgui.model.IChannel; import app.lsgui.model.IService; import app.lsgui.model.generic.GenericService; import app.lsgui.model.twitch.TwitchService; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ListProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleListProperty; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; /** * * @author Niklas 11.06.2016 * */ public final class Settings { private static final Logger LOGGER = LoggerFactory.getLogger(Settings.class); private static final String FILEPATH = System.getProperty("user.home") + "/.lsgui/settings.json"; private static final long TIMEOUT = 5000L; private static final int DEFAULT_GAMES_TO_LOAD = 20; private static final int DEFAULT_CHANNELS_TO_LOAD = 20; private static final String TWITCH_USER_STRING = "twitchusername"; private static final String TWITCH_OAUTH_STRING = "twitchoauth"; private static final String TWITCH_SORT = "twitchsorting"; private static final String PATH = "recordingpath"; private static final String CHANNELS_LOAD = "load_max_channels"; private static final String GAMES_LOAD = "load_max_games"; private static final String SERVICE_NAME = "serviceName"; private static final String SERVICE_URL = "serviceURL"; private static final String MINIMIZE_TO_TRAY_STRING = "minimizetotray"; private static final String WINDOWSTYLE_STRING = "windowstyle"; private static final String EXEPATH_STRING = "livestreamerexe"; private static final String QUALITY_STRING = "quality"; private static final String FAVOURITE_GAMES = "favouriteGames"; private static final String DEFAULT_TOKEN = "vkwhrtlhzcz3o91nu386ub62p5j6sk"; private static Settings instance; private ListProperty<IService> services = new SimpleListProperty<>(); private ListProperty<String> favouriteGames = new SimpleListProperty<>(); private BooleanProperty sortTwitch = new SimpleBooleanProperty(); private boolean minimizeToTray = true; private String windowStyle = "LightStyle"; private String currentService = "twitch.tv"; private String twitchUser = ""; private String twitchOAuth = ""; private int maxGamesLoad; private int maxChannelsLoad; private StringProperty liveStreamerExePath = new SimpleStringProperty(); private StringProperty quality = new SimpleStringProperty("Best"); private StringProperty recordingPath = new SimpleStringProperty(); private StringProperty updateLink = new SimpleStringProperty(); private boolean isLoading; private Settings() { } public static synchronized Settings getInstance() { if (instance == null) { instance = new Settings(); final File settings = new File(FILEPATH); if (!instance.isLoading && settings.exists() && settings.isFile() && !LsGuiUtils.isFileEmpty(settings)) { LOGGER.info("Loading Settings from File"); instance.loadSettingsFromFile(settings); } else { LOGGER.info("Settings file does not exists. Creating default File."); instance.saveSettings(); } } return instance; } public void saveSettings() { File settings = null; try { settings = new File(FILEPATH); final boolean createdDirs = settings.getParentFile().mkdirs(); final boolean result = settings.createNewFile(); LOGGER.debug("Settings Dir created? {}. Settings file was created? {}", createdDirs, result); } catch (IOException e) { LOGGER.error("ERROR while creaing Settings file", e); } this.createSettingsJson(settings); } private void loadSettingsFromFile(final File file) { this.isLoading = true; final JsonArray jsonArray = JsonUtils.getJsonArrayFromFile(file); this.loadSettings(jsonArray); this.loadServices(jsonArray); } private void loadSettings(final JsonArray jArray) { final JsonObject settings = jArray.get(0).getAsJsonObject(); this.sortTwitch.setValue(JsonUtils.getBooleanSafe(settings.get(TWITCH_SORT), false)); this.minimizeToTray = JsonUtils.getBooleanSafe(settings.get(MINIMIZE_TO_TRAY_STRING), false); this.twitchUser = JsonUtils.getStringSafe(settings.get(TWITCH_USER_STRING), ""); this.twitchOAuth = JsonUtils.getStringSafe(settings.get(TWITCH_OAUTH_STRING), DEFAULT_TOKEN); this.windowStyle = JsonUtils.getStringSafe(settings.get(WINDOWSTYLE_STRING), "LightStyle"); this.liveStreamerExePath.set(JsonUtils.getStringSafe(settings.get(EXEPATH_STRING), "")); this.maxChannelsLoad = JsonUtils.getIntSafe(settings.get(CHANNELS_LOAD), DEFAULT_CHANNELS_TO_LOAD); this.maxGamesLoad = JsonUtils.getIntSafe(settings.get(GAMES_LOAD), DEFAULT_GAMES_TO_LOAD); this.quality.set(JsonUtils.getStringSafe(settings.get(QUALITY_STRING), "Best")); this.recordingPath.set(JsonUtils.getStringSafe(settings.get(PATH), System.getProperty("user.home"))); final JsonArray favouritesArray = JsonUtils.getJsonArraySafe(FAVOURITE_GAMES, settings); for (int i = 0; i < favouritesArray.size(); i++) { final String favourite = favouritesArray.get(i).getAsString(); this.addFavouriteGame(favourite); } } private void loadServices(final JsonArray jArray) { this.services.set(FXCollections.observableArrayList()); final JsonArray servicesArray = jArray.get(1).getAsJsonArray(); for (int i = 0; i < servicesArray.size(); i++) { final JsonObject serviceJson = servicesArray.get(i).getAsJsonObject(); final String serviceName = serviceJson.get(SERVICE_NAME).getAsString(); final String serviceUrl = serviceJson.get(SERVICE_URL).getAsString(); final IService service; if (serviceUrl.toLowerCase(Locale.ENGLISH).contains("twitch")) { service = new TwitchService(serviceName, serviceUrl); } else { service = new GenericService(serviceName, serviceUrl); } final JsonArray channels = serviceJson.get("channels").getAsJsonArray(); for (int e = 0; e < channels.size(); e++) { final String channel = channels.get(e).getAsString(); service.addChannel(channel); } this.services.get().add(service); } } private void createSettingsJson(final File file) { try (final FileOutputStream outputStream = new FileOutputStream(file);) { final BufferedWriter bufferedWriter = new BufferedWriter( new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); final JsonWriter jsonWriter = new JsonWriter(bufferedWriter); jsonWriter.setIndent(" "); jsonWriter.beginArray(); jsonWriter.beginObject(); jsonWriter.name(TWITCH_USER_STRING).value(this.twitchUser); jsonWriter.name(TWITCH_OAUTH_STRING).value(this.twitchOAuth); jsonWriter.name(TWITCH_SORT).value(this.sortTwitch.get()); jsonWriter.name(QUALITY_STRING).value(this.getQuality().get()); jsonWriter.name(PATH).value(this.getRecordingPath().get()); jsonWriter.name(CHANNELS_LOAD).value(this.maxChannelsLoad); jsonWriter.name(GAMES_LOAD).value(this.maxGamesLoad); jsonWriter.name(MINIMIZE_TO_TRAY_STRING).value(this.minimizeToTray); jsonWriter.name(WINDOWSTYLE_STRING).value(this.windowStyle); jsonWriter.name(EXEPATH_STRING).value(this.getLivestreamerExePath().get()); this.writeFavouriteGames(jsonWriter); jsonWriter.endObject(); this.writeServices(jsonWriter); jsonWriter.endArray(); jsonWriter.close(); bufferedWriter.close(); } catch (IOException e) { LOGGER.error("ERROR while writing to Settings file", e); } } private void writeServices(final JsonWriter writer) throws IOException { writer.beginArray(); for (final IService service : this.services) { LOGGER.debug("Creating JSON for Service {}", service.getName().get()); writer.beginObject(); writer.name(SERVICE_NAME).value(service.getName().get()); writer.name(SERVICE_URL).value(service.getUrl().get()); writer.name("channels"); writer.beginArray(); for (final IChannel channel : service.getChannelProperty().get()) { if (channel.getName().get() != null) { writer.value(channel.getName().get()); } } writer.endArray(); writer.endObject(); } writer.endArray(); } private void writeFavouriteGames(final JsonWriter writer) throws IOException { LOGGER.debug("Writing Favourites to Settings file"); writer.name(FAVOURITE_GAMES); writer.beginArray(); for (final String favourite : this.favouriteGames) { LOGGER.trace("Writing Favourite '{}' to file", favourite); writer.value(favourite); } writer.endArray(); } public ListProperty<IService> getStreamServices() { return this.services; } public BooleanProperty getSortTwitch() { return this.sortTwitch; } public String getCurrentStreamService() { return this.currentService; } public String getTwitchUser() { return this.twitchUser; } public void setTwitchUser(final String twitchUser) { this.twitchUser = twitchUser; } public String getTwitchOAuth() { return this.twitchOAuth; } public void setTwitchOAuth(final String twitchOAuth) { this.twitchOAuth = twitchOAuth; } public int getMaxGamesLoad() { return this.maxGamesLoad; } public void setMaxGamesLoad(final int maxGamesLoad) { this.maxGamesLoad = maxGamesLoad; } public int getMaxChannelsLoad() { return this.maxChannelsLoad; } public void setMaxChannelsLoad(final int maxChannelsLoad) { this.maxChannelsLoad = maxChannelsLoad; } public long getTimeout() { return TIMEOUT; } public boolean isMinimizeToTray() { return this.minimizeToTray; } public void setMinimizeToTray(final boolean minimizeToTray) { this.minimizeToTray = minimizeToTray; } public String getWindowStyle() { return this.windowStyle; } public void setWindowStyle(final String windowStyle) { this.windowStyle = windowStyle; } public StringProperty getLivestreamerExePath() { return this.liveStreamerExePath; } public StringProperty getQuality() { return this.quality; } public StringProperty getRecordingPath() { return this.recordingPath; } public IService getTwitchService() { final List<IService> servicesAsList = this.getStreamServices().get(); final Optional<IService> serviceOptional = servicesAsList.stream().filter(TwitchUtils::isTwitchService) .findFirst(); if (serviceOptional.isPresent()) { return serviceOptional.get(); } return null; } public StringProperty getUpdateLink() { return this.updateLink; } public ListProperty<String> getFavouriteGames() { return this.favouriteGames; } public void addFavouriteGame(final String game) { final ObservableList<String> favourites = FXCollections.observableArrayList(this.favouriteGames); favourites.add(game); this.getFavouriteGames().set(favourites); } public void removeFavouriteGame(final String game) { final ObservableList<String> favourites = FXCollections.observableArrayList(this.favouriteGames); favourites.remove(game); this.getFavouriteGames().set(favourites); } }
src/main/java/app/lsgui/utils/Settings.java
/** * MIT License * * Copyright (c) 2016 Jan-Niklas Keck * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package app.lsgui.utils; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Locale; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.stream.JsonWriter; import app.lsgui.model.IChannel; import app.lsgui.model.IService; import app.lsgui.model.generic.GenericService; import app.lsgui.model.twitch.TwitchService; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ListProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleListProperty; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; /** * * @author Niklas 11.06.2016 * */ public final class Settings { private static final Logger LOGGER = LoggerFactory.getLogger(Settings.class); private static final String FILEPATH = System.getProperty("user.home") + "/.lsgui/settings.json"; private static final long TIMEOUT = 5000L; private static final int DEFAULT_GAMES_TO_LOAD = 20; private static final int DEFAULT_CHANNELS_TO_LOAD = 20; private static final String TWITCH_USER_STRING = "twitchusername"; private static final String TWITCH_OAUTH_STRING = "twitchoauth"; private static final String TWITCH_SORT = "twitchsorting"; private static final String PATH = "recordingpath"; private static final String CHANNELS_LOAD = "load_max_channels"; private static final String GAMES_LOAD = "load_max_games"; private static final String SERVICE_NAME = "serviceName"; private static final String SERVICE_URL = "serviceURL"; private static final String MINIMIZE_TO_TRAY_STRING = "minimizetotray"; private static final String WINDOWSTYLE_STRING = "windowstyle"; private static final String EXEPATH_STRING = "livestreamerexe"; private static final String QUALITY_STRING = "quality"; private static final String FAVOURITE_GAMES = "favouriteGames"; private static Settings instance; private ListProperty<IService> services = new SimpleListProperty<>(); private ListProperty<String> favouriteGames = new SimpleListProperty<>(); private BooleanProperty sortTwitch = new SimpleBooleanProperty(); private boolean minimizeToTray = true; private String windowStyle = "LightStyle"; private String currentService = "twitch.tv"; private String twitchUser = ""; private String twitchOAuth = ""; private int maxGamesLoad; private int maxChannelsLoad; private StringProperty liveStreamerExePath = new SimpleStringProperty(); private StringProperty quality = new SimpleStringProperty("Best"); private StringProperty recordingPath = new SimpleStringProperty(); private StringProperty updateLink = new SimpleStringProperty(); private boolean isLoading; private Settings() { } public static synchronized Settings getInstance() { if (instance == null) { instance = new Settings(); final File settings = new File(FILEPATH); if (!instance.isLoading && settings.exists() && settings.isFile() && !LsGuiUtils.isFileEmpty(settings)) { LOGGER.info("Loading Settings from File"); instance.loadSettingsFromFile(settings); } else { LOGGER.info("Settings file does not exists. Creating default File."); instance.saveSettings(); } } return instance; } public void saveSettings() { File settings = null; try { settings = new File(FILEPATH); final boolean createdDirs = settings.getParentFile().mkdirs(); final boolean result = settings.createNewFile(); LOGGER.debug("Settings Dir created? {}. Settings file was created? {}", createdDirs, result); } catch (IOException e) { LOGGER.error("ERROR while creaing Settings file", e); } this.createSettingsJson(settings); } private void loadSettingsFromFile(final File file) { this.isLoading = true; final JsonArray jsonArray = JsonUtils.getJsonArrayFromFile(file); this.loadSettings(jsonArray); this.loadServices(jsonArray); } private void loadSettings(final JsonArray jArray) { final JsonObject settings = jArray.get(0).getAsJsonObject(); this.sortTwitch.setValue(JsonUtils.getBooleanSafe(settings.get(TWITCH_SORT), false)); this.minimizeToTray = JsonUtils.getBooleanSafe(settings.get(MINIMIZE_TO_TRAY_STRING), false); this.twitchUser = JsonUtils.getStringSafe(settings.get(TWITCH_USER_STRING), ""); this.twitchOAuth = JsonUtils.getStringSafe(settings.get(TWITCH_OAUTH_STRING), ""); this.windowStyle = JsonUtils.getStringSafe(settings.get(WINDOWSTYLE_STRING), "LightStyle"); this.liveStreamerExePath.set(JsonUtils.getStringSafe(settings.get(EXEPATH_STRING), "")); this.maxChannelsLoad = JsonUtils.getIntSafe(settings.get(CHANNELS_LOAD), DEFAULT_CHANNELS_TO_LOAD); this.maxGamesLoad = JsonUtils.getIntSafe(settings.get(GAMES_LOAD), DEFAULT_GAMES_TO_LOAD); this.quality.set(JsonUtils.getStringSafe(settings.get(QUALITY_STRING), "Best")); this.recordingPath.set(JsonUtils.getStringSafe(settings.get(PATH), System.getProperty("user.home"))); final JsonArray favouritesArray = JsonUtils.getJsonArraySafe(FAVOURITE_GAMES, settings); for (int i = 0; i < favouritesArray.size(); i++) { final String favourite = favouritesArray.get(i).getAsString(); this.addFavouriteGame(favourite); } } private void loadServices(final JsonArray jArray) { this.services.set(FXCollections.observableArrayList()); final JsonArray servicesArray = jArray.get(1).getAsJsonArray(); for (int i = 0; i < servicesArray.size(); i++) { final JsonObject serviceJson = servicesArray.get(i).getAsJsonObject(); final String serviceName = serviceJson.get(SERVICE_NAME).getAsString(); final String serviceUrl = serviceJson.get(SERVICE_URL).getAsString(); final IService service; if (serviceUrl.toLowerCase(Locale.ENGLISH).contains("twitch")) { service = new TwitchService(serviceName, serviceUrl); } else { service = new GenericService(serviceName, serviceUrl); } final JsonArray channels = serviceJson.get("channels").getAsJsonArray(); for (int e = 0; e < channels.size(); e++) { final String channel = channels.get(e).getAsString(); service.addChannel(channel); } this.services.get().add(service); } } private void createSettingsJson(final File file) { try (final FileOutputStream outputStream = new FileOutputStream(file);) { final BufferedWriter bufferedWriter = new BufferedWriter( new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); final JsonWriter jsonWriter = new JsonWriter(bufferedWriter); jsonWriter.setIndent(" "); jsonWriter.beginArray(); jsonWriter.beginObject(); jsonWriter.name(TWITCH_USER_STRING).value(this.twitchUser); jsonWriter.name(TWITCH_OAUTH_STRING).value(this.twitchOAuth); jsonWriter.name(TWITCH_SORT).value(this.sortTwitch.get()); jsonWriter.name(QUALITY_STRING).value(this.getQuality().get()); jsonWriter.name(PATH).value(this.getRecordingPath().get()); jsonWriter.name(CHANNELS_LOAD).value(this.maxChannelsLoad); jsonWriter.name(GAMES_LOAD).value(this.maxGamesLoad); jsonWriter.name(MINIMIZE_TO_TRAY_STRING).value(this.minimizeToTray); jsonWriter.name(WINDOWSTYLE_STRING).value(this.windowStyle); jsonWriter.name(EXEPATH_STRING).value(this.getLivestreamerExePath().get()); this.writeFavouriteGames(jsonWriter); jsonWriter.endObject(); this.writeServices(jsonWriter); jsonWriter.endArray(); jsonWriter.close(); bufferedWriter.close(); } catch (IOException e) { LOGGER.error("ERROR while writing to Settings file", e); } } private void writeServices(final JsonWriter writer) throws IOException { writer.beginArray(); for (final IService service : this.services) { LOGGER.debug("Creating JSON for Service {}", service.getName().get()); writer.beginObject(); writer.name(SERVICE_NAME).value(service.getName().get()); writer.name(SERVICE_URL).value(service.getUrl().get()); writer.name("channels"); writer.beginArray(); for (final IChannel channel : service.getChannelProperty().get()) { if (channel.getName().get() != null) { writer.value(channel.getName().get()); } } writer.endArray(); writer.endObject(); } writer.endArray(); } private void writeFavouriteGames(final JsonWriter writer) throws IOException { LOGGER.debug("Writing Favourites to Settings file"); writer.name(FAVOURITE_GAMES); writer.beginArray(); for (final String favourite : this.favouriteGames) { LOGGER.trace("Writing Favourite '{}' to file", favourite); writer.value(favourite); } writer.endArray(); } public ListProperty<IService> getStreamServices() { return this.services; } public BooleanProperty getSortTwitch() { return this.sortTwitch; } public String getCurrentStreamService() { return this.currentService; } public String getTwitchUser() { return this.twitchUser; } public void setTwitchUser(final String twitchUser) { this.twitchUser = twitchUser; } public String getTwitchOAuth() { return this.twitchOAuth; } public void setTwitchOAuth(final String twitchOAuth) { this.twitchOAuth = twitchOAuth; } public int getMaxGamesLoad() { return this.maxGamesLoad; } public void setMaxGamesLoad(final int maxGamesLoad) { this.maxGamesLoad = maxGamesLoad; } public int getMaxChannelsLoad() { return this.maxChannelsLoad; } public void setMaxChannelsLoad(final int maxChannelsLoad) { this.maxChannelsLoad = maxChannelsLoad; } public long getTimeout() { return TIMEOUT; } public boolean isMinimizeToTray() { return this.minimizeToTray; } public void setMinimizeToTray(final boolean minimizeToTray) { this.minimizeToTray = minimizeToTray; } public String getWindowStyle() { return this.windowStyle; } public void setWindowStyle(final String windowStyle) { this.windowStyle = windowStyle; } public StringProperty getLivestreamerExePath() { return this.liveStreamerExePath; } public StringProperty getQuality() { return this.quality; } public StringProperty getRecordingPath() { return this.recordingPath; } public IService getTwitchService() { final List<IService> servicesAsList = this.getStreamServices().get(); final Optional<IService> serviceOptional = servicesAsList.stream().filter(TwitchUtils::isTwitchService) .findFirst(); if (serviceOptional.isPresent()) { return serviceOptional.get(); } return null; } public StringProperty getUpdateLink() { return this.updateLink; } public ListProperty<String> getFavouriteGames() { return this.favouriteGames; } public void addFavouriteGame(final String game) { final ObservableList<String> favourites = FXCollections.observableArrayList(this.favouriteGames); favourites.add(game); this.getFavouriteGames().set(favourites); } public void removeFavouriteGame(final String game) { final ObservableList<String> favourites = FXCollections.observableArrayList(this.favouriteGames); favourites.remove(game); this.getFavouriteGames().set(favourites); } }
add default token
src/main/java/app/lsgui/utils/Settings.java
add default token
Java
mit
bfadc2f044327407db7c5eff96cf9600522ff36d
0
victorvanni/bestbuy-search
package com.vvanni.listviewstudy; import android.app.Activity; import android.content.res.Configuration; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.*; import java.util.ArrayList; import java.util.List; public class ListActivity extends Activity { List products; ListView lvProducts; ////List of array strings which will serve as list items //ArrayList<String> listItems = new ArrayList<String>(); ////Defining a string adapter which will handle the data of the ListView //ArrayAdapter<String> adapter; ////Recording how many times the button has been clicked //int clickCounter=0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_list); //if (adapter == null) { // adapter = new ArrayAdapter<String>(this, // android.R.layout.simple_list_item_1, // listItems); // setListAdapter(adapter); //} //public String name; //public double price; //public String img_url_small; //public String img_url_big; // populate data products = new ArrayList(); products.add(new Product( "Apple - iPhone 6 Plus 128GB - Space Gray (AT&T)", 899.98, "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640104_sa.jpg", "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640104_sb.jpg")); products.add(new Product( "Apple - iPhone 6 Plus 64GB - Gold (AT&T)", 799.98, "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640168_sa.jpg", "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640168_sb.jpg")); products.add(new Product( "Apple - iPhone 6 Plus 64GB - Silver (AT&T)", 799.98, "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640159_sa.jpg", "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640159_sb.jpg")); products.add(new Product( "Nest - Protect 2nd Generation (Battery) Smart Smoke/Carbon Monoxide Alarm - White", 99.99, "http://img.bbystatic.com/BestBuy_US/images/products/8077/8077101_rc.jpg", "http://img.bbystatic.com/BestBuy_US/images/products/8077/8077101_rb.jpg")); lvProducts = (ListView) findViewById(R.id.lists_product); lvProducts.setAdapter(new ProductListAdapterSimple(this, products)); } //Method which will handle dynamic insertion //public void addItems(View v) //{ // listItems.add("Clicked : " + clickCounter++); // adapter.notifyDataSetChanged(); //} }
app/src/main/java/com/vvanni/listviewstudy/ListActivity.java
package com.vvanni.listviewstudy; import android.content.res.Configuration; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.*; import java.util.ArrayList; import java.util.List; public class ListActivity extends android.app.ListActivity { List products; ListView lvProducts; ////List of array strings which will serve as list items //ArrayList<String> listItems = new ArrayList<String>(); ////Defining a string adapter which will handle the data of the ListView //ArrayAdapter<String> adapter; ////Recording how many times the button has been clicked //int clickCounter=0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_list); //if (adapter == null) { // adapter = new ArrayAdapter<String>(this, // android.R.layout.simple_list_item_1, // listItems); // setListAdapter(adapter); //} //public String name; //public double price; //public String img_url_small; //public String img_url_big; // populate data products = new ArrayList(); products.add(new Product( "Apple - iPhone 6 Plus 128GB - Space Gray (AT&T)", 899.98, "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640104_sa.jpg", "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640104_sb.jpg")); products.add(new Product( "Apple - iPhone 6 Plus 64GB - Gold (AT&T)", 799.98, "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640168_sa.jpg", "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640168_sb.jpg")); products.add(new Product( "Apple - iPhone 6 Plus 64GB - Silver (AT&T)", 799.98, "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640159_sa.jpg", "http://images.bestbuy.com/BestBuy_US/images/products/7640/7640159_sb.jpg")); products.add(new Product( "Nest - Protect 2nd Generation (Battery) Smart Smoke/Carbon Monoxide Alarm - White", 99.99, "http://img.bbystatic.com/BestBuy_US/images/products/8077/8077101_rc.jpg", "http://img.bbystatic.com/BestBuy_US/images/products/8077/8077101_rb.jpg")); lvProducts = (ListView) findViewById(R.id.lists_product); lvProducts.setAdapter(new ProductListAdapterSimple(this, products)); } //Method which will handle dynamic insertion //public void addItems(View v) //{ // listItems.add("Clicked : " + clickCounter++); // adapter.notifyDataSetChanged(); //} }
Correct ListActivity extends (Activity instead of android...ListActivity)
app/src/main/java/com/vvanni/listviewstudy/ListActivity.java
Correct ListActivity extends (Activity instead of android...ListActivity)
Java
mit
4cae61d422d16b90149884f7e73bd0fda0f9cb95
0
onessimofalconi/bc-java,partheinstein/bc-java,open-keychain/spongycastle,lesstif/spongycastle,sonork/spongycastle,iseki-masaya/spongycastle,iseki-masaya/spongycastle,FAU-Inf2/spongycastle,open-keychain/spongycastle,sergeypayu/bc-java,Skywalker-11/spongycastle,lesstif/spongycastle,Skywalker-11/spongycastle,savichris/spongycastle,lesstif/spongycastle,sergeypayu/bc-java,bcgit/bc-java,sergeypayu/bc-java,onessimofalconi/bc-java,savichris/spongycastle,iseki-masaya/spongycastle,isghe/bc-java,isghe/bc-java,open-keychain/spongycastle,partheinstein/bc-java,FAU-Inf2/spongycastle,onessimofalconi/bc-java,sonork/spongycastle,savichris/spongycastle,partheinstein/bc-java,sonork/spongycastle,FAU-Inf2/spongycastle,isghe/bc-java,Skywalker-11/spongycastle,bcgit/bc-java,bcgit/bc-java
package org.bouncycastle.crypto.tls; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Vector; import org.bouncycastle.asn1.x509.KeyUsage; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.crypto.params.AsymmetricKeyParameter; import org.bouncycastle.crypto.params.DHParameters; import org.bouncycastle.crypto.params.DHPrivateKeyParameters; import org.bouncycastle.crypto.params.DHPublicKeyParameters; import org.bouncycastle.crypto.params.RSAKeyParameters; import org.bouncycastle.crypto.util.PublicKeyFactory; /** * TLS 1.0 PSK key exchange (RFC 4279). */ public class TlsPSKKeyExchange extends AbstractTlsKeyExchange { protected TlsPSKIdentity pskIdentity; protected DHParameters dhParameters; protected int[] namedCurves; protected short[] clientECPointFormats, serverECPointFormats; protected byte[] psk_identity_hint = null; protected DHPrivateKeyParameters dhAgreePrivateKey = null; protected DHPublicKeyParameters dhAgreePublicKey = null; protected AsymmetricKeyParameter serverPublicKey = null; protected RSAKeyParameters rsaServerPublicKey = null; protected TlsEncryptionCredentials serverCredentials = null; protected byte[] premasterSecret; public TlsPSKKeyExchange(int keyExchange, Vector supportedSignatureAlgorithms, TlsPSKIdentity pskIdentity, DHParameters dhParameters, int[] namedCurves, short[] clientECPointFormats, short[] serverECPointFormats) { super(keyExchange, supportedSignatureAlgorithms); switch (keyExchange) { case KeyExchangeAlgorithm.DHE_PSK: case KeyExchangeAlgorithm.ECDHE_PSK: case KeyExchangeAlgorithm.PSK: case KeyExchangeAlgorithm.RSA_PSK: break; default: throw new IllegalArgumentException("unsupported key exchange algorithm"); } this.pskIdentity = pskIdentity; this.dhParameters = dhParameters; this.namedCurves = namedCurves; this.clientECPointFormats = clientECPointFormats; this.serverECPointFormats = serverECPointFormats; } public void skipServerCredentials() throws IOException { if (keyExchange == KeyExchangeAlgorithm.RSA_PSK) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } } public void processServerCredentials(TlsCredentials serverCredentials) throws IOException { if (!(serverCredentials instanceof TlsEncryptionCredentials)) { throw new TlsFatalAlert(AlertDescription.internal_error); } processServerCertificate(serverCredentials.getCertificate()); this.serverCredentials = (TlsEncryptionCredentials)serverCredentials; } public byte[] generateServerKeyExchange() throws IOException { // TODO[RFC 4279] Need a server-side PSK API to determine hint and resolve identities to keys this.psk_identity_hint = null; if (this.psk_identity_hint == null && !requiresServerKeyExchange()) { return null; } ByteArrayOutputStream buf = new ByteArrayOutputStream(); if (this.psk_identity_hint == null) { TlsUtils.writeOpaque16(TlsUtils.EMPTY_BYTES, buf); } else { TlsUtils.writeOpaque16(this.psk_identity_hint, buf); } if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { if (this.dhParameters == null) { throw new TlsFatalAlert(AlertDescription.internal_error); } this.dhAgreePrivateKey = TlsDHUtils.generateEphemeralServerKeyExchange(context.getSecureRandom(), this.dhParameters, buf); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] } return buf.toByteArray(); } public void processServerCertificate(Certificate serverCertificate) throws IOException { if (keyExchange != KeyExchangeAlgorithm.RSA_PSK) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } if (serverCertificate.isEmpty()) { throw new TlsFatalAlert(AlertDescription.bad_certificate); } org.bouncycastle.asn1.x509.Certificate x509Cert = serverCertificate.getCertificateAt(0); SubjectPublicKeyInfo keyInfo = x509Cert.getSubjectPublicKeyInfo(); try { this.serverPublicKey = PublicKeyFactory.createKey(keyInfo); } catch (RuntimeException e) { throw new TlsFatalAlert(AlertDescription.unsupported_certificate); } // Sanity check the PublicKeyFactory if (this.serverPublicKey.isPrivate()) { throw new TlsFatalAlert(AlertDescription.internal_error); } this.rsaServerPublicKey = validateRSAPublicKey((RSAKeyParameters)this.serverPublicKey); TlsUtils.validateKeyUsage(x509Cert, KeyUsage.keyEncipherment); super.processServerCertificate(serverCertificate); } public boolean requiresServerKeyExchange() { switch (keyExchange) { case KeyExchangeAlgorithm.DHE_PSK: case KeyExchangeAlgorithm.ECDHE_PSK: return true; default: return false; } } public void processServerKeyExchange(InputStream input) throws IOException { this.psk_identity_hint = TlsUtils.readOpaque16(input); if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { ServerDHParams serverDHParams = ServerDHParams.parse(input); this.dhAgreePublicKey = TlsDHUtils.validateDHPublicKey(serverDHParams.getPublicKey()); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] } } public void validateCertificateRequest(CertificateRequest certificateRequest) throws IOException { throw new TlsFatalAlert(AlertDescription.unexpected_message); } public void processClientCredentials(TlsCredentials clientCredentials) throws IOException { throw new TlsFatalAlert(AlertDescription.internal_error); } public void generateClientKeyExchange(OutputStream output) throws IOException { if (psk_identity_hint == null) { pskIdentity.skipIdentityHint(); } else { pskIdentity.notifyIdentityHint(psk_identity_hint); } byte[] psk_identity = pskIdentity.getPSKIdentity(); TlsUtils.writeOpaque16(psk_identity, output); if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { this.dhAgreePrivateKey = TlsDHUtils.generateEphemeralClientKeyExchange(context.getSecureRandom(), dhAgreePublicKey.getParameters(), output); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] throw new TlsFatalAlert(AlertDescription.internal_error); } else if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { this.premasterSecret = TlsRSAUtils.generateEncryptedPreMasterSecret(context, this.rsaServerPublicKey, output); } } public byte[] generatePremasterSecret() throws IOException { byte[] psk = pskIdentity.getPSK(); byte[] other_secret = generateOtherSecret(psk.length); ByteArrayOutputStream buf = new ByteArrayOutputStream(4 + other_secret.length + psk.length); TlsUtils.writeOpaque16(other_secret, buf); TlsUtils.writeOpaque16(psk, buf); return buf.toByteArray(); } protected byte[] generateOtherSecret(int pskLength) throws IOException { if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { if (dhAgreePrivateKey != null) { return TlsDHUtils.calculateDHBasicAgreement(dhAgreePublicKey, dhAgreePrivateKey); } throw new TlsFatalAlert(AlertDescription.internal_error); } if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] throw new TlsFatalAlert(AlertDescription.internal_error); } if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { return this.premasterSecret; } return new byte[pskLength]; } protected RSAKeyParameters validateRSAPublicKey(RSAKeyParameters key) throws IOException { // TODO What is the minimum bit length required? // key.getModulus().bitLength(); if (!key.getExponent().isProbablePrime(2)) { throw new TlsFatalAlert(AlertDescription.illegal_parameter); } return key; } }
core/src/main/java/org/bouncycastle/crypto/tls/TlsPSKKeyExchange.java
package org.bouncycastle.crypto.tls; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Vector; import org.bouncycastle.asn1.x509.KeyUsage; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.crypto.params.AsymmetricKeyParameter; import org.bouncycastle.crypto.params.DHParameters; import org.bouncycastle.crypto.params.DHPrivateKeyParameters; import org.bouncycastle.crypto.params.DHPublicKeyParameters; import org.bouncycastle.crypto.params.RSAKeyParameters; import org.bouncycastle.crypto.util.PublicKeyFactory; /** * TLS 1.0 PSK key exchange (RFC 4279). */ public class TlsPSKKeyExchange extends AbstractTlsKeyExchange { protected TlsPSKIdentity pskIdentity; protected DHParameters dhParameters; protected int[] namedCurves; protected short[] clientECPointFormats, serverECPointFormats; protected byte[] psk_identity_hint = null; protected DHPrivateKeyParameters dhAgreePrivateKey = null; protected DHPublicKeyParameters dhAgreePublicKey = null; protected AsymmetricKeyParameter serverPublicKey = null; protected RSAKeyParameters rsaServerPublicKey = null; protected TlsEncryptionCredentials serverCredentials = null; protected byte[] premasterSecret; public TlsPSKKeyExchange(int keyExchange, Vector supportedSignatureAlgorithms, TlsPSKIdentity pskIdentity, DHParameters dhParameters, int[] namedCurves, short[] clientECPointFormats, short[] serverECPointFormats) { super(keyExchange, supportedSignatureAlgorithms); switch (keyExchange) { case KeyExchangeAlgorithm.DHE_PSK: case KeyExchangeAlgorithm.ECDHE_PSK: case KeyExchangeAlgorithm.PSK: case KeyExchangeAlgorithm.RSA_PSK: break; default: throw new IllegalArgumentException("unsupported key exchange algorithm"); } this.pskIdentity = pskIdentity; this.dhParameters = dhParameters; this.namedCurves = namedCurves; this.clientECPointFormats = clientECPointFormats; this.serverECPointFormats = serverECPointFormats; } public void skipServerCredentials() throws IOException { if (keyExchange == KeyExchangeAlgorithm.RSA_PSK) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } } public void processServerCredentials(TlsCredentials serverCredentials) throws IOException { if (!(serverCredentials instanceof TlsEncryptionCredentials)) { throw new TlsFatalAlert(AlertDescription.internal_error); } processServerCertificate(serverCredentials.getCertificate()); this.serverCredentials = (TlsEncryptionCredentials)serverCredentials; } public byte[] generateServerKeyExchange() throws IOException { // TODO[RFC 4279] Need a server-side PSK API to determine hint and resolve identities to keys this.psk_identity_hint = null; if (this.psk_identity_hint == null && !requiresServerKeyExchange()) { return null; } ByteArrayOutputStream buf = new ByteArrayOutputStream(); if (this.psk_identity_hint == null) { TlsUtils.writeOpaque16(TlsUtils.EMPTY_BYTES, buf); } else { TlsUtils.writeOpaque16(this.psk_identity_hint, buf); } if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { this.dhAgreePrivateKey = TlsDHUtils.generateEphemeralServerKeyExchange(context.getSecureRandom(), this.dhParameters, buf); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] } return buf.toByteArray(); } public void processServerCertificate(Certificate serverCertificate) throws IOException { if (keyExchange != KeyExchangeAlgorithm.RSA_PSK) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } if (serverCertificate.isEmpty()) { throw new TlsFatalAlert(AlertDescription.bad_certificate); } org.bouncycastle.asn1.x509.Certificate x509Cert = serverCertificate.getCertificateAt(0); SubjectPublicKeyInfo keyInfo = x509Cert.getSubjectPublicKeyInfo(); try { this.serverPublicKey = PublicKeyFactory.createKey(keyInfo); } catch (RuntimeException e) { throw new TlsFatalAlert(AlertDescription.unsupported_certificate); } // Sanity check the PublicKeyFactory if (this.serverPublicKey.isPrivate()) { throw new TlsFatalAlert(AlertDescription.internal_error); } this.rsaServerPublicKey = validateRSAPublicKey((RSAKeyParameters)this.serverPublicKey); TlsUtils.validateKeyUsage(x509Cert, KeyUsage.keyEncipherment); super.processServerCertificate(serverCertificate); } public boolean requiresServerKeyExchange() { switch (keyExchange) { case KeyExchangeAlgorithm.DHE_PSK: case KeyExchangeAlgorithm.ECDHE_PSK: return true; default: return false; } } public void processServerKeyExchange(InputStream input) throws IOException { this.psk_identity_hint = TlsUtils.readOpaque16(input); if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { ServerDHParams serverDHParams = ServerDHParams.parse(input); this.dhAgreePublicKey = TlsDHUtils.validateDHPublicKey(serverDHParams.getPublicKey()); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] } } public void validateCertificateRequest(CertificateRequest certificateRequest) throws IOException { throw new TlsFatalAlert(AlertDescription.unexpected_message); } public void processClientCredentials(TlsCredentials clientCredentials) throws IOException { throw new TlsFatalAlert(AlertDescription.internal_error); } public void generateClientKeyExchange(OutputStream output) throws IOException { if (psk_identity_hint == null) { pskIdentity.skipIdentityHint(); } else { pskIdentity.notifyIdentityHint(psk_identity_hint); } byte[] psk_identity = pskIdentity.getPSKIdentity(); TlsUtils.writeOpaque16(psk_identity, output); if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { this.dhAgreePrivateKey = TlsDHUtils.generateEphemeralClientKeyExchange(context.getSecureRandom(), dhAgreePublicKey.getParameters(), output); } else if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] throw new TlsFatalAlert(AlertDescription.internal_error); } else if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { this.premasterSecret = TlsRSAUtils.generateEncryptedPreMasterSecret(context, this.rsaServerPublicKey, output); } } public byte[] generatePremasterSecret() throws IOException { byte[] psk = pskIdentity.getPSK(); byte[] other_secret = generateOtherSecret(psk.length); ByteArrayOutputStream buf = new ByteArrayOutputStream(4 + other_secret.length + psk.length); TlsUtils.writeOpaque16(other_secret, buf); TlsUtils.writeOpaque16(psk, buf); return buf.toByteArray(); } protected byte[] generateOtherSecret(int pskLength) throws IOException { if (this.keyExchange == KeyExchangeAlgorithm.DHE_PSK) { if (dhAgreePrivateKey != null) { return TlsDHUtils.calculateDHBasicAgreement(dhAgreePublicKey, dhAgreePrivateKey); } throw new TlsFatalAlert(AlertDescription.internal_error); } if (this.keyExchange == KeyExchangeAlgorithm.ECDHE_PSK) { // TODO[RFC 5489] throw new TlsFatalAlert(AlertDescription.internal_error); } if (this.keyExchange == KeyExchangeAlgorithm.RSA_PSK) { return this.premasterSecret; } return new byte[pskLength]; } protected RSAKeyParameters validateRSAPublicKey(RSAKeyParameters key) throws IOException { // TODO What is the minimum bit length required? // key.getModulus().bitLength(); if (!key.getExponent().isProbablePrime(2)) { throw new TlsFatalAlert(AlertDescription.illegal_parameter); } return key; } }
Check dhParameters is set before using
core/src/main/java/org/bouncycastle/crypto/tls/TlsPSKKeyExchange.java
Check dhParameters is set before using
Java
mit
c98e98f1d473209b151f7a495a119cc6645cc863
0
gilleain/signatures
package signature; import java.util.Iterator; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; /** * A collection of vertex indices with the same canonical signature string. * * @author maclean * */ public class SymmetryClass implements Comparable<SymmetryClass> { /** * The signature string that the vertices all share */ private final String signatureString; /** * The set of vertex indices that have this signature string */ private final SortedSet<Integer> vertexIndices; /** * Make a symmetry class for the signature string * <code>signatureString</code>. * @param signatureString the signature string for this symmetry class */ public SymmetryClass(String signatureString) { this.signatureString = signatureString; this.vertexIndices = new TreeSet<Integer>(); } public Iterator<Integer> getVertexIndices() { return this.vertexIndices.iterator(); } public String getSignatureString() { return this.signatureString; } /** * Check that the symmetry class' string is the same as the supplied string. * * @param otherSignatureString the string to check * @return true if the strings are equal */ public boolean hasSignature(String otherSignatureString) { return this.signatureString.equals(otherSignatureString); } /** * Add a vertex index to the list. * * @param vertexIndex the vertex index to add */ public void addIndex(int vertexIndex) { this.vertexIndices.add(vertexIndex); } /** * If the vertex indexed by <code>vertexIndex</code> is in the symmetry * class then return the smaller of it and the lowest element. If it is not * in the symmetry class, return -1. * * @param vertexIndex * @return */ public int getMinimal(int vertexIndex, List<Integer> used) { int min = -1; for (int classIndex : this.vertexIndices) { if (classIndex == vertexIndex) { if (min == -1) { return vertexIndex; } else { return min; } } else { if (used.contains(classIndex)) { continue; } else { min = classIndex; } } } // the vertexIndex is not in the symmetry class return -1; } /* (non-Javadoc) * @see java.lang.Comparable#compareTo(java.lang.Object) */ public int compareTo(SymmetryClass o) { return this.signatureString.compareTo(o.signatureString); } public String toString() { return this.signatureString + " " + this.vertexIndices; } }
src/main/java/signature/SymmetryClass.java
package signature; import java.util.Iterator; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; /** * A collection of vertex indices with the same canonical signature string. * * @author maclean * */ public class SymmetryClass implements Comparable<SymmetryClass> { /** * The signature string that the vertices all share */ private final String signatureString; /** * The set of vertex indices that have this signature string */ private final SortedSet<Integer> vertexIndices; /** * Make a symmetry class for the signature string * <code>signatureString</code>. * @param signatureString the signature string for this symmetry class */ public SymmetryClass(String signatureString) { this.signatureString = signatureString; this.vertexIndices = new TreeSet<Integer>(); } public Iterator<Integer> getVertexIndices() { return this.vertexIndices.iterator(); } public String getSignatureString() { return this.signatureString; } /** * Check that the symmetry class' string is the same as the supplied string. * * @param otherSignatureString the string to check * @return true if the strings are equal */ public boolean hasSignature(String otherSignatureString) { return this.signatureString.equals(otherSignatureString); } /** * Add a vertex index to the list. * * @param vertexIndex the vertex index to add */ public void addIndex(int vertexIndex) { this.vertexIndices.add(vertexIndex); } /** * If the vertex indexed by <code>vertexIndex</code> is in the symmetry * class then return the smaller of it and the lowest element. If it is not * in the symmetry class, return -1. * * @param vertexIndex * @return */ public int getMinimal(int vertexIndex, List<Integer> used) { int min = -1; for (int classIndex : this.vertexIndices) { if (classIndex == vertexIndex) { if (min == -1) { return vertexIndex; } else { return min; } } else { if (used.contains(classIndex)) { continue; } else { min = classIndex; } } } // the vertexIndex is not in the symmetry class return -1; } /* (non-Javadoc) * @see java.lang.Comparable#compareTo(java.lang.Object) */ public int compareTo(SymmetryClass o) { return this.signatureString.compareTo(o.signatureString); } }
toString for SymmetryClass
src/main/java/signature/SymmetryClass.java
toString for SymmetryClass
Java
mit
4f22a01a2b60e040e9b479a6f1e842e7f51930f6
0
tndatacommons/android-grow-app,tndatacommons/android-app,izzyalonso/android-app,Revenaunt/android-app
package org.tndata.android.compass.model; import android.os.Parcel; import com.google.gson.annotations.SerializedName; /** * Model class for user goals. * * @author Ismael Alonso * @version 1.0.0 */ public class UserGoal extends Goal{ public static final String TYPE = "usergoal"; //Values retrieved from the API @SerializedName("goal") private TDCGoal mGoal; @SerializedName("primary_category") private long mPrimaryCategoryId; @SerializedName("engagement_rank") private double mEngagementRank; @SerializedName("weekly_completions") private int mWeeklyCompletions; /*---------* * GETTERS * *---------*/ public TDCGoal getGoal(){ return mGoal; } @Override public long getContentId(){ return mGoal.getId(); } @Override public String getTitle(){ return mGoal.getTitle(); } public String getDescription(){ return mGoal.getDescription(); } public String getHTMLDescription(){ return mGoal.getHTMLDescription(); } public long getPrimaryCategoryId(){ return mPrimaryCategoryId; } public int getEngagementRank(){ return (int)mEngagementRank; } public int getWeeklyCompletions(){ return mWeeklyCompletions; } @Override protected String getType(){ return TYPE; } /*---------* * UTILITY * *---------*/ @Override public void init(){ } @Override public String toString(){ return "UserGoal #" + getId() + " (" + mGoal.toString() + ")"; } @Override public int describeContents(){ return 0; } @Override public void writeToParcel(Parcel dest, int flags){ super.writeToParcel(dest, flags); dest.writeParcelable(mGoal, flags); dest.writeLong(mPrimaryCategoryId); dest.writeDouble(mEngagementRank); dest.writeInt(mWeeklyCompletions); } public static final Creator<UserGoal> CREATOR = new Creator<UserGoal>(){ @Override public UserGoal createFromParcel(Parcel source){ return new UserGoal(source); } @Override public UserGoal[] newArray(int size){ return new UserGoal[size]; } }; private UserGoal(Parcel src){ super(src); mGoal = src.readParcelable(TDCGoal.class.getClassLoader()); mPrimaryCategoryId = src.readLong(); mEngagementRank = src.readDouble(); mWeeklyCompletions =src.readInt(); } }
src/main/java/org/tndata/android/compass/model/UserGoal.java
package org.tndata.android.compass.model; import android.os.Parcel; import com.google.gson.annotations.SerializedName; /** * Model class for user goals. * * @author Ismael Alonso * @version 1.0.0 */ public class UserGoal extends Goal{ public static final String TYPE = "usergoal"; //Values retrieved from the API @SerializedName("goal") private TDCGoal mGoal; @SerializedName("primary_category") private long mPrimaryCategoryId; /*---------* * GETTERS * *---------*/ public TDCGoal getGoal(){ return mGoal; } @Override public long getContentId(){ return mGoal.getId(); } @Override public String getTitle(){ return mGoal.getTitle(); } public String getDescription(){ return mGoal.getDescription(); } public String getHTMLDescription(){ return mGoal.getHTMLDescription(); } public long getPrimaryCategoryId(){ return mPrimaryCategoryId; } @Override protected String getType(){ return TYPE; } /*---------* * UTILITY * *---------*/ @Override public void init(){ } @Override public String toString(){ return "UserGoal #" + getId() + " (" + mGoal.toString() + ")"; } @Override public int describeContents(){ return 0; } @Override public void writeToParcel(Parcel dest, int flags){ super.writeToParcel(dest, flags); dest.writeParcelable(mGoal, flags); dest.writeLong(mPrimaryCategoryId); } public static final Creator<UserGoal> CREATOR = new Creator<UserGoal>(){ @Override public UserGoal createFromParcel(Parcel source){ return new UserGoal(source); } @Override public UserGoal[] newArray(int size){ return new UserGoal[size]; } }; private UserGoal(Parcel src){ super(src); mGoal = src.readParcelable(TDCGoal.class.getClassLoader()); mPrimaryCategoryId = src.readLong(); } }
Add engagement data to UserGoal model
src/main/java/org/tndata/android/compass/model/UserGoal.java
Add engagement data to UserGoal model
Java
mit
16d2a5576f63cdc0801cd2d64e46552e4f70825e
0
tladesignz/DNATools
/** * DNA Android Tools. * * The MIT License (MIT) * * Copyright (c) 2015 Die Netzarchitekten e.U., Benjamin Erhart * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.netzarchitekten.tools; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.AssetManager; import android.content.res.Configuration; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.LocaleList; import android.util.DisplayMetrics; import java.util.Locale; /** * Encapsulates deprecation warning for * <ul> * <li>{@link android.content.res.Resources#getColor(int)} since Marshmallow (API 23)</li> * <li>{@link android.content.res.Resources#getDrawable(int)} since Lollipop MR1 (API 22)</li> * <li>{@link android.content.res.Configuration#locale} since N (API 24)</li> * </ul> * <p> * Contains a static and an OO interface. * </p> * <p> * Also, provides facilities to work with a different locale, then set by the user. * </p> * * @author Benjamin Erhart {@literal <[email protected]>} */ @SuppressWarnings({"WeakerAccess", "unused"}) public class Resources { private final Context mContext; private final android.content.res.Resources mResources; /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. */ public Resources(Context context) { mContext = context; mResources = mContext.getResources(); } /** * Honor deprecation of {@link android.content.res.Resources#getColor(int)} * since API 23. * * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return a single color value in the form 0xAARRGGBB. * @see android.content.res.Resources#getColor(int, android.content.res.Resources.Theme) */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") public int getColor(int id) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) return mResources.getColor(id, null); return mResources.getColor(id); } /** * Honor deprecation of * {@link android.content.res.Resources#getDrawable(int)} since API 22. * * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return an object that can be used to draw this resource. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") public Drawable getDrawable(int id) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) return mResources.getDrawable(id, null); return mResources.getDrawable(id); } /** * Honor deprecation of {@link Configuration#locale} since API 24. * * @return the currently used primary locale. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") public Locale getPrimaryLocale() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) return mResources.getConfiguration().getLocales().get(0); return mResources.getConfiguration().locale; } /** * <p> * Sets a new (and only) locale for this app until it is reset using {@link #resetLocale()}, * <b>as long</b>, as the primary locale isn't already the same. * </p> * <p> * Makes use of a side-effect of * {@link android.content.res.Resources#Resources(AssetManager, DisplayMetrics, Configuration)}, * which propagates the localization change. * </p> * * @param newLocale * The new {@link Locale}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use {@link #getContextWithNewLocale(Locale)} * instead, which dynamically uses a better method, if available. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") @Deprecated public Resources setLocale(Locale newLocale) { if (!getPrimaryLocale().equals(newLocale)) { Configuration newConfig = new Configuration(mResources.getConfiguration()); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { newConfig.setLocales(new LocaleList(newLocale)); } else { newConfig.locale = newLocale; } new android.content.res.Resources(mResources.getAssets(), mResources.getDisplayMetrics(), newConfig); } return this; } /** * <p> * Sets a new (and only) locale for this app until it is reset using {@link #resetLocale()}. * </p> * <p> * Makes use of a side-effect of * {@link android.content.res.Resources#Resources(AssetManager, DisplayMetrics, Configuration)}, * which propagates the localization change. * </p> * * @param newLocale * The new locale as {@link String}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use {@link #getContextWithNewLocale(String)} * instead, which dynamically uses a better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public Resources setLocale(String newLocale) { return setLocale(new Locale(newLocale)); } /** * <p> * Creates a new {@link Context} which uses a given {@link Locale} instead of the default one * used. * </p> * <p> * As a compatibility fallback for API &lt; 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext()} at the end of your usage and beware, that if you * don't or hold on too long to this, it can happen on API &lt; 17, that your complete app will * show in a different language! * </p> * * @param newLocale * The new {@link Locale}. * @return a {@link Context} using the given {@link Locale}. */ @SuppressWarnings("deprecation") public Context getContextWithNewLocale(Locale newLocale) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { Configuration override = new Configuration(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { override.setLocales(new LocaleList(newLocale)); } else { override.locale = newLocale; } return mContext.createConfigurationContext(override); } // Fallback for older versions, using the side-effect. setLocale(newLocale); return mContext; } /** * <p> * Creates a new {@link Context} which uses a given locale instead of the default one * used. * </p> * <p> * As a compatibility fallback for API &lt; 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext()} at the end of your usage and beware, that if you * don't or hold on too long to this, it can happen on API &lt; 17, that your complete app will * show in a different language! * </p> * * @param newLocale * The new locale as a {@link String}. * @return a {@link Context} using the given locale. */ public Context getContextWithNewLocale(String newLocale) { return getContextWithNewLocale(new Locale(newLocale)); } /** * Reset the current primary locale to the originally set device's locale using a * side-effect of * {@link android.content.res.Resources#Resources(AssetManager, DisplayMetrics, Configuration)}. * * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use {@link #giveUpNewLocaleContext()} * instead, which dynamically uses a better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public Resources resetLocale() { new android.content.res.Resources(mResources.getAssets(), mResources.getDisplayMetrics(), mResources.getConfiguration()); return this; } /** * <p> * Give up the context with the new locale. * </p> * <p> * Actually, this does nothing on API &gt;= 17, since we just have to stop using the given * {@link Context}. Below, though, it resets the language on the main context to the original * one, since that is the workaround for older API versions. * </p> * * @return NULL, which you must assign to your local context variable. */ @SuppressWarnings("deprecation") public Context giveUpNewLocaleContext() { // Only, if we had to use the locale-injection into the normal context, this is useful. // Otherwise, we just do nothing. if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) { resetLocale(); } return null; } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return a single color value in the form 0xAARRGGBB. */ public static int getColor(Context context, int id) { return new Resources(context).getColor(id); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return an object that can be used to draw this resource. */ public static Drawable getDrawable(Context context, int id) { return new Resources(context).getDrawable(id); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @return the currently used primary locale. */ public static Locale getPrimaryLocale(Context context) { return new Resources(context).getPrimaryLocale(); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new {@link Locale}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use * {@link #getContextWithNewLocale(Context, Locale)} instead, which dynamically uses a * better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public static Resources setLocale(Context context, Locale newLocale) { return new Resources(context).setLocale(newLocale); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new locale as {@link String}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use * {@link #getContextWithNewLocale(Context, String)} instead, which dynamically uses a * better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public static Resources setLocale(Context context, String newLocale) { return new Resources(context).setLocale(newLocale); } /** * <p> * Creates a new {@link Context} which uses a given {@link Locale} instead of the default one * used. * </p> * <p> * As a compatibility fallback for API &lt; 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext(Context)} at the end of your usage and beware, that * if you don't or hold on too long to this, it can happen on API &lt; 17, that your complete * app will show in a different language! * </p> * * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new {@link Locale}. * @return a {@link Context} using the given {@link Locale}. */ public static Context getContextWithNewLocale(Context context, Locale newLocale) { return new Resources(context).getContextWithNewLocale(newLocale); } /** * <p> * Creates a new {@link Context} which uses a given locale instead of the default one * used. * </p> * <p> * As a compatibility fallback for API &lt; 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext(Context)} at the end of your usage and beware, that * if you don't or hold on too long to this, it can happen on API &lt; 17, that your complete * app will show in a different language! * </p> * * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new locale as a {@link String}. * @return a {@link Context} using the given locale. */ public static Context getContextWithNewLocale(Context context, String newLocale) { return new Resources(context).getContextWithNewLocale(newLocale); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use * {@link #giveUpNewLocaleContext(Context)} instead, which dynamically uses a * better method, if available. */ @SuppressWarnings("deprecation") public static Resources resetLocale(Context context) { return new Resources(context).resetLocale(); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * * @return NULL, which you must assign to your local context variable. */ public static Context giveUpNewLocaleContext(Context context) { return new Resources(context).giveUpNewLocaleContext(); } }
tools/src/main/java/com/netzarchitekten/tools/Resources.java
/** * DNA Android Tools. * * The MIT License (MIT) * * Copyright (c) 2015 Die Netzarchitekten e.U., Benjamin Erhart * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.netzarchitekten.tools; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.AssetManager; import android.content.res.Configuration; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.LocaleList; import android.util.DisplayMetrics; import java.util.Locale; /** * Encapsulates deprecation warning for * <ul> * <li>{@link android.content.res.Resources#getColor(int)} since Marshmallow (API 23)</li> * <li>{@link android.content.res.Resources#getDrawable(int)} since Lollipop MR1 (API 22)</li> * <li>{@link android.content.res.Configuration#locale} since N (API 24)</li> * </ul> * <p> * Contains a static and an OO interface. * </p> * <p> * Also, provides facilities to work with a different locale, then set by the user. * </p> * * @author Benjamin Erhart {@literal <[email protected]>} */ @SuppressWarnings({"WeakerAccess", "unused"}) public class Resources { private final Context mContext; private final android.content.res.Resources mResources; /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. */ public Resources(Context context) { mContext = context; mResources = mContext.getResources(); } /** * Honor deprecation of {@link android.content.res.Resources#getColor(int)} * since API 23. * * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return a single color value in the form 0xAARRGGBB. * @see android.content.res.Resources#getColor(int, android.content.res.Resources.Theme) */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") public int getColor(int id) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) return mResources.getColor(id, null); return mResources.getColor(id); } /** * Honor deprecation of * {@link android.content.res.Resources#getDrawable(int)} since API 22. * * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return an object that can be used to draw this resource. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") public Drawable getDrawable(int id) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) return mResources.getDrawable(id, null); return mResources.getDrawable(id); } /** * Honor deprecation of {@link Configuration#locale} since API 24. * * @return the currently used primary locale. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") public Locale getPrimaryLocale() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) return mResources.getConfiguration().getLocales().get(0); return mResources.getConfiguration().locale; } /** * <p> * Sets a new (and only) locale for this app until it is reset using {@link #resetLocale()}, * <b>as long</b>, as the primary locale isn't already the same. * </p> * <p> * Makes use of a side-effect of * {@link android.content.res.Resources#Resources(AssetManager, DisplayMetrics, Configuration)}, * which propagates the localization change. * </p> * * @param newLocale * The new {@link Locale}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use {@link #getContextWithNewLocale(Locale)} * instead, which dynamically uses a better method, if available. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") @Deprecated public Resources setLocale(Locale newLocale) { if (!getPrimaryLocale().equals(newLocale)) { Configuration newConfig = new Configuration(mResources.getConfiguration()); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { newConfig.setLocales(new LocaleList(newLocale)); } else { newConfig.locale = newLocale; } new android.content.res.Resources(mResources.getAssets(), mResources.getDisplayMetrics(), newConfig); } return this; } /** * <p> * Sets a new (and only) locale for this app until it is reset using {@link #resetLocale()}. * </p> * <p> * Makes use of a side-effect of * {@link android.content.res.Resources#Resources(AssetManager, DisplayMetrics, Configuration)}, * which propagates the localization change. * </p> * * @param newLocale * The new locale as {@link String}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use {@link #getContextWithNewLocale(String)} * instead, which dynamically uses a better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public Resources setLocale(String newLocale) { return setLocale(new Locale(newLocale)); } /** * <p> * Creates a new {@link Context} which uses a given {@link Locale} instead of the default one * used. * </p> * <p> * As a compatibility fallback for API < 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext()} at the end of your usage and beware, that if you * don't or hold on too long to this, it can happen on API < 17, that your complete app will * show in a different language! * </p> * * @param newLocale * The new {@link Locale}. * @return a {@link Context} using the given {@link Locale}. */ @SuppressWarnings("deprecation") public Context getContextWithNewLocale(Locale newLocale) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { Configuration override = new Configuration(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { override.setLocales(new LocaleList(newLocale)); } else { override.locale = newLocale; } return mContext.createConfigurationContext(override); } // Fallback for older versions, using the side-effect. setLocale(newLocale); return mContext; } /** * <p> * Creates a new {@link Context} which uses a given locale instead of the default one * used. * </p> * <p> * As a compatibility fallback for API < 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext()} at the end of your usage and beware, that if you * don't or hold on too long to this, it can happen on API < 17, that your complete app will * show in a different language! * </p> * * @param newLocale * The new locale as a {@link String}. * @return a {@link Context} using the given locale. */ public Context getContextWithNewLocale(String newLocale) { return getContextWithNewLocale(new Locale(newLocale)); } /** * Reset the current primary locale to the originally set device's locale using a * side-effect of * {@link android.content.res.Resources#Resources(AssetManager, DisplayMetrics, Configuration)}. * * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use {@link #giveUpNewLocaleContext()} * instead, which dynamically uses a better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public Resources resetLocale() { new android.content.res.Resources(mResources.getAssets(), mResources.getDisplayMetrics(), mResources.getConfiguration()); return this; } /** * <p> * Give up the context with the new locale. * </p> * <p> * Actually, this does nothing on API >= 17, since we just have to stop using the given * {@link Context}. Below, though, it resets the language on the main context to the original * one, since that is the workaround for older API versions. * </p> * * @return NULL, which you must assign to your local context variable. */ @SuppressWarnings("deprecation") public Context giveUpNewLocaleContext() { // Only, if we had to use the locale-injection into the normal context, this is useful. // Otherwise, we just do nothing. if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) { resetLocale(); } return null; } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return a single color value in the form 0xAARRGGBB. */ public static int getColor(Context context, int id) { return new Resources(context).getColor(id); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param id * The desired resource identifier, as generated by the aapt * tool. This integer encodes the package, type, and resource * entry. The value 0 is an invalid identifier. * @return an object that can be used to draw this resource. */ public static Drawable getDrawable(Context context, int id) { return new Resources(context).getDrawable(id); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @return the currently used primary locale. */ public static Locale getPrimaryLocale(Context context) { return new Resources(context).getPrimaryLocale(); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new {@link Locale}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use * {@link #getContextWithNewLocale(Context, Locale)} instead, which dynamically uses a * better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public static Resources setLocale(Context context, Locale newLocale) { return new Resources(context).setLocale(newLocale); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new locale as {@link String}. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use * {@link #getContextWithNewLocale(Context, String)} instead, which dynamically uses a * better method, if available. */ @SuppressWarnings("deprecation") @Deprecated public static Resources setLocale(Context context, String newLocale) { return new Resources(context).setLocale(newLocale); } /** * <p> * Creates a new {@link Context} which uses a given {@link Locale} instead of the default one * used. * </p> * <p> * As a compatibility fallback for API < 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext(Context)} at the end of your usage and beware, that * if you don't or hold on too long to this, it can happen on API < 17, that your complete app * will show in a different language! * </p> * * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new {@link Locale}. * @return a {@link Context} using the given {@link Locale}. */ public static Context getContextWithNewLocale(Context context, Locale newLocale) { return new Resources(context).getContextWithNewLocale(newLocale); } /** * <p> * Creates a new {@link Context} which uses a given locale instead of the default one * used. * </p> * <p> * As a compatibility fallback for API < 17, where this is not possible, instead injects the * given Locale into the <b>current</b> context and returns that. * </p> * <p> * <b>ATTENTION</b>: Because of the fallback, make sure to call * {@link #giveUpNewLocaleContext(Context)} at the end of your usage and beware, that * if you don't or hold on too long to this, it can happen on API < 17, that your complete app * will show in a different language! * </p> * * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @param newLocale * The new locale as a {@link String}. * @return a {@link Context} using the given locale. */ public static Context getContextWithNewLocale(Context context, String newLocale) { return new Resources(context).getContextWithNewLocale(newLocale); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * @return this object for fluency. * @deprecated Using this side-effect is deprecated. Use * {@link #giveUpNewLocaleContext(Context)} instead, which dynamically uses a * better method, if available. */ @SuppressWarnings("deprecation") public static Resources resetLocale(Context context) { return new Resources(context).resetLocale(); } /** * @param context * A context object to access the * {@link android.content.res.Resources} of the app. * * @return NULL, which you must assign to your local context variable. */ public static Context giveUpNewLocaleContext(Context context) { return new Resources(context).giveUpNewLocaleContext(); } }
Fixed JavaDoc.
tools/src/main/java/com/netzarchitekten/tools/Resources.java
Fixed JavaDoc.
Java
agpl-3.0
bf8f38445b5cd22cf01797d17faf75dfcdfe46e5
0
opencadc/vos,opencadc/vos,opencadc/vos
/* ************************************************************************ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * * (c) 2009. (c) 2009. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 * All rights reserved Tous droits réservés * * NRC disclaims any warranties, Le CNRC dénie toute garantie * expressed, implied, or énoncée, implicite ou légale, * statutory, of any kind with de quelque nature que ce * respect to the software, soit, concernant le logiciel, * including without limitation y compris sans restriction * any warranty of merchantability toute garantie de valeur * or fitness for a particular marchande ou de pertinence * purpose. NRC shall not be pour un usage particulier. * liable in any event for any Le CNRC ne pourra en aucun cas * damages, whether direct or être tenu responsable de tout * indirect, special or general, dommage, direct ou indirect, * consequential or incidental, particulier ou général, * arising from the use of the accessoire ou fortuit, résultant * software. Neither the name de l'utilisation du logiciel. Ni * of the National Research le nom du Conseil National de * Council of Canada nor the Recherches du Canada ni les noms * names of its contributors may de ses participants ne peuvent * be used to endorse or promote être utilisés pour approuver ou * products derived from this promouvoir les produits dérivés * software without specific prior de ce logiciel sans autorisation * written permission. préalable et particulière * par écrit. * * This file is part of the Ce fichier fait partie du projet * OpenCADC project. OpenCADC. * * OpenCADC is free software: OpenCADC est un logiciel libre ; * you can redistribute it and/or vous pouvez le redistribuer ou le * modify it under the terms of modifier suivant les termes de * the GNU Affero General Public la “GNU Affero General Public * License as published by the License” telle que publiée * Free Software Foundation, par la Free Software Foundation * either version 3 of the : soit la version 3 de cette * License, or (at your option) licence, soit (à votre gré) * any later version. toute version ultérieure. * * OpenCADC is distributed in the OpenCADC est distribué * hope that it will be useful, dans l’espoir qu’il vous * but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE * without even the implied GARANTIE : sans même la garantie * warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ * or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF * PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence * General Public License for Générale Publique GNU Affero * more details. pour plus de détails. * * You should have received Vous devriez avoir reçu une * a copy of the GNU Affero copie de la Licence Générale * General Public License along Publique GNU Affero avec * with OpenCADC. If not, see OpenCADC ; si ce n’est * <http://www.gnu.org/licenses/>. pas le cas, consultez : * <http://www.gnu.org/licenses/>. * * $Revision: 4 $ * ************************************************************************ */ package ca.nrc.cadc.conformance.uws; import com.meterware.httpunit.HttpException; import com.meterware.httpunit.WebConversation; import com.meterware.httpunit.WebResponse; import org.apache.log4j.Logger; import org.jdom.Document; import org.jdom.Element; import org.junit.Test; import static org.junit.Assert.*; public class JobsTest extends AbstractUWSTest { private static Logger log = Logger.getLogger(JobsTest.class); public JobsTest() { super(); setLoggingLevel(log); } /* * This test should only be run after the Servlet container for the UWS service * has been restarted. It expects that the UWS service has no Jobs. */ @Test public void testEmptyJobs() throws Exception { WebResponse response = null; try { // Request the UWS service. WebConversation conversation = new WebConversation(); response = get(conversation, serviceUrl); if (response.getResponseCode() == 403) // forbidden: a plausible interpretation { return; } // Validate the XML against the schema. log.debug("XML:\r\n" + response.getText()); Document document = buildDocument(response.getText(), true); Element root = document.getRootElement(); assertNotNull("XML returned from GET of " + serviceUrl + " missing uws:jobs element", root); //NodeList list = root.getElementsByTagName("uws:jobref"); //assertEquals(propertiesFilename + " XML returned from GET of " + baseUrl + " contained uws:jobref elements", 0, list.getLength()); log.info("JobsTest.testEmptyJobs completed."); } catch(HttpException ex) { if (ex.getResponseCode() == 403) { log.warn("GET access to job-list was Forbidden (403) -- cannot test output"); } else { log.error("unexpected exception", ex); fail(ex.getMessage()); } } catch (Throwable t) { log.error("unexpected exception", t); fail(t.getMessage()); } } }
projects/cadcTestUWS/src/ca/nrc/cadc/conformance/uws/JobsTest.java
/* ************************************************************************ ******************* CANADIAN ASTRONOMY DATA CENTRE ******************* ************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES ************** * * (c) 2009. (c) 2009. * Government of Canada Gouvernement du Canada * National Research Council Conseil national de recherches * Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6 * All rights reserved Tous droits réservés * * NRC disclaims any warranties, Le CNRC dénie toute garantie * expressed, implied, or énoncée, implicite ou légale, * statutory, of any kind with de quelque nature que ce * respect to the software, soit, concernant le logiciel, * including without limitation y compris sans restriction * any warranty of merchantability toute garantie de valeur * or fitness for a particular marchande ou de pertinence * purpose. NRC shall not be pour un usage particulier. * liable in any event for any Le CNRC ne pourra en aucun cas * damages, whether direct or être tenu responsable de tout * indirect, special or general, dommage, direct ou indirect, * consequential or incidental, particulier ou général, * arising from the use of the accessoire ou fortuit, résultant * software. Neither the name de l'utilisation du logiciel. Ni * of the National Research le nom du Conseil National de * Council of Canada nor the Recherches du Canada ni les noms * names of its contributors may de ses participants ne peuvent * be used to endorse or promote être utilisés pour approuver ou * products derived from this promouvoir les produits dérivés * software without specific prior de ce logiciel sans autorisation * written permission. préalable et particulière * par écrit. * * This file is part of the Ce fichier fait partie du projet * OpenCADC project. OpenCADC. * * OpenCADC is free software: OpenCADC est un logiciel libre ; * you can redistribute it and/or vous pouvez le redistribuer ou le * modify it under the terms of modifier suivant les termes de * the GNU Affero General Public la “GNU Affero General Public * License as published by the License” telle que publiée * Free Software Foundation, par la Free Software Foundation * either version 3 of the : soit la version 3 de cette * License, or (at your option) licence, soit (à votre gré) * any later version. toute version ultérieure. * * OpenCADC is distributed in the OpenCADC est distribué * hope that it will be useful, dans l’espoir qu’il vous * but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE * without even the implied GARANTIE : sans même la garantie * warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ * or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF * PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence * General Public License for Générale Publique GNU Affero * more details. pour plus de détails. * * You should have received Vous devriez avoir reçu une * a copy of the GNU Affero copie de la Licence Générale * General Public License along Publique GNU Affero avec * with OpenCADC. If not, see OpenCADC ; si ce n’est * <http://www.gnu.org/licenses/>. pas le cas, consultez : * <http://www.gnu.org/licenses/>. * * $Revision: 4 $ * ************************************************************************ */ package ca.nrc.cadc.conformance.uws; import com.meterware.httpunit.WebConversation; import com.meterware.httpunit.WebResponse; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.jdom.Document; import org.jdom.Element; import org.junit.Test; import static org.junit.Assert.*; public class JobsTest extends AbstractUWSTest { private static Logger log = Logger.getLogger(JobsTest.class); public JobsTest() { super(); setLoggingLevel(log); } /* * This test should only be run after the Servlet container for the UWS service * has been restarted. It expects that the UWS service has no Jobs. */ @Test public void testEmptyJobs() throws Exception { try { // Request the UWS service. WebConversation conversation = new WebConversation(); WebResponse response = get(conversation, serviceUrl); // Validate the XML against the schema. log.debug("XML:\r\n" + response.getText()); Document document = buildDocument(response.getText(), true); Element root = document.getRootElement(); assertNotNull("XML returned from GET of " + serviceUrl + " missing uws:jobs element", root); //NodeList list = root.getElementsByTagName("uws:jobref"); //assertEquals(propertiesFilename + " XML returned from GET of " + baseUrl + " contained uws:jobref elements", 0, list.getLength()); log.info("JobsTest.testEmptyJobs completed."); } catch (Throwable t) { log.error(t); fail(t.getMessage()); } } }
changed to UWS-v1.0.xsd, fixed GET job-list test to tolerate a 403 git-svn-id: 311fcc5b8b03427d323cee07bbb9e5a14d8d22e9@712 728ff76a-78ac-11de-a72b-d90af8dea425
projects/cadcTestUWS/src/ca/nrc/cadc/conformance/uws/JobsTest.java
changed to UWS-v1.0.xsd, fixed GET job-list test to tolerate a 403
Java
agpl-3.0
4c0944b9f89c30f7d315eff90ccbde0c199c629f
0
PoweRGbg/AndroidAPS,Heiner1/AndroidAPS,Heiner1/AndroidAPS,jotomo/AndroidAPS,MilosKozak/AndroidAPS,Heiner1/AndroidAPS,MilosKozak/AndroidAPS,winni67/AndroidAPS,jotomo/AndroidAPS,PoweRGbg/AndroidAPS,jotomo/AndroidAPS,Heiner1/AndroidAPS,PoweRGbg/AndroidAPS,winni67/AndroidAPS,MilosKozak/AndroidAPS
package de.jotomo.ruffyscripter.commands; import android.os.SystemClock; import org.monkey.d.ruffy.ruffy.driver.display.MenuAttribute; import org.monkey.d.ruffy.ruffy.driver.display.MenuType; import org.monkey.d.ruffy.ruffy.driver.display.menu.MenuTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Locale; import java.util.Objects; import de.jotomo.ruffyscripter.RuffyScripter; public class SetTbrCommand implements Command { private static final Logger log = LoggerFactory.getLogger(SetTbrCommand.class); private final long percentage; private final long duration; public SetTbrCommand(long percentage, long duration) { this.percentage = percentage; this.duration = duration; if (percentage % 10 != 0) { throw new IllegalArgumentException("TBR percentage must be set in 10% steps"); } if (percentage < 0 || percentage > 500) { throw new IllegalArgumentException("TBR percentage must be within 0-500%"); } if (percentage != 100) { if (duration % 15 != 0) { throw new IllegalArgumentException("TBR duration can only be set in 15 minute steps"); } if (duration > 60 * 24) { throw new IllegalArgumentException("Maximum TBR duration is 24 hours"); } } if (percentage == 0 && duration > 120) { throw new IllegalArgumentException("Max allowed zero-temp duration is 2h"); } } @Override public CommandResult execute(RuffyScripter scripter) { try { scripter.verifyMenuIsDisplayed(MenuType.MAIN_MENU); enterTbrMenu(scripter); inputTbrPercentage(scripter); SystemClock.sleep(500); verifyDisplayedTbrPercentage(scripter); if (percentage == 100) { cancelTbrAndConfirmCancellationWarning(scripter); } else { // switch to TBR_DURATION menu by pressing menu key scripter.pressMenuKey(); scripter.waitForMenuUpdate(); scripter.verifyMenuIsDisplayed(MenuType.TBR_DURATION); inputTbrDuration(scripter); SystemClock.sleep(500); verifyDisplayedTbrDuration(scripter); // confirm TBR scripter.pressCheckKey(); SystemClock.sleep(500); } scripter.verifyMenuIsDisplayed(MenuType.MAIN_MENU, "Pump did not return to MAIN_MEU after setting TBR. " + "Check pump manually, the TBR might not have been set/cancelled."); // check main menu shows the same values we just set if (percentage == 100) { verifyMainMenuShowsNoActiveTbr(scripter); return new CommandResult().success(true).enacted(true).message("TBR was cancelled"); } else { verifyMainMenuShowsExpectedTbrActive(scripter); return new CommandResult().success(true).enacted(true).message( String.format(Locale.US, "TBR set to %d%% for %d min", percentage, duration)); } } catch (CommandException e) { return e.toCommandResult(); } } private void enterTbrMenu(RuffyScripter scripter) { scripter.navigateToMenu(MenuType.TBR_MENU); scripter.pressCheckKey(); scripter.waitForMenuUpdate(); scripter.verifyMenuIsDisplayed(MenuType.TBR_SET); } private void inputTbrPercentage(RuffyScripter scripter) { long currentPercent = readDisplayedTbrPercentage(scripter); log.debug("Current TBR %: " + currentPercent); long percentageChange = percentage - currentPercent; long percentageSteps = percentageChange / 10; boolean increasePercentage = true; if (percentageSteps < 0) { increasePercentage = false; percentageSteps = Math.abs(percentageSteps); } log.debug("Pressing " + (increasePercentage ? "up" : "down") + " " + percentageSteps + " times"); for (int i = 0; i < percentageSteps; i++) { if (increasePercentage) scripter.pressUpKey(); else scripter.pressDownKey(); SystemClock.sleep(100); log.debug("Push #" + (i + 1)); } } private void verifyDisplayedTbrPercentage(RuffyScripter scripter) { long displayedPercentage = readDisplayedTbrPercentage(scripter); if (displayedPercentage != this.percentage) { log.debug("Final displayed TBR percentage: " + displayedPercentage); throw new CommandException().message("Failed to set TBR percentage"); } } private long readDisplayedTbrPercentage(RuffyScripter scripter) { Object percentageObj = scripter.currentMenu.getAttribute(MenuAttribute.BASAL_RATE); // this as a bit hacky, the display value is blinking, so we might catch that, so // keep trying till we get the Double we want while (!(percentageObj instanceof Double)) { scripter.waitForMenuUpdate(); percentageObj = scripter.currentMenu.getAttribute(MenuAttribute.BASAL_RATE); } return ((Double) percentageObj).longValue(); } private void inputTbrDuration(RuffyScripter scripter) { long currentDuration = readDisplayedTbrDuration(scripter); if (currentDuration % 15 != 0) { // The duration displayed is how long an active TBR will still run, // which might be something like 0:13, hence not in 15 minute steps. // Pressing up will go to the next higher 15 minute step. // Don't press down, from 0:13 it can't go down, so press up. // Pressing up from 23:59 works to go to 24:00. scripter.pressUpKey(); scripter.waitForMenuUpdate(); currentDuration = readDisplayedTbrDuration(scripter); } log.debug("Current TBR duration: " + currentDuration); long durationChange = duration - currentDuration; long durationSteps = durationChange / 15; boolean increaseDuration = true; if (durationSteps < 0) { increaseDuration = false; durationSteps = Math.abs(durationSteps); } log.debug("Pressing " + (increaseDuration ? "up" : "down") + " " + durationSteps + " times"); for (int i = 0; i < durationSteps; i++) { if (increaseDuration) scripter.pressUpKey(); else scripter.pressDownKey(); SystemClock.sleep(100); log.debug("Push #" + (i + 1)); } } private void verifyDisplayedTbrDuration(RuffyScripter scripter) { long displayedDuration = readDisplayedTbrDuration(scripter); if (displayedDuration != duration) { log.debug("Final displayed TBR duration: " + displayedDuration); throw new CommandException().message("Failed to set TBR duration"); } } private long readDisplayedTbrDuration(RuffyScripter scripter) { Object durationObj = scripter.currentMenu.getAttribute(MenuAttribute.RUNTIME); // this as a bit hacky, the display value is blinking, so we might catch that, so // keep trying till we get the Double we want while (!(durationObj instanceof MenuTime)) { scripter.waitForMenuUpdate(); durationObj = scripter.currentMenu.getAttribute(MenuAttribute.RUNTIME); } MenuTime duration = (MenuTime) durationObj; return duration.getHour() * 60 + duration.getMinute(); } private void cancelTbrAndConfirmCancellationWarning(RuffyScripter scripter) { // confirm entered TBR scripter.pressCheckKey(); // we could read remaining duration from MAIN_MENU, but but the time we're here, // we could have moved from 0:02 to 0:01, so instead, check if a "TBR CANCELLED alert" // is raised and if so dismiss it long inTwoSeconds = System.currentTimeMillis() + 5 * 1000; boolean alertProcessed = false; while (System.currentTimeMillis() < inTwoSeconds && !alertProcessed) { if (scripter.currentMenu.getType() == MenuType.WARNING_OR_ERROR) { // check the raised alarm is TBR CANCELLED. // note that the message is permanently displayed, while the error code is blinking. // wait till the error code can be read results in the code hanging, despite // menu updates coming in, so just check the message String errorMsg = (String) scripter.currentMenu.getAttribute(MenuAttribute.MESSAGE); if (!errorMsg.equals("TBR CANCELLED")) { throw new CommandException().success(false).enacted(false) .message("An alert other than the expected TBR CANCELLED was raised by the pump: " + errorMsg + ". Please check the pump."); } // confirm "TBR CANCELLED alert" scripter.pressCheckKey(); // dismiss "TBR CANCELLED alert" scripter.pressCheckKey(); scripter.waitForMenuToBeLeft(MenuType.WARNING_OR_ERROR); alertProcessed = true; } SystemClock.sleep(10); } } private void verifyMainMenuShowsNoActiveTbr(RuffyScripter scripter) { Double tbrPercentage = (Double) scripter.currentMenu.getAttribute(MenuAttribute.TBR); boolean runtimeDisplayed = scripter.currentMenu.attributes().contains(MenuAttribute.RUNTIME); if (tbrPercentage != 100 || runtimeDisplayed) { throw new CommandException().message("Cancelling TBR failed, TBR is still set according to MAIN_MENU"); } } private void verifyMainMenuShowsExpectedTbrActive(RuffyScripter scripter) { // new TBR set; percentage and duration must be displayed ... if (!scripter.currentMenu.attributes().contains(MenuAttribute.TBR) || !scripter.currentMenu.attributes().contains(MenuAttribute.TBR)) { throw new CommandException().message("Setting TBR failed, according to MAIN_MENU no TBR is active"); } Double mmTbrPercentage = (Double) scripter.currentMenu.getAttribute(MenuAttribute.TBR); MenuTime mmTbrDuration = (MenuTime) scripter.currentMenu.getAttribute(MenuAttribute.RUNTIME); // ... and be the same as what we set // note that displayed duration might have already counted down, e.g. from 30 minutes to // 29 minutes and 59 seconds, so that 29 minutes are displayed int mmTbrDurationInMinutes = mmTbrDuration.getHour() * 60 + mmTbrDuration.getMinute(); if (mmTbrPercentage != percentage || (mmTbrDurationInMinutes != duration && mmTbrDurationInMinutes + 1 != duration)) { throw new CommandException().message("Setting TBR failed, TBR in MAIN_MENU differs from expected"); } } @Override public String toString() { return "SetTbrCommand{" + "percentage=" + percentage + ", duration=" + duration + '}'; } }
app/src/main/java/de/jotomo/ruffyscripter/commands/SetTbrCommand.java
package de.jotomo.ruffyscripter.commands; import android.os.SystemClock; import org.monkey.d.ruffy.ruffy.driver.display.MenuAttribute; import org.monkey.d.ruffy.ruffy.driver.display.MenuType; import org.monkey.d.ruffy.ruffy.driver.display.menu.MenuTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Locale; import java.util.Objects; import de.jotomo.ruffyscripter.RuffyScripter; public class SetTbrCommand implements Command { private static final Logger log = LoggerFactory.getLogger(SetTbrCommand.class); private final long percentage; private final long duration; public SetTbrCommand(long percentage, long duration) { this.percentage = percentage; this.duration = duration; if (percentage % 10 != 0) { throw new IllegalArgumentException("TBR percentage must be set in 10% steps"); } if (percentage < 0 || percentage > 500) { throw new IllegalArgumentException("TBR percentage must be within 0-500%"); } if (percentage != 100) { if (duration % 15 != 0) { throw new IllegalArgumentException("TBR duration can only be set in 15 minute steps"); } if (duration > 60 * 24) { throw new IllegalArgumentException("Maximum TBR duration is 24 hours"); } } if (percentage == 0 && duration > 120) { throw new IllegalArgumentException("Max allowed zero-temp duration is 2h"); } } @Override public CommandResult execute(RuffyScripter scripter) { try { scripter.verifyMenuIsDisplayed(MenuType.MAIN_MENU); enterTbrMenu(scripter); inputTbrPercentage(scripter); SystemClock.sleep(500); verifyDisplayedTbrPercentage(scripter); if (percentage == 100) { cancelTbrAndConfirmCancellationWarning(scripter); } else { // switch to TBR_DURATION menu by pressing menu key scripter.pressMenuKey(); scripter.waitForMenuUpdate(); scripter.verifyMenuIsDisplayed(MenuType.TBR_DURATION); inputTbrDuration(scripter); SystemClock.sleep(500); verifyDisplayedTbrDuration(scripter); // confirm TBR scripter.pressCheckKey(); SystemClock.sleep(500); } scripter.verifyMenuIsDisplayed(MenuType.MAIN_MENU, "Pump did not return to MAIN_MEU after setting TBR. " + "Check pump manually, the TBR might not have been set/cancelled."); // check main menu shows the same values we just set if (percentage == 100) { verifyMainMenuShowsNoActiveTbr(scripter); return new CommandResult().success(true).enacted(true).message("TBR was cancelled"); } else { verifyMainMenuShowsExpectedTbrActive(scripter); return new CommandResult().success(true).enacted(true).message( String.format(Locale.US, "TBR set to %d%% for %d min", percentage, duration)); } } catch (CommandException e) { return e.toCommandResult(); } } private void enterTbrMenu(RuffyScripter scripter) { scripter.navigateToMenu(MenuType.TBR_MENU); scripter.pressCheckKey(); scripter.waitForMenuUpdate(); scripter.verifyMenuIsDisplayed(MenuType.TBR_SET); } private void inputTbrPercentage(RuffyScripter scripter) { long currentPercent = readDisplayedTbrPercentage(scripter); log.debug("Current TBR %: " + currentPercent); long percentageChange = percentage - currentPercent; long percentageSteps = percentageChange / 10; boolean increasePercentage = true; if (percentageSteps < 0) { increasePercentage = false; percentageSteps = Math.abs(percentageSteps); } log.debug("Pressing " + (increasePercentage ? "up" : "down") + " " + percentageSteps + " times"); for (int i = 0; i < percentageSteps; i++) { if (increasePercentage) scripter.pressUpKey(); else scripter.pressDownKey(); SystemClock.sleep(100); log.debug("Push #" + (i + 1)); } } private void verifyDisplayedTbrPercentage(RuffyScripter scripter) { long displayedPercentage = readDisplayedTbrPercentage(scripter); if (displayedPercentage != this.percentage) { log.debug("Final displayed TBR percentage: " + displayedPercentage); throw new CommandException().message("Failed to set TBR percentage"); } } private long readDisplayedTbrPercentage(RuffyScripter scripter) { Object percentageObj = scripter.currentMenu.getAttribute(MenuAttribute.BASAL_RATE); // this as a bit hacky, the display value is blinking, so we might catch that, so // keep trying till we get the Double we want while (!(percentageObj instanceof Double)) { scripter.waitForMenuUpdate(); percentageObj = scripter.currentMenu.getAttribute(MenuAttribute.BASAL_RATE); } return ((Double) percentageObj).longValue(); } private void inputTbrDuration(RuffyScripter scripter) { long currentDuration = readDisplayedTbrDuration(scripter); if (currentDuration % 15 != 0) { // The duration displayed is how long an active TBR will still run, // which might be something like 0:13, hence not in 15 minute steps. // Pressing up will go to the next higher 15 minute step. // Don't press down, from 0:13 it can't go down, so press up. // Pressing up from 23:59 works to go to 24:00. scripter.pressUpKey(); scripter.waitForMenuUpdate(); currentDuration = readDisplayedTbrDuration(scripter); } log.debug("Current TBR duration: " + currentDuration); long durationChange = duration - currentDuration; long durationSteps = durationChange / 15; boolean increaseDuration = true; if (durationSteps < 0) { increaseDuration = false; durationSteps = Math.abs(durationSteps); } log.debug("Pressing " + (increaseDuration ? "up" : "down") + " " + durationSteps + " times"); for (int i = 0; i < durationSteps; i++) { if (increaseDuration) scripter.pressUpKey(); else scripter.pressDownKey(); SystemClock.sleep(100); log.debug("Push #" + (i + 1)); } } private void verifyDisplayedTbrDuration(RuffyScripter scripter) { long displayedDuration = readDisplayedTbrDuration(scripter); if (displayedDuration != duration) { log.debug("Final displayed TBR duration: " + displayedDuration); throw new CommandException().message("Failed to set TBR duration"); } } private long readDisplayedTbrDuration(RuffyScripter scripter) { Object durationObj = scripter.currentMenu.getAttribute(MenuAttribute.RUNTIME); // this as a bit hacky, the display value is blinking, so we might catch that, so // keep trying till we get the Double we want while (!(durationObj instanceof MenuTime)) { scripter.waitForMenuUpdate(); durationObj = scripter.currentMenu.getAttribute(MenuAttribute.RUNTIME); } MenuTime duration = (MenuTime) durationObj; return duration.getHour() * 60 + duration.getMinute(); } private void cancelTbrAndConfirmCancellationWarning(RuffyScripter scripter) { // confirm entered TBR scripter.pressCheckKey(); // we could read remaining duration from MAIN_MENU, but but the time we're here, // we could have moved from 0:02 to 0:01, so instead, check if a "TBR CANCELLED alert" // is raised and if so dismiss it scripter.waitForMenuToBeLeft(MenuType.TBR_SET); long inTwoSeconds = System.currentTimeMillis() + 2 * 1000; boolean alertProcessed = false; while (System.currentTimeMillis() < inTwoSeconds && !alertProcessed) { if (scripter.currentMenu.getType() == MenuType.WARNING_OR_ERROR) { // check the raised alarm is TBR CANCELLED. // note that the message is permanently displayed, while the error code is blinking. // wait till the error code can be read results in the code hanging, despite // menu updates coming in, so just check the message String errorMsg = (String) scripter.currentMenu.getAttribute(MenuAttribute.MESSAGE); if (!errorMsg.equals("TBR CANCELLED")) { throw new CommandException().success(false).enacted(false) .message("An alert other than the expected TBR CANCELLED was raised by the pump: " + errorMsg + ". Please check the pump."); } // confirm "TBR CANCELLED alert" scripter.pressCheckKey(); SystemClock.sleep(200); // dismiss "TBR CANCELLED alert" scripter.pressCheckKey(); scripter.waitForMenuToBeLeft(MenuType.WARNING_OR_ERROR); alertProcessed = true; } SystemClock.sleep(50); } } private void verifyMainMenuShowsNoActiveTbr(RuffyScripter scripter) { Double tbrPercentage = (Double) scripter.currentMenu.getAttribute(MenuAttribute.TBR); boolean runtimeDisplayed = scripter.currentMenu.attributes().contains(MenuAttribute.RUNTIME); if (tbrPercentage != 100 || runtimeDisplayed) { throw new CommandException().message("Cancelling TBR failed, TBR is still set according to MAIN_MENU"); } } private void verifyMainMenuShowsExpectedTbrActive(RuffyScripter scripter) { // new TBR set; percentage and duration must be displayed ... if (!scripter.currentMenu.attributes().contains(MenuAttribute.TBR) || !scripter.currentMenu.attributes().contains(MenuAttribute.TBR)) { throw new CommandException().message("Setting TBR failed, according to MAIN_MENU no TBR is active"); } Double mmTbrPercentage = (Double) scripter.currentMenu.getAttribute(MenuAttribute.TBR); MenuTime mmTbrDuration = (MenuTime) scripter.currentMenu.getAttribute(MenuAttribute.RUNTIME); // ... and be the same as what we set // note that displayed duration might have already counted down, e.g. from 30 minutes to // 29 minutes and 59 seconds, so that 29 minutes are displayed int mmTbrDurationInMinutes = mmTbrDuration.getHour() * 60 + mmTbrDuration.getMinute(); if (mmTbrPercentage != percentage || (mmTbrDurationInMinutes != duration && mmTbrDurationInMinutes + 1 != duration)) { throw new CommandException().message("Setting TBR failed, TBR in MAIN_MENU differs from expected"); } } @Override public String toString() { return "SetTbrCommand{" + "percentage=" + percentage + ", duration=" + duration + '}'; } }
Optimize cancelling TBR.
app/src/main/java/de/jotomo/ruffyscripter/commands/SetTbrCommand.java
Optimize cancelling TBR.
Java
agpl-3.0
ae524a264c36274a49c867d737f1e5fd8eeb3420
0
RapidInfoSys/Rapid,RapidInfoSys/Rapid,RapidInfoSys/Rapid,RapidInfoSys/Rapid
/* Copyright (C) 2020 - Gareth Edwards / Rapid Information Systems [email protected] This file is part of the Rapid Application Platform Rapid is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The terms require you to include the original copyright, and the license notice in all redistributions. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License in a file named "COPYING". If not, see <http://www.gnu.org/licenses/>. */ package com.rapid.core; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletResponse; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactoryConfigurationError; import org.apache.logging.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.SAXException; import com.rapid.actions.Logic.Condition; import com.rapid.actions.Logic.Value; import com.rapid.core.Application.RapidLoadingException; import com.rapid.core.Application.Resource; import com.rapid.core.Application.ResourceDependency; import com.rapid.forms.FormAdapter; import com.rapid.forms.FormAdapter.FormControlValue; import com.rapid.forms.FormAdapter.FormPageControlValues; import com.rapid.forms.FormAdapter.UserFormDetails; import com.rapid.security.SecurityAdapter; import com.rapid.security.SecurityAdapter.SecurityAdapaterException; import com.rapid.security.SecurityAdapter.User; import com.rapid.server.Rapid; import com.rapid.server.RapidHttpServlet; import com.rapid.server.RapidRequest; import com.rapid.server.filter.RapidFilter; import com.rapid.utils.Files; import com.rapid.utils.Html; import com.rapid.utils.Minify; import com.rapid.utils.XML; @XmlRootElement @XmlType(namespace="http://rapid-is.co.uk/core") public class Page { // the version of this class's xml structure when marshalled (if we have any significant changes down the line we can upgrade the xml files before unmarshalling) public static final int XML_VERSION = 1; // form page types public static final int FORM_PAGE_TYPE_NORMAL = 0; public static final int FORM_PAGE_TYPE_SUBMITTED = 1; public static final int FORM_PAGE_TYPE_ERROR = 2; public static final int FORM_PAGE_TYPE_SAVE = 3; public static final int FORM_PAGE_TYPE_RESUME = 4; // a class for retaining page html for a set of user roles - this structure is now depreciated as of Rapid 2.3.5.2 in favour of a more efficient tree structure public static class RoleHtml { // instance variables private List<String> _roles; private String _html; // properties public List<String> getRoles() { return _roles; } public void setRoles(List<String> roles) { _roles = roles; } public String getHtml() { return _html; } public void setHtml(String html) { _html = html; } // constructors public RoleHtml() {} public RoleHtml(List<String> roles, String html) { _roles = roles; _html = html; } } // a class for retaining control html that has user roles public static class RoleControlHtml { // instance variables private String _startHtml, _endHtml; private List<String> _roles; private List<RoleControlHtml> _children; // properties public String getStartHtml() { return _startHtml; } public void setStartHtml(String startHtml) { _startHtml = startHtml; } public String getEndHtml() { return _endHtml; } public void setEndHtml(String endHtml) { _endHtml = endHtml; } public List<String> getRoles() { return _roles; } public void setRoles(List<String> roles) { _roles = roles; } public List<RoleControlHtml> getChildren() { return _children; } public void setChildren(List<RoleControlHtml> children) { _children = children; } // constructors public RoleControlHtml() {} public RoleControlHtml(JSONObject jsonRoleControlHtml) throws JSONException { _startHtml = jsonRoleControlHtml.optString("startHtml", null); _endHtml = jsonRoleControlHtml.optString("endHtml", null); JSONArray jsonRoles = jsonRoleControlHtml.optJSONArray("roles"); if (jsonRoles != null) { _roles = new ArrayList<>(); for (int i = 0; i < jsonRoles.length(); i++) _roles.add(jsonRoles.getString(i)); } JSONArray jsonChildren = jsonRoleControlHtml.optJSONArray("children"); if (jsonChildren != null) { _children = new ArrayList<>(); for (int i = 0; i < jsonChildren.length(); i++) _children.add( new RoleControlHtml(jsonChildren.getJSONObject(i))); } } } // details of a lock that might be on this page public static class Lock { private String _userName, _userDescription; private Date _dateTime; public String getUserName() { return _userName; } public void setUserName(String userName) { _userName = userName; } public String getUserDescription() { return _userDescription; } public void setUserDescription(String userDescription) { _userDescription = userDescription; } public Date getDateTime() { return _dateTime; } public void setDateTime(Date dateTime) { _dateTime = dateTime; } // constructors public Lock() {} public Lock(String userName, String userDescription, Date dateTime) { _userName = userName; _userDescription = userDescription; _dateTime = dateTime; } } // instance variables private int _xmlVersion, _formPageType; private String _id, _name, _title, _label, _description, _createdBy, _modifiedBy, _htmlBody, _bodyStyleClasses, _cachedHeadLinks, _cachedHeadCSS, _cachedHeadReadyJS, _cachedHeadJS, _eTag; private boolean _simple, _hideHeaderFooter; private Date _createdDate, _modifiedDate; private List<Control> _controls, _reCaptchaControls; private List<Event> _events; private List<Style> _styles; private List<String> _controlTypes, _actionTypes, _sessionVariables, _roles; private List<RoleHtml> _rolesHtml; private RoleControlHtml _roleControlHtml; private List<Condition> _visibilityConditions; private String _conditionsType; private Lock _lock; private List<String> _formControlValues; private List<String> _dialoguePageIds; // this array is used to collect all of the lines needed in the pageload before sorting them private List<String> _pageloadLines; // properties // the xml version is used to upgrade xml files before unmarshalling (we use a property so it's written ito xml) public int getXMLVersion() { return _xmlVersion; } public void setXMLVersion(int xmlVersion) { _xmlVersion = xmlVersion; } // the id uniquely identifies the page (it is quiet short and is concatinated to control id's so more than one page's control's can be working in a document at one time) public String getId() { return _id; } public void setId(String id) { _id = id; } // this is expected to be short name, probably even a code that is used by users to simply identify pages (also becomes the file name) public String getName() { return _name; } public void setName(String name) { _name = name; } // this is a user-friendly, long title public String getTitle() { return _title; } public void setTitle(String title) { _title = title; } // the form page type, most will be normal but we show special pages for after submission, error, and saved public int getFormPageType() { return _formPageType; } public void setFormPageType(int formPageType) { _formPageType = formPageType; } // this is a the label to use in the form summary public String getLabel() { return _label; } public void setLabel(String label) { _label = label; } // an even longer description of what this page does public String getDescription() { return _description; } public void setDescription(String description) { _description = description; } // simple pages do not have any events and can be used in page panels without dynamically loading them via ajax public boolean getSimple() { return _simple; } public void setSimple(boolean simple) { _simple = simple; } // whether to hide any theme header / footer public boolean getHideHeaderFooter() { return _hideHeaderFooter; } public void setHideHeaderFooter(boolean hideHeaderFooter) { _hideHeaderFooter = hideHeaderFooter; } // the user that created this page (or archived page) public String getCreatedBy() { return _createdBy; } public void setCreatedBy(String createdBy) { _createdBy = createdBy; } // the date this page (or archive) was created public Date getCreatedDate() { return _createdDate; } public void setCreatedDate(Date createdDate) { _createdDate = createdDate; } // the last user to save this application public String getModifiedBy() { return _modifiedBy; } public void setModifiedBy(String modifiedBy) { _modifiedBy = modifiedBy; } // the date this application was last saved public Date getModifiedDate() { return _modifiedDate; } public void setModifiedDate(Date modifiedDate) { _modifiedDate = modifiedDate; } // the html for this page public String getHtmlBody() { return _htmlBody; } public void setHtmlBody(String htmlBody) { _htmlBody = htmlBody; } // any style classes for this pages body element public String getBodyStyleClasses() { return _bodyStyleClasses; } public void setBodyStyleClasses(String bodyStyleClasses) { _bodyStyleClasses = bodyStyleClasses; } // the child controls of the page public List<Control> getControls() { return _controls; } public void setControls(List<Control> controls) { _controls = controls; } // the page events and actions public List<Event> getEvents() { return _events; } public void setEvents(List<Event> events) { _events = events; } // the page styles public List<Style> getStyles() { return _styles; } public void setStyles(List<Style> styles) { _styles = styles; } // session variables used by this page (navigation actions are expected to pass them in) public List<String> getSessionVariables() { return _sessionVariables; } public void setSessionVariables(List<String> sessionVariables) { _sessionVariables = sessionVariables; } // the roles required to view this page public List<String> getRoles() { return _roles; } public void setRoles(List<String> roles) { _roles = roles; } // list of different page html for different possible role combinations - this is depreciated from Rapid 2.3.5.3 public List<RoleHtml> getRolesHtml() { return _rolesHtml; } public void setRolesHtml(List<RoleHtml> rolesHtml) { _rolesHtml = rolesHtml; } // page html for different possible role combinations - this is depreciated from Rapid 2.3.5.3 public RoleControlHtml getRoleControlHtml() { return _roleControlHtml; } public void setRoleControlHtml(RoleControlHtml roleControlHtml) { _roleControlHtml = roleControlHtml; } // any lock that might be on this page public Lock getLock() { return _lock; } public void setLock(Lock lock) { _lock = lock; } // the page visibility rule conditions public List<Condition> getVisibilityConditions() { return _visibilityConditions; } public void setVisibilityConditions(List<Condition> visibilityConditions) { _visibilityConditions = visibilityConditions; } // the type (and/or) of the page visibility conditions - named so can be shared with logic action public String getConditionsType() { return _conditionsType; } public void setConditionsType(String conditionsType) { _conditionsType = conditionsType; } // the etag used to send 304 not modified if caching is turned off public String getETag() { return _eTag; } // constructor public Page() { // set the xml version _xmlVersion = XML_VERSION; // set the eTag _eTag = Long.toString(new Date().getTime()); } // instance methods public String getFile(ServletContext servletContext, Application application) { return application.getConfigFolder(servletContext) + "/" + "/pages/" + Files.safeName(_name + ".page.xml"); } public void addControl(Control control) { if (_controls == null) _controls = new ArrayList<>(); _controls.add(control); } public Control getControl(int index) { if (_controls == null) return null; return _controls.get(index); } // an iterative function for tree-walking child controls when searching for one public Control getChildControl(List<Control> controls, String controlId) { Control foundControl = null; if (controls != null) { for (Control control : controls) { if (controlId.equals(control.getId())) { foundControl = control; break; } else { foundControl = getChildControl(control.getChildControls(), controlId); if (foundControl != null) break; } } } return foundControl; } // uses the tree walking function above to find a particular control public Control getControl(String id) { return getChildControl(_controls, id); } public void getChildControls(List<Control> controls, List<Control> childControls) { if (controls != null) { for (Control control : controls) { childControls.add(control); getChildControls(control.getChildControls(), childControls); } } } public List<Control> getAllControls() { ArrayList<Control> controls = new ArrayList<>(); getChildControls(_controls, controls); return controls; } // find an action from a list of actions, including checking child actions public Action getChildAction(List<Action> actions, String actionId) { Action foundAction = null; if (actions != null) { for (Action action : actions) { if (action != null) { if (actionId.equals(action.getId())) return action; foundAction = getChildAction(action.getChildActions(), actionId); if (foundAction != null) break; } } } return foundAction; } // find an action amongst a controls events - faster to use if we have the control already public Action getChildEventsAction(List<Event> events, String actionId) { Action foundAction = null; if (events != null) { for (Event event : events) { if (event.getActions() != null) { foundAction = getChildAction(event.getActions(), actionId); if (foundAction != null) break; } } } return foundAction; } // an iterative function for tree-walking child controls when searching for a specific action public Action getChildControlsAction(List<Control> controls, String actionId) { Action foundAction = null; if (controls != null) { for (Control control : controls) { // look in the control events for the action foundAction = getChildEventsAction(control.getEvents(), actionId); // if we didn't get the action if (foundAction == null) { // look in the child controls foundAction = getChildControlsAction(control.getChildControls(), actionId); } // we're done! if (foundAction != null) break; } } return foundAction; } // find an action in the page by its id public Action getAction(String id) { // check the page actions first if (_events != null) { for (Event event : _events) { if (event.getActions() != null) { Action action = getChildAction(event.getActions(), id); if (action != null) return action; } } } // uses the tree walking function above to the find a particular action return getChildControlsAction(_controls, id); } // recursively append to a list of actions from an action and it's children public void getChildActions(List<Action> actions, Action action, String type, boolean isWebserviceOnly) { // check if web service actions only if (!isWebserviceOnly || action.isWebService()) { // check there is a type if (type == null) { // no type so add this action actions.add(action); } else { // if types match if (type.equals(action.getType())) { // add action actions.add(action); } } // check there are child actions if (action.getChildActions() != null) { // loop them for (Action childAction : action.getChildActions()) { // add their actions too if (childAction != null) getChildActions(actions, childAction, type, isWebserviceOnly); } } } } // overide for the above public void getChildActions(List<Action> actions, Action action,boolean isWebserviceOnly) { getChildActions(actions, action, null, isWebserviceOnly); } // recursively append to a list of actions from a control and it's children public void getChildActions(List<Action> actions, Control control, String type, boolean isWebserviceOnly) { // check this control has events if (control.getEvents() != null) { for (Event event : control.getEvents()) { // add any actions to the list if (event.getActions() != null) { // loop the actions for (Action action : event.getActions()) { // add any child actions too if (action != null) getChildActions(actions, action, type, isWebserviceOnly); } } } } // check if we have any child controls if (control.getChildControls() != null) { // loop the child controls for (Control childControl : control.getChildControls()) { // add their actions too getChildActions(actions, childControl, type, isWebserviceOnly); } } } // override for the above public void getChildActions(List<Action> actions, Control control, boolean isWebserviceOnly) { getChildActions(actions, control, null, isWebserviceOnly); } // add actions and child actions public void addAction(List<Action> actions, Action action, boolean webServiceOnly) { // if we have an action if (action != null) { // add it if (!webServiceOnly || action.isWebService()) actions.add(action); // get any child actions List<Action> childActions = action.getChildActions(); // if there where some if (childActions != null) { // loop the children for (Action childAction : childActions) { // add this action recursively addAction(actions, childAction, webServiceOnly); } } } } // get all actions in the page of a specified type public List<Action> getAllActions(String type, boolean webServiceOnly) { // instantiate the list we're going to return List<Action> actions = new ArrayList<>(); // check the page events first if (_events != null) { for (Event event : _events) { // get any event actions List<Action> eventActions = event.getActions(); // if we got some if (eventActions != null) { // if type is null if (type == null) { // loop actions for (Action eventAction : eventActions) { // add this action, including it's children addAction(actions, eventAction, webServiceOnly); } } else { // loop them for (Action eventAction : eventActions) { // if right type if (type.equals(eventAction.getType())) { // add this action, including it's children addAction(actions, eventAction, webServiceOnly); } // Child actions List<Action> eventActionChildren = eventAction.getChildActions(); if (eventActionChildren != null) { for (Action childAction : eventActionChildren) { if (type.equals(childAction.getType())) { addAction(actions, childAction, webServiceOnly); } } } } } } } } // uses the tree walking function above to add all actions if (_controls != null) { for (Control control : _controls) { getChildActions(actions, control, type, webServiceOnly); } } // sort them by action id Collections.sort(actions, new Comparator<Action>() { @Override public int compare(Action obj1, Action obj2) { if (obj1 == null) return -1; if (obj2 == null) return 1; if (obj1.equals(obj2)) return 0; String id1 = obj1.getId(); String id2 = obj2.getId(); if (id1 == null) return -1; if (id2 == null) return -1; int startPos = id1.lastIndexOf("_A"); if (startPos < 0) return -1; int endPos = id1.indexOf("_", startPos + 2); if (endPos < 0) endPos = id1.length(); id1 = id1.substring(startPos + 2, endPos); startPos = id2.lastIndexOf("_A"); if (startPos < 0) return 1; endPos = id2.indexOf("_", startPos + 2); if (endPos < 0) endPos = id2.length(); id2 = id2.substring(startPos + 2, endPos); return (Integer.parseInt(id1) - Integer.parseInt(id2)); } }); return actions; } // get all actions in the page public List<Action> getAllActions() { // override for the above return getAllActions(null, false); } // get all actions in the page of a certain type public List<Action> getAllActions(String type) { // override for the above return getAllActions(type, false); } // get all web-service actions in the page public List<Action> getAllWebServiceActions() { // override for the above return getAllActions(null, true); } // an iterative function for tree-walking child controls when searching for a specific action's control public Control getChildControlActionControl(List<Control> controls, String actionId) { Control foundControl = null; if (controls != null) { for (Control control : controls) { if (control.getEvents() != null) { for (Event event : control.getEvents()) { if (event.getActions() != null) { for (Action action : event.getActions()) { if (actionId.equals(action.getId())) return control; } } } } foundControl = getChildControlActionControl(control.getChildControls(), actionId); if (foundControl != null) break; } } return foundControl; } // find an action's control in the page by its id public Control getActionControl(String actionId) { // uses the tree walking function above to the find a particular action return getChildControlActionControl(_controls, actionId); } // an iterative function for tree-walking child controls when searching for a specific action's control public Event getChildControlActionEvent(List<Control> controls, String actionId) { Event foundEvent = null; if (controls != null) { for (Control control : controls) { if (control.getEvents() != null) { for (Event event : control.getEvents()) { if (event.getActions() != null) { for (Action action : event.getActions()) { if (actionId.equals(action.getId())) return event; } } } } foundEvent = getChildControlActionEvent(control.getChildControls(), actionId); if (foundEvent != null) break; } } return foundEvent; } // find an action in the page by its id public Event getActionEvent(String actionId) { // check the page actions first if (_events != null) { for (Event event : _events) { if (event.getActions() != null) { for (Action action : event.getActions()) { if (actionId.equals(action.getId())) return event; } } } } // uses the tree walking function above to the find a particular action return getChildControlActionEvent(_controls, actionId); } // gets the pages that this page can navigate to as a dialogue - we check all pages to see which can come back public List<String> getDialoguePageIds() { // if the internal variable has not been initialised yet if (_dialoguePageIds == null) _dialoguePageIds = new ArrayList<>(); // get all navigation actions on this page List<Action> actions = getAllActions("navigate"); // loop them for (Action action : actions) { // if this is a dialogue if (Boolean.parseBoolean(action.getProperty("dialogue"))) { // get the page id String pageId = action.getProperty("page"); // if we got one if (pageId != null) { // add if it is something if (pageId.length() > 0) _dialoguePageIds.add(pageId); } } } return _dialoguePageIds; } // iterative function for building a flat JSONArray of controls that can be used on other pages, will also add events if including from a dialogue private void getOtherPageControls(RapidHttpServlet rapidServlet, JSONArray jsonControls, List<Control> controls, boolean includePageVisibiltyControls, Boolean includeFromDialogue) throws JSONException { // check we were given some controls if (controls != null) { // loop the controls for (Control control : controls) { // get if this control can be used from other pages boolean canBeUsedFromOtherPages = control.getCanBeUsedFromOtherPages(); // get if this control can be used for page visibility boolean canBeUsedForFormPageVisibilty = control.getCanBeUsedForFormPageVisibilty() && includePageVisibiltyControls; // if this control can be used from other pages if (canBeUsedFromOtherPages || canBeUsedForFormPageVisibilty || includeFromDialogue) { // get the control details JSONObject jsonControlClass = rapidServlet.getJsonControl(control.getType()); // check we got one if (jsonControlClass != null) { // get the name String controlName = control.getName(); // no need to include if we don't have one if (controlName != null && controlName.trim().length() > 0) { // make a JSON object with what we need about this control JSONObject jsonControl = new JSONObject(); jsonControl.put("id", control.getId()); jsonControl.put("type", control.getType()); jsonControl.put("name", controlName); if (jsonControlClass.optString("getDataFunction", null) != null) jsonControl.put("input", true); if (jsonControlClass.optString("setDataJavaScript", null) != null) jsonControl.put("output", true); if (canBeUsedFromOtherPages) jsonControl.put("otherPages", true); if (canBeUsedForFormPageVisibilty) jsonControl.put("pageVisibility", true); if (control.getProperty("formObjectAddressNumber") != null) jsonControl.put("formObjectAddressNumber", control.getProperty("formObjectAddressNumber")); if (control.getProperty("formObjectPartyNumber") != null) jsonControl.put("formObjectPartyNumber", control.getProperty("formObjectPartyNumber")); // look for any runtimeProperties JSONObject jsonProperty = jsonControlClass.optJSONObject("runtimeProperties"); // if we got some if (jsonProperty != null) { // create an array to hold the properties JSONArray jsonRunTimeProperties = new JSONArray(); // look for an array too JSONArray jsonProperties = jsonProperty.optJSONArray("runtimeProperty"); // assume int index = 0; int count = 0; // if an array if (jsonProperties != null) { // get the first item jsonProperty = jsonProperties.getJSONObject(index); // set the count count = jsonProperties.length(); } // look for a single object JSONObject jsonPropertySingle = jsonProperty.optJSONObject("runtimeProperty"); // assume this one if not null if (jsonPropertySingle != null) jsonProperty = jsonPropertySingle; // do once and loop until no more left do { // create a json object for this runtime property JSONObject jsonRuntimeProperty = new JSONObject(); jsonRuntimeProperty.put("type", jsonProperty.get("type")); jsonRuntimeProperty.put("name", jsonProperty.get("name")); if (jsonProperty.optString("getPropertyFunction", null) != null) jsonRuntimeProperty.put("input", true); if (jsonProperty.optString("setPropertyJavaScript", null) != null) jsonRuntimeProperty.put("output", true); if (jsonProperty.optBoolean("canBeUsedForFormPageVisibilty")) jsonRuntimeProperty.put("visibility", true); // add to the collection - note further check for dialogue controls having to add in jsonRunTimeProperties.put(jsonRuntimeProperty); // increment the index index ++; // get the next item if there's one there if (index < count) jsonProperty = jsonProperties.getJSONObject(index); } while (index < count); // add the properties to what we're returning jsonControl.put("runtimeProperties", jsonRunTimeProperties); } // property loop // if we are including from dialogue if (includeFromDialogue) { // set the other pages property so we see it in the designer jsonControl.put("otherPages", true); // get any events for this control List<Event> events = control.getEvents(); // if we got some if (events != null) { // an array of events JSONArray jsonEvents = new JSONArray(); // loop them for (Event event : events) { // get any actions List<Action> actions = event.getActions(); // if there were some if (actions != null) { // if there were some if (actions.size() > 0) { // make a jsonArray for the actions JSONArray jsonActions = new JSONArray(); // loop the actions for (Action action : actions) { // make a json object for the action JSONObject jsonAction = new JSONObject(); // add id jsonAction.put("id", action.getId()); // add type jsonAction.put("type", action.getType()); // add to array jsonActions.put(jsonAction); } // make a jsonObject for this event JSONObject jsonEvent = new JSONObject(); // add the event id jsonEvent.put("type", event.getType()); // add the jsonActions to the event jsonEvent.put("actions", jsonActions); // as jsonEvent to collection jsonEvents.put(jsonEvent); } } } // add the jsonEvents to the control if we got some if (jsonEvents.length() > 0) jsonControl.put("events",jsonEvents); } } // add it to the collection we are returning straight away jsonControls.put(jsonControl); } // name check } // control class check } // other page or visibility check // run for any child controls getOtherPageControls(rapidServlet, jsonControls, control.getChildControls(), includePageVisibiltyControls, includeFromDialogue); } } } // uses the above iterative method to return an object with flat array of controls in this page that can be used from other pages, for use in the designer public JSONArray getOtherPageComponents(RapidHttpServlet rapidServlet, boolean includePageVisibiltyControls, boolean includeFromDialogue) throws JSONException { // the list of controls we're about to return JSONArray controls = new JSONArray(); if (includeFromDialogue) { // make a JSON object with what we need about this page JSONObject jsonPageControl = new JSONObject(); jsonPageControl.put("id", _id); jsonPageControl.put("type", "page"); jsonPageControl.put("name", _name); // if we have events, add them too if (_events != null && _events.size() > 0) jsonPageControl.put("events", getJsonEvents(_events)); // add the page control to the list controls.put(jsonPageControl); } // start building the array using the page controls getOtherPageControls(rapidServlet, controls, _controls, includePageVisibiltyControls, includeFromDialogue); // return the components return controls; } private static JSONArray getJsonEvents(List<Event> events) throws JSONException { // an array of events JSONArray jsonEvents = new JSONArray(); // if we got some if (events != null) { // loop them for (Event event : events) { // make a jsonObject for this event JSONObject jsonEvent = new JSONObject(); // get any actions List<Action> actions = event.getActions(); // if there were some if (actions != null) { // if there were some if (actions.size() > 0) { // make a jsonArray for the actions JSONArray jsonActions = new JSONArray(); // loop the actions for (Action action : actions) { // make a json object for the action JSONObject jsonAction = new JSONObject(); // add id jsonAction.put("id", action.getId()); // add type jsonAction.put("type", action.getType()); // add comments jsonAction.put("type", action.getProperty("comments")); // add to array jsonActions.put(jsonAction); } // add the event id jsonEvent.put("type", event.getType()); // add the jsonActions to the event jsonEvent.put("actions", jsonActions); // as jsonEvent to collection jsonEvents.put(jsonEvent); } } } } return jsonEvents; } // used to turn either a page or control style into text for the css file public String getStyleCSS(Style style) { // start the text we are going to return String css = ""; // get the style rules ArrayList<String> rules = style.getRules(); // check we have some if (rules != null) { if (rules.size() > 0) { // add the style css = style.getAppliesTo().trim() + " {\n"; // check we have // loop and add the rules for (String rule : rules) { css += "\t" + rule.trim() + "\n"; } css += "}\n\n"; } } // return the css return css; } // an iterative function for tree-walking child controls when building the page styles public void getChildControlStyles(List<Control> controls, StringBuilder stringBuilder) { if (controls != null) { for (Control control : controls) { // look for styles ArrayList<Style> controlStyles = control.getStyles(); if (controlStyles != null) { // loop the styles for (Style style : controlStyles) { // get some nice text for the css stringBuilder.append(getStyleCSS(style)); } } // try and call on any child controls getChildControlStyles(control.getChildControls(), stringBuilder); } } } public String getAllCSS(ServletContext servletContext, Application application) { // the stringbuilder we're going to use StringBuilder stringBuilder = new StringBuilder(); // check if the page has styles if (_styles != null) { // loop for (Style style: _styles) { stringBuilder.append(getStyleCSS(style)); } } // use the iterative tree-walking function to add all of the control styles getChildControlStyles(_controls, stringBuilder); // return it with inserted parameters return application.insertParameters(servletContext, stringBuilder.toString()); } public List<String> getAllActionTypes() { List<String> actionTypes = new ArrayList<>(); List<Action> actions = getAllActions(); if (actions != null) { for (Action action : actions) { String actionType = action.getType(); if (!actionTypes.contains(actionType)) actionTypes.add(actionType); } } return actionTypes; } public List<String> getAllControlTypes() { List<String> controlTypes = new ArrayList<>(); controlTypes.add("page"); List<Control> controls = getAllControls(); if (controls != null) { for (Control control : controls) { String controlType = control.getType(); if (!controlTypes.contains(controlType)) controlTypes.add(controlType); } } return controlTypes; } // removes the page lock if it is more than 1 hour old public void checkLock() { // only check if there is one present if (_lock != null) { // get the time now Date now = new Date(); // get the time an hour after the lock time Date lockExpiry = new Date(_lock.getDateTime().getTime() + 1000 * 60 * 60); // if the lock expiry has passed set the lock to null; if (now.after(lockExpiry)) _lock = null; } } public void backup(RapidHttpServlet rapidServlet, RapidRequest rapidRequest, Application application, File pageFile, boolean delete) throws IOException { // get the user name String userName = Files.safeName(rapidRequest.getUserName()); // create folders to archive the pages String archivePath = application.getBackupFolder(rapidServlet.getServletContext(), delete); File archiveFolder = new File(archivePath); if (!archiveFolder.exists()) archiveFolder.mkdirs(); SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd_HHmmss"); String dateString = formatter.format(new Date()); // create a file object for the archive file File archiveFile = new File(archivePath + "/" + Files.safeName(_name + "_" + dateString + "_" + userName + ".page.xml")); // copy the existing new file to the archive file Files.copyFile(pageFile, archiveFile); } public void deleteBackup(RapidHttpServlet rapidServlet, Application application, String backupId) { // create the path String backupPath = application.getBackupFolder(rapidServlet.getServletContext(), false) + "/" + backupId; // create the file File backupFile = new File(backupPath); // delete Files.deleteRecurring(backupFile); } public long save(RapidHttpServlet rapidServlet, RapidRequest rapidRequest, Application application, boolean backup) throws JAXBException, IOException { // create folders to save the pages String pagePath = application.getConfigFolder(rapidServlet.getServletContext()) + "/pages"; File pageFolder = new File(pagePath); if (!pageFolder.exists()) pageFolder.mkdirs(); // create a file object for the new file File newFile = new File(pagePath + "/" + Files.safeName(getName() + ".page.xml")); // if we want a backup and the new file already exists it needs archiving if (backup && newFile.exists()) backup(rapidServlet, rapidRequest, application, newFile, false); // create a file for the temp file File tempFile = new File(pagePath + "/" + Files.safeName(getName() + "-saving.page.xml")); // update the modified by and date _modifiedBy = rapidRequest.getUserName(); _modifiedDate = new Date(); // get a buffered writer for our page with UTF-8 file format BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(tempFile), "UTF-8")); try { // marshall the page object into the temp file rapidServlet.getMarshaller().marshal(this, bw); } catch (JAXBException ex) { // close the file writer bw.close(); // re throw the exception throw ex; } // close the file writer bw.close(); // copy the tempFile to the newFile Files.copyFile(tempFile, newFile); // store the size of the file writter long fileSize = tempFile.length(); // delete the temp file tempFile.delete(); // replace the old page with the new page application.getPages().addPage(this, newFile, application.getIsForm()); // empty the cached page html _cachedHeadLinks = null; _cachedHeadCSS = null; _cachedHeadReadyJS = null; _cachedHeadJS = null; // empty the cached action types _actionTypes = null; // empty the cached control types _controlTypes = null; // empty the page variables so they are rebuilt the next time application.emptyPageVariables(); return fileSize; } public void delete(RapidHttpServlet rapidServlet, RapidRequest rapidRequest, Application application) throws JAXBException, IOException { // create folders to delete the page String pagePath = application.getConfigFolder(rapidServlet.getServletContext()) + "/pages"; // create a file object for the delete file File delFile = new File(pagePath + "/" + Files.safeName(getName() + ".page.xml")); // if the new file already exists it needs archiving if (delFile.exists()) { // archive the page file backup(rapidServlet, rapidRequest, application, delFile, false); // delete the page file delFile.delete(); // remove it from the current list of pages application.getPages().removePage(_id); } // get the resources path String resourcesPath = application.getWebFolder(rapidServlet.getServletContext()); // create a file object for deleting the page css file File delCssFile = new File(resourcesPath + "/" + Files.safeName(getName() + ".css")); // delete if it exists if (delCssFile.exists()) delCssFile.delete(); // create a file object for deleting the page css file File delCssFileMin = new File(resourcesPath + "/" + Files.safeName(getName() + ".min.css")); // delete if it exists if (delCssFileMin.exists()) delCssFileMin.delete(); } // this includes functions to iteratively call any control initJavaScript and set up any event listeners private void getPageLoadLines(List<String> pageloadLines, List<Control> controls) throws JSONException { // if we have controls if (controls != null) { // loop controls for (Control control : controls) { // check for any initJavaScript to call if (control.hasInitJavaScript()) { // get any details we may have String details = control.getDetails(); // set to empty string or clean up if (details == null) { details = ""; } else { details = ", " + control.getId() + "details"; } // write an init call method with support for older controls that may not have had the init method pageloadLines.add("Init_" + control.getType() + "('" + control.getId() + "'" + details + ");\n"); } // check event actions if (control.getEvents() != null) { // loop events for (Event event : control.getEvents()) { // only if event is non-custom and there are actually some actions to invoke if (!event.isCustomType() && event.getActions() != null) { if (event.getActions().size() > 0) { // add any page load lines from this pageloadLines.add(event.getPageLoadJavaScript(control)); } } } } // now call iteratively for child controls (of this [child] control, etc.) if (control.getChildControls() != null) getPageLoadLines(pageloadLines, control.getChildControls()); } } } // the html for a specific resource public String getResourceHtml(Application application, Resource resource) { // assume we couldn't make the resource html String resourceHtml = null; // set the link according to the type switch (resource.getType()) { case Resource.JAVASCRIPT: if (application.getStatus() == Application.STATUS_LIVE) { try { resourceHtml = " <script type='text/javascript'>" + Minify.toString(resource.getContent(),Minify.JAVASCRIPT, "JavaScript resource " + resource.getName()) + "</script>"; } catch (IOException ex) { resourceHtml = " <script type='text/javascript'>/* Failed to minify resource " + resource.getName() + " JavaScript : " + ex.getMessage() + "*/</script>"; } } else { resourceHtml = " <script type='text/javascript'>\n" + resource.getContent() + "\n </script>"; } break; case Resource.CSS: if (application.getStatus() == Application.STATUS_LIVE) { try { resourceHtml = " <style>" + Minify.toString(resource.getContent(), Minify.CSS, "") + "</style>"; } catch (IOException ex) { resourceHtml = " <style>/* Failed to minify resource " + resource.getName() + " CSS : " + ex.getMessage() + "*/<style>"; } } else { resourceHtml = " <style>" + resource.getContent() + "<style>"; } break; case Resource.JAVASCRIPTFILE : case Resource.JAVASCRIPTLINK : resourceHtml = " <script type='text/javascript' src='" + resource.getContent() + "'></script>"; break; case Resource.CSSFILE : case Resource.CSSLINK : resourceHtml = " <link rel='stylesheet' type='text/css' href='" + resource.getContent() + "'></link>"; break; } // return it return resourceHtml; } // the resources for the page public String getResourcesHtml(Application application, boolean allResources) { StringBuilder stringBuilder = new StringBuilder(); // get all action types used in this page if (_actionTypes == null) _actionTypes = getAllActionTypes(); // get all control types used in this page if (_controlTypes == null) _controlTypes = getAllControlTypes(); // manage the resources links added already so we don't add twice ArrayList<String> addedResources = new ArrayList<>(); // if this application has resources add during initialisation if (application.getResources() != null) { // loop and add the resources required by this application's controls and actions (created when application loads) for (Resource resource : application.getResources()) { // if we want all the resources (for the designer) or there is a dependency for this resource if (allResources || resource.hasDependency(ResourceDependency.RAPID) || resource.hasDependency(ResourceDependency.ACTION, _actionTypes) || resource.hasDependency(ResourceDependency.CONTROL, _controlTypes)) { // the html we're hoping to get String resourceHtml = getResourceHtml(application, resource); // if we got some html and don't have it already if (resourceHtml != null && !addedResources.contains(resourceHtml)) { // append it stringBuilder.append(resourceHtml + "\n"); // remember we've added it addedResources.add(resourceHtml); } } // dependency check } // resource loop } // has resources return stringBuilder.toString(); } private void getEventJavaScriptFunction(RapidRequest rapidRequest, StringBuilder stringBuilder, Application application, Control control, Event event) { // check actions are initialised if (event.getActions() != null) { // check there are some to loop if (event.getActions().size() > 0) { // create actions separately to avoid redundancy StringBuilder actionStringBuilder = new StringBuilder(); StringBuilder eventStringBuilder = new StringBuilder(); // start the function name String functionName = "Event_" + event.getType() + "_"; // if this is the page (no control) use the page id, otherwise use the controlId if (control == null) { // append the page id functionName += _id; } else { // append the control id functionName += control.getId(); } // create a function for running the actions for this controls events eventStringBuilder.append("function " + functionName + "(ev) {\n"); // open a try/catch eventStringBuilder.append(" try {\n"); // get any filter javascript String filter = event.getFilter(); // if we have any add it now if (filter != null) { // only bother if not an empty string if (!"".equals(filter)) { eventStringBuilder.append(" " + filter.trim().replace("\n", "\n ") + "\n"); } } // loop the actions and produce the handling JavaScript for (Action action : event.getActions()) { try { // get the action client-side java script from the action object (it's generated there as it can contain values stored in the object on the server side) String actionJavaScript = action.getJavaScriptWithHeader(rapidRequest, application, this, control, null); // if non null if (actionJavaScript != null) { // trim it to avoid tabs and line breaks that might sneak in actionJavaScript = actionJavaScript.trim(); // only if what we got is not an empty string if (!("").equals(actionJavaScript)) { // if this action has been marked for redundancy avoidance if (action.getAvoidRedundancy()) { // add the action function to the action stringbuilder so it's before the event actionStringBuilder.append("function Action_" + action.getId() + "(ev) {\n" + " " + actionJavaScript.trim().replace("\n", "\n ") + "\n" + " return true;\n" + "}\n\n"); // add an action function call to the event string builder eventStringBuilder.append(" if (!Action_" + action.getId() + "(ev)) return false;\n"); //eventStringBuilder.append(" Action_" + action.getId() + "(ev);\n"); } else { // go straight into the event eventStringBuilder.append(" " + actionJavaScript.trim().replace("\n", "\n ") + "\n"); } } } } catch (Exception ex) { // print a commented message eventStringBuilder.append(" // Error creating JavaScript for " + action.getType() + " action " + action.getId() + " : " + ex.getMessage() + "\n"); } } // close the try/catch if (control == null) { // page eventStringBuilder.append(" } catch(ex) { Event_error('" + event.getType() + "',null,ex); }\n"); } else { // control eventStringBuilder.append(" } catch(ex) { Event_error('" + event.getType() + "','" + control.getId() + "',ex); }\n"); } // close event function eventStringBuilder.append("}\n\n"); // add the action functions stringBuilder.append(actionStringBuilder); // add the event function stringBuilder.append(eventStringBuilder); } } } // build the event handling page JavaScript iteratively private void getEventHandlersJavaScript(RapidRequest rapidRequest, StringBuilder stringBuilder, Application application, List<Control> controls) throws JSONException { // check there are some controls if (controls != null) { // if we're at the root of the page if (controls.equals(_controls)) { // check for page events if (_events != null) { // loop page events and get js functions for (Event event : _events) getEventJavaScriptFunction(rapidRequest, stringBuilder, application, null, event); } } for (Control control : controls) { // check event actions if (control.getEvents() != null) { // loop page events and get js functions for (Event event : control.getEvents()) getEventJavaScriptFunction(rapidRequest, stringBuilder, application, control, event); } // now call iteratively for child controls (of this [child] control, etc.) if (control.getChildControls() != null) getEventHandlersJavaScript(rapidRequest, stringBuilder, application, control.getChildControls()); } } } // this method produces the start of the head (which is shared by the no permission response) private String getHeadStart(RapidHttpServlet rapidServlet, Application application) { // look for the page title suffix String pageTitleSuffix = rapidServlet.getServletContext().getInitParameter("pageTitleSuffix"); // if null make default if (pageTitleSuffix == null) pageTitleSuffix = " - by Rapid"; // create start of head html and return return " <head>\n" + " <title>" + Html.escape(_title) + pageTitleSuffix + "</title>\n" + " <meta description=\"Created using Rapid - www.rapid-is.co.uk\"/>\n" + " <meta charset=\"utf-8\"/>\n" + " <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no\" />\n" + (application != null ? " <meta name=\"theme-color\" content=\"" + application.getStatusBarColour() + "\" />\n" : "" ) + " <link rel=\"icon\" href=\"favicon.ico\"></link>\n"; } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadLinks(RapidHttpServlet rapidServlet, Application application, boolean isDialogue) throws JSONException { // create a string builder containing the head links StringBuilder stringBuilder = new StringBuilder(getHeadStart(rapidServlet, application)); // if you're looking for where the jquery link is added it's the first resource in the page.control.xml file stringBuilder.append(" " + getResourcesHtml(application, false).trim() + "\n"); // add a JavaScript block with important global variables - this removed by the pagePanel loader and navigation action when showing dialogues, by matching to the various variables so be careful changing anything below stringBuilder.append(" <script type='text/javascript'>\n"); if (application != null) { stringBuilder.append("var _appId = '" + application.getId() + "';\n"); stringBuilder.append("var _appVersion = '" + application.getVersion() + "';\n"); } stringBuilder.append("var _pageId = '" + _id + "';\n"); stringBuilder.append("var _pageName = '" + _name.replace("'", "\\'") + "';\n"); stringBuilder.append("var _pageTitle = '" + _title.replace("'", "\\'") + "';\n"); // this flag indicates if the Rapid Mobile client is regaining the foreground stringBuilder.append("var _mobileResume = false;\n"); // this flag indicates if any controls are loading asynchronously and the page load method can't be called stringBuilder.append("var _loadingControls = 0;\n"); stringBuilder.append("var _loadingPages = [];\n"); stringBuilder.append(" </script>\n"); return stringBuilder.toString(); } // this private method writes JavaScript specific to the user private void writeUserJS(Writer writer, RapidRequest rapidRequest, Application application, User user, Boolean download) throws RapidLoadingException, IOException, JSONException { // open js writer.write(" <script type='text/javascript'>\n"); // if download if (download) { // just write RapidMobile //writer.write("var _userName = 'RapidMobile';\n"); // print user - the above is turned off for now until enough people download Rapid Mobile 2.4.1.3 which can send the authenticated user if (user != null) writer.write("var _userName = '" + user.getName() + "';\n"); } else { // print user if (user != null) writer.write("var _userName = '" + user.getName() + "';\n"); // get app page variables List<String> pageVariables = application.getPageVariables(rapidRequest.getRapidServlet().getServletContext()); // if we got some if (pageVariables != null) { // prepare json to hold page variables JSONObject jsonPageVariables = new JSONObject(); // loop them for (String pageVariable : pageVariables) { // null safety if (pageVariable != null) { // look for a value in the session String value = (String) rapidRequest.getSessionAttribute(pageVariable); // if we got one add it if (value != null) jsonPageVariables.put(pageVariable, value); } } // write page variables writer.write("var _pageVariables_" + _id + " = " + jsonPageVariables + ";\n"); } } // close js writer.write(" </script>\n"); } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadCSS(RapidRequest rapidRequest, Application application, boolean isDialogue) throws JSONException { // create an empty string builder StringBuilder stringBuilder = new StringBuilder(); // fetch all page control styles String pageCss = getAllCSS(rapidRequest.getRapidServlet().getServletContext(), application); // only if there is some if (pageCss.length() > 0) { // open style blocks stringBuilder.append(" <style>\n"); // if live we're going to try and minify if (application.getStatus() == Application.STATUS_LIVE) { try { // get string to itself minified pageCss = Minify.toString(pageCss, Minify.CSS, "Page head CSS"); } catch (IOException ex) { // add error and resort to unminified pageCss = "\n/*\n\n Failed to minify the css : " + ex.getMessage() + "\n\n*/\n\n" + pageCss; } } else { // prefix with minify message pageCss = "\n/* The code below is minified for live applications */\n\n" + pageCss; } // add it to the page stringBuilder.append(pageCss); // close the style block stringBuilder.append(" </style>\n"); } // return it return stringBuilder.toString(); } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadReadyJS(RapidRequest rapidRequest, Application application, boolean isDialogue, FormAdapter formAdapter) throws JSONException { // make a new string builder just for the js (so we can minify it independently) StringBuilder jsStringBuilder = new StringBuilder(); // add an extra line break for non-live applications if (application.getStatus() != Application.STATUS_LIVE) jsStringBuilder.append("\n"); // get all controls List<Control> pageControls = getAllControls(); // if we got some if (pageControls != null) { // loop them for (Control control : pageControls) { // get the details String details = control.getDetails(); // check if null if (details != null) { // create a gloabl variable for it's details jsStringBuilder.append("var " + control.getId() + "details = " + details + ";\n"); } } // add a line break again if we printed anything if (jsStringBuilder.length() > 0) jsStringBuilder.append("\n"); } // initialise the form controls that need their values added in the dynamic part of the page script _formControlValues = new ArrayList<>(); // get all actions List<Action> actions = getAllActions(); // loop them for (Action action : actions) { // if form type if ("form".equals(action.getType())) { // get the action type String type = action.getProperty("actionType"); // if value copy if ("val".equals(type)) { // get control id String controlId = action.getProperty("dataSource"); // add to collection if all in order if (controlId != null) _formControlValues.add(controlId); } else if ("sub".equals(type)) { // add sub as id _formControlValues.add("sub"); } else if ("err".equals(type)) { // add err as id _formControlValues.add("err"); } else if ("res".equals(type)) { // add res as id _formControlValues.add("res"); } } } // initialise our pageload lines collections _pageloadLines = new ArrayList<>(); // get any control initJavaScript event listeners into he pageloadLine (goes into $(document).ready function) getPageLoadLines(_pageloadLines, _controls); // get a synchronised list to avoid concurrency exception in sort List<String> pageLoadLines = Collections.synchronizedList(_pageloadLines); // synchronised block for sorting in thread-safe manner synchronized (this) { // sort the page load lines Collections.sort(pageLoadLines, new Comparator<String>() { @Override public int compare(String l1, String l2) { if (l1 == null || l1.isEmpty()) return -1; if (l2 == null || l2.isEmpty()) return 1; char i1 = l1.charAt(0); char i2 = l2.charAt(0); return i2 - i1; }} ); } // if there is a form adapter in place if (formAdapter != null) { // add a line to set any form values before the load event is run pageLoadLines.add("Event_setFormValues($.Event('setValues'));\n"); // add an init form function - in extras.js pageLoadLines.add("Event_initForm('" + _id + "');\n"); } // check for page events (this is here so all listeners are registered by now) and controls (there should not be none but nothing happens without them) if (_events != null && _controls != null) { // loop page events for (Event event : _events) { // only if there are actually some actions to invoke if (event.getActions() != null) { if (event.getActions().size() > 0) { // page is a special animal so we need to do each of it's event types differently if ("pageload".equals(event.getType())) { // call the page load if safe to do so - controls with asynchronous loading will need to check and call this method themselves pageLoadLines.add("if (!_mobileResume) { if (_loadingControls < 1) { Event_pageload_" + _id + "($.Event('pageload')) } else { _loadingPages.push('" + _id + "');} };\n"); } // resume is also a special animal if ("resume".equals(event.getType())) { // fire the resume event immediately if there is no rapidMobile (it will be done by the Rapid Mobile app if present) pageLoadLines.add("if (!window['_rapidmobile']) Event_resume_" + _id + "($.Event('resume'));\n"); } // reusable action is only invoked via reusable actions on other events - there is no listener } } } } // if there is a form adapter in place if (formAdapter != null) { // add a line to check the form now all load events have been run pageLoadLines.add("Event_checkForm();\n"); } // if this is not a dialogue or there are any load lines if (!isDialogue || pageLoadLines.size() > 0) { // open the page loaded function jsStringBuilder.append("$(document).ready( function() {\n"); // add a try jsStringBuilder.append(" try {\n"); // print any page load lines such as initialising controls for (String line : pageLoadLines) jsStringBuilder.append(" " + line); // close the try jsStringBuilder.append(" } catch(ex) { $('body').html(ex.message || ex); }\n"); // after 200 milliseconds show and trigger a window resize for any controls that might be listening (this also cuts out any flicker), we also call focus on the elements we marked for focus while invisible (in extras.js) jsStringBuilder.append(" window.setTimeout( function() {\n $(window).resize();\n $('body').css('visibility','visible');\n $('[data-focus]').focus();\n }, 200);\n"); // end of page loaded function jsStringBuilder.append("});\n\n"); } // return it return jsStringBuilder.toString(); } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadJS(RapidRequest rapidRequest, Application application, boolean isDialogue) throws JSONException { // a string builder StringBuilder stringBuilder = new StringBuilder(); // make a new string builder just for the js (so we can minify it independently) StringBuilder jsStringBuilder = new StringBuilder(); // get all actions in the page List<Action> pageActions = getAllActions(); // only proceed if there are actions in this page if (pageActions != null) { // loop the list of all actions looking for redundancy for (Action action : pageActions) { // if this action adds redundancy to any others if (action.getRedundantActions() != null) { // loop them for (String actionId : action.getRedundantActions()) { // try and find the action Action redundantAction = getAction(actionId); // if we got one if (redundantAction != null) { // update the redundancy avoidance flag redundantAction.avoidRedundancy(true); } } } // redundantActions != null } // action loop to check redundancy // loop the list of actions for (Action action : pageActions) { try { // look for any javascript to print into the page that this action may have String actionPageJavaScript = action.getPageJavaScript(rapidRequest, application, this, null); // print it here if so if (actionPageJavaScript != null) jsStringBuilder.append(actionPageJavaScript.trim() + "\n\n"); } catch (Exception ex) { // print the exception as a comment jsStringBuilder.append("// Error producing page JavaScript : " + ex.getMessage() + "\n\n"); } } // action loop // add event handlers, staring at the root controls getEventHandlersJavaScript(rapidRequest, jsStringBuilder, application, _controls); } // page actions check // if there was any js if (jsStringBuilder.length() > 0) { // check the application status if (application.getStatus() == Application.STATUS_LIVE) { try { // minify the js before adding stringBuilder.append(Minify.toString(jsStringBuilder.toString(),Minify.JAVASCRIPT, "Page JavaScript")); } catch (IOException ex) { // add the error stringBuilder.append("\n\n/* Failed to minify JavaScript : " + ex.getMessage() + " */\n\n"); // add the js as is stringBuilder.append(jsStringBuilder); } } else { // add the js as is stringBuilder.append("/* The code below is minified for live applications */\n\n" + jsStringBuilder.toString().trim() + "\n"); } } // get it into a string and insert any parameters String headJS = application.insertParameters(rapidRequest.getRapidServlet().getServletContext(), stringBuilder.toString()); // return it return headJS; } // this function interatively checks permission and writes control role html private void writeRoleControlHtml(Writer writer, List<String> userRoles, RoleControlHtml roleControlHtml) throws IOException { // if we have a roleControlHtml if (roleControlHtml != null) { // assume we haven't passed boolean passed = false; // check if it has roles if (roleControlHtml.getRoles() == null) { // no roles it passes passed = true; } else { // loop the control roles first - likely to be smaller for (String controlRole : roleControlHtml.getRoles()) { // loop the user roles for (String userRole : userRoles) { // if they match if (controlRole.equalsIgnoreCase(userRole)) { // we've passed passed = true; // don't check any further break; } } // don't loop further if passed if (passed) break; } } // if we passed if (passed) { // write the start html if there is any if (roleControlHtml.getStartHtml() != null) writer.write(roleControlHtml.getStartHtml()); // if there are children if (roleControlHtml.getChildren() != null) { // loop the children for (RoleControlHtml childRoleControlHtml : roleControlHtml.getChildren()) { // print them writeRoleControlHtml(writer, userRoles, childRoleControlHtml); } } // write the end html if there is any if (roleControlHtml.getEndHtml() != null) writer.write(roleControlHtml.getEndHtml()); } // control roles check } // roleControlHtml check } // this routine produces the entire page public void writeHtml(RapidHttpServlet rapidServlet, HttpServletResponse response, RapidRequest rapidRequest, Application application, User user, Writer writer, boolean designerLink, boolean download) throws JSONException, IOException, RapidLoadingException { // get the servlet context ServletContext servletContext = rapidServlet.getServletContext(); // this doctype is necessary (amongst other things) to stop the "user agent stylesheet" overriding styles writer.write("<!DOCTYPE html>\n"); // open the html writer.write("<html lang=\"en\">\n"); // get any theme Theme theme = application.getTheme(servletContext); // check for undermaintenance status if (application.getStatus() == Application.STATUS_MAINTENANCE) { rapidServlet.writeMessage(writer, "Under maintenance", "This application is currently under maintenance. Please try again in a few minutes."); } else { // get the security SecurityAdapter security = application.getSecurityAdapter(); // get any form adapter FormAdapter formAdapter = application.getFormAdapter(); // assume the user has permission to access the page boolean gotPagePermission = true; try { // if this page has roles if (_roles != null) { if (_roles.size() > 0) { // check if the user has any of them gotPagePermission = security.checkUserRole(rapidRequest, _roles); } } } catch (SecurityAdapaterException ex) { rapidServlet.getLogger().error("Error checking for page roles", ex); } // check that there's permission if (gotPagePermission) { // whether we're rebulding the page for each request boolean rebuildPages = Boolean.parseBoolean(servletContext.getInitParameter("rebuildPages")); // check whether or not we rebuild if (rebuildPages) { // get fresh head links writer.write(getHeadLinks(rapidServlet, application, !designerLink)); // write the user-specific JS writeUserJS(writer, rapidRequest, application, user, download); // get fresh js and css writer.write(getHeadCSS(rapidRequest, application, !designerLink)); // open the script writer.write(" <script type='text/javascript'>\n"); // write the ready JS writer.write(getHeadReadyJS(rapidRequest, application, !designerLink, formAdapter)); } else { // rebuild any uncached if (_cachedHeadLinks == null) _cachedHeadLinks = getHeadLinks(rapidServlet, application, !designerLink); if (_cachedHeadCSS == null) _cachedHeadCSS = getHeadCSS(rapidRequest, application, !designerLink); if (_cachedHeadReadyJS == null) _cachedHeadReadyJS = getHeadReadyJS(rapidRequest, application, !designerLink, formAdapter); // get the cached head links writer.write(_cachedHeadLinks); // write the user-specific JS writeUserJS(writer, rapidRequest, application, user, download); // get the cached head js and css writer.write(_cachedHeadCSS); // open the script writer.write(" <script type='text/javascript'>\n"); // write the ready JS writer.write(_cachedHeadReadyJS); } // if there is a form if (formAdapter != null) { // set no cache on this page RapidFilter.noCache(response); // a placeholder for any form id String formId = null; // a placeholder for any form values StringBuilder formValues = null; // first do the actions that could result in an exception try { // get the form details UserFormDetails formDetails = formAdapter.getUserFormDetails(rapidRequest); // if we got some if (formDetails != null) { // set the form id formId = formDetails.getId(); // create the values string builder formValues = new StringBuilder(); // set whether submitted formValues.append("var _formSubmitted = " + formDetails.getSubmitted() + ";\n\n"); // start the form values object (to supply previous form values) formValues.append("var _formValues = {"); // if form control values to set if (_formControlValues != null) { // loop then for (int i = 0; i < _formControlValues.size(); i++) { // get the control id String id = _formControlValues.get(i); // place holder for the value String value = null; // some id's are special if ("id".equals(id)) { // the submission message value = formDetails.getId(); } else if ("sub".equals(id)) { // the submission message value = formDetails.getSubmitMessage(); } else if ("err".equals(id)) { // the submission message value = formDetails.getErrorMessage(); } else if ("res".equals(id)) { // the submission message value = formDetails.getPassword(); } else { // lookup the value value = formAdapter.getFormControlValue(rapidRequest, formId, id, false); } // if we got one if (value != null) { // escape it and enclose it value = value.replace("\\", "\\\\").replace("'", "\\'").replace("\r\n", "\\n").replace("\n", "\\n").replace("\r", ""); // add to object formValues.append("'" + id + "':'" + value + "'"); // add comma formValues.append(","); } } } // close it formValues.append("'id':_formId};\n\n"); // start the set form values function formValues.append("function Event_setFormValues(ev) {"); // get any form page values FormPageControlValues formControlValues = formAdapter.getFormPageControlValues(rapidRequest, formId, _id); // if there are any if (formControlValues != null) { if (formControlValues.size() > 0) { // add a line break formValues.append("\n"); // loop the values for (FormControlValue formControlValue : formControlValues) { // get the control Control pageControl = getControl(formControlValue.getId()); // if we got one if (pageControl != null) { // get the value String value = formControlValue.getValue(); // assume using setData String function = "setData_" + pageControl.getType(); // get the json properties for the control JSONObject jsonControl = rapidServlet.getJsonControl(pageControl.getType()); // if we got some if (jsonControl != null) { // look for the formSetRunTimePropertyType String formSetRunTimePropertyType = jsonControl.optString("formSetRunTimePropertyType", null); // if we got one update the function to use it if (formSetRunTimePropertyType != null) function = "setProperty_" + pageControl.getType() + "_" + formSetRunTimePropertyType; } // get any control details String details = pageControl.getDetailsJavaScript(application, this); // if null update to string if (details == null) details = null; // if there is a value use the standard setData for it (this might change to something more sophisticated at some point) if (value != null) formValues.append(" if (window[\""+ function + "\"]) " + function + "(ev, '" + pageControl.getId() + "', null, " + details + ", '" + value.replace("\\", "\\\\").replace("'", "\\'").replace("\r\n", "\\n").replace("\n", "\\n").replace("\r", "") + "');\n"); } } } } // close the function formValues.append("};\n\n"); // write the form values writer.write(formValues.toString()); } else { // set whether submitted writer.write("var _formSubmitted = false;\n\n"); // a dummy setFormValues method writer.write("function Event_setFormValues(ev) {}\n\n"); } // write the form id into the page - not necessary for dialogues if (designerLink) writer.write("var _formId = '" + formId + "';\n\n"); } catch (Exception ex) { // log the error rapidServlet.getLogger().error("Error create page form values", ex); // write a dummy Event_setFormValues with alert and redirect to start page writer.write("var _formSubmitted = false;\nfunction Event_setFormValues() {\n alert('Error with form values : " + ex.getMessage().replace("'", "\\'") + "');\n window.location.href='~?a=" + application.getId() + "'\n};\n\n"); } } if (rebuildPages) { // write the ready JS writer.write(getHeadJS(rapidRequest, application, !designerLink)); } else { // get the rest of the cached JS if (_cachedHeadJS == null) _cachedHeadJS = getHeadJS(rapidRequest, application, !designerLink); // write the ready JS writer.write(_cachedHeadJS); } // close the script writer.write("\n </script>\n"); // close the head writer.write(" </head>\n"); // start the body writer.write(" <body id='" + _id + "' style='visibility:hidden;'" + (_bodyStyleClasses == null ? "" : " class='" + _bodyStyleClasses + "'") + ">\n"); // if there was a theme and we're not hiding the header / footer if (theme != null && !_hideHeaderFooter) { // get any header html String headerHtml = theme.getHeaderHtml(); // write the header html if there is something to write if (headerHtml != null) if (headerHtml.length() > 0) writer.write(headerHtml); } // start the form if in use (but not for dialogues and other cases where the page is partial) if (formAdapter != null && designerLink) { writer.write(" <form id='" + _id + "_form' action='~?a=" + application.getId() + "&v=" + application.getVersion() + "&p=" + _id + "' method='POST'" + (application.getFormDisableAutoComplete() ? " autocomplete='off'" : "") + ">\n"); writer.write(" <input type='hidden' name='csrfToken' value='" + rapidRequest.getCSRFToken() + "' />\n"); writer.write(" <input type='hidden' id='" + _id + "_hiddenControls' name='" + _id + "_hiddenControls' />\n"); } // a reference for the body html String bodyHtml = null; // check we have _rolesHtml - this has been depreciated since 2.3.5.3 but older page files may still have it this way if (_rolesHtml != null) { // get the users roles List<String> userRoles = user.getRoles(); if (userRoles != null) { // loop each roles html entry for (RoleHtml roleHtml : _rolesHtml) { // get the roles from this combination List<String> roles = roleHtml.getRoles(); // assume not roles are required (this will be updated if roles are present) int rolesRequired = 0; // keep a running count for the roles we have int gotRoleCount = 0; // if there are roles to check if (roles != null) { // update how many roles we need our user to have rolesRequired = roles.size(); // check whether we need any roles and that our user has any at all if (rolesRequired > 0) { // check the user has as many roles as this combination requires if (userRoles.size() >= rolesRequired) { // loop the roles we need for this combination for (String role : roleHtml.getRoles()) { // check this role if (userRoles.contains(role)) { // increment the got role count gotRoleCount ++; } // increment the count of required roles } // loop roles } // user has enough roles to bother checking this combination } // if any roles are required } // add roles to check // if we have all the roles we need if (gotRoleCount == rolesRequired) { // use this html bodyHtml = roleHtml.getHtml(); // no need to check any further break; } } // html role combo loop } // got userRoles } else { // check if this page has role control html if (_roleControlHtml == null) { // check for _htmlBody if (_htmlBody == null) { // if _htmlBody is null, which happens for new pages which have not been saved yet, set to empty string to avoid no permission later bodyHtml = ""; } else { // set this to the whole html body bodyHtml = _htmlBody; } } else { // get the users roles List<String> userRoles = user.getRoles(); // if the user has roles if (userRoles != null) { // if the application is live if (application.getStatus() == Application.STATUS_LIVE) { // write straight to the page writer writeRoleControlHtml(writer, userRoles, _roleControlHtml); // set bodyHtml to empty string indicating we had permission bodyHtml = ""; } else { // make a StringWriter StringWriter swriter = new StringWriter(); // write straight to the StringWriter writeRoleControlHtml(swriter, userRoles, _roleControlHtml); // set bodyHtml to what what written so it will be pretty printed bodyHtml = swriter.toString(); } } // user has roles } // this page has role control html } // if our users have roles and we have different html for roles // check if we got any body html via the roles if (bodyHtml == null) { // didn't get any body html, show no permission rapidServlet.writeMessage(writer, "No permission", "You do not have permssion to view this page"); } else { // check there is something to write - will be an empty string if already written by newer user roles code if (bodyHtml.length() > 0) { // check the status of the application if (application.getStatus() == Application.STATUS_DEVELOPMENT) { // pretty print writer.write(Html.getPrettyHtml(bodyHtml.trim())); } else { // no pretty print writer.write(bodyHtml.trim()); } } // close the form if (formAdapter != null && designerLink) writer.write(" </form>\n"); } // got body html check } else { // no page permission rapidServlet.writeMessage(writer, "No permission", "You do not have permssion to view this page"); } // page permission check try { // whether to include the designer link - dialogues and files in the .zip do not so no need to even check permission if (designerLink) { // assume not admin link boolean adminLinkPermission = false; // check for the design role, super is required as well if the rapid app if ("rapid".equals(application.getId())) { if (security.checkUserRole(rapidRequest, Rapid.DESIGN_ROLE) && security.checkUserRole(rapidRequest, Rapid.SUPER_ROLE)) adminLinkPermission = true; } else { if (security.checkUserRole(rapidRequest, Rapid.DESIGN_ROLE)) adminLinkPermission = true; } // if we had the admin link if (adminLinkPermission) { // create string builder for the links StringBuilder designLinkStringBuilder = new StringBuilder(); // create a string builder for the jquery StringBuilder designLinkJQueryStringBuilder = new StringBuilder(); // loop all of the controls for (Control control : getAllControls()) { // get the json control definition JSONObject jsonControl = rapidServlet.getJsonControl(control.getType()); // definition check if ( jsonControl != null) { // look for the design link jquery String designLinkJQuery = jsonControl.optString("designLinkJQuery", null); // if we got any design link jquery if (designLinkJQuery != null) { // get the image title from the control name String title = control.getName(); // escape any apostrophes if (title != null) title = title.replace("'", "&apos;"); // add the link into the string builder designLinkStringBuilder.append("<a id='designLink_" + control.getId() + "' data-id='" + control.getId() + "' href='#'><img src='" + jsonControl.optString("image","images/penknife_24x24.png") + "' title='" + title + "'/></a>\n"); // trim the JQuery designLinkJQuery = designLinkJQuery.trim(); // start with a . if not if (!designLinkJQuery.startsWith(".")) designLinkJQuery = "." + designLinkJQuery; // end with ; if not if (!designLinkJQuery.endsWith(";")) designLinkJQuery += ";"; // add the jquery after the object reference designLinkJQueryStringBuilder.append(" $('#designLink_" + control.getId() + "')" + designLinkJQuery.replace("\n", "\n ") + "\n"); } } } // using attr href was the weirdest thing. Some part of jQuery seemed to be setting the url back to v=1&p=P1 when v=2&p=P2 was printed in the html // we also now use a JavaScript function getDesignerUrl in designlinks.js to make the url whilst checking for pretty urls writer.write( "<link rel='stylesheet' type='text/css' href='styles/designlinks.css'></link>\n" + "<script type='text/javascript' src='scripts/designlinks.js'></script>\n" + "<div id='designShow'></div>\n" + "<div id='designLinks' style='display:none;'>\n" + "<a id='designLink' href='#'><img src='images/tool.svg' title='Open Rapid Design'/></a>\n" + "<a id='designLinkNewTab' style='padding:5px;' href='#'><img src='images/right.svg' title='Open Rapid Design in a new tab'/></a>\n" + designLinkStringBuilder.toString() + "</div>\n" + "<script type='text/javascript'>\n" + "/* designLink */\n" + "var _onDesignLink = false;\n" + "var _onDesignTable = false;\n" + "var designerUrl = getDesignerUrl();\n" + "$(document).ready( function() {\n" + " $('#designShow').mouseover( function(ev) {\n $('#designLink').attr('href', designerUrl);\n $('#designLinkNewTab').attr('target','_blank').attr('href', designerUrl);\n $('#designLinks').show(); _onDesignLink = true;\n });\n" + " $('#designLinks').mouseleave( function(ev) {\n _onDesignLink = false;\n setTimeout( function() {\n if(!_onDesignLink && !_onDesignTable) $('#designLinks').fadeOut();\n }, 1000);\n });\n" + " $('#designLinks').mouseover(function(ev) {\n _onDesignLink = true;\n });\n" + " $('html').click(function(){\n if(!_onDesignLink && !_onDesignTable) {\n $('div.designData').fadeOut();\n $('#designLinks').fadeOut();\n }\n });\n" + designLinkJQueryStringBuilder.toString() + "});\n" + "</script>\n"); } } } catch (SecurityAdapaterException ex) { rapidServlet.getLogger().error("Error checking for the designer link", ex); } // design permssion check } // design link check // if there was a theme and we're not hiding the header / footer if (theme != null && !_hideHeaderFooter) { // get any header html String footerHtml = theme.getFooterHtml(); // write the header html if there is something to write if (footerHtml != null) if (footerHtml.length() > 0) writer.write(footerHtml); } // add the remaining elements writer.write(" </body>\n</html>"); } // gets the value of a condition used in the page visibility rules private String getConditionValue(RapidRequest rapidRequest, String formId, FormAdapter formAdapter, Application application, Value value) throws Exception { String[] idParts = value.getId().split("\\."); if (idParts[0].equals("System")) { // just check that there is a type if (idParts.length > 1) { // get the type from the second part String type = idParts[1]; // the available system values are specified above getDataOptions in designer.js if ("app id".equals(type)) { // whether rapid mobile is present return application.getId(); } else if ("app version".equals(type)) { // whether rapid mobile is present return application.getVersion(); } else if ("page id".equals(type)) { // the page return _id; } else if ("mobile".equals(type)) { // whether rapid mobile is present return "false"; } else if ("online".equals(type)) { // whether we are online (presumed true if no rapid mobile) return "true"; } else if ("user".equals(type) || "user name".equals(idParts[1])) { // pass the field as a value return rapidRequest.getUserName(); } else if ("field".equals(type)) { // pass the field as a value return value.getField(); } else { // pass through as literal return idParts[1]; } } else { // return null return null; } } else if (idParts[0].equals("Session")) { // if there are enough if parts if (idParts.length > 1) { return (String) rapidRequest.getSessionAttribute(idParts[1]); } else { return null; } } else { // get the id of the value object (should be a control Id) String valueId = value.getId(); // retrieve and return it from the form adapater, but not if it's hidden return formAdapter.getFormControlValue(rapidRequest, formId, valueId, true); } } // return a boolean for page visibility public boolean isVisible(RapidRequest rapidRequest, Application application, UserFormDetails userFormDetails) throws Exception { // get a logger Logger logger = rapidRequest.getRapidServlet().getLogger(); // get the form adapter FormAdapter formAdapter = application.getFormAdapter(); // if we have a form adapter and visibility conditions if (formAdapter == null) { // no form adapter always visible logger.debug("Page " + _id + " no form adapter, always visible "); return true; } else { if (userFormDetails == null) { // no user form details logger.debug("No user form details"); return false; } else if (_simple) { // simple page always invisible on forms logger.debug("Page " + _id + " is a simple page, always hidden on forms"); return false; } else if (_formPageType == FORM_PAGE_TYPE_SAVE) { // save page always invisible on forms logger.debug("Page " + _id + " is a save page, always hidden on forms"); return false; } else if (_formPageType == FORM_PAGE_TYPE_RESUME) { // resume page always invisible on forms logger.debug("Page " + _id + " is a resume page, always hidden on forms"); return false; } else if (_formPageType == FORM_PAGE_TYPE_SUBMITTED && !userFormDetails.getShowSubmitPage()) { // requests for submitted page are denied if show submission is not true logger.debug("Page " + _id + " is a submitted page but the form has not been submitted yet"); return false; } else if (_formPageType == FORM_PAGE_TYPE_ERROR && !userFormDetails.getError()) { // requests for error page are denied if no error logger.debug("Page " + _id + " is an error page but the form has not had an error yet"); return false; } else if (_visibilityConditions == null) { // no _visibilityConditions always visible logger.debug("Page " + _id + " _visibilityConditions is null, always visible on forms"); return true; } else if (_visibilityConditions.size() == 0) { // no _visibilityConditions always visible logger.debug("Page " + _id + " _visibilityConditions size is zero, always visible on forms"); return true; } else { // log logger.trace("Page " + _id + " " + _visibilityConditions.size() + " visibility condition(s) " + " : " + _conditionsType); // assume we have failed all conditions boolean pass = false; // loop them for (Condition condition : _visibilityConditions) { // assume we have failed this condition pass = false; logger.trace("Page " + _id + " visibility condition " + " : " + condition); String value1 = getConditionValue(rapidRequest, userFormDetails.getId(), formAdapter, application, condition.getValue1()); logger.trace("Value 1 = " + value1); String value2 = getConditionValue(rapidRequest, userFormDetails.getId(), formAdapter, application, condition.getValue2()); logger.trace("Value 2 = " + value2); String operation = condition.getOperation(); if (value1 == null) value1 = ""; if (value2 == null) value2 = ""; // pass is updated from false to true if conditions match if ("==".equals(operation)) { if (value1.equals(value2)) pass = true; } else if ("!=".equals(operation)) { if (!value1.equals(value2)) pass = true; } else { // the remaining conditions all work with numbers and must not be empty strings if (value1.length() > 0 && value2.length() > 0) { try { // convert to floats float num1 = Float.parseFloat(value1); float num2 = Float.parseFloat(value2); // check the conditions if (">".equals(operation)) { if ((num1 > num2)) pass = true; } else if (">=".equals(operation)) { if ((num1 >= num2)) pass = true; } else if ("<".equals(operation)) { if ((num1 < num2)) pass = true; } else if ("<=".equals(operation)) { if ((num1 <= num2)) pass = true; } } catch (Exception ex) { // something went wrong - generally in the conversion - return false logger.error("Error assessing page visibility page " + _id + " " + condition); } // try } // empty string check } // operation check // log logger.debug("Visibility condition for page " + _id + " : " + value1 + " " + condition.getOperation()+ " " + value2 + " , (" + condition + ") result is " + pass); // for the fast fail check whether we have an or if ("or".equals(_conditionsType)) { // if the conditions are or and we've just passed, we can stop checking further as we've passed in total if (pass) break; } else { // if the conditions are and and we've just failed, we can stop checking further as we've failed in total if (!pass) break; } } // condition loop // log result logger.debug("Page " + _id + " visibility check, " + _visibilityConditions.size() + " conditions, pass = " + pass); // if we failed set the page values to null if (!pass) formAdapter.setFormPageControlValues(rapidRequest, userFormDetails.getId(), _id, null); // return the pass return pass; } // simple, conditions, condition checks } // form adapter check } // return any reCaptcha controls in the page public List<Control> getRecaptchaControls() { if (_reCaptchaControls == null) { // make a new list _reCaptchaControls = new ArrayList<>(); // loop page controls for (Control control : getAllControls()) { // if this is a recapthca add it if ("recaptcha".equals(control.getType())) { _reCaptchaControls.add(control); } } } return _reCaptchaControls; } // overrides @Override public String toString() { return "Page " + _id + " " + _name + " - " + _title; } // static methods // static function to load a new page public static Page load(ServletContext servletContext, File file) throws JAXBException, ParserConfigurationException, SAXException, IOException, TransformerFactoryConfigurationError, TransformerException { // get the logger Logger logger = (Logger) servletContext.getAttribute("logger"); // trace log that we're about to load a page logger.trace("Loading page from " + file); // open the xml file into a document Document pageDocument = XML.openDocument(file); // specify the xmlVersion as -1 int xmlVersion = -1; // look for a version node Node xmlVersionNode = XML.getChildElement(pageDocument.getFirstChild(), "XMLVersion"); // if we got one update the version if (xmlVersionNode != null) xmlVersion = Integer.parseInt(xmlVersionNode.getTextContent()); // if the version of this xml isn't the same as this class we have some work to do! if (xmlVersion != XML_VERSION) { // get the page name String name = XML.getChildElementValue(pageDocument.getFirstChild(), "name"); // log the difference logger.debug("Page " + name + " with version " + xmlVersion + ", current version is " + XML_VERSION); // // Here we would have code to update from known versions of the file to the current version // // check whether there was a version node in the file to start with if (xmlVersionNode == null) { // create the version node xmlVersionNode = pageDocument.createElement("XMLVersion"); // add it to the root of the document pageDocument.getFirstChild().appendChild(xmlVersionNode); } // set the xml to the latest version xmlVersionNode.setTextContent(Integer.toString(XML_VERSION)); // // Here we would use xpath to find all controls and run the Control.upgrade method // // // Here we would use xpath to find all actions, each class has it's own upgrade method so // we need to identify the class, instantiate it and call it's upgrade method // it's probably worthwhile maintaining a map of instantiated classes to avoid unnecessary re-instantiation // // save it XML.saveDocument(pageDocument, file); logger.debug("Updated " + name + " page version to " + XML_VERSION); } // get the unmarshaller from the context Unmarshaller unmarshaller = RapidHttpServlet.getUnmarshaller(); // get a buffered reader for our page with UTF-8 file format BufferedReader br = new BufferedReader( new InputStreamReader( new FileInputStream(file), "UTF-8")); // try the unmarshalling try { // unmarshall the page Page page = (Page) unmarshaller.unmarshal(br); // log that the page was loaded logger.debug("Loaded page " + page.getId() + " - " + page.getName() + " from " + file); // close the buffered reader br.close(); // return the page return page; } catch (JAXBException ex) { // close the buffered reader br.close(); // log that the page had an error logger.error("Error loading page from " + file); // re-throw throw ex; } } }
src/com/rapid/core/Page.java
/* Copyright (C) 2020 - Gareth Edwards / Rapid Information Systems [email protected] This file is part of the Rapid Application Platform Rapid is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The terms require you to include the original copyright, and the license notice in all redistributions. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License in a file named "COPYING". If not, see <http://www.gnu.org/licenses/>. */ package com.rapid.core; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletResponse; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactoryConfigurationError; import org.apache.logging.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.SAXException; import com.rapid.actions.Logic.Condition; import com.rapid.actions.Logic.Value; import com.rapid.core.Application.RapidLoadingException; import com.rapid.core.Application.Resource; import com.rapid.core.Application.ResourceDependency; import com.rapid.forms.FormAdapter; import com.rapid.forms.FormAdapter.FormControlValue; import com.rapid.forms.FormAdapter.FormPageControlValues; import com.rapid.forms.FormAdapter.UserFormDetails; import com.rapid.security.SecurityAdapter; import com.rapid.security.SecurityAdapter.SecurityAdapaterException; import com.rapid.security.SecurityAdapter.User; import com.rapid.server.Rapid; import com.rapid.server.RapidHttpServlet; import com.rapid.server.RapidRequest; import com.rapid.server.filter.RapidFilter; import com.rapid.utils.Files; import com.rapid.utils.Html; import com.rapid.utils.Minify; import com.rapid.utils.XML; @XmlRootElement @XmlType(namespace="http://rapid-is.co.uk/core") public class Page { // the version of this class's xml structure when marshalled (if we have any significant changes down the line we can upgrade the xml files before unmarshalling) public static final int XML_VERSION = 1; // form page types public static final int FORM_PAGE_TYPE_NORMAL = 0; public static final int FORM_PAGE_TYPE_SUBMITTED = 1; public static final int FORM_PAGE_TYPE_ERROR = 2; public static final int FORM_PAGE_TYPE_SAVE = 3; public static final int FORM_PAGE_TYPE_RESUME = 4; // a class for retaining page html for a set of user roles - this structure is now depreciated as of Rapid 2.3.5.2 in favour of a more efficient tree structure public static class RoleHtml { // instance variables private List<String> _roles; private String _html; // properties public List<String> getRoles() { return _roles; } public void setRoles(List<String> roles) { _roles = roles; } public String getHtml() { return _html; } public void setHtml(String html) { _html = html; } // constructors public RoleHtml() {} public RoleHtml(List<String> roles, String html) { _roles = roles; _html = html; } } // a class for retaining control html that has user roles public static class RoleControlHtml { // instance variables private String _startHtml, _endHtml; private List<String> _roles; private List<RoleControlHtml> _children; // properties public String getStartHtml() { return _startHtml; } public void setStartHtml(String startHtml) { _startHtml = startHtml; } public String getEndHtml() { return _endHtml; } public void setEndHtml(String endHtml) { _endHtml = endHtml; } public List<String> getRoles() { return _roles; } public void setRoles(List<String> roles) { _roles = roles; } public List<RoleControlHtml> getChildren() { return _children; } public void setChildren(List<RoleControlHtml> children) { _children = children; } // constructors public RoleControlHtml() {} public RoleControlHtml(JSONObject jsonRoleControlHtml) throws JSONException { _startHtml = jsonRoleControlHtml.optString("startHtml", null); _endHtml = jsonRoleControlHtml.optString("endHtml", null); JSONArray jsonRoles = jsonRoleControlHtml.optJSONArray("roles"); if (jsonRoles != null) { _roles = new ArrayList<>(); for (int i = 0; i < jsonRoles.length(); i++) _roles.add(jsonRoles.getString(i)); } JSONArray jsonChildren = jsonRoleControlHtml.optJSONArray("children"); if (jsonChildren != null) { _children = new ArrayList<>(); for (int i = 0; i < jsonChildren.length(); i++) _children.add( new RoleControlHtml(jsonChildren.getJSONObject(i))); } } } // details of a lock that might be on this page public static class Lock { private String _userName, _userDescription; private Date _dateTime; public String getUserName() { return _userName; } public void setUserName(String userName) { _userName = userName; } public String getUserDescription() { return _userDescription; } public void setUserDescription(String userDescription) { _userDescription = userDescription; } public Date getDateTime() { return _dateTime; } public void setDateTime(Date dateTime) { _dateTime = dateTime; } // constructors public Lock() {} public Lock(String userName, String userDescription, Date dateTime) { _userName = userName; _userDescription = userDescription; _dateTime = dateTime; } } // instance variables private int _xmlVersion, _formPageType; private String _id, _name, _title, _label, _description, _createdBy, _modifiedBy, _htmlBody, _bodyStyleClasses, _cachedHeadLinks, _cachedHeadCSS, _cachedHeadReadyJS, _cachedHeadJS, _eTag; private boolean _simple, _hideHeaderFooter; private Date _createdDate, _modifiedDate; private List<Control> _controls, _reCaptchaControls; private List<Event> _events; private List<Style> _styles; private List<String> _controlTypes, _actionTypes, _sessionVariables, _roles; private List<RoleHtml> _rolesHtml; private RoleControlHtml _roleControlHtml; private List<Condition> _visibilityConditions; private String _conditionsType; private Lock _lock; private List<String> _formControlValues; private List<String> _dialoguePageIds; // this array is used to collect all of the lines needed in the pageload before sorting them private List<String> _pageloadLines; // properties // the xml version is used to upgrade xml files before unmarshalling (we use a property so it's written ito xml) public int getXMLVersion() { return _xmlVersion; } public void setXMLVersion(int xmlVersion) { _xmlVersion = xmlVersion; } // the id uniquely identifies the page (it is quiet short and is concatinated to control id's so more than one page's control's can be working in a document at one time) public String getId() { return _id; } public void setId(String id) { _id = id; } // this is expected to be short name, probably even a code that is used by users to simply identify pages (also becomes the file name) public String getName() { return _name; } public void setName(String name) { _name = name; } // this is a user-friendly, long title public String getTitle() { return _title; } public void setTitle(String title) { _title = title; } // the form page type, most will be normal but we show special pages for after submission, error, and saved public int getFormPageType() { return _formPageType; } public void setFormPageType(int formPageType) { _formPageType = formPageType; } // this is a the label to use in the form summary public String getLabel() { return _label; } public void setLabel(String label) { _label = label; } // an even longer description of what this page does public String getDescription() { return _description; } public void setDescription(String description) { _description = description; } // simple pages do not have any events and can be used in page panels without dynamically loading them via ajax public boolean getSimple() { return _simple; } public void setSimple(boolean simple) { _simple = simple; } // whether to hide any theme header / footer public boolean getHideHeaderFooter() { return _hideHeaderFooter; } public void setHideHeaderFooter(boolean hideHeaderFooter) { _hideHeaderFooter = hideHeaderFooter; } // the user that created this page (or archived page) public String getCreatedBy() { return _createdBy; } public void setCreatedBy(String createdBy) { _createdBy = createdBy; } // the date this page (or archive) was created public Date getCreatedDate() { return _createdDate; } public void setCreatedDate(Date createdDate) { _createdDate = createdDate; } // the last user to save this application public String getModifiedBy() { return _modifiedBy; } public void setModifiedBy(String modifiedBy) { _modifiedBy = modifiedBy; } // the date this application was last saved public Date getModifiedDate() { return _modifiedDate; } public void setModifiedDate(Date modifiedDate) { _modifiedDate = modifiedDate; } // the html for this page public String getHtmlBody() { return _htmlBody; } public void setHtmlBody(String htmlBody) { _htmlBody = htmlBody; } // any style classes for this pages body element public String getBodyStyleClasses() { return _bodyStyleClasses; } public void setBodyStyleClasses(String bodyStyleClasses) { _bodyStyleClasses = bodyStyleClasses; } // the child controls of the page public List<Control> getControls() { return _controls; } public void setControls(List<Control> controls) { _controls = controls; } // the page events and actions public List<Event> getEvents() { return _events; } public void setEvents(List<Event> events) { _events = events; } // the page styles public List<Style> getStyles() { return _styles; } public void setStyles(List<Style> styles) { _styles = styles; } // session variables used by this page (navigation actions are expected to pass them in) public List<String> getSessionVariables() { return _sessionVariables; } public void setSessionVariables(List<String> sessionVariables) { _sessionVariables = sessionVariables; } // the roles required to view this page public List<String> getRoles() { return _roles; } public void setRoles(List<String> roles) { _roles = roles; } // list of different page html for different possible role combinations - this is depreciated from Rapid 2.3.5.3 public List<RoleHtml> getRolesHtml() { return _rolesHtml; } public void setRolesHtml(List<RoleHtml> rolesHtml) { _rolesHtml = rolesHtml; } // page html for different possible role combinations - this is depreciated from Rapid 2.3.5.3 public RoleControlHtml getRoleControlHtml() { return _roleControlHtml; } public void setRoleControlHtml(RoleControlHtml roleControlHtml) { _roleControlHtml = roleControlHtml; } // any lock that might be on this page public Lock getLock() { return _lock; } public void setLock(Lock lock) { _lock = lock; } // the page visibility rule conditions public List<Condition> getVisibilityConditions() { return _visibilityConditions; } public void setVisibilityConditions(List<Condition> visibilityConditions) { _visibilityConditions = visibilityConditions; } // the type (and/or) of the page visibility conditions - named so can be shared with logic action public String getConditionsType() { return _conditionsType; } public void setConditionsType(String conditionsType) { _conditionsType = conditionsType; } // the etag used to send 304 not modified if caching is turned off public String getETag() { return _eTag; } // constructor public Page() { // set the xml version _xmlVersion = XML_VERSION; // set the eTag _eTag = Long.toString(new Date().getTime()); } // instance methods public String getFile(ServletContext servletContext, Application application) { return application.getConfigFolder(servletContext) + "/" + "/pages/" + Files.safeName(_name + ".page.xml"); } public void addControl(Control control) { if (_controls == null) _controls = new ArrayList<>(); _controls.add(control); } public Control getControl(int index) { if (_controls == null) return null; return _controls.get(index); } // an iterative function for tree-walking child controls when searching for one public Control getChildControl(List<Control> controls, String controlId) { Control foundControl = null; if (controls != null) { for (Control control : controls) { if (controlId.equals(control.getId())) { foundControl = control; break; } else { foundControl = getChildControl(control.getChildControls(), controlId); if (foundControl != null) break; } } } return foundControl; } // uses the tree walking function above to find a particular control public Control getControl(String id) { return getChildControl(_controls, id); } public void getChildControls(List<Control> controls, List<Control> childControls) { if (controls != null) { for (Control control : controls) { childControls.add(control); getChildControls(control.getChildControls(), childControls); } } } public List<Control> getAllControls() { ArrayList<Control> controls = new ArrayList<>(); getChildControls(_controls, controls); return controls; } // find an action from a list of actions, including checking child actions public Action getChildAction(List<Action> actions, String actionId) { Action foundAction = null; if (actions != null) { for (Action action : actions) { if (action != null) { if (actionId.equals(action.getId())) return action; foundAction = getChildAction(action.getChildActions(), actionId); if (foundAction != null) break; } } } return foundAction; } // find an action amongst a controls events - faster to use if we have the control already public Action getChildEventsAction(List<Event> events, String actionId) { Action foundAction = null; if (events != null) { for (Event event : events) { if (event.getActions() != null) { foundAction = getChildAction(event.getActions(), actionId); if (foundAction != null) break; } } } return foundAction; } // an iterative function for tree-walking child controls when searching for a specific action public Action getChildControlsAction(List<Control> controls, String actionId) { Action foundAction = null; if (controls != null) { for (Control control : controls) { // look in the control events for the action foundAction = getChildEventsAction(control.getEvents(), actionId); // if we didn't get the action if (foundAction == null) { // look in the child controls foundAction = getChildControlsAction(control.getChildControls(), actionId); } // we're done! if (foundAction != null) break; } } return foundAction; } // find an action in the page by its id public Action getAction(String id) { // check the page actions first if (_events != null) { for (Event event : _events) { if (event.getActions() != null) { Action action = getChildAction(event.getActions(), id); if (action != null) return action; } } } // uses the tree walking function above to the find a particular action return getChildControlsAction(_controls, id); } // recursively append to a list of actions from an action and it's children public void getChildActions(List<Action> actions, Action action, String type, boolean isWebserviceOnly) { // check if web service actions only if (!isWebserviceOnly || action.isWebService()) { // check there is a type if (type == null) { // no type so add this action actions.add(action); } else { // if types match if (type.equals(action.getType())) { // add action actions.add(action); } } // check there are child actions if (action.getChildActions() != null) { // loop them for (Action childAction : action.getChildActions()) { // add their actions too if (childAction != null) getChildActions(actions, childAction, type, isWebserviceOnly); } } } } // overide for the above public void getChildActions(List<Action> actions, Action action,boolean isWebserviceOnly) { getChildActions(actions, action, null, isWebserviceOnly); } // recursively append to a list of actions from a control and it's children public void getChildActions(List<Action> actions, Control control, String type, boolean isWebserviceOnly) { // check this control has events if (control.getEvents() != null) { for (Event event : control.getEvents()) { // add any actions to the list if (event.getActions() != null) { // loop the actions for (Action action : event.getActions()) { // add any child actions too if (action != null) getChildActions(actions, action, type, isWebserviceOnly); } } } } // check if we have any child controls if (control.getChildControls() != null) { // loop the child controls for (Control childControl : control.getChildControls()) { // add their actions too getChildActions(actions, childControl, type, isWebserviceOnly); } } } // override for the above public void getChildActions(List<Action> actions, Control control, boolean isWebserviceOnly) { getChildActions(actions, control, null, isWebserviceOnly); } // add actions and child actions public void addAction(List<Action> actions, Action action, boolean webServiceOnly) { // if we have an action if (action != null) { // add it if (!webServiceOnly || action.isWebService()) actions.add(action); // get any child actions List<Action> childActions = action.getChildActions(); // if there where some if (childActions != null) { // loop the children for (Action childAction : childActions) { // add this action recursively addAction(actions, childAction, webServiceOnly); } } } } // get all actions in the page of a specified type public List<Action> getAllActions(String type, boolean webServiceOnly) { // instantiate the list we're going to return List<Action> actions = new ArrayList<>(); // check the page events first if (_events != null) { for (Event event : _events) { // get any event actions List<Action> eventActions = event.getActions(); // if we got some if (eventActions != null) { // if type is null if (type == null) { // loop actions for (Action eventAction : eventActions) { // add this action, including it's children addAction(actions, eventAction, webServiceOnly); } } else { // loop them for (Action eventAction : eventActions) { // if right type if (type.equals(eventAction.getType())) { // add this action, including it's children addAction(actions, eventAction, webServiceOnly); } // Child actions List<Action> eventActionChildren = eventAction.getChildActions(); if (eventActionChildren != null) { for (Action childAction : eventActionChildren) { if (type.equals(childAction.getType())) { addAction(actions, childAction, webServiceOnly); } } } } } } } } // uses the tree walking function above to add all actions if (_controls != null) { for (Control control : _controls) { getChildActions(actions, control, type, webServiceOnly); } } // sort them by action id Collections.sort(actions, new Comparator<Action>() { @Override public int compare(Action obj1, Action obj2) { if (obj1 == null) return -1; if (obj2 == null) return 1; if (obj1.equals(obj2)) return 0; String id1 = obj1.getId(); String id2 = obj2.getId(); if (id1 == null) return -1; if (id2 == null) return -1; int startPos = id1.lastIndexOf("_A"); if (startPos < 0) return -1; int endPos = id1.indexOf("_", startPos + 2); if (endPos < 0) endPos = id1.length(); id1 = id1.substring(startPos + 2, endPos); startPos = id2.lastIndexOf("_A"); if (startPos < 0) return 1; endPos = id2.indexOf("_", startPos + 2); if (endPos < 0) endPos = id2.length(); id2 = id2.substring(startPos + 2, endPos); return (Integer.parseInt(id1) - Integer.parseInt(id2)); } }); return actions; } // get all actions in the page public List<Action> getAllActions() { // override for the above return getAllActions(null, false); } // get all actions in the page of a certain type public List<Action> getAllActions(String type) { // override for the above return getAllActions(type, false); } // get all web-service actions in the page public List<Action> getAllWebServiceActions() { // override for the above return getAllActions(null, true); } // an iterative function for tree-walking child controls when searching for a specific action's control public Control getChildControlActionControl(List<Control> controls, String actionId) { Control foundControl = null; if (controls != null) { for (Control control : controls) { if (control.getEvents() != null) { for (Event event : control.getEvents()) { if (event.getActions() != null) { for (Action action : event.getActions()) { if (actionId.equals(action.getId())) return control; } } } } foundControl = getChildControlActionControl(control.getChildControls(), actionId); if (foundControl != null) break; } } return foundControl; } // find an action's control in the page by its id public Control getActionControl(String actionId) { // uses the tree walking function above to the find a particular action return getChildControlActionControl(_controls, actionId); } // an iterative function for tree-walking child controls when searching for a specific action's control public Event getChildControlActionEvent(List<Control> controls, String actionId) { Event foundEvent = null; if (controls != null) { for (Control control : controls) { if (control.getEvents() != null) { for (Event event : control.getEvents()) { if (event.getActions() != null) { for (Action action : event.getActions()) { if (actionId.equals(action.getId())) return event; } } } } foundEvent = getChildControlActionEvent(control.getChildControls(), actionId); if (foundEvent != null) break; } } return foundEvent; } // find an action in the page by its id public Event getActionEvent(String actionId) { // check the page actions first if (_events != null) { for (Event event : _events) { if (event.getActions() != null) { for (Action action : event.getActions()) { if (actionId.equals(action.getId())) return event; } } } } // uses the tree walking function above to the find a particular action return getChildControlActionEvent(_controls, actionId); } // gets the pages that this page can navigate to as a dialogue - we check all pages to see which can come back public List<String> getDialoguePageIds() { // if the internal variable has not been initialised yet if (_dialoguePageIds == null) { // initialise _dialoguePageIds = new ArrayList<>(); // get all navigation actions on this page List<Action> actions = getAllActions("navigate"); // loop them for (Action action : actions) { // if this is a dialogue if (Boolean.parseBoolean(action.getProperty("dialogue"))) { // get the page id String pageId = action.getProperty("page"); // if we got one if (pageId != null) { // add if it is something if (pageId.length() > 0) _dialoguePageIds.add(pageId); } } } } return _dialoguePageIds; } // iterative function for building a flat JSONArray of controls that can be used on other pages, will also add events if including from a dialogue private void getOtherPageControls(RapidHttpServlet rapidServlet, JSONArray jsonControls, List<Control> controls, boolean includePageVisibiltyControls, Boolean includeFromDialogue) throws JSONException { // check we were given some controls if (controls != null) { // loop the controls for (Control control : controls) { // get if this control can be used from other pages boolean canBeUsedFromOtherPages = control.getCanBeUsedFromOtherPages(); // get if this control can be used for page visibility boolean canBeUsedForFormPageVisibilty = control.getCanBeUsedForFormPageVisibilty() && includePageVisibiltyControls; // if this control can be used from other pages if (canBeUsedFromOtherPages || canBeUsedForFormPageVisibilty || includeFromDialogue) { // get the control details JSONObject jsonControlClass = rapidServlet.getJsonControl(control.getType()); // check we got one if (jsonControlClass != null) { // get the name String controlName = control.getName(); // no need to include if we don't have one if (controlName != null && controlName.trim().length() > 0) { // make a JSON object with what we need about this control JSONObject jsonControl = new JSONObject(); jsonControl.put("id", control.getId()); jsonControl.put("type", control.getType()); jsonControl.put("name", controlName); if (jsonControlClass.optString("getDataFunction", null) != null) jsonControl.put("input", true); if (jsonControlClass.optString("setDataJavaScript", null) != null) jsonControl.put("output", true); if (canBeUsedFromOtherPages) jsonControl.put("otherPages", true); if (canBeUsedForFormPageVisibilty) jsonControl.put("pageVisibility", true); if (control.getProperty("formObjectAddressNumber") != null) jsonControl.put("formObjectAddressNumber", control.getProperty("formObjectAddressNumber")); if (control.getProperty("formObjectPartyNumber") != null) jsonControl.put("formObjectPartyNumber", control.getProperty("formObjectPartyNumber")); // look for any runtimeProperties JSONObject jsonProperty = jsonControlClass.optJSONObject("runtimeProperties"); // if we got some if (jsonProperty != null) { // create an array to hold the properties JSONArray jsonRunTimeProperties = new JSONArray(); // look for an array too JSONArray jsonProperties = jsonProperty.optJSONArray("runtimeProperty"); // assume int index = 0; int count = 0; // if an array if (jsonProperties != null) { // get the first item jsonProperty = jsonProperties.getJSONObject(index); // set the count count = jsonProperties.length(); } // look for a single object JSONObject jsonPropertySingle = jsonProperty.optJSONObject("runtimeProperty"); // assume this one if not null if (jsonPropertySingle != null) jsonProperty = jsonPropertySingle; // do once and loop until no more left do { // create a json object for this runtime property JSONObject jsonRuntimeProperty = new JSONObject(); jsonRuntimeProperty.put("type", jsonProperty.get("type")); jsonRuntimeProperty.put("name", jsonProperty.get("name")); if (jsonProperty.optString("getPropertyFunction", null) != null) jsonRuntimeProperty.put("input", true); if (jsonProperty.optString("setPropertyJavaScript", null) != null) jsonRuntimeProperty.put("output", true); if (jsonProperty.optBoolean("canBeUsedForFormPageVisibilty")) jsonRuntimeProperty.put("visibility", true); // add to the collection - note further check for dialogue controls having to add in jsonRunTimeProperties.put(jsonRuntimeProperty); // increment the index index ++; // get the next item if there's one there if (index < count) jsonProperty = jsonProperties.getJSONObject(index); } while (index < count); // add the properties to what we're returning jsonControl.put("runtimeProperties", jsonRunTimeProperties); } // property loop // if we are including from dialogue if (includeFromDialogue) { // set the other pages property so we see it in the designer jsonControl.put("otherPages", true); // get any events for this control List<Event> events = control.getEvents(); // if we got some if (events != null) { // an array of events JSONArray jsonEvents = new JSONArray(); // loop them for (Event event : events) { // get any actions List<Action> actions = event.getActions(); // if there were some if (actions != null) { // if there were some if (actions.size() > 0) { // make a jsonArray for the actions JSONArray jsonActions = new JSONArray(); // loop the actions for (Action action : actions) { // make a json object for the action JSONObject jsonAction = new JSONObject(); // add id jsonAction.put("id", action.getId()); // add type jsonAction.put("type", action.getType()); // add to array jsonActions.put(jsonAction); } // make a jsonObject for this event JSONObject jsonEvent = new JSONObject(); // add the event id jsonEvent.put("type", event.getType()); // add the jsonActions to the event jsonEvent.put("actions", jsonActions); // as jsonEvent to collection jsonEvents.put(jsonEvent); } } } // add the jsonEvents to the control if we got some if (jsonEvents.length() > 0) jsonControl.put("events",jsonEvents); } } // add it to the collection we are returning straight away jsonControls.put(jsonControl); } // name check } // control class check } // other page or visibility check // run for any child controls getOtherPageControls(rapidServlet, jsonControls, control.getChildControls(), includePageVisibiltyControls, includeFromDialogue); } } } // uses the above iterative method to return an object with flat array of controls in this page that can be used from other pages, for use in the designer public JSONArray getOtherPageComponents(RapidHttpServlet rapidServlet, boolean includePageVisibiltyControls, boolean includeFromDialogue) throws JSONException { // the list of controls we're about to return JSONArray controls = new JSONArray(); // start building the array using the page controls getOtherPageControls(rapidServlet, controls, _controls, includePageVisibiltyControls, includeFromDialogue); // return the components return controls; } // used to turn either a page or control style into text for the css file public String getStyleCSS(Style style) { // start the text we are going to return String css = ""; // get the style rules ArrayList<String> rules = style.getRules(); // check we have some if (rules != null) { if (rules.size() > 0) { // add the style css = style.getAppliesTo().trim() + " {\n"; // check we have // loop and add the rules for (String rule : rules) { css += "\t" + rule.trim() + "\n"; } css += "}\n\n"; } } // return the css return css; } // an iterative function for tree-walking child controls when building the page styles public void getChildControlStyles(List<Control> controls, StringBuilder stringBuilder) { if (controls != null) { for (Control control : controls) { // look for styles ArrayList<Style> controlStyles = control.getStyles(); if (controlStyles != null) { // loop the styles for (Style style : controlStyles) { // get some nice text for the css stringBuilder.append(getStyleCSS(style)); } } // try and call on any child controls getChildControlStyles(control.getChildControls(), stringBuilder); } } } public String getAllCSS(ServletContext servletContext, Application application) { // the stringbuilder we're going to use StringBuilder stringBuilder = new StringBuilder(); // check if the page has styles if (_styles != null) { // loop for (Style style: _styles) { stringBuilder.append(getStyleCSS(style)); } } // use the iterative tree-walking function to add all of the control styles getChildControlStyles(_controls, stringBuilder); // return it with inserted parameters return application.insertParameters(servletContext, stringBuilder.toString()); } public List<String> getAllActionTypes() { List<String> actionTypes = new ArrayList<>(); List<Action> actions = getAllActions(); if (actions != null) { for (Action action : actions) { String actionType = action.getType(); if (!actionTypes.contains(actionType)) actionTypes.add(actionType); } } return actionTypes; } public List<String> getAllControlTypes() { List<String> controlTypes = new ArrayList<>(); controlTypes.add("page"); List<Control> controls = getAllControls(); if (controls != null) { for (Control control : controls) { String controlType = control.getType(); if (!controlTypes.contains(controlType)) controlTypes.add(controlType); } } return controlTypes; } // removes the page lock if it is more than 1 hour old public void checkLock() { // only check if there is one present if (_lock != null) { // get the time now Date now = new Date(); // get the time an hour after the lock time Date lockExpiry = new Date(_lock.getDateTime().getTime() + 1000 * 60 * 60); // if the lock expiry has passed set the lock to null; if (now.after(lockExpiry)) _lock = null; } } public void backup(RapidHttpServlet rapidServlet, RapidRequest rapidRequest, Application application, File pageFile, boolean delete) throws IOException { // get the user name String userName = Files.safeName(rapidRequest.getUserName()); // create folders to archive the pages String archivePath = application.getBackupFolder(rapidServlet.getServletContext(), delete); File archiveFolder = new File(archivePath); if (!archiveFolder.exists()) archiveFolder.mkdirs(); SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd_HHmmss"); String dateString = formatter.format(new Date()); // create a file object for the archive file File archiveFile = new File(archivePath + "/" + Files.safeName(_name + "_" + dateString + "_" + userName + ".page.xml")); // copy the existing new file to the archive file Files.copyFile(pageFile, archiveFile); } public void deleteBackup(RapidHttpServlet rapidServlet, Application application, String backupId) { // create the path String backupPath = application.getBackupFolder(rapidServlet.getServletContext(), false) + "/" + backupId; // create the file File backupFile = new File(backupPath); // delete Files.deleteRecurring(backupFile); } public long save(RapidHttpServlet rapidServlet, RapidRequest rapidRequest, Application application, boolean backup) throws JAXBException, IOException { // create folders to save the pages String pagePath = application.getConfigFolder(rapidServlet.getServletContext()) + "/pages"; File pageFolder = new File(pagePath); if (!pageFolder.exists()) pageFolder.mkdirs(); // create a file object for the new file File newFile = new File(pagePath + "/" + Files.safeName(getName() + ".page.xml")); // if we want a backup and the new file already exists it needs archiving if (backup && newFile.exists()) backup(rapidServlet, rapidRequest, application, newFile, false); // create a file for the temp file File tempFile = new File(pagePath + "/" + Files.safeName(getName() + "-saving.page.xml")); // update the modified by and date _modifiedBy = rapidRequest.getUserName(); _modifiedDate = new Date(); // get a buffered writer for our page with UTF-8 file format BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(tempFile), "UTF-8")); try { // marshall the page object into the temp file rapidServlet.getMarshaller().marshal(this, bw); } catch (JAXBException ex) { // close the file writer bw.close(); // re throw the exception throw ex; } // close the file writer bw.close(); // copy the tempFile to the newFile Files.copyFile(tempFile, newFile); // store the size of the file writter long fileSize = tempFile.length(); // delete the temp file tempFile.delete(); // replace the old page with the new page application.getPages().addPage(this, newFile, application.getIsForm()); // empty the cached page html _cachedHeadLinks = null; _cachedHeadCSS = null; _cachedHeadReadyJS = null; _cachedHeadJS = null; // empty the cached action types _actionTypes = null; // empty the cached control types _controlTypes = null; // empty the page variables so they are rebuilt the next time application.emptyPageVariables(); return fileSize; } public void delete(RapidHttpServlet rapidServlet, RapidRequest rapidRequest, Application application) throws JAXBException, IOException { // create folders to delete the page String pagePath = application.getConfigFolder(rapidServlet.getServletContext()) + "/pages"; // create a file object for the delete file File delFile = new File(pagePath + "/" + Files.safeName(getName() + ".page.xml")); // if the new file already exists it needs archiving if (delFile.exists()) { // archive the page file backup(rapidServlet, rapidRequest, application, delFile, false); // delete the page file delFile.delete(); // remove it from the current list of pages application.getPages().removePage(_id); } // get the resources path String resourcesPath = application.getWebFolder(rapidServlet.getServletContext()); // create a file object for deleting the page css file File delCssFile = new File(resourcesPath + "/" + Files.safeName(getName() + ".css")); // delete if it exists if (delCssFile.exists()) delCssFile.delete(); // create a file object for deleting the page css file File delCssFileMin = new File(resourcesPath + "/" + Files.safeName(getName() + ".min.css")); // delete if it exists if (delCssFileMin.exists()) delCssFileMin.delete(); } // this includes functions to iteratively call any control initJavaScript and set up any event listeners private void getPageLoadLines(List<String> pageloadLines, List<Control> controls) throws JSONException { // if we have controls if (controls != null) { // loop controls for (Control control : controls) { // check for any initJavaScript to call if (control.hasInitJavaScript()) { // get any details we may have String details = control.getDetails(); // set to empty string or clean up if (details == null) { details = ""; } else { details = ", " + control.getId() + "details"; } // write an init call method with support for older controls that may not have had the init method pageloadLines.add("Init_" + control.getType() + "('" + control.getId() + "'" + details + ");\n"); } // check event actions if (control.getEvents() != null) { // loop events for (Event event : control.getEvents()) { // only if event is non-custom and there are actually some actions to invoke if (!event.isCustomType() && event.getActions() != null) { if (event.getActions().size() > 0) { // add any page load lines from this pageloadLines.add(event.getPageLoadJavaScript(control)); } } } } // now call iteratively for child controls (of this [child] control, etc.) if (control.getChildControls() != null) getPageLoadLines(pageloadLines, control.getChildControls()); } } } // the html for a specific resource public String getResourceHtml(Application application, Resource resource) { // assume we couldn't make the resource html String resourceHtml = null; // set the link according to the type switch (resource.getType()) { case Resource.JAVASCRIPT: if (application.getStatus() == Application.STATUS_LIVE) { try { resourceHtml = " <script type='text/javascript'>" + Minify.toString(resource.getContent(),Minify.JAVASCRIPT, "JavaScript resource " + resource.getName()) + "</script>"; } catch (IOException ex) { resourceHtml = " <script type='text/javascript'>/* Failed to minify resource " + resource.getName() + " JavaScript : " + ex.getMessage() + "*/</script>"; } } else { resourceHtml = " <script type='text/javascript'>\n" + resource.getContent() + "\n </script>"; } break; case Resource.CSS: if (application.getStatus() == Application.STATUS_LIVE) { try { resourceHtml = " <style>" + Minify.toString(resource.getContent(), Minify.CSS, "") + "</style>"; } catch (IOException ex) { resourceHtml = " <style>/* Failed to minify resource " + resource.getName() + " CSS : " + ex.getMessage() + "*/<style>"; } } else { resourceHtml = " <style>" + resource.getContent() + "<style>"; } break; case Resource.JAVASCRIPTFILE : case Resource.JAVASCRIPTLINK : resourceHtml = " <script type='text/javascript' src='" + resource.getContent() + "'></script>"; break; case Resource.CSSFILE : case Resource.CSSLINK : resourceHtml = " <link rel='stylesheet' type='text/css' href='" + resource.getContent() + "'></link>"; break; } // return it return resourceHtml; } // the resources for the page public String getResourcesHtml(Application application, boolean allResources) { StringBuilder stringBuilder = new StringBuilder(); // get all action types used in this page if (_actionTypes == null) _actionTypes = getAllActionTypes(); // get all control types used in this page if (_controlTypes == null) _controlTypes = getAllControlTypes(); // manage the resources links added already so we don't add twice ArrayList<String> addedResources = new ArrayList<>(); // if this application has resources add during initialisation if (application.getResources() != null) { // loop and add the resources required by this application's controls and actions (created when application loads) for (Resource resource : application.getResources()) { // if we want all the resources (for the designer) or there is a dependency for this resource if (allResources || resource.hasDependency(ResourceDependency.RAPID) || resource.hasDependency(ResourceDependency.ACTION, _actionTypes) || resource.hasDependency(ResourceDependency.CONTROL, _controlTypes)) { // the html we're hoping to get String resourceHtml = getResourceHtml(application, resource); // if we got some html and don't have it already if (resourceHtml != null && !addedResources.contains(resourceHtml)) { // append it stringBuilder.append(resourceHtml + "\n"); // remember we've added it addedResources.add(resourceHtml); } } // dependency check } // resource loop } // has resources return stringBuilder.toString(); } private void getEventJavaScriptFunction(RapidRequest rapidRequest, StringBuilder stringBuilder, Application application, Control control, Event event) { // check actions are initialised if (event.getActions() != null) { // check there are some to loop if (event.getActions().size() > 0) { // create actions separately to avoid redundancy StringBuilder actionStringBuilder = new StringBuilder(); StringBuilder eventStringBuilder = new StringBuilder(); // start the function name String functionName = "Event_" + event.getType() + "_"; // if this is the page (no control) use the page id, otherwise use the controlId if (control == null) { // append the page id functionName += _id; } else { // append the control id functionName += control.getId(); } // create a function for running the actions for this controls events eventStringBuilder.append("function " + functionName + "(ev) {\n"); // open a try/catch eventStringBuilder.append(" try {\n"); // get any filter javascript String filter = event.getFilter(); // if we have any add it now if (filter != null) { // only bother if not an empty string if (!"".equals(filter)) { eventStringBuilder.append(" " + filter.trim().replace("\n", "\n ") + "\n"); } } // loop the actions and produce the handling JavaScript for (Action action : event.getActions()) { try { // get the action client-side java script from the action object (it's generated there as it can contain values stored in the object on the server side) String actionJavaScript = action.getJavaScriptWithHeader(rapidRequest, application, this, control, null); // if non null if (actionJavaScript != null) { // trim it to avoid tabs and line breaks that might sneak in actionJavaScript = actionJavaScript.trim(); // only if what we got is not an empty string if (!("").equals(actionJavaScript)) { // if this action has been marked for redundancy avoidance if (action.getAvoidRedundancy()) { // add the action function to the action stringbuilder so it's before the event actionStringBuilder.append("function Action_" + action.getId() + "(ev) {\n" + " " + actionJavaScript.trim().replace("\n", "\n ") + "\n" + " return true;\n" + "}\n\n"); // add an action function call to the event string builder eventStringBuilder.append(" if (!Action_" + action.getId() + "(ev)) return false;\n"); //eventStringBuilder.append(" Action_" + action.getId() + "(ev);\n"); } else { // go straight into the event eventStringBuilder.append(" " + actionJavaScript.trim().replace("\n", "\n ") + "\n"); } } } } catch (Exception ex) { // print a commented message eventStringBuilder.append(" // Error creating JavaScript for " + action.getType() + " action " + action.getId() + " : " + ex.getMessage() + "\n"); } } // close the try/catch if (control == null) { // page eventStringBuilder.append(" } catch(ex) { Event_error('" + event.getType() + "',null,ex); }\n"); } else { // control eventStringBuilder.append(" } catch(ex) { Event_error('" + event.getType() + "','" + control.getId() + "',ex); }\n"); } // close event function eventStringBuilder.append("}\n\n"); // add the action functions stringBuilder.append(actionStringBuilder); // add the event function stringBuilder.append(eventStringBuilder); } } } // build the event handling page JavaScript iteratively private void getEventHandlersJavaScript(RapidRequest rapidRequest, StringBuilder stringBuilder, Application application, List<Control> controls) throws JSONException { // check there are some controls if (controls != null) { // if we're at the root of the page if (controls.equals(_controls)) { // check for page events if (_events != null) { // loop page events and get js functions for (Event event : _events) getEventJavaScriptFunction(rapidRequest, stringBuilder, application, null, event); } } for (Control control : controls) { // check event actions if (control.getEvents() != null) { // loop page events and get js functions for (Event event : control.getEvents()) getEventJavaScriptFunction(rapidRequest, stringBuilder, application, control, event); } // now call iteratively for child controls (of this [child] control, etc.) if (control.getChildControls() != null) getEventHandlersJavaScript(rapidRequest, stringBuilder, application, control.getChildControls()); } } } // this method produces the start of the head (which is shared by the no permission response) private String getHeadStart(RapidHttpServlet rapidServlet, Application application) { // look for the page title suffix String pageTitleSuffix = rapidServlet.getServletContext().getInitParameter("pageTitleSuffix"); // if null make default if (pageTitleSuffix == null) pageTitleSuffix = " - by Rapid"; // create start of head html and return return " <head>\n" + " <title>" + Html.escape(_title) + pageTitleSuffix + "</title>\n" + " <meta description=\"Created using Rapid - www.rapid-is.co.uk\"/>\n" + " <meta charset=\"utf-8\"/>\n" + " <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no\" />\n" + (application != null ? " <meta name=\"theme-color\" content=\"" + application.getStatusBarColour() + "\" />\n" : "" ) + " <link rel=\"icon\" href=\"favicon.ico\"></link>\n"; } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadLinks(RapidHttpServlet rapidServlet, Application application, boolean isDialogue) throws JSONException { // create a string builder containing the head links StringBuilder stringBuilder = new StringBuilder(getHeadStart(rapidServlet, application)); // if you're looking for where the jquery link is added it's the first resource in the page.control.xml file stringBuilder.append(" " + getResourcesHtml(application, false).trim() + "\n"); // add a JavaScript block with important global variables - this removed by the pagePanel loader and navigation action when showing dialogues, by matching to the various variables so be careful changing anything below stringBuilder.append(" <script type='text/javascript'>\n"); if (application != null) { stringBuilder.append("var _appId = '" + application.getId() + "';\n"); stringBuilder.append("var _appVersion = '" + application.getVersion() + "';\n"); } stringBuilder.append("var _pageId = '" + _id + "';\n"); stringBuilder.append("var _pageName = '" + _name.replace("'", "\\'") + "';\n"); stringBuilder.append("var _pageTitle = '" + _title.replace("'", "\\'") + "';\n"); // this flag indicates if the Rapid Mobile client is regaining the foreground stringBuilder.append("var _mobileResume = false;\n"); // this flag indicates if any controls are loading asynchronously and the page load method can't be called stringBuilder.append("var _loadingControls = 0;\n"); stringBuilder.append("var _loadingPages = [];\n"); stringBuilder.append(" </script>\n"); return stringBuilder.toString(); } // this private method writes JavaScript specific to the user private void writeUserJS(Writer writer, RapidRequest rapidRequest, Application application, User user, Boolean download) throws RapidLoadingException, IOException, JSONException { // open js writer.write(" <script type='text/javascript'>\n"); // if download if (download) { // just write RapidMobile //writer.write("var _userName = 'RapidMobile';\n"); // print user - the above is turned off for now until enough people download Rapid Mobile 2.4.1.3 which can send the authenticated user if (user != null) writer.write("var _userName = '" + user.getName() + "';\n"); } else { // print user if (user != null) writer.write("var _userName = '" + user.getName() + "';\n"); // get app page variables List<String> pageVariables = application.getPageVariables(rapidRequest.getRapidServlet().getServletContext()); // if we got some if (pageVariables != null) { // prepare json to hold page variables JSONObject jsonPageVariables = new JSONObject(); // loop them for (String pageVariable : pageVariables) { // null safety if (pageVariable != null) { // look for a value in the session String value = (String) rapidRequest.getSessionAttribute(pageVariable); // if we got one add it if (value != null) jsonPageVariables.put(pageVariable, value); } } // write page variables writer.write("var _pageVariables_" + _id + " = " + jsonPageVariables + ";\n"); } } // close js writer.write(" </script>\n"); } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadCSS(RapidRequest rapidRequest, Application application, boolean isDialogue) throws JSONException { // create an empty string builder StringBuilder stringBuilder = new StringBuilder(); // fetch all page control styles String pageCss = getAllCSS(rapidRequest.getRapidServlet().getServletContext(), application); // only if there is some if (pageCss.length() > 0) { // open style blocks stringBuilder.append(" <style>\n"); // if live we're going to try and minify if (application.getStatus() == Application.STATUS_LIVE) { try { // get string to itself minified pageCss = Minify.toString(pageCss, Minify.CSS, "Page head CSS"); } catch (IOException ex) { // add error and resort to unminified pageCss = "\n/*\n\n Failed to minify the css : " + ex.getMessage() + "\n\n*/\n\n" + pageCss; } } else { // prefix with minify message pageCss = "\n/* The code below is minified for live applications */\n\n" + pageCss; } // add it to the page stringBuilder.append(pageCss); // close the style block stringBuilder.append(" </style>\n"); } // return it return stringBuilder.toString(); } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadReadyJS(RapidRequest rapidRequest, Application application, boolean isDialogue, FormAdapter formAdapter) throws JSONException { // make a new string builder just for the js (so we can minify it independently) StringBuilder jsStringBuilder = new StringBuilder(); // add an extra line break for non-live applications if (application.getStatus() != Application.STATUS_LIVE) jsStringBuilder.append("\n"); // get all controls List<Control> pageControls = getAllControls(); // if we got some if (pageControls != null) { // loop them for (Control control : pageControls) { // get the details String details = control.getDetails(); // check if null if (details != null) { // create a gloabl variable for it's details jsStringBuilder.append("var " + control.getId() + "details = " + details + ";\n"); } } // add a line break again if we printed anything if (jsStringBuilder.length() > 0) jsStringBuilder.append("\n"); } // initialise the form controls that need their values added in the dynamic part of the page script _formControlValues = new ArrayList<>(); // get all actions List<Action> actions = getAllActions(); // loop them for (Action action : actions) { // if form type if ("form".equals(action.getType())) { // get the action type String type = action.getProperty("actionType"); // if value copy if ("val".equals(type)) { // get control id String controlId = action.getProperty("dataSource"); // add to collection if all in order if (controlId != null) _formControlValues.add(controlId); } else if ("sub".equals(type)) { // add sub as id _formControlValues.add("sub"); } else if ("err".equals(type)) { // add err as id _formControlValues.add("err"); } else if ("res".equals(type)) { // add res as id _formControlValues.add("res"); } } } // initialise our pageload lines collections _pageloadLines = new ArrayList<>(); // get any control initJavaScript event listeners into he pageloadLine (goes into $(document).ready function) getPageLoadLines(_pageloadLines, _controls); // get a synchronised list to avoid concurrency exception in sort List<String> pageLoadLines = Collections.synchronizedList(_pageloadLines); // synchronised block for sorting in thread-safe manner synchronized (this) { // sort the page load lines Collections.sort(pageLoadLines, new Comparator<String>() { @Override public int compare(String l1, String l2) { if (l1 == null || l1.isEmpty()) return -1; if (l2 == null || l2.isEmpty()) return 1; char i1 = l1.charAt(0); char i2 = l2.charAt(0); return i2 - i1; }} ); } // if there is a form adapter in place if (formAdapter != null) { // add a line to set any form values before the load event is run pageLoadLines.add("Event_setFormValues($.Event('setValues'));\n"); // add an init form function - in extras.js pageLoadLines.add("Event_initForm('" + _id + "');\n"); } // check for page events (this is here so all listeners are registered by now) and controls (there should not be none but nothing happens without them) if (_events != null && _controls != null) { // loop page events for (Event event : _events) { // only if there are actually some actions to invoke if (event.getActions() != null) { if (event.getActions().size() > 0) { // page is a special animal so we need to do each of it's event types differently if ("pageload".equals(event.getType())) { // call the page load if safe to do so - controls with asynchronous loading will need to check and call this method themselves pageLoadLines.add("if (!_mobileResume) { if (_loadingControls < 1) { Event_pageload_" + _id + "($.Event('pageload')) } else { _loadingPages.push('" + _id + "');} };\n"); } // resume is also a special animal if ("resume".equals(event.getType())) { // fire the resume event immediately if there is no rapidMobile (it will be done by the Rapid Mobile app if present) pageLoadLines.add("if (!window['_rapidmobile']) Event_resume_" + _id + "($.Event('resume'));\n"); } // reusable action is only invoked via reusable actions on other events - there is no listener } } } } // if there is a form adapter in place if (formAdapter != null) { // add a line to check the form now all load events have been run pageLoadLines.add("Event_checkForm();\n"); } // if this is not a dialogue or there are any load lines if (!isDialogue || pageLoadLines.size() > 0) { // open the page loaded function jsStringBuilder.append("$(document).ready( function() {\n"); // add a try jsStringBuilder.append(" try {\n"); // print any page load lines such as initialising controls for (String line : pageLoadLines) jsStringBuilder.append(" " + line); // close the try jsStringBuilder.append(" } catch(ex) { $('body').html(ex.message || ex); }\n"); // after 200 milliseconds show and trigger a window resize for any controls that might be listening (this also cuts out any flicker), we also call focus on the elements we marked for focus while invisible (in extras.js) jsStringBuilder.append(" window.setTimeout( function() {\n $(window).resize();\n $('body').css('visibility','visible');\n $('[data-focus]').focus();\n }, 200);\n"); // end of page loaded function jsStringBuilder.append("});\n\n"); } // return it return jsStringBuilder.toString(); } // this private method produces the head of the page which is often cached, if resourcesOnly is true only page resources are included which is used when sending no permission private String getHeadJS(RapidRequest rapidRequest, Application application, boolean isDialogue) throws JSONException { // a string builder StringBuilder stringBuilder = new StringBuilder(); // make a new string builder just for the js (so we can minify it independently) StringBuilder jsStringBuilder = new StringBuilder(); // get all actions in the page List<Action> pageActions = getAllActions(); // only proceed if there are actions in this page if (pageActions != null) { // loop the list of all actions looking for redundancy for (Action action : pageActions) { // if this action adds redundancy to any others if (action.getRedundantActions() != null) { // loop them for (String actionId : action.getRedundantActions()) { // try and find the action Action redundantAction = getAction(actionId); // if we got one if (redundantAction != null) { // update the redundancy avoidance flag redundantAction.avoidRedundancy(true); } } } // redundantActions != null } // action loop to check redundancy // loop the list of actions for (Action action : pageActions) { try { // look for any javascript to print into the page that this action may have String actionPageJavaScript = action.getPageJavaScript(rapidRequest, application, this, null); // print it here if so if (actionPageJavaScript != null) jsStringBuilder.append(actionPageJavaScript.trim() + "\n\n"); } catch (Exception ex) { // print the exception as a comment jsStringBuilder.append("// Error producing page JavaScript : " + ex.getMessage() + "\n\n"); } } // action loop // add event handlers, staring at the root controls getEventHandlersJavaScript(rapidRequest, jsStringBuilder, application, _controls); } // page actions check // if there was any js if (jsStringBuilder.length() > 0) { // check the application status if (application.getStatus() == Application.STATUS_LIVE) { try { // minify the js before adding stringBuilder.append(Minify.toString(jsStringBuilder.toString(),Minify.JAVASCRIPT, "Page JavaScript")); } catch (IOException ex) { // add the error stringBuilder.append("\n\n/* Failed to minify JavaScript : " + ex.getMessage() + " */\n\n"); // add the js as is stringBuilder.append(jsStringBuilder); } } else { // add the js as is stringBuilder.append("/* The code below is minified for live applications */\n\n" + jsStringBuilder.toString().trim() + "\n"); } } // get it into a string and insert any parameters String headJS = application.insertParameters(rapidRequest.getRapidServlet().getServletContext(), stringBuilder.toString()); // return it return headJS; } // this function interatively checks permission and writes control role html private void writeRoleControlHtml(Writer writer, List<String> userRoles, RoleControlHtml roleControlHtml) throws IOException { // if we have a roleControlHtml if (roleControlHtml != null) { // assume we haven't passed boolean passed = false; // check if it has roles if (roleControlHtml.getRoles() == null) { // no roles it passes passed = true; } else { // loop the control roles first - likely to be smaller for (String controlRole : roleControlHtml.getRoles()) { // loop the user roles for (String userRole : userRoles) { // if they match if (controlRole.equalsIgnoreCase(userRole)) { // we've passed passed = true; // don't check any further break; } } // don't loop further if passed if (passed) break; } } // if we passed if (passed) { // write the start html if there is any if (roleControlHtml.getStartHtml() != null) writer.write(roleControlHtml.getStartHtml()); // if there are children if (roleControlHtml.getChildren() != null) { // loop the children for (RoleControlHtml childRoleControlHtml : roleControlHtml.getChildren()) { // print them writeRoleControlHtml(writer, userRoles, childRoleControlHtml); } } // write the end html if there is any if (roleControlHtml.getEndHtml() != null) writer.write(roleControlHtml.getEndHtml()); } // control roles check } // roleControlHtml check } // this routine produces the entire page public void writeHtml(RapidHttpServlet rapidServlet, HttpServletResponse response, RapidRequest rapidRequest, Application application, User user, Writer writer, boolean designerLink, boolean download) throws JSONException, IOException, RapidLoadingException { // get the servlet context ServletContext servletContext = rapidServlet.getServletContext(); // this doctype is necessary (amongst other things) to stop the "user agent stylesheet" overriding styles writer.write("<!DOCTYPE html>\n"); // open the html writer.write("<html lang=\"en\">\n"); // get any theme Theme theme = application.getTheme(servletContext); // check for undermaintenance status if (application.getStatus() == Application.STATUS_MAINTENANCE) { rapidServlet.writeMessage(writer, "Under maintenance", "This application is currently under maintenance. Please try again in a few minutes."); } else { // get the security SecurityAdapter security = application.getSecurityAdapter(); // get any form adapter FormAdapter formAdapter = application.getFormAdapter(); // assume the user has permission to access the page boolean gotPagePermission = true; try { // if this page has roles if (_roles != null) { if (_roles.size() > 0) { // check if the user has any of them gotPagePermission = security.checkUserRole(rapidRequest, _roles); } } } catch (SecurityAdapaterException ex) { rapidServlet.getLogger().error("Error checking for page roles", ex); } // check that there's permission if (gotPagePermission) { // whether we're rebulding the page for each request boolean rebuildPages = Boolean.parseBoolean(servletContext.getInitParameter("rebuildPages")); // check whether or not we rebuild if (rebuildPages) { // get fresh head links writer.write(getHeadLinks(rapidServlet, application, !designerLink)); // write the user-specific JS writeUserJS(writer, rapidRequest, application, user, download); // get fresh js and css writer.write(getHeadCSS(rapidRequest, application, !designerLink)); // open the script writer.write(" <script type='text/javascript'>\n"); // write the ready JS writer.write(getHeadReadyJS(rapidRequest, application, !designerLink, formAdapter)); } else { // rebuild any uncached if (_cachedHeadLinks == null) _cachedHeadLinks = getHeadLinks(rapidServlet, application, !designerLink); if (_cachedHeadCSS == null) _cachedHeadCSS = getHeadCSS(rapidRequest, application, !designerLink); if (_cachedHeadReadyJS == null) _cachedHeadReadyJS = getHeadReadyJS(rapidRequest, application, !designerLink, formAdapter); // get the cached head links writer.write(_cachedHeadLinks); // write the user-specific JS writeUserJS(writer, rapidRequest, application, user, download); // get the cached head js and css writer.write(_cachedHeadCSS); // open the script writer.write(" <script type='text/javascript'>\n"); // write the ready JS writer.write(_cachedHeadReadyJS); } // if there is a form if (formAdapter != null) { // set no cache on this page RapidFilter.noCache(response); // a placeholder for any form id String formId = null; // a placeholder for any form values StringBuilder formValues = null; // first do the actions that could result in an exception try { // get the form details UserFormDetails formDetails = formAdapter.getUserFormDetails(rapidRequest); // if we got some if (formDetails != null) { // set the form id formId = formDetails.getId(); // create the values string builder formValues = new StringBuilder(); // set whether submitted formValues.append("var _formSubmitted = " + formDetails.getSubmitted() + ";\n\n"); // start the form values object (to supply previous form values) formValues.append("var _formValues = {"); // if form control values to set if (_formControlValues != null) { // loop then for (int i = 0; i < _formControlValues.size(); i++) { // get the control id String id = _formControlValues.get(i); // place holder for the value String value = null; // some id's are special if ("id".equals(id)) { // the submission message value = formDetails.getId(); } else if ("sub".equals(id)) { // the submission message value = formDetails.getSubmitMessage(); } else if ("err".equals(id)) { // the submission message value = formDetails.getErrorMessage(); } else if ("res".equals(id)) { // the submission message value = formDetails.getPassword(); } else { // lookup the value value = formAdapter.getFormControlValue(rapidRequest, formId, id, false); } // if we got one if (value != null) { // escape it and enclose it value = value.replace("\\", "\\\\").replace("'", "\\'").replace("\r\n", "\\n").replace("\n", "\\n").replace("\r", ""); // add to object formValues.append("'" + id + "':'" + value + "'"); // add comma formValues.append(","); } } } // close it formValues.append("'id':_formId};\n\n"); // start the set form values function formValues.append("function Event_setFormValues(ev) {"); // get any form page values FormPageControlValues formControlValues = formAdapter.getFormPageControlValues(rapidRequest, formId, _id); // if there are any if (formControlValues != null) { if (formControlValues.size() > 0) { // add a line break formValues.append("\n"); // loop the values for (FormControlValue formControlValue : formControlValues) { // get the control Control pageControl = getControl(formControlValue.getId()); // if we got one if (pageControl != null) { // get the value String value = formControlValue.getValue(); // assume using setData String function = "setData_" + pageControl.getType(); // get the json properties for the control JSONObject jsonControl = rapidServlet.getJsonControl(pageControl.getType()); // if we got some if (jsonControl != null) { // look for the formSetRunTimePropertyType String formSetRunTimePropertyType = jsonControl.optString("formSetRunTimePropertyType", null); // if we got one update the function to use it if (formSetRunTimePropertyType != null) function = "setProperty_" + pageControl.getType() + "_" + formSetRunTimePropertyType; } // get any control details String details = pageControl.getDetailsJavaScript(application, this); // if null update to string if (details == null) details = null; // if there is a value use the standard setData for it (this might change to something more sophisticated at some point) if (value != null) formValues.append(" if (window[\""+ function + "\"]) " + function + "(ev, '" + pageControl.getId() + "', null, " + details + ", '" + value.replace("\\", "\\\\").replace("'", "\\'").replace("\r\n", "\\n").replace("\n", "\\n").replace("\r", "") + "');\n"); } } } } // close the function formValues.append("};\n\n"); // write the form values writer.write(formValues.toString()); } else { // set whether submitted writer.write("var _formSubmitted = false;\n\n"); // a dummy setFormValues method writer.write("function Event_setFormValues(ev) {}\n\n"); } // write the form id into the page - not necessary for dialogues if (designerLink) writer.write("var _formId = '" + formId + "';\n\n"); } catch (Exception ex) { // log the error rapidServlet.getLogger().error("Error create page form values", ex); // write a dummy Event_setFormValues with alert and redirect to start page writer.write("var _formSubmitted = false;\nfunction Event_setFormValues() {\n alert('Error with form values : " + ex.getMessage().replace("'", "\\'") + "');\n window.location.href='~?a=" + application.getId() + "'\n};\n\n"); } } if (rebuildPages) { // write the ready JS writer.write(getHeadJS(rapidRequest, application, !designerLink)); } else { // get the rest of the cached JS if (_cachedHeadJS == null) _cachedHeadJS = getHeadJS(rapidRequest, application, !designerLink); // write the ready JS writer.write(_cachedHeadJS); } // close the script writer.write("\n </script>\n"); // close the head writer.write(" </head>\n"); // start the body writer.write(" <body id='" + _id + "' style='visibility:hidden;'" + (_bodyStyleClasses == null ? "" : " class='" + _bodyStyleClasses + "'") + ">\n"); // if there was a theme and we're not hiding the header / footer if (theme != null && !_hideHeaderFooter) { // get any header html String headerHtml = theme.getHeaderHtml(); // write the header html if there is something to write if (headerHtml != null) if (headerHtml.length() > 0) writer.write(headerHtml); } // start the form if in use (but not for dialogues and other cases where the page is partial) if (formAdapter != null && designerLink) { writer.write(" <form id='" + _id + "_form' action='~?a=" + application.getId() + "&v=" + application.getVersion() + "&p=" + _id + "' method='POST'" + (application.getFormDisableAutoComplete() ? " autocomplete='off'" : "") + ">\n"); writer.write(" <input type='hidden' name='csrfToken' value='" + rapidRequest.getCSRFToken() + "' />\n"); writer.write(" <input type='hidden' id='" + _id + "_hiddenControls' name='" + _id + "_hiddenControls' />\n"); } // a reference for the body html String bodyHtml = null; // check we have _rolesHtml - this has been depreciated since 2.3.5.3 but older page files may still have it this way if (_rolesHtml != null) { // get the users roles List<String> userRoles = user.getRoles(); if (userRoles != null) { // loop each roles html entry for (RoleHtml roleHtml : _rolesHtml) { // get the roles from this combination List<String> roles = roleHtml.getRoles(); // assume not roles are required (this will be updated if roles are present) int rolesRequired = 0; // keep a running count for the roles we have int gotRoleCount = 0; // if there are roles to check if (roles != null) { // update how many roles we need our user to have rolesRequired = roles.size(); // check whether we need any roles and that our user has any at all if (rolesRequired > 0) { // check the user has as many roles as this combination requires if (userRoles.size() >= rolesRequired) { // loop the roles we need for this combination for (String role : roleHtml.getRoles()) { // check this role if (userRoles.contains(role)) { // increment the got role count gotRoleCount ++; } // increment the count of required roles } // loop roles } // user has enough roles to bother checking this combination } // if any roles are required } // add roles to check // if we have all the roles we need if (gotRoleCount == rolesRequired) { // use this html bodyHtml = roleHtml.getHtml(); // no need to check any further break; } } // html role combo loop } // got userRoles } else { // check if this page has role control html if (_roleControlHtml == null) { // check for _htmlBody if (_htmlBody == null) { // if _htmlBody is null, which happens for new pages which have not been saved yet, set to empty string to avoid no permission later bodyHtml = ""; } else { // set this to the whole html body bodyHtml = _htmlBody; } } else { // get the users roles List<String> userRoles = user.getRoles(); // if the user has roles if (userRoles != null) { // if the application is live if (application.getStatus() == Application.STATUS_LIVE) { // write straight to the page writer writeRoleControlHtml(writer, userRoles, _roleControlHtml); // set bodyHtml to empty string indicating we had permission bodyHtml = ""; } else { // make a StringWriter StringWriter swriter = new StringWriter(); // write straight to the StringWriter writeRoleControlHtml(swriter, userRoles, _roleControlHtml); // set bodyHtml to what what written so it will be pretty printed bodyHtml = swriter.toString(); } } // user has roles } // this page has role control html } // if our users have roles and we have different html for roles // check if we got any body html via the roles if (bodyHtml == null) { // didn't get any body html, show no permission rapidServlet.writeMessage(writer, "No permission", "You do not have permssion to view this page"); } else { // check there is something to write - will be an empty string if already written by newer user roles code if (bodyHtml.length() > 0) { // check the status of the application if (application.getStatus() == Application.STATUS_DEVELOPMENT) { // pretty print writer.write(Html.getPrettyHtml(bodyHtml.trim())); } else { // no pretty print writer.write(bodyHtml.trim()); } } // close the form if (formAdapter != null && designerLink) writer.write(" </form>\n"); } // got body html check } else { // no page permission rapidServlet.writeMessage(writer, "No permission", "You do not have permssion to view this page"); } // page permission check try { // whether to include the designer link - dialogues and files in the .zip do not so no need to even check permission if (designerLink) { // assume not admin link boolean adminLinkPermission = false; // check for the design role, super is required as well if the rapid app if ("rapid".equals(application.getId())) { if (security.checkUserRole(rapidRequest, Rapid.DESIGN_ROLE) && security.checkUserRole(rapidRequest, Rapid.SUPER_ROLE)) adminLinkPermission = true; } else { if (security.checkUserRole(rapidRequest, Rapid.DESIGN_ROLE)) adminLinkPermission = true; } // if we had the admin link if (adminLinkPermission) { // create string builder for the links StringBuilder designLinkStringBuilder = new StringBuilder(); // create a string builder for the jquery StringBuilder designLinkJQueryStringBuilder = new StringBuilder(); // loop all of the controls for (Control control : getAllControls()) { // get the json control definition JSONObject jsonControl = rapidServlet.getJsonControl(control.getType()); // definition check if ( jsonControl != null) { // look for the design link jquery String designLinkJQuery = jsonControl.optString("designLinkJQuery", null); // if we got any design link jquery if (designLinkJQuery != null) { // get the image title from the control name String title = control.getName(); // escape any apostrophes if (title != null) title = title.replace("'", "&apos;"); // add the link into the string builder designLinkStringBuilder.append("<a id='designLink_" + control.getId() + "' data-id='" + control.getId() + "' href='#'><img src='" + jsonControl.optString("image","images/penknife_24x24.png") + "' title='" + title + "'/></a>\n"); // trim the JQuery designLinkJQuery = designLinkJQuery.trim(); // start with a . if not if (!designLinkJQuery.startsWith(".")) designLinkJQuery = "." + designLinkJQuery; // end with ; if not if (!designLinkJQuery.endsWith(";")) designLinkJQuery += ";"; // add the jquery after the object reference designLinkJQueryStringBuilder.append(" $('#designLink_" + control.getId() + "')" + designLinkJQuery.replace("\n", "\n ") + "\n"); } } } // using attr href was the weirdest thing. Some part of jQuery seemed to be setting the url back to v=1&p=P1 when v=2&p=P2 was printed in the html // we also now use a JavaScript function getDesignerUrl in designlinks.js to make the url whilst checking for pretty urls writer.write( "<link rel='stylesheet' type='text/css' href='styles/designlinks.css'></link>\n" + "<script type='text/javascript' src='scripts/designlinks.js'></script>\n" + "<div id='designShow'></div>\n" + "<div id='designLinks' style='display:none;'>\n" + "<a id='designLink' href='#'><img src='images/tool.svg' title='Open Rapid Design'/></a>\n" + "<a id='designLinkNewTab' style='padding:5px;' href='#'><img src='images/right.svg' title='Open Rapid Design in a new tab'/></a>\n" + designLinkStringBuilder.toString() + "</div>\n" + "<script type='text/javascript'>\n" + "/* designLink */\n" + "var _onDesignLink = false;\n" + "var _onDesignTable = false;\n" + "var designerUrl = getDesignerUrl();\n" + "$(document).ready( function() {\n" + " $('#designShow').mouseover( function(ev) {\n $('#designLink').attr('href', designerUrl);\n $('#designLinkNewTab').attr('target','_blank').attr('href', designerUrl);\n $('#designLinks').show(); _onDesignLink = true;\n });\n" + " $('#designLinks').mouseleave( function(ev) {\n _onDesignLink = false;\n setTimeout( function() {\n if(!_onDesignLink && !_onDesignTable) $('#designLinks').fadeOut();\n }, 1000);\n });\n" + " $('#designLinks').mouseover(function(ev) {\n _onDesignLink = true;\n });\n" + " $('html').click(function(){\n if(!_onDesignLink && !_onDesignTable) {\n $('div.designData').fadeOut();\n $('#designLinks').fadeOut();\n }\n });\n" + designLinkJQueryStringBuilder.toString() + "});\n" + "</script>\n"); } } } catch (SecurityAdapaterException ex) { rapidServlet.getLogger().error("Error checking for the designer link", ex); } // design permssion check } // design link check // if there was a theme and we're not hiding the header / footer if (theme != null && !_hideHeaderFooter) { // get any header html String footerHtml = theme.getFooterHtml(); // write the header html if there is something to write if (footerHtml != null) if (footerHtml.length() > 0) writer.write(footerHtml); } // add the remaining elements writer.write(" </body>\n</html>"); } // gets the value of a condition used in the page visibility rules private String getConditionValue(RapidRequest rapidRequest, String formId, FormAdapter formAdapter, Application application, Value value) throws Exception { String[] idParts = value.getId().split("\\."); if (idParts[0].equals("System")) { // just check that there is a type if (idParts.length > 1) { // get the type from the second part String type = idParts[1]; // the available system values are specified above getDataOptions in designer.js if ("app id".equals(type)) { // whether rapid mobile is present return application.getId(); } else if ("app version".equals(type)) { // whether rapid mobile is present return application.getVersion(); } else if ("page id".equals(type)) { // the page return _id; } else if ("mobile".equals(type)) { // whether rapid mobile is present return "false"; } else if ("online".equals(type)) { // whether we are online (presumed true if no rapid mobile) return "true"; } else if ("user".equals(type) || "user name".equals(idParts[1])) { // pass the field as a value return rapidRequest.getUserName(); } else if ("field".equals(type)) { // pass the field as a value return value.getField(); } else { // pass through as literal return idParts[1]; } } else { // return null return null; } } else if (idParts[0].equals("Session")) { // if there are enough if parts if (idParts.length > 1) { return (String) rapidRequest.getSessionAttribute(idParts[1]); } else { return null; } } else { // get the id of the value object (should be a control Id) String valueId = value.getId(); // retrieve and return it from the form adapater, but not if it's hidden return formAdapter.getFormControlValue(rapidRequest, formId, valueId, true); } } // return a boolean for page visibility public boolean isVisible(RapidRequest rapidRequest, Application application, UserFormDetails userFormDetails) throws Exception { // get a logger Logger logger = rapidRequest.getRapidServlet().getLogger(); // get the form adapter FormAdapter formAdapter = application.getFormAdapter(); // if we have a form adapter and visibility conditions if (formAdapter == null) { // no form adapter always visible logger.debug("Page " + _id + " no form adapter, always visible "); return true; } else { if (userFormDetails == null) { // no user form details logger.debug("No user form details"); return false; } else if (_simple) { // simple page always invisible on forms logger.debug("Page " + _id + " is a simple page, always hidden on forms"); return false; } else if (_formPageType == FORM_PAGE_TYPE_SAVE) { // save page always invisible on forms logger.debug("Page " + _id + " is a save page, always hidden on forms"); return false; } else if (_formPageType == FORM_PAGE_TYPE_RESUME) { // resume page always invisible on forms logger.debug("Page " + _id + " is a resume page, always hidden on forms"); return false; } else if (_formPageType == FORM_PAGE_TYPE_SUBMITTED && !userFormDetails.getShowSubmitPage()) { // requests for submitted page are denied if show submission is not true logger.debug("Page " + _id + " is a submitted page but the form has not been submitted yet"); return false; } else if (_formPageType == FORM_PAGE_TYPE_ERROR && !userFormDetails.getError()) { // requests for error page are denied if no error logger.debug("Page " + _id + " is an error page but the form has not had an error yet"); return false; } else if (_visibilityConditions == null) { // no _visibilityConditions always visible logger.debug("Page " + _id + " _visibilityConditions is null, always visible on forms"); return true; } else if (_visibilityConditions.size() == 0) { // no _visibilityConditions always visible logger.debug("Page " + _id + " _visibilityConditions size is zero, always visible on forms"); return true; } else { // log logger.trace("Page " + _id + " " + _visibilityConditions.size() + " visibility condition(s) " + " : " + _conditionsType); // assume we have failed all conditions boolean pass = false; // loop them for (Condition condition : _visibilityConditions) { // assume we have failed this condition pass = false; logger.trace("Page " + _id + " visibility condition " + " : " + condition); String value1 = getConditionValue(rapidRequest, userFormDetails.getId(), formAdapter, application, condition.getValue1()); logger.trace("Value 1 = " + value1); String value2 = getConditionValue(rapidRequest, userFormDetails.getId(), formAdapter, application, condition.getValue2()); logger.trace("Value 2 = " + value2); String operation = condition.getOperation(); if (value1 == null) value1 = ""; if (value2 == null) value2 = ""; // pass is updated from false to true if conditions match if ("==".equals(operation)) { if (value1.equals(value2)) pass = true; } else if ("!=".equals(operation)) { if (!value1.equals(value2)) pass = true; } else { // the remaining conditions all work with numbers and must not be empty strings if (value1.length() > 0 && value2.length() > 0) { try { // convert to floats float num1 = Float.parseFloat(value1); float num2 = Float.parseFloat(value2); // check the conditions if (">".equals(operation)) { if ((num1 > num2)) pass = true; } else if (">=".equals(operation)) { if ((num1 >= num2)) pass = true; } else if ("<".equals(operation)) { if ((num1 < num2)) pass = true; } else if ("<=".equals(operation)) { if ((num1 <= num2)) pass = true; } } catch (Exception ex) { // something went wrong - generally in the conversion - return false logger.error("Error assessing page visibility page " + _id + " " + condition); } // try } // empty string check } // operation check // log logger.debug("Visibility condition for page " + _id + " : " + value1 + " " + condition.getOperation()+ " " + value2 + " , (" + condition + ") result is " + pass); // for the fast fail check whether we have an or if ("or".equals(_conditionsType)) { // if the conditions are or and we've just passed, we can stop checking further as we've passed in total if (pass) break; } else { // if the conditions are and and we've just failed, we can stop checking further as we've failed in total if (!pass) break; } } // condition loop // log result logger.debug("Page " + _id + " visibility check, " + _visibilityConditions.size() + " conditions, pass = " + pass); // if we failed set the page values to null if (!pass) formAdapter.setFormPageControlValues(rapidRequest, userFormDetails.getId(), _id, null); // return the pass return pass; } // simple, conditions, condition checks } // form adapter check } // return any reCaptcha controls in the page public List<Control> getRecaptchaControls() { if (_reCaptchaControls == null) { // make a new list _reCaptchaControls = new ArrayList<>(); // loop page controls for (Control control : getAllControls()) { // if this is a recapthca add it if ("recaptcha".equals(control.getType())) { _reCaptchaControls.add(control); } } } return _reCaptchaControls; } // overrides @Override public String toString() { return "Page " + _id + " " + _name + " - " + _title; } // static methods // static function to load a new page public static Page load(ServletContext servletContext, File file) throws JAXBException, ParserConfigurationException, SAXException, IOException, TransformerFactoryConfigurationError, TransformerException { // get the logger Logger logger = (Logger) servletContext.getAttribute("logger"); // trace log that we're about to load a page logger.trace("Loading page from " + file); // open the xml file into a document Document pageDocument = XML.openDocument(file); // specify the xmlVersion as -1 int xmlVersion = -1; // look for a version node Node xmlVersionNode = XML.getChildElement(pageDocument.getFirstChild(), "XMLVersion"); // if we got one update the version if (xmlVersionNode != null) xmlVersion = Integer.parseInt(xmlVersionNode.getTextContent()); // if the version of this xml isn't the same as this class we have some work to do! if (xmlVersion != XML_VERSION) { // get the page name String name = XML.getChildElementValue(pageDocument.getFirstChild(), "name"); // log the difference logger.debug("Page " + name + " with version " + xmlVersion + ", current version is " + XML_VERSION); // // Here we would have code to update from known versions of the file to the current version // // check whether there was a version node in the file to start with if (xmlVersionNode == null) { // create the version node xmlVersionNode = pageDocument.createElement("XMLVersion"); // add it to the root of the document pageDocument.getFirstChild().appendChild(xmlVersionNode); } // set the xml to the latest version xmlVersionNode.setTextContent(Integer.toString(XML_VERSION)); // // Here we would use xpath to find all controls and run the Control.upgrade method // // // Here we would use xpath to find all actions, each class has it's own upgrade method so // we need to identify the class, instantiate it and call it's upgrade method // it's probably worthwhile maintaining a map of instantiated classes to avoid unnecessary re-instantiation // // save it XML.saveDocument(pageDocument, file); logger.debug("Updated " + name + " page version to " + XML_VERSION); } // get the unmarshaller from the context Unmarshaller unmarshaller = RapidHttpServlet.getUnmarshaller(); // get a buffered reader for our page with UTF-8 file format BufferedReader br = new BufferedReader( new InputStreamReader( new FileInputStream(file), "UTF-8")); // try the unmarshalling try { // unmarshall the page Page page = (Page) unmarshaller.unmarshal(br); // log that the page was loaded logger.debug("Loaded page " + page.getId() + " - " + page.getName() + " from " + file); // close the buffered reader br.close(); // return the page return page; } catch (JAXBException ex) { // close the buffered reader br.close(); // log that the page had an error logger.error("Error loading page from " + file); // re-throw throw ex; } } }
Updated page to allow access to background actions from dialogue
src/com/rapid/core/Page.java
Updated page to allow access to background actions from dialogue
Java
lgpl-2.1
5a7d47212e3f0c85afa89f00bc8e70e5a040cf47
0
luminwin/beast-mcmc,JifengJiang/beast-mcmc,luminwin/beast-mcmc,danieljue/beast-mcmc,luminwin/beast-mcmc,luminwin/beast-mcmc,danieljue/beast-mcmc,evolvedmicrobe/beast-mcmc,JifengJiang/beast-mcmc,JifengJiang/beast-mcmc,JifengJiang/beast-mcmc,danieljue/beast-mcmc,evolvedmicrobe/beast-mcmc,evolvedmicrobe/beast-mcmc,evolvedmicrobe/beast-mcmc,evolvedmicrobe/beast-mcmc,danieljue/beast-mcmc,luminwin/beast-mcmc,JifengJiang/beast-mcmc,danieljue/beast-mcmc
/* * MCMCCriterion.java * * Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.inference.mcmc; import dr.math.MathUtils; import dr.inference.markovchain.Acceptor; /** * This class encapsulates the acception criterion for an MCMC proposal. * * @author Alexei Drummond * * @version $Id: MCMCCriterion.java,v 1.12 2005/05/24 20:25:59 rambaut Exp $ */ public class MCMCCriterion implements Acceptor { protected double temperature = 1.0; public MCMCCriterion() { temperature = 1.0; } public MCMCCriterion(double t) { temperature = t; } public double getAcceptanceValue(double oldScore, double hastingsRatio) { double acceptanceValue = (MathUtils.randomLogDouble() + (oldScore * temperature) - hastingsRatio) / temperature; return acceptanceValue; } public boolean accept(double oldScore, double newScore, double hastingsRatio, double[] logr) { logr[0] = (newScore - oldScore) * temperature + hastingsRatio; // for coercedAcceptanceProbability if (logr[0] > 0) logr[0] = 0.0; boolean accept = MathUtils.randomLogDouble() < logr[0]; return accept; } public double getTemperature() { return temperature; } public void setTemperature(double temperature) { this.temperature = temperature; } }
src/dr/inference/mcmc/MCMCCriterion.java
/* * MCMCCriterion.java * * Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.inference.mcmc; import dr.math.MathUtils; import dr.inference.markovchain.Acceptor; /** * This class encapsulates the acception criterion for an MCMC proposal. * * @author Alexei Drummond * * @version $Id: MCMCCriterion.java,v 1.12 2005/05/24 20:25:59 rambaut Exp $ */ public class MCMCCriterion implements Acceptor { protected double temperature = 1.0; public MCMCCriterion() { temperature = 1.0; } public MCMCCriterion(double t) { temperature = t; } public double getAcceptanceValue(double oldScore, double hastingsRatio) { double acceptanceValue = (Math.log(MathUtils.nextDouble()) + (oldScore * temperature) - hastingsRatio) / temperature; return acceptanceValue; } public boolean accept(double oldScore, double newScore, double hastingsRatio, double[] logr) { logr[0] = (newScore - oldScore) * temperature + hastingsRatio; // for coercedAcceptanceProbability if (logr[0] > 0) logr[0] = 0.0; boolean accept = (Math.log(MathUtils.nextDouble()) < logr[0]); return accept; } public double getTemperature() { return temperature; } public void setTemperature(double temperature) { this.temperature = temperature; } }
use randomLogDouble
src/dr/inference/mcmc/MCMCCriterion.java
use randomLogDouble
Java
lgpl-2.1
28c66d14a45ad806b34b2b71f40dbc1974627777
0
beast-dev/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc
/* * NormalGammaPrecisionGibbsOperator.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.inference.operators; import dr.inference.distribution.*; import dr.inference.model.Parameter; import dr.inference.operators.repeatedMeasures.GammaGibbsProvider; import dr.math.MathUtils; import dr.math.distributions.Distribution; import dr.math.distributions.GammaDistribution; import dr.math.matrixAlgebra.Vector; import dr.xml.*; /** * @author Marc A. Suchard * @author Philippe Lemey */ public class NormalGammaPrecisionGibbsOperator extends SimpleMCMCOperator implements GibbsOperator, Reportable { public static final String OPERATOR_NAME = "normalGammaPrecisionGibbsOperator"; public static final String LIKELIHOOD = "likelihood"; public static final String PRIOR = "prior"; private static final String WORKING = "workingDistribution"; public NormalGammaPrecisionGibbsOperator(GammaGibbsProvider gammaGibbsProvider, GammaStatisticsProvider prior, double weight) { this(gammaGibbsProvider, prior, null, weight); } public NormalGammaPrecisionGibbsOperator(GammaGibbsProvider gammaGibbsProvider, GammaStatisticsProvider prior, GammaStatisticsProvider working, double weight) { this.gammaGibbsProvider = gammaGibbsProvider; this.precisionParameter = gammaGibbsProvider.getPrecisionParameter(); this.prior = prior; this.working = working; setWeight(weight); } /** * @return a short descriptive message of the performance of this operator. */ public String getPerformanceSuggestion() { return null; } public String getOperatorName() { return OPERATOR_NAME; } @Override public String getReport() { int dimTrait = precisionParameter.getDimension(); double[] obsCounts = new double[dimTrait]; double[] sumSquaredErrors = new double[dimTrait]; gammaGibbsProvider.drawValues(); for (int i = 0; i < dimTrait; i++) { final GammaGibbsProvider.SufficientStatistics statistics = gammaGibbsProvider.getSufficientStatistics(i); obsCounts[i] = statistics.observationCount; sumSquaredErrors[i] = statistics.sumOfSquaredErrors; } StringBuilder sb = new StringBuilder(OPERATOR_NAME + " report:\n"); sb.append("Observation counts:\t"); sb.append(new Vector(obsCounts)); sb.append("\n"); sb.append("Sum of squared errors:\t"); sb.append(new Vector(sumSquaredErrors)); return sb.toString(); } static class GammaParametrization implements GammaStatisticsProvider { private final double rate; private final double shape; GammaParametrization(double mean, double variance) { if (mean == 0) { rate = 0; shape = -0.5; // Uninformative prior } else { rate = mean / variance; shape = mean * rate; } } GammaParametrization(Distribution distribution) { this(distribution.mean(), distribution.variance()); } double getRate() { return rate; } double getShape() { return shape; } @Override public double getShape(int dim) { return getShape(); } @Override public double getRate(int dim) { return getRate(); } } private double weigh(double working, double prior) { return (1.0 - pathParameter) * working + pathParameter * prior; } public double doOperation() { gammaGibbsProvider.drawValues(); for (int dim = 0; dim < precisionParameter.getDimension(); ++dim) { final GammaGibbsProvider.SufficientStatistics statistics = gammaGibbsProvider.getSufficientStatistics(dim); double shape = pathParameter * statistics.observationCount / 2; double rate = pathParameter * statistics.sumOfSquaredErrors / 2; if (working == null) { shape += prior.getShape(dim); rate += prior.getRate(dim); } else { shape += weigh(prior.getShape(dim), prior.getShape(dim)); //TODO: shouldn't these include the working? rate += weigh(prior.getRate(dim), prior.getShape(dim)); } final double draw = MathUtils.nextGamma(shape, rate); // Gamma( \alpha + n/2 , \beta + (1/2)*SSE ) precisionParameter.setParameterValue(dim, draw); } return 0; } @Override public void setPathParameter(double beta) { if (beta < 0.0 || beta > 1.0) { throw new IllegalArgumentException("Invalid pathParameter value"); } this.pathParameter = beta; } /** * @return the number of steps the operator performs in one go. */ public int getStepCount() { return 1; } public static dr.xml.XMLObjectParser PARSER = new dr.xml.AbstractXMLObjectParser() { public String getParserName() { return OPERATOR_NAME; } private void checkGammaDistribution(DistributionLikelihood distribution) throws XMLParseException { if (!((distribution.getDistribution() instanceof GammaDistribution) || (distribution.getDistribution() instanceof GammaDistributionModel))) { throw new XMLParseException("Gibbs operator assumes normal-gamma model"); } } private GammaStatisticsProvider getGammaStatisticsProvider(Object obj) throws XMLParseException { final GammaStatisticsProvider gammaStats; if (obj instanceof DistributionLikelihood) { DistributionLikelihood priorLike = (DistributionLikelihood) obj; checkGammaDistribution(priorLike); gammaStats = new GammaParametrization(priorLike.getDistribution()); } else if (obj instanceof GammaStatisticsProvider) { gammaStats = (GammaStatisticsProvider) obj; } else { throw new XMLParseException("Prior must be gamma"); } return gammaStats; } public Object parseXMLObject(XMLObject xo) throws XMLParseException { final double weight = xo.getDoubleAttribute(WEIGHT); final Object prior = xo.getElementFirstChild(PRIOR); GammaStatisticsProvider priorDistribution = getGammaStatisticsProvider(prior); final Object working = xo.hasChildNamed(WORKING) ? xo.getElementFirstChild(WORKING) : null; GammaStatisticsProvider workingDistribution = null; if (working != null) { workingDistribution = getGammaStatisticsProvider(working); } final GammaGibbsProvider gammaGibbsProvider; if (xo.hasChildNamed(LIKELIHOOD)) { DistributionLikelihood likelihood = (DistributionLikelihood) xo.getElementFirstChild(LIKELIHOOD); if (!((likelihood.getDistribution() instanceof NormalDistributionModel) || (likelihood.getDistribution() instanceof LogNormalDistributionModel) )) { throw new XMLParseException("Gibbs operator assumes normal-gamma model"); } gammaGibbsProvider = new GammaGibbsProvider.Default(likelihood); } else { gammaGibbsProvider = (GammaGibbsProvider) xo.getChild(GammaGibbsProvider.class); } return new NormalGammaPrecisionGibbsOperator(gammaGibbsProvider, priorDistribution, workingDistribution, weight); } //************************************************************************ // AbstractXMLObjectParser implementation //************************************************************************ public String getParserDescription() { return "This element returns a operator on the precision parameter of a normal model with gamma prior."; } public Class getReturnType() { return MCMCOperator.class; } public XMLSyntaxRule[] getSyntaxRules() { return rules; } private final XMLSyntaxRule[] rules = { AttributeRule.newDoubleRule(WEIGHT), new XORRule( new ElementRule(LIKELIHOOD, new XMLSyntaxRule[]{ new XORRule( new ElementRule(DistributionLikelihood.class), new ElementRule(GammaStatisticsProvider.class) ) }), new ElementRule(GammaGibbsProvider.class) ), new ElementRule(PRIOR, new XMLSyntaxRule[]{ new ElementRule(DistributionLikelihood.class) }), new ElementRule(WORKING, new XMLSyntaxRule[]{ new ElementRule(DistributionLikelihood.class) }, true), }; }; private final GammaGibbsProvider gammaGibbsProvider; private final Parameter precisionParameter; private final GammaStatisticsProvider prior; private final GammaStatisticsProvider working; private double pathParameter = 1.0; }
src/dr/inference/operators/NormalGammaPrecisionGibbsOperator.java
/* * NormalGammaPrecisionGibbsOperator.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.inference.operators; import dr.inference.distribution.*; import dr.inference.model.Parameter; import dr.inference.operators.repeatedMeasures.GammaGibbsProvider; import dr.math.MathUtils; import dr.math.distributions.Distribution; import dr.math.distributions.GammaDistribution; import dr.math.matrixAlgebra.Vector; import dr.xml.*; /** * @author Marc A. Suchard * @author Philippe Lemey */ public class NormalGammaPrecisionGibbsOperator extends SimpleMCMCOperator implements GibbsOperator, Reportable { public static final String OPERATOR_NAME = "normalGammaPrecisionGibbsOperator"; public static final String LIKELIHOOD = "likelihood"; public static final String PRIOR = "prior"; private static final String WORKING = "workingDistribution"; public NormalGammaPrecisionGibbsOperator(GammaGibbsProvider gammaGibbsProvider, GammaStatisticsProvider prior, double weight) { this(gammaGibbsProvider, prior, null, weight); } public NormalGammaPrecisionGibbsOperator(GammaGibbsProvider gammaGibbsProvider, GammaStatisticsProvider prior, GammaStatisticsProvider working, double weight) { this.gammaGibbsProvider = gammaGibbsProvider; this.precisionParameter = gammaGibbsProvider.getPrecisionParameter(); this.prior = prior; this.working = working; setWeight(weight); } /** * @return a short descriptive message of the performance of this operator. */ public String getPerformanceSuggestion() { return null; } public String getOperatorName() { return OPERATOR_NAME; } @Override public String getReport() { int dimTrait = precisionParameter.getDimension(); double[] obsCounts = new double[dimTrait]; double[] sumSquaredErrors = new double[dimTrait]; gammaGibbsProvider.drawValues(); for (int i = 0; i < dimTrait; i++) { final GammaGibbsProvider.SufficientStatistics statistics = gammaGibbsProvider.getSufficientStatistics(i); obsCounts[i] = statistics.observationCount; sumSquaredErrors[i] = statistics.sumOfSquaredErrors; } StringBuilder sb = new StringBuilder(OPERATOR_NAME + " report:\n"); sb.append("Observation counts:\t"); sb.append(new Vector(obsCounts)); sb.append("\n"); sb.append("Sum of squared errors:\t"); sb.append(new Vector(sumSquaredErrors)); return sb.toString(); } static class GammaParametrization implements GammaStatisticsProvider { private final double rate; private final double shape; GammaParametrization(double mean, double variance) { if (mean == 0) { rate = 0; shape = -0.5; // Uninformative prior } else { rate = mean / variance; shape = mean * rate; } } GammaParametrization(Distribution distribution) { this(distribution.mean(), distribution.variance()); } double getRate() { return rate; } double getShape() { return shape; } @Override public double getShape(int dim) { return getShape(); } @Override public double getRate(int dim) { return getRate(); } } private double weigh(double working, double prior) { return (1.0 - pathParameter) * working + pathParameter * prior; } public double doOperation() { gammaGibbsProvider.drawValues(); for (int dim = 0; dim < precisionParameter.getDimension(); ++dim) { final GammaGibbsProvider.SufficientStatistics statistics = gammaGibbsProvider.getSufficientStatistics(dim); double shape = pathParameter * statistics.observationCount / 2; double rate = pathParameter * statistics.sumOfSquaredErrors / 2; if (working == null) { shape += prior.getShape(dim); rate += prior.getRate(dim); } else { shape += weigh(prior.getShape(dim), prior.getShape(dim)); //TODO: shouldn't these include the working? rate += weigh(prior.getRate(dim), prior.getShape(dim)); } final double draw = MathUtils.nextGamma(shape, rate); // Gamma( \alpha + n/2 , \beta + (1/2)*SSE ) precisionParameter.setParameterValue(dim, draw); } return 0; } @Override public void setPathParameter(double beta) { if (beta < 0.0 || beta > 1.0) { throw new IllegalArgumentException("Invalid pathParameter value"); } this.pathParameter = beta; } /** * @return the number of steps the operator performs in one go. */ public int getStepCount() { return 1; } public static dr.xml.XMLObjectParser PARSER = new dr.xml.AbstractXMLObjectParser() { public String getParserName() { return OPERATOR_NAME; } private void checkGammaDistribution(DistributionLikelihood distribution) throws XMLParseException { if (!((distribution.getDistribution() instanceof GammaDistribution) || (distribution.getDistribution() instanceof GammaDistributionModel))) { throw new XMLParseException("Gibbs operator assumes normal-gamma model"); } } public Object parseXMLObject(XMLObject xo) throws XMLParseException { final double weight = xo.getDoubleAttribute(WEIGHT); final DistributionLikelihood prior = (DistributionLikelihood) xo.getElementFirstChild(PRIOR); checkGammaDistribution(prior); GammaParametrization priorDistribution = new GammaParametrization(prior.getDistribution()); final DistributionLikelihood working = (xo.hasChildNamed(WORKING) ? (DistributionLikelihood) xo.getElementFirstChild(WORKING) : null); GammaParametrization workingDistribution = null; if (working != null) { checkGammaDistribution(working); workingDistribution = new GammaParametrization(working.getDistribution()); } final GammaGibbsProvider gammaGibbsProvider; if (xo.hasChildNamed(LIKELIHOOD)) { DistributionLikelihood likelihood = (DistributionLikelihood) xo.getElementFirstChild(LIKELIHOOD); if (!((likelihood.getDistribution() instanceof NormalDistributionModel) || (likelihood.getDistribution() instanceof LogNormalDistributionModel) )) { throw new XMLParseException("Gibbs operator assumes normal-gamma model"); } gammaGibbsProvider = new GammaGibbsProvider.Default(likelihood); } else { gammaGibbsProvider = (GammaGibbsProvider) xo.getChild(GammaGibbsProvider.class); } return new NormalGammaPrecisionGibbsOperator(gammaGibbsProvider, priorDistribution, workingDistribution, weight); } //************************************************************************ // AbstractXMLObjectParser implementation //************************************************************************ public String getParserDescription() { return "This element returns a operator on the precision parameter of a normal model with gamma prior."; } public Class getReturnType() { return MCMCOperator.class; } public XMLSyntaxRule[] getSyntaxRules() { return rules; } private final XMLSyntaxRule[] rules = { AttributeRule.newDoubleRule(WEIGHT), new XORRule( new ElementRule(LIKELIHOOD, new XMLSyntaxRule[]{ new ElementRule(DistributionLikelihood.class) }), new ElementRule(GammaGibbsProvider.class) ), new ElementRule(PRIOR, new XMLSyntaxRule[]{ new ElementRule(DistributionLikelihood.class) }), new ElementRule(WORKING, new XMLSyntaxRule[]{ new ElementRule(DistributionLikelihood.class) }, true), }; }; private final GammaGibbsProvider gammaGibbsProvider; private final Parameter precisionParameter; private final GammaStatisticsProvider prior; private final GammaStatisticsProvider working; private double pathParameter = 1.0; }
NormalGammaPrecisionGibbsOperator can now take a broader range of prior
src/dr/inference/operators/NormalGammaPrecisionGibbsOperator.java
NormalGammaPrecisionGibbsOperator can now take a broader range of prior
Java
apache-2.0
368289aa122c33afdb2a433b829a0075f32a083d
0
jcshen007/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,argv0/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,resmo/cloudstack,mufaddalq/cloudstack-datera-driver,argv0/cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,cinderella/incubator-cloudstack,cinderella/incubator-cloudstack,wido/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,resmo/cloudstack,argv0/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,wido/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,wido/cloudstack,resmo/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,resmo/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,argv0/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,cinderella/incubator-cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,wido/cloudstack
/** * Copyright (C) 2010 Cloud.com, Inc. All rights reserved. * * This software is licensed under the GNU General Public License v3 or later. * * It is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package com.cloud.network.lb; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import com.cloud.api.commands.ListLoadBalancerRuleInstancesCmd; import com.cloud.api.commands.ListLoadBalancerRulesCmd; import com.cloud.api.commands.UpdateLoadBalancerRuleCmd; import com.cloud.dc.dao.VlanDao; import com.cloud.domain.dao.DomainDao; import com.cloud.event.ActionEvent; import com.cloud.event.EventTypes; import com.cloud.event.UsageEventVO; import com.cloud.event.dao.EventDao; import com.cloud.event.dao.UsageEventDao; import com.cloud.exception.InvalidParameterValueException; import com.cloud.exception.NetworkRuleConflictException; import com.cloud.exception.PermissionDeniedException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.network.IPAddressVO; import com.cloud.network.LoadBalancerVMMapVO; import com.cloud.network.LoadBalancerVO; import com.cloud.network.NetworkManager; import com.cloud.network.dao.FirewallRulesDao; import com.cloud.network.dao.IPAddressDao; import com.cloud.network.dao.LoadBalancerDao; import com.cloud.network.dao.LoadBalancerVMMapDao; import com.cloud.network.lb.LoadBalancingRule.LbDestination; import com.cloud.network.rules.FirewallRule; import com.cloud.network.rules.FirewallRule.Purpose; import com.cloud.network.rules.FirewallRuleVO; import com.cloud.network.rules.LoadBalancer; import com.cloud.network.rules.RulesManager; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.UserContext; import com.cloud.user.dao.AccountDao; import com.cloud.uservm.UserVm; import com.cloud.utils.component.Inject; import com.cloud.utils.component.Manager; import com.cloud.utils.db.DB; import com.cloud.utils.db.Filter; import com.cloud.utils.db.JoinBuilder; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.net.NetUtils; import com.cloud.vm.Nic; import com.cloud.vm.UserVmVO; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.dao.NicDao; import com.cloud.vm.dao.UserVmDao; @Local(value = { LoadBalancingRulesManager.class, LoadBalancingRulesService.class }) public class LoadBalancingRulesManagerImpl implements LoadBalancingRulesManager, LoadBalancingRulesService, Manager { private static final Logger s_logger = Logger.getLogger(LoadBalancingRulesManagerImpl.class); String _name; @Inject NetworkManager _networkMgr; @Inject RulesManager _rulesMgr; @Inject AccountManager _accountMgr; @Inject IPAddressDao _ipAddressDao; @Inject FirewallRulesDao _rulesDao; @Inject LoadBalancerDao _lbDao; @Inject VlanDao _vlanDao; @Inject EventDao _eventDao; @Inject LoadBalancerVMMapDao _lb2VmMapDao; @Inject UserVmDao _vmDao; @Inject AccountDao _accountDao; @Inject DomainDao _domainDao; @Inject NicDao _nicDao; @Inject UsageEventDao _usageEventDao; @Override @DB @ActionEvent (eventType=EventTypes.EVENT_ASSIGN_TO_LOAD_BALANCER_RULE, eventDescription="assigning to load balancer", async=true) public boolean assignToLoadBalancer(long loadBalancerId, List<Long> instanceIds) { UserContext ctx = UserContext.current(); Account caller = ctx.getCaller(); LoadBalancerVO loadBalancerLock = null; LoadBalancerVO loadBalancer = _lbDao.findById(loadBalancerId); if (loadBalancer == null) { throw new InvalidParameterValueException("Failed to assign to load balancer " + loadBalancerId + ", the load balancer was not found."); } List<LoadBalancerVMMapVO> mappedInstances = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId, false); Set<Long> mappedInstanceIds = new HashSet<Long>(); for (LoadBalancerVMMapVO mappedInstance : mappedInstances) { mappedInstanceIds.add(Long.valueOf(mappedInstance.getInstanceId())); } List<UserVm> vmsToAdd = new ArrayList<UserVm>(); for (Long instanceId : instanceIds) { if (mappedInstanceIds.contains(instanceId)) { throw new InvalidParameterValueException("VM " + instanceId + " is already mapped to load balancer."); } UserVm vm = _vmDao.findById(instanceId); if (vm == null || vm.getState() == State.Destroyed || vm.getState() == State.Expunging) { throw new InvalidParameterValueException("Invalid instance id: " + instanceId); } _rulesMgr.checkRuleAndUserVm(loadBalancer, vm, caller); if (vm.getAccountId() != loadBalancer.getAccountId()) { throw new PermissionDeniedException("Cannot add virtual machines that do not belong to the same owner."); } // Let's check to make sure the vm has a nic in the same network as the load balancing rule. List<? extends Nic> nics = _networkMgr.getNics(vm); Nic nicInSameNetwork = null; for (Nic nic : nics) { if (nic.getNetworkId() == loadBalancer.getNetworkId()) { nicInSameNetwork = nic; break; } } if (nicInSameNetwork == null) { throw new InvalidParameterValueException("VM " + instanceId + " cannot be added because it doesn't belong in the same network."); } if (s_logger.isDebugEnabled()) { s_logger.debug("Adding " + vm + " to the load balancer pool"); } vmsToAdd.add(vm); } try { loadBalancerLock = _lbDao.acquireInLockTable(loadBalancerId); if(loadBalancerLock == null) { s_logger.warn("Failed to acquire lock to assign VM to load balance rule id " + loadBalancerId); return false; } Transaction txn = Transaction.currentTxn(); txn.start(); for (UserVm vm : vmsToAdd) { LoadBalancerVMMapVO map = new LoadBalancerVMMapVO(loadBalancer.getId(), vm.getId(), false); map = _lb2VmMapDao.persist(map); } txn.commit(); loadBalancer.setState(FirewallRule.State.Add); _lbDao.persist(loadBalancer); applyLoadBalancerConfig(loadBalancerId); } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); return false; } finally { if (loadBalancerLock != null) { _lbDao.releaseFromLockTable(loadBalancerId); } } return true; } @Override @ActionEvent (eventType=EventTypes.EVENT_REMOVE_FROM_LOAD_BALANCER_RULE, eventDescription="removing from load balancer", async=true) public boolean removeFromLoadBalancer(long loadBalancerId, List<Long> instanceIds) { UserContext caller = UserContext.current(); LoadBalancerVO loadBalancerLock = null; LoadBalancerVO loadBalancer = _lbDao.findById(Long.valueOf(loadBalancerId)); if (loadBalancer == null) { throw new InvalidParameterException("Invalid load balancer value: " + loadBalancerId); } _accountMgr.checkAccess(caller.getCaller(), loadBalancer); try { loadBalancerLock = _lbDao.acquireInLockTable(loadBalancerId); if (loadBalancerLock == null) { s_logger.warn("Failed to acquire lock to delete load balance rule id " + loadBalancerId); return false; } loadBalancer.setState(FirewallRule.State.Add); _lbDao.persist(loadBalancer); for (long instanceId : instanceIds) { LoadBalancerVMMapVO map = _lb2VmMapDao.findByLoadBalancerIdAndVmId(loadBalancerId, instanceId); map.setRevoke(true); _lb2VmMapDao.persist(map); s_logger.debug("Set load balancer rule for revoke: rule id " + loadBalancerId + ", vmId " + instanceId); } if (applyLoadBalancerConfig(loadBalancerId)) { _lb2VmMapDao.remove(loadBalancerId, instanceIds, null); s_logger.debug("Load balancer rule id " + loadBalancerId + " is removed for vms " + instanceIds); } else { s_logger.warn("Failed to remove load balancer rule id " + loadBalancerId + " for vms " + instanceIds); throw new CloudRuntimeException("Failed to remove load balancer rule id " + loadBalancerId + " for vms " + instanceIds); } } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); return false; } finally { if (loadBalancerLock != null) { _lbDao.releaseFromLockTable(loadBalancerId); } } return true; } @Override public boolean removeVmFromLoadBalancers(long instanceId) { boolean success = true; List<LoadBalancerVMMapVO> maps = _lb2VmMapDao.listByInstanceId(instanceId); if (maps == null || maps.isEmpty()) { return true; } Map<Long, List<Long>> lbsToReconfigure = new HashMap<Long, List<Long>>(); //first set all existing lb mappings with Revoke state for (LoadBalancerVMMapVO map: maps) { long lbId = map.getLoadBalancerId(); List<Long> instances = lbsToReconfigure.get(lbId); if (instances == null) { instances = new ArrayList<Long>(); } instances.add(map.getInstanceId()); lbsToReconfigure.put(lbId, instances); map.setRevoke(true); _lb2VmMapDao.persist(map); s_logger.debug("Set load balancer rule for revoke: rule id " + map.getLoadBalancerId() + ", vmId " + instanceId); } //Reapply all lbs that had the vm assigned if (lbsToReconfigure != null) { for (Map.Entry<Long, List<Long>> lb : lbsToReconfigure.entrySet()) { if (!removeFromLoadBalancer(lb.getKey(), lb.getValue())) { success = false; } } } return success; } @Override @ActionEvent (eventType=EventTypes.EVENT_LOAD_BALANCER_DELETE, eventDescription="deleting load balancer", async=true) public boolean deleteLoadBalancerRule(long loadBalancerId, boolean apply) { UserContext caller = UserContext.current(); LoadBalancerVO loadBalancerLock = null; LoadBalancerVO lb = _lbDao.findById(loadBalancerId); if (lb == null) { throw new InvalidParameterException("Invalid load balancer value: " + loadBalancerId); } _accountMgr.checkAccess(caller.getCaller(), lb); loadBalancerLock = _lbDao.acquireInLockTable(loadBalancerId); if(loadBalancerLock == null) { s_logger.warn("Failed to acquire lock to delete load balance rule id " + loadBalancerId); return false; } lb.setState(FirewallRule.State.Revoke); _lbDao.persist(lb); List<LoadBalancerVMMapVO> maps = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId); if (maps != null) { for (LoadBalancerVMMapVO map : maps) { map.setRevoke(true); _lb2VmMapDao.persist(map); s_logger.debug("Set load balancer rule for revoke: rule id " + loadBalancerId + ", vmId " + map.getInstanceId()); } } if (apply) { try { applyLoadBalancerConfig(loadBalancerId); } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); return false; } finally { if (loadBalancerLock != null) _lbDao.releaseFromLockTable(loadBalancerId); } } _rulesDao.remove(lb.getId()); UsageEventVO usageEvent = new UsageEventVO(EventTypes.EVENT_LOAD_BALANCER_DELETE, lb.getAccountId(), 0 , lb.getId(), null); _usageEventDao.persist(usageEvent); s_logger.debug("Load balancer with id " + lb.getId() + " is removed successfully"); return true; } @Override @ActionEvent (eventType=EventTypes.EVENT_LOAD_BALANCER_CREATE, eventDescription="creating load balancer") public LoadBalancer createLoadBalancerRule(LoadBalancer lb) throws NetworkRuleConflictException { UserContext caller = UserContext.current(); long ipId = lb.getSourceIpAddressId(); // make sure ip address exists IPAddressVO ipAddr = _ipAddressDao.findById(ipId); if (ipAddr == null || !ipAddr.readyToUse()) { throw new InvalidParameterValueException("Unable to create load balancer rule, invalid IP address id" + ipId); } int srcPortStart = lb.getSourcePortStart(); int srcPortEnd = lb.getSourcePortEnd(); int defPortStart = lb.getDefaultPortStart(); int defPortEnd = lb.getDefaultPortEnd(); if (!NetUtils.isValidPort(srcPortStart)) { throw new InvalidParameterValueException("publicPort is an invalid value: " + srcPortStart); } if (!NetUtils.isValidPort(srcPortEnd)) { throw new InvalidParameterValueException("Public port range is an invalid value: " + srcPortEnd); } if (srcPortStart > srcPortEnd) { throw new InvalidParameterValueException("Public port range is an invalid value: " + srcPortStart + "-" + srcPortEnd); } if (!NetUtils.isValidPort(defPortStart)) { throw new InvalidParameterValueException("privatePort is an invalid value: " + defPortStart); } if (!NetUtils.isValidPort(defPortEnd)) { throw new InvalidParameterValueException("privatePort is an invalid value: " + defPortEnd); } if (defPortStart > defPortEnd) { throw new InvalidParameterValueException("private port range is invalid: " + defPortStart + "-" + defPortEnd); } if ((lb.getAlgorithm() == null) || !NetUtils.isValidAlgorithm(lb.getAlgorithm())) { throw new InvalidParameterValueException("Invalid algorithm: " + lb.getAlgorithm()); } Long networkId = lb.getNetworkId(); if (networkId == -1 ) { networkId = ipAddr.getAssociatedWithNetworkId(); } _accountMgr.checkAccess(caller.getCaller(), ipAddr); LoadBalancerVO newRule = new LoadBalancerVO(lb.getXid(), lb.getName(), lb.getDescription(), lb.getSourceIpAddressId(), lb.getSourcePortEnd(), lb.getDefaultPortStart(), lb.getAlgorithm(), networkId, ipAddr.getAccountId(), ipAddr.getDomainId()); newRule = _lbDao.persist(newRule); try { _rulesMgr.detectRulesConflict(newRule, ipAddr); if (!_rulesDao.setStateToAdd(newRule)) { throw new CloudRuntimeException("Unable to update the state to add for " + newRule); } s_logger.debug("Load balancer " + newRule.getId() + " for Ip address id=" + ipId + ", public port " + srcPortStart + ", private port " + defPortStart+ " is added successfully."); UsageEventVO usageEvent = new UsageEventVO(EventTypes.EVENT_LOAD_BALANCER_CREATE, ipAddr.getAllocatedToAccountId(), ipAddr.getDataCenterId(), newRule.getId(), null); _usageEventDao.persist(usageEvent); return newRule; } catch (Exception e) { _lbDao.remove(newRule.getId()); if (e instanceof NetworkRuleConflictException) { throw (NetworkRuleConflictException) e; } throw new CloudRuntimeException("Unable to add rule for ip address id=" + newRule.getSourceIpAddressId(), e); } } @Override public boolean applyLoadBalancerConfig(long lbRuleId) throws ResourceUnavailableException { List<LoadBalancerVO> lbs = new ArrayList<LoadBalancerVO>(1); lbs.add(_lbDao.findById(lbRuleId)); return applyLoadBalancerRules(lbs); } @Override public boolean applyLoadBalancersForNetwork(long networkId) throws ResourceUnavailableException { List<LoadBalancerVO> lbs = _lbDao.listByNetworkId(networkId); if (lbs != null) { return applyLoadBalancerRules(lbs); } else { s_logger.info("Network id=" + networkId + " doesn't have load balancer rules, nothing to apply"); return true; } } private boolean applyLoadBalancerRules(List<LoadBalancerVO> lbs) throws ResourceUnavailableException{ List<LoadBalancingRule> rules = new ArrayList<LoadBalancingRule>(); for (LoadBalancerVO lb : lbs) { List<LbDestination> dstList = getExistingDestinations(lb.getId()); if (dstList != null && !dstList.isEmpty()) { LoadBalancingRule loadBalancing = new LoadBalancingRule(lb, dstList); rules.add(loadBalancing); } } if (!_networkMgr.applyRules(rules, false)) { s_logger.debug("LB rules are not completely applied"); return false; } for (LoadBalancerVO lb : lbs) { if (lb.getState() == FirewallRule.State.Revoke) { _lbDao.remove(lb.getId()); s_logger.debug("LB " + lb.getId() + " is successfully removed"); } else if (lb.getState() == FirewallRule.State.Add) { lb.setState(FirewallRule.State.Active); s_logger.debug("LB rule " + lb.getId() + " state is set to Active"); _lbDao.persist(lb); } } return true; } @Override public boolean removeAllLoadBalanacers(long ipId) { List<FirewallRuleVO> rules = _rulesDao.listByIpAndNotRevoked(ipId, null); if (rules != null) s_logger.debug("Found " + rules.size() + " lb rules to cleanup"); for (FirewallRule rule : rules) { if (rule.getPurpose() == Purpose.LoadBalancing) { boolean result = deleteLoadBalancerRule(rule.getId(), true); if (result == false) { s_logger.warn("Unable to remove load balancer rule " + rule.getId()); return false; } } } return true; } @Override public List<LbDestination> getExistingDestinations(long lbId) { List<LbDestination> dstList = new ArrayList<LbDestination>(); List<LoadBalancerVMMapVO> lbVmMaps = _lb2VmMapDao.listByLoadBalancerId(lbId); LoadBalancerVO lb = _lbDao.findById(lbId); String dstIp = null; for (LoadBalancerVMMapVO lbVmMap : lbVmMaps) { UserVm vm = _vmDao.findById(lbVmMap.getInstanceId()); Nic nic = _nicDao.findByInstanceIdAndNetworkIdIncludingRemoved(lb.getNetworkId(), vm.getId()); dstIp = nic.getIp4Address(); LbDestination lbDst = new LbDestination(lb.getDefaultPortStart(), lb.getDefaultPortEnd(), dstIp, lbVmMap.isRevoke()); dstList.add(lbDst); } return dstList; } @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { _name = name; return true; } @Override public boolean start() { return true; } @Override public boolean stop() { return true; } @Override public String getName() { return _name; } @Override @ActionEvent (eventType=EventTypes.EVENT_LOAD_BALANCER_UPDATE, eventDescription="updating load balancer", async=true) public LoadBalancer updateLoadBalancerRule(UpdateLoadBalancerRuleCmd cmd) { Long lbRuleId = cmd.getId(); String name = cmd.getLoadBalancerName(); String description = cmd.getDescription(); String algorithm = cmd.getAlgorithm(); LoadBalancerVO lb = _lbDao.findById(lbRuleId); if (name != null) { lb.setName(name); } if (description != null) { lb.setDescription(description); } if (algorithm != null) { lb.setAlgorithm(algorithm); } _lbDao.update(lbRuleId, lb); //If algorithm is changed, have to reapply the lb config if (algorithm != null) { try { lb.setState(FirewallRule.State.Add); _lbDao.persist(lb); applyLoadBalancerConfig(lbRuleId); } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); } } return lb; } // @Override @DB // public boolean removeFromLoadBalancer(RemoveFromLoadBalancerRuleCmd cmd) throws InvalidParameterValueException { // // Long userId = UserContext.current().getUserId(); // Account account = UserContext.current().getAccount(); // Long loadBalancerId = cmd.getId(); // Long vmInstanceId = cmd.getVirtualMachineId(); // List<Long> instanceIds = cmd.getVirtualMachineIds(); // // if ((vmInstanceId == null) && (instanceIds == null)) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "No virtual machine id specified."); // } // // // if a single instanceId was given, add it to the list so we can always just process the list if instanceIds // if (instanceIds == null) { // instanceIds = new ArrayList<Long>(); // instanceIds.add(vmInstanceId); // } // // if (userId == null) { // userId = Long.valueOf(1); // } // // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(Long.valueOf(loadBalancerId)); // // if (loadBalancer == null) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "Unable to find load balancer rule with id " + loadBalancerId); // } else if (account != null) { // if (!isAdmin(account.getType()) && (loadBalancer.getAccountId() != account.getId())) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "Account " + account.getAccountName() + " does not own load balancer rule " + loadBalancer.getName() + // " (id:" + loadBalancer.getId() + ")"); // } else if (!_domainDao.isChildDomain(account.getDomainId(), loadBalancer.getDomainId())) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid load balancer rule id (" + loadBalancer.getId() + ") given, unable to remove virtual machine instances."); // } // } // // Transaction txn = Transaction.currentTxn(); // LoadBalancerVO loadBalancerLock = null; // boolean success = true; // try { // // IPAddressVO ipAddress = _ipAddressDao.findById(loadBalancer.getIpAddress()); // if (ipAddress == null) { // return false; // } // // DomainRouterVO router = _routerMgr.getRouter(ipAddress.getAccountId(), ipAddress.getDataCenterId()); // if (router == null) { // return false; // } // // txn.start(); // for (Long instanceId : instanceIds) { // UserVm userVm = _userVmDao.findById(instanceId); // if (userVm == null) { // s_logger.warn("Unable to find virtual machine with id " + instanceId); // throw new InvalidParameterValueException("Unable to find virtual machine with id " + instanceId); // } // PortForwardingRuleVO fwRule = _rulesDao.findByGroupAndPrivateIp(loadBalancerId, userVm.getGuestIpAddress(), false); // if (fwRule != null) { // fwRule.setEnabled(false); // _rulesDao.update(fwRule.getId(), fwRule); // } // } // // List<PortForwardingRuleVO> allLbRules = new ArrayList<PortForwardingRuleVO>(); // IPAddressVO ipAddr = _ipAddressDao.findById(loadBalancer.getIpAddress()); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(loadBalancer.getAccountId(), ipAddr.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _rulesDao.listIPForwarding(ipv.getAddress(), false); // allLbRules.addAll(rules); // } // // updateFirewallRules(loadBalancer.getIpAddress(), allLbRules, router); // // // firewall rules are updated, lock the load balancer as mappings are updated // loadBalancerLock = _loadBalancerDao.acquireInLockTable(loadBalancerId); // if (loadBalancerLock == null) { // s_logger.warn("removeFromLoadBalancer: failed to lock load balancer " + loadBalancerId + ", deleting mappings anyway..."); // } // // // remove all the loadBalancer->VM mappings // _loadBalancerVMMapDao.remove(loadBalancerId, instanceIds, Boolean.FALSE); // // // Save and create the event // String description; // String type = EventTypes.EVENT_NET_RULE_DELETE; // String level = EventVO.LEVEL_INFO; // // for (PortForwardingRuleVO updatedRule : allLbRules) { // if (!updatedRule.isEnabled()) { // _rulesDao.remove(updatedRule.getId()); // // description = "deleted load balancer rule [" + updatedRule.getSourceIpAddress() + ":" + updatedRule.getSourcePort() + "]->[" // + updatedRule.getDestinationIpAddress() + ":" + updatedRule.getDestinationPort() + "]" + " " + updatedRule.getProtocol(); // // EventUtils.saveEvent(userId, loadBalancer.getAccountId(), level, type, description); // } // } // txn.commit(); // } catch (Exception ex) { // s_logger.warn("Failed to delete load balancing rule with exception: ", ex); // success = false; // txn.rollback(); // } finally { // if (loadBalancerLock != null) { // _loadBalancerDao.releaseFromLockTable(loadBalancerId); // } // } // return success; // } // // @Override @DB // public boolean deleteLoadBalancerRule(DeleteLoadBalancerRuleCmd cmd) throws InvalidParameterValueException, PermissionDeniedException{ // Long loadBalancerId = cmd.getId(); // Long userId = UserContext.current().getUserId(); // Account account = UserContext.current().getAccount(); // // ///verify input parameters // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(loadBalancerId); // if (loadBalancer == null) { // throw new InvalidParameterValueException ("Unable to find load balancer rule with id " + loadBalancerId); // } // // if (account != null) { // if (!isAdmin(account.getType())) { // if (loadBalancer.getAccountId() != account.getId()) { // throw new PermissionDeniedException("Account " + account.getAccountName() + " does not own load balancer rule " + loadBalancer.getName() + " (id:" + loadBalancerId + "), permission denied"); // } // } else if (!_domainDao.isChildDomain(account.getDomainId(), loadBalancer.getDomainId())) { // throw new PermissionDeniedException("Unable to delete load balancer rule " + loadBalancer.getName() + " (id:" + loadBalancerId + "), permission denied."); // } // } // // if (userId == null) { // userId = Long.valueOf(1); // } // // Transaction txn = Transaction.currentTxn(); // LoadBalancerVO loadBalancerLock = null; // try { // // IPAddressVO ipAddress = _ipAddressDao.findById(loadBalancer.getIpAddress()); // if (ipAddress == null) { // return false; // } // // DomainRouterVO router = _routerMgr.getRouter(ipAddress.getAccountId(), ipAddress.getDataCenterId()); // List<PortForwardingRuleVO> fwRules = _firewallRulesDao.listByLoadBalancerId(loadBalancerId); // // txn.start(); // // if ((fwRules != null) && !fwRules.isEmpty()) { // for (PortForwardingRuleVO fwRule : fwRules) { // fwRule.setEnabled(false); // _firewallRulesDao.update(fwRule.getId(), fwRule); // } // // List<PortForwardingRuleVO> allLbRules = new ArrayList<PortForwardingRuleVO>(); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(loadBalancer.getAccountId(), ipAddress.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _firewallRulesDao.listIPForwarding(ipv.getAddress(), false); // allLbRules.addAll(rules); // } // // updateFirewallRules(loadBalancer.getIpAddress(), allLbRules, router); // // // firewall rules are updated, lock the load balancer as the mappings are updated // loadBalancerLock = _loadBalancerDao.acquireInLockTable(loadBalancerId); // if (loadBalancerLock == null) { // s_logger.warn("deleteLoadBalancer: failed to lock load balancer " + loadBalancerId + ", deleting mappings anyway..."); // } // // // remove all loadBalancer->VM mappings // List<LoadBalancerVMMapVO> lbVmMap = _loadBalancerVMMapDao.listByLoadBalancerId(loadBalancerId); // if (lbVmMap != null && !lbVmMap.isEmpty()) { // for (LoadBalancerVMMapVO lb : lbVmMap) { // _loadBalancerVMMapDao.remove(lb.getId()); // } // } // // // Save and create the event // String description; // String type = EventTypes.EVENT_NET_RULE_DELETE; // String ruleName = "load balancer"; // String level = EventVO.LEVEL_INFO; // Account accountOwner = _accountDao.findById(loadBalancer.getAccountId()); // // for (PortForwardingRuleVO updatedRule : fwRules) { // _firewallRulesDao.remove(updatedRule.getId()); // // description = "deleted " + ruleName + " rule [" + updatedRule.getSourceIpAddress() + ":" + updatedRule.getSourcePort() + "]->[" // + updatedRule.getDestinationIpAddress() + ":" + updatedRule.getDestinationPort() + "]" + " " + updatedRule.getProtocol(); // // EventUtils.saveEvent(userId, accountOwner.getId(), level, type, description); // } // } // // txn.commit(); // } catch (Exception ex) { // txn.rollback(); // s_logger.error("Unexpected exception deleting load balancer " + loadBalancerId, ex); // return false; // } finally { // if (loadBalancerLock != null) { // _loadBalancerDao.releaseFromLockTable(loadBalancerId); // } // } // // boolean success = _loadBalancerDao.remove(loadBalancerId); // // // save off an event for removing the load balancer // EventVO event = new EventVO(); // event.setUserId(userId); // event.setAccountId(loadBalancer.getAccountId()); // event.setType(EventTypes.EVENT_LOAD_BALANCER_DELETE); // if (success) { // event.setLevel(EventVO.LEVEL_INFO); // String params = "id="+loadBalancer.getId(); // event.setParameters(params); // event.setDescription("Successfully deleted load balancer " + loadBalancer.getName() + " (id:" + loadBalancer.getId() + ")"); // } else { // event.setLevel(EventVO.LEVEL_ERROR); // event.setDescription("Failed to delete load balancer " + loadBalancer.getName() + " (id:" + loadBalancer.getId() + ")"); // } // _eventDao.persist(event); // return success; // } // @Override @DB // public boolean assignToLoadBalancer(AssignToLoadBalancerRuleCmd cmd) throws NetworkRuleConflictException { // Long loadBalancerId = cmd.getLoadBalancerId(); // Long instanceIdParam = cmd.getVirtualMachineId(); // List<Long> instanceIds = cmd.getVirtualMachineIds(); // // if ((instanceIdParam == null) && (instanceIds == null)) { // throw new InvalidParameterValueException("Unable to assign to load balancer " + loadBalancerId + ", no instance id is specified."); // } // // if ((instanceIds == null) && (instanceIdParam != null)) { // instanceIds = new ArrayList<Long>(); // instanceIds.add(instanceIdParam); // } // // // FIXME: We should probably lock the load balancer here to prevent multiple updates... // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(loadBalancerId); // if (loadBalancer == null) { // throw new InvalidParameterValueException("Failed to assign to load balancer " + loadBalancerId + ", the load balancer was not found."); // } // // // // Permission check... // Account account = UserContext.current().getAccount(); // if (account != null) { // if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) { // if (!_domainDao.isChildDomain(account.getDomainId(), loadBalancer.getDomainId())) { // throw new PermissionDeniedException("Failed to assign to load balancer " + loadBalancerId + ", permission denied."); // } // } else if (account.getType() != Account.ACCOUNT_TYPE_ADMIN && account.getId() != loadBalancer.getAccountId()) { // throw new PermissionDeniedException("Failed to assign to load balancer " + loadBalancerId + ", permission denied."); // } // } // // Transaction txn = Transaction.currentTxn(); // List<PortForwardingRuleVO> firewallRulesToApply = new ArrayList<PortForwardingRuleVO>(); // long accountId = 0; // DomainRouterVO router = null; // // List<LoadBalancerVMMapVO> mappedInstances = _loadBalancerVMMapDao.listByLoadBalancerId(loadBalancerId, false); // Set<Long> mappedInstanceIds = new HashSet<Long>(); // if (mappedInstances != null) { // for (LoadBalancerVMMapVO mappedInstance : mappedInstances) { // mappedInstanceIds.add(Long.valueOf(mappedInstance.getInstanceId())); // } // } // // List<Long> finalInstanceIds = new ArrayList<Long>(); // for (Long instanceId : instanceIds) { // if (mappedInstanceIds.contains(instanceId)) { // continue; // } else { // finalInstanceIds.add(instanceId); // } // // UserVmVO userVm = _vmDao.findById(instanceId); // if (userVm == null) { // s_logger.warn("Unable to find virtual machine with id " + instanceId); // throw new InvalidParameterValueException("Unable to find virtual machine with id " + instanceId); // } else { // // sanity check that the vm can be applied to the load balancer // ServiceOfferingVO offering = _serviceOfferingDao.findById(userVm.getServiceOfferingId()); // if ((offering == null) || !GuestIpType.Virtualized.equals(offering.getGuestIpType())) { // // we previously added these instanceIds to the loadBalancerVMMap, so remove them here as we are rejecting the API request // // without actually modifying the load balancer // _loadBalancerVMMapDao.remove(loadBalancerId, instanceIds, Boolean.TRUE); // // if (s_logger.isDebugEnabled()) { // s_logger.debug("Unable to add virtual machine " + userVm.toString() + " to load balancer " + loadBalancerId + ", bad network type (" + ((offering == null) ? "null" : offering.getGuestIpType()) + ")"); // } // // throw new InvalidParameterValueException("Unable to add virtual machine " + userVm.toString() + " to load balancer " + loadBalancerId + ", bad network type (" + ((offering == null) ? "null" : offering.getGuestIpType()) + ")"); // } // } // // if (accountId == 0) { // accountId = userVm.getAccountId(); // } else if (accountId != userVm.getAccountId()) { // s_logger.warn("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") belongs to account " + userVm.getAccountId() // + ", previous vm in list belongs to account " + accountId); // throw new InvalidParameterValueException("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") belongs to account " + userVm.getAccountId() // + ", previous vm in list belongs to account " + accountId); // } // // DomainRouterVO nextRouter = null; // if (userVm.getDomainRouterId() != null) { // nextRouter = _routerMgr.getRouter(userVm.getDomainRouterId()); // } // if (nextRouter == null) { // s_logger.warn("Unable to find router (" + userVm.getDomainRouterId() + ") for virtual machine with id " + instanceId); // throw new InvalidParameterValueException("Unable to find router (" + userVm.getDomainRouterId() + ") for virtual machine with id " + instanceId); // } // // if (router == null) { // router = nextRouter; // // // Make sure owner of router is owner of load balancer. Since we are already checking that all VMs belong to the same router, by checking router // // ownership once we'll make sure all VMs belong to the owner of the load balancer. // if (router.getAccountId() != loadBalancer.getAccountId()) { // throw new InvalidParameterValueException("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") does not belong to the owner of load balancer " + // loadBalancer.getName() + " (owner is account id " + loadBalancer.getAccountId() + ")"); // } // } else if (router.getId() != nextRouter.getId()) { // throw new InvalidParameterValueException("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") belongs to router " + nextRouter.getHostName() // + ", previous vm in list belongs to router " + router.getHostName()); // } // // // check for ip address/port conflicts by checking exising forwarding and loadbalancing rules // String ipAddress = loadBalancer.getIpAddress(); // String privateIpAddress = userVm.getGuestIpAddress(); // List<PortForwardingRuleVO> existingRulesOnPubIp = _rulesDao.listIPForwarding(ipAddress); // // if (existingRulesOnPubIp != null) { // for (PortForwardingRuleVO fwRule : existingRulesOnPubIp) { // if (!( (fwRule.isForwarding() == false) && // (fwRule.getGroupId() != null) && // (fwRule.getGroupId() == loadBalancer.getId()) )) { // // if the rule is not for the current load balancer, check to see if the private IP is our target IP, // // in which case we have a conflict // if (fwRule.getSourcePort().equals(loadBalancer.getPublicPort())) { // throw new NetworkRuleConflictException("An existing port forwarding service rule for " + ipAddress + ":" + loadBalancer.getPublicPort() // + " exists, found while trying to apply load balancer " + loadBalancer.getName() + " (id:" + loadBalancer.getId() + ") to instance " // + userVm.getHostName() + "."); // } // } else if (fwRule.getDestinationIpAddress().equals(privateIpAddress) && fwRule.getDestinationPort().equals(loadBalancer.getPrivatePort()) && fwRule.isEnabled()) { // // for the current load balancer, don't add the same instance to the load balancer more than once // continue; // } // } // } // // PortForwardingRuleVO newFwRule = new PortForwardingRuleVO(); // newFwRule.setAlgorithm(loadBalancer.getAlgorithm()); // newFwRule.setEnabled(true); // newFwRule.setForwarding(false); // newFwRule.setPrivatePort(loadBalancer.getPrivatePort()); // newFwRule.setPublicPort(loadBalancer.getPublicPort()); // newFwRule.setPublicIpAddress(loadBalancer.getIpAddress()); // newFwRule.setPrivateIpAddress(userVm.getGuestIpAddress()); // newFwRule.setGroupId(loadBalancer.getId()); // // firewallRulesToApply.add(newFwRule); // } // // // if there's no work to do, bail out early rather than reconfiguring the proxy with the existing rules // if (firewallRulesToApply.isEmpty()) { // return true; // } // // //Sync on domR // if(router == null){ // throw new InvalidParameterValueException("Failed to assign to load balancer " + loadBalancerId + ", the domain router was not found at " + loadBalancer.getIpAddress()); // } // else{ // cmd.synchronizeCommand("Router", router.getId()); // } // // IPAddressVO ipAddr = _ipAddressDao.findById(loadBalancer.getIpAddress()); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(accountId, ipAddr.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _rulesDao.listIpForwardingRulesForLoadBalancers(ipv.getAddress()); // firewallRulesToApply.addAll(rules); // } // // txn.start(); // // List<PortForwardingRuleVO> updatedRules = null; // if (router.getState().equals(State.Starting)) { // // Starting is a special case...if the router is starting that means the IP address hasn't yet been assigned to the domR and the update firewall rules script will fail. // // In this case, just store the rules and they will be applied when the router state is resent (after the router is started). // updatedRules = firewallRulesToApply; // } else { // updatedRules = updateFirewallRules(loadBalancer.getIpAddress(), firewallRulesToApply, router); // } // // // Save and create the event // String description; // String type = EventTypes.EVENT_NET_RULE_ADD; // String ruleName = "load balancer"; // String level = EventVO.LEVEL_INFO; // // LoadBalancerVO loadBalancerLock = null; // try { // loadBalancerLock = _loadBalancerDao.acquireInLockTable(loadBalancerId); // if (loadBalancerLock == null) { // s_logger.warn("assignToLoadBalancer: Failed to lock load balancer " + loadBalancerId + ", proceeding with updating loadBalancerVMMappings..."); // } // if ((updatedRules != null) && (updatedRules.size() == firewallRulesToApply.size())) { // // flag the instances as mapped to the load balancer // for (Long addedInstanceId : finalInstanceIds) { // LoadBalancerVMMapVO mappedVM = new LoadBalancerVMMapVO(loadBalancerId, addedInstanceId); // _loadBalancerVMMapDao.persist(mappedVM); // } // // /* We used to add these instances as pending when the API command is received on the server, and once they were applied, // * the pending status was removed. In the 2.2 API framework, this is no longer done and instead the new mappings just // * need to be persisted // List<LoadBalancerVMMapVO> pendingMappedVMs = _loadBalancerVMMapDao.listByLoadBalancerId(loadBalancerId, true); // for (LoadBalancerVMMapVO pendingMappedVM : pendingMappedVMs) { // if (instanceIds.contains(pendingMappedVM.getInstanceId())) { // LoadBalancerVMMapVO pendingMappedVMForUpdate = _loadBalancerVMMapDao.createForUpdate(); // pendingMappedVMForUpdate.setPending(false); // _loadBalancerVMMapDao.update(pendingMappedVM.getId(), pendingMappedVMForUpdate); // } // } // */ // // for (PortForwardingRuleVO updatedRule : updatedRules) { // _rulesDao.persist(updatedRule); // // description = "created new " + ruleName + " rule [" + updatedRule.getSourceIpAddress() + ":" // + updatedRule.getSourcePort() + "]->[" + updatedRule.getDestinationIpAddress() + ":" // + updatedRule.getDestinationPort() + "]" + " " + updatedRule.getProtocol(); // // EventUtils.saveEvent(UserContext.current().getUserId(), loadBalancer.getAccountId(), level, type, description); // } // txn.commit(); // return true; // } else { // // Remove the instanceIds from the load balancer since there was a failure. Make sure to commit the // // transaction here, otherwise the act of throwing the internal error exception will cause this // // remove operation to be rolled back. // _loadBalancerVMMapDao.remove(loadBalancerId, instanceIds, null); // txn.commit(); // // s_logger.warn("Failed to apply load balancer " + loadBalancer.getName() + " (id:" + loadBalancerId + ") to guest virtual machines " + StringUtils.join(instanceIds, ",")); // throw new CloudRuntimeException("Failed to apply load balancer " + loadBalancer.getName() + " (id:" + loadBalancerId + ") to guest virtual machine " + StringUtils.join(instanceIds, ",")); // } // } finally { // if (loadBalancerLock != null) { // _loadBalancerDao.releaseFromLockTable(loadBalancerId); // } // } // } // @Override @DB // public LoadBalancer createLoadBalancerRule(CreateLoadBalancerRuleCmd cmd) throws InvalidParameterValueException, PermissionDeniedException { // String publicIp = cmd.getPublicIp(); // // // make sure ip address exists // IPAddressVO ipAddr = _ipAddressDao.findById(cmd.getPublicIp()); // if (ipAddr == null) { // throw new InvalidParameterValueException("Unable to create load balancer rule, invalid IP address " + publicIp); // } // // VlanVO vlan = _vlanDao.findById(ipAddr.getVlanDbId()); // if (vlan != null) { // if (!VlanType.VirtualNetwork.equals(vlan.getVlanType())) { // throw new InvalidParameterValueException("Unable to create load balancer rule for IP address " + publicIp + ", only VirtualNetwork type IP addresses can be used for load balancers."); // } // } // else ERROR? // // // Verify input parameters // if ((ipAddr.getAccountId() == null) || (ipAddr.getAllocated() == null)) { // throw new InvalidParameterValueException("Unable to create load balancer rule, cannot find account owner for ip " + publicIp); // } // // Account account = UserContext.current().getAccount(); // if (account != null) { // if ((account.getType() == Account.ACCOUNT_TYPE_ADMIN) || (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)) { // if (!_domainDao.isChildDomain(account.getDomainId(), ipAddr.getDomainId())) { // throw new PermissionDeniedException("Unable to create load balancer rule on IP address " + publicIp + ", permission denied."); // } // } else if (account.getId() != ipAddr.getAccountId().longValue()) { // throw new PermissionDeniedException("Unable to create load balancer rule, account " + account.getAccountName() + " doesn't own ip address " + publicIp); // } // } // // String loadBalancerName = cmd.getLoadBalancerRuleName(); // LoadBalancerVO existingLB = _loadBalancerDao.findByAccountAndName(ipAddr.getAccountId(), loadBalancerName); // if (existingLB != null) { // throw new InvalidParameterValueException("Unable to create load balancer rule, an existing load balancer rule with name " + loadBalancerName + " already exists."); // } // // // validate params // String publicPort = cmd.getPublicPort(); // String privatePort = cmd.getPrivatePort(); // String algorithm = cmd.getAlgorithm(); // // if (!NetUtils.isValidPort(publicPort)) { // throw new InvalidParameterValueException("publicPort is an invalid value"); // } // if (!NetUtils.isValidPort(privatePort)) { // throw new InvalidParameterValueException("privatePort is an invalid value"); // } // if ((algorithm == null) || !NetUtils.isValidAlgorithm(algorithm)) { // throw new InvalidParameterValueException("Invalid algorithm"); // } // // boolean locked = false; // try { // LoadBalancerVO exitingLB = _loadBalancerDao.findByIpAddressAndPublicPort(publicIp, publicPort); // if (exitingLB != null) { // throw new InvalidParameterValueException("IP Address/public port already load balanced by an existing load balancer rule"); // } // // List<PortForwardingRuleVO> existingFwRules = _rulesDao.listIPForwarding(publicIp, publicPort, true); // if ((existingFwRules != null) && !existingFwRules.isEmpty()) { // throw new InvalidParameterValueException("IP Address (" + publicIp + ") and port (" + publicPort + ") already in use"); // } // // ipAddr = _ipAddressDao.acquireInLockTable(publicIp); // if (ipAddr == null) { // throw new PermissionDeniedException("User does not own ip address " + publicIp); // } // // locked = true; // // LoadBalancerVO loadBalancer = new LoadBalancerVO(loadBalancerName, cmd.getDescription(), ipAddr.getAccountId(), publicIp, publicPort, privatePort, algorithm); // loadBalancer = _loadBalancerDao.persist(loadBalancer); // Long id = loadBalancer.getId(); // // // Save off information for the event that the security group was applied // Long userId = UserContext.current().getUserId(); // if (userId == null) { // userId = Long.valueOf(User.UID_SYSTEM); // } // // EventVO event = new EventVO(); // event.setUserId(userId); // event.setAccountId(ipAddr.getAccountId()); // event.setType(EventTypes.EVENT_LOAD_BALANCER_CREATE); // // if (id == null) { // event.setDescription("Failed to create load balancer " + loadBalancer.getName() + " on ip address " + publicIp + "[" + publicPort + "->" + privatePort + "]"); // event.setLevel(EventVO.LEVEL_ERROR); // } else { // event.setDescription("Successfully created load balancer " + loadBalancer.getName() + " on ip address " + publicIp + "[" + publicPort + "->" + privatePort + "]"); // String params = "id="+loadBalancer.getId()+"\ndcId="+ipAddr.getDataCenterId(); // event.setParameters(params); // event.setLevel(EventVO.LEVEL_INFO); // } // _eventDao.persist(event); // // return _loadBalancerDao.findById(id); // } finally { // if (locked) { // _ipAddressDao.releaseFromLockTable(publicIp); // } // } // } // @Override // public boolean updateLoadBalancerRules(final List<PortForwardingRuleVO> fwRules, final DomainRouterVO router, Long hostId) { // // for (PortForwardingRuleVO rule : fwRules) { // // Determine the the VLAN ID and netmask of the rule's public IP address // IPAddressVO ip = _ipAddressDao.findById(rule.getSourceIpAddress()); // VlanVO vlan = _vlanDao.findById(new Long(ip.getVlanDbId())); // String vlanNetmask = vlan.getVlanNetmask(); // // rule.setVlanNetmask(vlanNetmask); // } // // final LoadBalancerConfigurator cfgrtr = new HAProxyConfigurator(); // final String [] cfg = cfgrtr.generateConfiguration(fwRules); // final String [][] addRemoveRules = cfgrtr.generateFwRules(fwRules); // final LoadBalancerCfgCommand cmd = new LoadBalancerCfgCommand(cfg, addRemoveRules, router.getInstanceName(), router.getPrivateIpAddress()); // final Answer ans = _agentMgr.easySend(hostId, cmd); // if (ans == null) { // return false; // } else { // return ans.getResult(); // } // } // @Override @DB // public LoadBalancerVO updateLoadBalancerRule(UpdateLoadBalancerRuleCmd cmd) throws InvalidParameterValueException, PermissionDeniedException{ // Long loadBalancerId = cmd.getId(); // String privatePort = cmd.getPrivatePort(); // String algorithm = cmd.getAlgorithm(); // String name = cmd.getLoadBalancerName(); // String description = cmd.getDescription(); // Account account = UserContext.current().getAccount(); // // //Verify input parameters // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(loadBalancerId); // if (loadBalancer == null) { // throw new InvalidParameterValueException("Unable to find load balancer rule " + loadBalancerId + " for update."); // } // // // make sure the name's not already in use // if (name != null) { // LoadBalancerVO existingLB = _loadBalancerDao.findByAccountAndName(loadBalancer.getAccountId(), name); // if ((existingLB != null) && (existingLB.getId() != loadBalancer.getId())) { // throw new InvalidParameterValueException("Unable to update load balancer " + loadBalancer.getName() + " with new name " + name + ", the name is already in use."); // } // } // // Account lbOwner = _accountDao.findById(loadBalancer.getAccountId()); // if (lbOwner == null) { // throw new InvalidParameterValueException("Unable to update load balancer rule, cannot find owning account"); // } // // Long accountId = lbOwner.getId(); // if (account != null) { // if (!isAdmin(account.getType())) { // if (account.getId() != accountId.longValue()) { // throw new PermissionDeniedException("Unable to update load balancer rule, permission denied"); // } // } else if (!_domainDao.isChildDomain(account.getDomainId(), lbOwner.getDomainId())) { // throw new PermissionDeniedException("Unable to update load balancer rule, permission denied."); // } // } // // String updatedPrivatePort = ((privatePort == null) ? loadBalancer.getPrivatePort() : privatePort); // String updatedAlgorithm = ((algorithm == null) ? loadBalancer.getAlgorithm() : algorithm); // String updatedName = ((name == null) ? loadBalancer.getName() : name); // String updatedDescription = ((description == null) ? loadBalancer.getDescription() : description); // // Transaction txn = Transaction.currentTxn(); // try { // txn.start(); // loadBalancer.setPrivatePort(updatedPrivatePort); // loadBalancer.setAlgorithm(updatedAlgorithm); // loadBalancer.setName(updatedName); // loadBalancer.setDescription(updatedDescription); // _loadBalancerDao.update(loadBalancer.getId(), loadBalancer); // // List<PortForwardingRuleVO> fwRules = _firewallRulesDao.listByLoadBalancerId(loadBalancer.getId()); // if ((fwRules != null) && !fwRules.isEmpty()) { // for (PortForwardingRuleVO fwRule : fwRules) { // fwRule.setPrivatePort(updatedPrivatePort); // fwRule.setAlgorithm(updatedAlgorithm); // _firewallRulesDao.update(fwRule.getId(), fwRule); // } // } // txn.commit(); // } catch (RuntimeException ex) { // s_logger.warn("Unhandled exception trying to update load balancer rule", ex); // txn.rollback(); // throw ex; // } finally { // txn.close(); // } // // // now that the load balancer has been updated, reconfigure the HA Proxy on the router with all the LB rules // List<PortForwardingRuleVO> allLbRules = new ArrayList<PortForwardingRuleVO>(); // IPAddressVO ipAddress = _ipAddressDao.findById(loadBalancer.getIpAddress()); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(loadBalancer.getAccountId(), ipAddress.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _firewallRulesDao.listIPForwarding(ipv.getAddress(), false); // allLbRules.addAll(rules); // } // // IPAddressVO ip = _ipAddressDao.findById(loadBalancer.getIpAddress()); // DomainRouterVO router = _routerMgr.getRouter(ip.getAccountId(), ip.getDataCenterId()); // updateFirewallRules(loadBalancer.getIpAddress(), allLbRules, router); // return _loadBalancerDao.findById(loadBalancer.getId()); // } @Override public List<UserVmVO> listLoadBalancerInstances(ListLoadBalancerRuleInstancesCmd cmd) throws PermissionDeniedException { Account account = UserContext.current().getCaller(); Long loadBalancerId = cmd.getId(); Boolean applied = cmd.isApplied(); if (applied == null) { applied = Boolean.TRUE; } LoadBalancerVO loadBalancer = _lbDao.findById(loadBalancerId); if (loadBalancer == null) { return null; } long lbAcctId = loadBalancer.getAccountId(); if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) { Account userAccount = _accountDao.findById(lbAcctId); if (!_domainDao.isChildDomain(account.getDomainId(), userAccount.getDomainId())) { throw new PermissionDeniedException("Invalid load balancer rule id (" + loadBalancerId + ") given, unable to list load balancer instances."); } } else if (account.getType() == Account.ACCOUNT_TYPE_NORMAL && account.getId() != lbAcctId) { throw new PermissionDeniedException("Unable to list load balancer instances, account " + account.getAccountName() + " does not own load balancer rule " + loadBalancer.getName()); } List<UserVmVO> loadBalancerInstances = new ArrayList<UserVmVO>(); List<LoadBalancerVMMapVO> vmLoadBalancerMappings = null; if (applied) { // List only the instances that have actually been applied to the load balancer (pending is false). vmLoadBalancerMappings = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId, false); } else { // List all instances applied, even pending ones that are currently being assigned, so that the semantics // of "what instances can I apply to this load balancer" are maintained. vmLoadBalancerMappings = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId); } List<Long> appliedInstanceIdList = new ArrayList<Long>(); if ((vmLoadBalancerMappings != null) && !vmLoadBalancerMappings.isEmpty()) { for (LoadBalancerVMMapVO vmLoadBalancerMapping : vmLoadBalancerMappings) { appliedInstanceIdList.add(vmLoadBalancerMapping.getInstanceId()); } } IPAddressVO addr = _ipAddressDao.findById(loadBalancer.getSourceIpAddressId()); List<UserVmVO> userVms = _vmDao.listVirtualNetworkInstancesByAcctAndZone(loadBalancer.getAccountId(), addr.getDataCenterId(), loadBalancer.getNetworkId()); for (UserVmVO userVm : userVms) { // if the VM is destroyed, being expunged, in an error state, or in an unknown state, skip it switch (userVm.getState()) { case Destroyed: case Expunging: case Error: case Unknown: continue; } boolean isApplied = appliedInstanceIdList.contains(userVm.getId()); if (!applied && !isApplied) { loadBalancerInstances.add(userVm); } else if (applied && isApplied) { loadBalancerInstances.add(userVm); } } return loadBalancerInstances; } @Override public List<LoadBalancerVO> searchForLoadBalancers(ListLoadBalancerRulesCmd cmd) throws InvalidParameterValueException, PermissionDeniedException { Account caller = UserContext.current().getCaller(); Account owner = null; Long domainId = cmd.getDomainId(); String accountName = cmd.getAccountName(); Long accountId = null; Long ipId = cmd.getPublicIpId(); if (accountName != null && domainId != null) { owner = _accountDao.findActiveAccount(accountName, domainId); if (owner == null) { accountId = -1L; } } if (caller.getType() == Account.ACCOUNT_TYPE_NORMAL) { accountId = caller.getAccountId(); } else if (caller.getType() == Account.ACCOUNT_TYPE_ADMIN && owner != null) { accountId = owner.getId(); } else if (owner != null && caller.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN){ _accountMgr.checkAccess(caller, owner); } Filter searchFilter = new Filter(LoadBalancerVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal()); Object id = cmd.getId(); Object name = cmd.getLoadBalancerRuleName(); Object keyword = cmd.getKeyword(); Object instanceId = cmd.getVirtualMachineId(); SearchBuilder<LoadBalancerVO> sb = _lbDao.createSearchBuilder(); sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE); sb.and("sourceIpAddress", sb.entity().getSourceIpAddressId(), SearchCriteria.Op.EQ); sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ); sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ); if (instanceId != null) { SearchBuilder<LoadBalancerVMMapVO> lbVMSearch = _lb2VmMapDao.createSearchBuilder(); lbVMSearch.and("instanceId", lbVMSearch.entity().getInstanceId(), SearchCriteria.Op.EQ); sb.join("lbVMSearch", lbVMSearch, sb.entity().getId(), lbVMSearch.entity().getLoadBalancerId(), JoinBuilder.JoinType.INNER); } SearchCriteria<LoadBalancerVO> sc = sb.create(); if (keyword != null) { SearchCriteria<LoadBalancerVO> ssc = _lbDao.createSearchCriteria(); ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%"); ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%"); sc.addAnd("name", SearchCriteria.Op.SC, ssc); } if (name != null) { sc.setParameters("name", "%" + name + "%"); } if (id != null) { sc.setParameters("id", id); } if (ipId != null) { sc.setParameters("sourceIpAddress", ipId); } if (instanceId != null) { sc.setJoinParameters("lbVMSearch", "instanceId", instanceId); } if (accountId != null) { sc.setParameters("accountId", accountId); } else if (domainId != null) { sc.setParameters("domainId", domainId); } return _lbDao.search(sc, searchFilter); } @Override public List<LoadBalancingRule> listByNetworkId(long networkId) { List<LoadBalancerVO> lbs = _lbDao.listByNetworkId(networkId); List<LoadBalancingRule> lbRules = new ArrayList<LoadBalancingRule>(); for (LoadBalancerVO lb : lbs) { List<LbDestination> dstList = getExistingDestinations(lb.getId()); LoadBalancingRule loadBalancing = new LoadBalancingRule(lb, dstList); lbRules.add(loadBalancing); } return lbRules; } }
server/src/com/cloud/network/lb/LoadBalancingRulesManagerImpl.java
/** * Copyright (C) 2010 Cloud.com, Inc. All rights reserved. * * This software is licensed under the GNU General Public License v3 or later. * * It is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package com.cloud.network.lb; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import com.cloud.api.commands.ListLoadBalancerRuleInstancesCmd; import com.cloud.api.commands.ListLoadBalancerRulesCmd; import com.cloud.api.commands.UpdateLoadBalancerRuleCmd; import com.cloud.dc.dao.VlanDao; import com.cloud.domain.dao.DomainDao; import com.cloud.event.ActionEvent; import com.cloud.event.EventTypes; import com.cloud.event.UsageEventVO; import com.cloud.event.dao.EventDao; import com.cloud.event.dao.UsageEventDao; import com.cloud.exception.InvalidParameterValueException; import com.cloud.exception.NetworkRuleConflictException; import com.cloud.exception.PermissionDeniedException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.network.IPAddressVO; import com.cloud.network.LoadBalancerVMMapVO; import com.cloud.network.LoadBalancerVO; import com.cloud.network.NetworkManager; import com.cloud.network.dao.FirewallRulesDao; import com.cloud.network.dao.IPAddressDao; import com.cloud.network.dao.LoadBalancerDao; import com.cloud.network.dao.LoadBalancerVMMapDao; import com.cloud.network.lb.LoadBalancingRule.LbDestination; import com.cloud.network.rules.FirewallRule; import com.cloud.network.rules.FirewallRule.Purpose; import com.cloud.network.rules.FirewallRuleVO; import com.cloud.network.rules.LoadBalancer; import com.cloud.network.rules.RulesManager; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.UserContext; import com.cloud.user.dao.AccountDao; import com.cloud.uservm.UserVm; import com.cloud.utils.component.Inject; import com.cloud.utils.component.Manager; import com.cloud.utils.db.DB; import com.cloud.utils.db.Filter; import com.cloud.utils.db.JoinBuilder; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.net.NetUtils; import com.cloud.vm.Nic; import com.cloud.vm.UserVmVO; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.dao.NicDao; import com.cloud.vm.dao.UserVmDao; @Local(value = { LoadBalancingRulesManager.class, LoadBalancingRulesService.class }) public class LoadBalancingRulesManagerImpl implements LoadBalancingRulesManager, LoadBalancingRulesService, Manager { private static final Logger s_logger = Logger.getLogger(LoadBalancingRulesManagerImpl.class); String _name; @Inject NetworkManager _networkMgr; @Inject RulesManager _rulesMgr; @Inject AccountManager _accountMgr; @Inject IPAddressDao _ipAddressDao; @Inject FirewallRulesDao _rulesDao; @Inject LoadBalancerDao _lbDao; @Inject VlanDao _vlanDao; @Inject EventDao _eventDao; @Inject LoadBalancerVMMapDao _lb2VmMapDao; @Inject UserVmDao _vmDao; @Inject AccountDao _accountDao; @Inject DomainDao _domainDao; @Inject NicDao _nicDao; @Inject UsageEventDao _usageEventDao; @Override @DB @ActionEvent (eventType=EventTypes.EVENT_ASSIGN_TO_LOAD_BALANCER_RULE, eventDescription="assigning to load balancer", async=true) public boolean assignToLoadBalancer(long loadBalancerId, List<Long> instanceIds) { UserContext ctx = UserContext.current(); Account caller = ctx.getCaller(); LoadBalancerVO loadBalancer = _lbDao.findById(loadBalancerId); if (loadBalancer == null) { throw new InvalidParameterValueException("Failed to assign to load balancer " + loadBalancerId + ", the load balancer was not found."); } List<LoadBalancerVMMapVO> mappedInstances = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId, false); Set<Long> mappedInstanceIds = new HashSet<Long>(); for (LoadBalancerVMMapVO mappedInstance : mappedInstances) { mappedInstanceIds.add(Long.valueOf(mappedInstance.getInstanceId())); } List<UserVm> vmsToAdd = new ArrayList<UserVm>(); for (Long instanceId : instanceIds) { if (mappedInstanceIds.contains(instanceId)) { throw new InvalidParameterValueException("VM " + instanceId + " is already mapped to load balancer."); } UserVm vm = _vmDao.findById(instanceId); if (vm == null || vm.getState() == State.Destroyed || vm.getState() == State.Expunging) { throw new InvalidParameterValueException("Invalid instance id: " + instanceId); } _rulesMgr.checkRuleAndUserVm(loadBalancer, vm, caller); if (vm.getAccountId() != loadBalancer.getAccountId()) { throw new PermissionDeniedException("Cannot add virtual machines that do not belong to the same owner."); } // Let's check to make sure the vm has a nic in the same network as the load balancing rule. List<? extends Nic> nics = _networkMgr.getNics(vm); Nic nicInSameNetwork = null; for (Nic nic : nics) { if (nic.getNetworkId() == loadBalancer.getNetworkId()) { nicInSameNetwork = nic; break; } } if (nicInSameNetwork == null) { throw new InvalidParameterValueException("VM " + instanceId + " cannot be added because it doesn't belong in the same network."); } if (s_logger.isDebugEnabled()) { s_logger.debug("Adding " + vm + " to the load balancer pool"); } vmsToAdd.add(vm); } Transaction txn = Transaction.currentTxn(); txn.start(); for (UserVm vm : vmsToAdd) { LoadBalancerVMMapVO map = new LoadBalancerVMMapVO(loadBalancer.getId(), vm.getId(), false); map = _lb2VmMapDao.persist(map); } txn.commit(); try { loadBalancer.setState(FirewallRule.State.Add); _lbDao.persist(loadBalancer); applyLoadBalancerConfig(loadBalancerId); } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); return false; } return true; } @Override @ActionEvent (eventType=EventTypes.EVENT_REMOVE_FROM_LOAD_BALANCER_RULE, eventDescription="removing from load balancer", async=true) public boolean removeFromLoadBalancer(long loadBalancerId, List<Long> instanceIds) { UserContext caller = UserContext.current(); LoadBalancerVO loadBalancer = _lbDao.findById(Long.valueOf(loadBalancerId)); if (loadBalancer == null) { throw new InvalidParameterException("Invalid load balancer value: " + loadBalancerId); } _accountMgr.checkAccess(caller.getCaller(), loadBalancer); try { loadBalancer.setState(FirewallRule.State.Add); _lbDao.persist(loadBalancer); for (long instanceId : instanceIds) { LoadBalancerVMMapVO map = _lb2VmMapDao.findByLoadBalancerIdAndVmId(loadBalancerId, instanceId); map.setRevoke(true); _lb2VmMapDao.persist(map); s_logger.debug("Set load balancer rule for revoke: rule id " + loadBalancerId + ", vmId " + instanceId); } if (applyLoadBalancerConfig(loadBalancerId)) { _lb2VmMapDao.remove(loadBalancerId, instanceIds, null); s_logger.debug("Load balancer rule id " + loadBalancerId + " is removed for vms " + instanceIds); } else { s_logger.warn("Failed to remove load balancer rule id " + loadBalancerId + " for vms " + instanceIds); throw new CloudRuntimeException("Failed to remove load balancer rule id " + loadBalancerId + " for vms " + instanceIds); } } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); return false; } return true; } @Override public boolean removeVmFromLoadBalancers(long instanceId) { boolean success = true; List<LoadBalancerVMMapVO> maps = _lb2VmMapDao.listByInstanceId(instanceId); if (maps == null || maps.isEmpty()) { return true; } Map<Long, List<Long>> lbsToReconfigure = new HashMap<Long, List<Long>>(); //first set all existing lb mappings with Revoke state for (LoadBalancerVMMapVO map: maps) { long lbId = map.getLoadBalancerId(); List<Long> instances = lbsToReconfigure.get(lbId); if (instances == null) { instances = new ArrayList<Long>(); } instances.add(map.getInstanceId()); lbsToReconfigure.put(lbId, instances); map.setRevoke(true); _lb2VmMapDao.persist(map); s_logger.debug("Set load balancer rule for revoke: rule id " + map.getLoadBalancerId() + ", vmId " + instanceId); } //Reapply all lbs that had the vm assigned if (lbsToReconfigure != null) { for (Map.Entry<Long, List<Long>> lb : lbsToReconfigure.entrySet()) { if (!removeFromLoadBalancer(lb.getKey(), lb.getValue())) { success = false; } } } return success; } @Override @ActionEvent (eventType=EventTypes.EVENT_LOAD_BALANCER_DELETE, eventDescription="deleting load balancer", async=true) public boolean deleteLoadBalancerRule(long loadBalancerId, boolean apply) { UserContext caller = UserContext.current(); LoadBalancerVO lb = _lbDao.findById(loadBalancerId); if (lb == null) { throw new InvalidParameterException("Invalid load balancer value: " + loadBalancerId); } _accountMgr.checkAccess(caller.getCaller(), lb); lb.setState(FirewallRule.State.Revoke); _lbDao.persist(lb); List<LoadBalancerVMMapVO> maps = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId); if (maps != null) { for (LoadBalancerVMMapVO map : maps) { map.setRevoke(true); _lb2VmMapDao.persist(map); s_logger.debug("Set load balancer rule for revoke: rule id " + loadBalancerId + ", vmId " + map.getInstanceId()); } } if (apply) { try { applyLoadBalancerConfig(loadBalancerId); } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); return false; } } _rulesDao.remove(lb.getId()); UsageEventVO usageEvent = new UsageEventVO(EventTypes.EVENT_LOAD_BALANCER_DELETE, lb.getAccountId(), 0 , lb.getId(), null); _usageEventDao.persist(usageEvent); s_logger.debug("Load balancer with id " + lb.getId() + " is removed successfully"); return true; } @Override @ActionEvent (eventType=EventTypes.EVENT_LOAD_BALANCER_CREATE, eventDescription="creating load balancer") public LoadBalancer createLoadBalancerRule(LoadBalancer lb) throws NetworkRuleConflictException { UserContext caller = UserContext.current(); long ipId = lb.getSourceIpAddressId(); // make sure ip address exists IPAddressVO ipAddr = _ipAddressDao.findById(ipId); if (ipAddr == null || !ipAddr.readyToUse()) { throw new InvalidParameterValueException("Unable to create load balancer rule, invalid IP address id" + ipId); } int srcPortStart = lb.getSourcePortStart(); int srcPortEnd = lb.getSourcePortEnd(); int defPortStart = lb.getDefaultPortStart(); int defPortEnd = lb.getDefaultPortEnd(); if (!NetUtils.isValidPort(srcPortStart)) { throw new InvalidParameterValueException("publicPort is an invalid value: " + srcPortStart); } if (!NetUtils.isValidPort(srcPortEnd)) { throw new InvalidParameterValueException("Public port range is an invalid value: " + srcPortEnd); } if (srcPortStart > srcPortEnd) { throw new InvalidParameterValueException("Public port range is an invalid value: " + srcPortStart + "-" + srcPortEnd); } if (!NetUtils.isValidPort(defPortStart)) { throw new InvalidParameterValueException("privatePort is an invalid value: " + defPortStart); } if (!NetUtils.isValidPort(defPortEnd)) { throw new InvalidParameterValueException("privatePort is an invalid value: " + defPortEnd); } if (defPortStart > defPortEnd) { throw new InvalidParameterValueException("private port range is invalid: " + defPortStart + "-" + defPortEnd); } if ((lb.getAlgorithm() == null) || !NetUtils.isValidAlgorithm(lb.getAlgorithm())) { throw new InvalidParameterValueException("Invalid algorithm: " + lb.getAlgorithm()); } Long networkId = lb.getNetworkId(); if (networkId == -1 ) { networkId = ipAddr.getAssociatedWithNetworkId(); } _accountMgr.checkAccess(caller.getCaller(), ipAddr); LoadBalancerVO newRule = new LoadBalancerVO(lb.getXid(), lb.getName(), lb.getDescription(), lb.getSourceIpAddressId(), lb.getSourcePortEnd(), lb.getDefaultPortStart(), lb.getAlgorithm(), networkId, ipAddr.getAccountId(), ipAddr.getDomainId()); newRule = _lbDao.persist(newRule); try { _rulesMgr.detectRulesConflict(newRule, ipAddr); if (!_rulesDao.setStateToAdd(newRule)) { throw new CloudRuntimeException("Unable to update the state to add for " + newRule); } s_logger.debug("Load balancer " + newRule.getId() + " for Ip address id=" + ipId + ", public port " + srcPortStart + ", private port " + defPortStart+ " is added successfully."); UsageEventVO usageEvent = new UsageEventVO(EventTypes.EVENT_LOAD_BALANCER_CREATE, ipAddr.getAllocatedToAccountId(), ipAddr.getDataCenterId(), newRule.getId(), null); _usageEventDao.persist(usageEvent); return newRule; } catch (Exception e) { _lbDao.remove(newRule.getId()); if (e instanceof NetworkRuleConflictException) { throw (NetworkRuleConflictException) e; } throw new CloudRuntimeException("Unable to add rule for ip address id=" + newRule.getSourceIpAddressId(), e); } } @Override public boolean applyLoadBalancerConfig(long lbRuleId) throws ResourceUnavailableException { List<LoadBalancerVO> lbs = new ArrayList<LoadBalancerVO>(1); lbs.add(_lbDao.findById(lbRuleId)); return applyLoadBalancerRules(lbs); } @Override public boolean applyLoadBalancersForNetwork(long networkId) throws ResourceUnavailableException { List<LoadBalancerVO> lbs = _lbDao.listByNetworkId(networkId); if (lbs != null) { return applyLoadBalancerRules(lbs); } else { s_logger.info("Network id=" + networkId + " doesn't have load balancer rules, nothing to apply"); return true; } } private boolean applyLoadBalancerRules(List<LoadBalancerVO> lbs) throws ResourceUnavailableException{ List<LoadBalancingRule> rules = new ArrayList<LoadBalancingRule>(); for (LoadBalancerVO lb : lbs) { List<LbDestination> dstList = getExistingDestinations(lb.getId()); if (dstList != null && !dstList.isEmpty()) { LoadBalancingRule loadBalancing = new LoadBalancingRule(lb, dstList); rules.add(loadBalancing); } } if (!_networkMgr.applyRules(rules, false)) { s_logger.debug("LB rules are not completely applied"); return false; } for (LoadBalancerVO lb : lbs) { if (lb.getState() == FirewallRule.State.Revoke) { _lbDao.remove(lb.getId()); s_logger.debug("LB " + lb.getId() + " is successfully removed"); } else if (lb.getState() == FirewallRule.State.Add) { lb.setState(FirewallRule.State.Active); s_logger.debug("LB rule " + lb.getId() + " state is set to Active"); _lbDao.persist(lb); } } return true; } @Override public boolean removeAllLoadBalanacers(long ipId) { List<FirewallRuleVO> rules = _rulesDao.listByIpAndNotRevoked(ipId, null); if (rules != null) s_logger.debug("Found " + rules.size() + " lb rules to cleanup"); for (FirewallRule rule : rules) { if (rule.getPurpose() == Purpose.LoadBalancing) { boolean result = deleteLoadBalancerRule(rule.getId(), true); if (result == false) { s_logger.warn("Unable to remove load balancer rule " + rule.getId()); return false; } } } return true; } @Override public List<LbDestination> getExistingDestinations(long lbId) { List<LbDestination> dstList = new ArrayList<LbDestination>(); List<LoadBalancerVMMapVO> lbVmMaps = _lb2VmMapDao.listByLoadBalancerId(lbId); LoadBalancerVO lb = _lbDao.findById(lbId); String dstIp = null; for (LoadBalancerVMMapVO lbVmMap : lbVmMaps) { UserVm vm = _vmDao.findById(lbVmMap.getInstanceId()); Nic nic = _nicDao.findByInstanceIdAndNetworkIdIncludingRemoved(lb.getNetworkId(), vm.getId()); dstIp = nic.getIp4Address(); LbDestination lbDst = new LbDestination(lb.getDefaultPortStart(), lb.getDefaultPortEnd(), dstIp, lbVmMap.isRevoke()); dstList.add(lbDst); } return dstList; } @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { _name = name; return true; } @Override public boolean start() { return true; } @Override public boolean stop() { return true; } @Override public String getName() { return _name; } @Override @ActionEvent (eventType=EventTypes.EVENT_LOAD_BALANCER_UPDATE, eventDescription="updating load balancer", async=true) public LoadBalancer updateLoadBalancerRule(UpdateLoadBalancerRuleCmd cmd) { Long lbRuleId = cmd.getId(); String name = cmd.getLoadBalancerName(); String description = cmd.getDescription(); String algorithm = cmd.getAlgorithm(); LoadBalancerVO lb = _lbDao.findById(lbRuleId); if (name != null) { lb.setName(name); } if (description != null) { lb.setDescription(description); } if (algorithm != null) { lb.setAlgorithm(algorithm); } _lbDao.update(lbRuleId, lb); //If algorithm is changed, have to reapply the lb config if (algorithm != null) { try { lb.setState(FirewallRule.State.Add); _lbDao.persist(lb); applyLoadBalancerConfig(lbRuleId); } catch (ResourceUnavailableException e) { s_logger.warn("Unable to apply the load balancer config because resource is unavaliable.", e); } } return lb; } // @Override @DB // public boolean removeFromLoadBalancer(RemoveFromLoadBalancerRuleCmd cmd) throws InvalidParameterValueException { // // Long userId = UserContext.current().getUserId(); // Account account = UserContext.current().getAccount(); // Long loadBalancerId = cmd.getId(); // Long vmInstanceId = cmd.getVirtualMachineId(); // List<Long> instanceIds = cmd.getVirtualMachineIds(); // // if ((vmInstanceId == null) && (instanceIds == null)) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "No virtual machine id specified."); // } // // // if a single instanceId was given, add it to the list so we can always just process the list if instanceIds // if (instanceIds == null) { // instanceIds = new ArrayList<Long>(); // instanceIds.add(vmInstanceId); // } // // if (userId == null) { // userId = Long.valueOf(1); // } // // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(Long.valueOf(loadBalancerId)); // // if (loadBalancer == null) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "Unable to find load balancer rule with id " + loadBalancerId); // } else if (account != null) { // if (!isAdmin(account.getType()) && (loadBalancer.getAccountId() != account.getId())) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "Account " + account.getAccountName() + " does not own load balancer rule " + loadBalancer.getName() + // " (id:" + loadBalancer.getId() + ")"); // } else if (!_domainDao.isChildDomain(account.getDomainId(), loadBalancer.getDomainId())) { // throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid load balancer rule id (" + loadBalancer.getId() + ") given, unable to remove virtual machine instances."); // } // } // // Transaction txn = Transaction.currentTxn(); // LoadBalancerVO loadBalancerLock = null; // boolean success = true; // try { // // IPAddressVO ipAddress = _ipAddressDao.findById(loadBalancer.getIpAddress()); // if (ipAddress == null) { // return false; // } // // DomainRouterVO router = _routerMgr.getRouter(ipAddress.getAccountId(), ipAddress.getDataCenterId()); // if (router == null) { // return false; // } // // txn.start(); // for (Long instanceId : instanceIds) { // UserVm userVm = _userVmDao.findById(instanceId); // if (userVm == null) { // s_logger.warn("Unable to find virtual machine with id " + instanceId); // throw new InvalidParameterValueException("Unable to find virtual machine with id " + instanceId); // } // PortForwardingRuleVO fwRule = _rulesDao.findByGroupAndPrivateIp(loadBalancerId, userVm.getGuestIpAddress(), false); // if (fwRule != null) { // fwRule.setEnabled(false); // _rulesDao.update(fwRule.getId(), fwRule); // } // } // // List<PortForwardingRuleVO> allLbRules = new ArrayList<PortForwardingRuleVO>(); // IPAddressVO ipAddr = _ipAddressDao.findById(loadBalancer.getIpAddress()); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(loadBalancer.getAccountId(), ipAddr.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _rulesDao.listIPForwarding(ipv.getAddress(), false); // allLbRules.addAll(rules); // } // // updateFirewallRules(loadBalancer.getIpAddress(), allLbRules, router); // // // firewall rules are updated, lock the load balancer as mappings are updated // loadBalancerLock = _loadBalancerDao.acquireInLockTable(loadBalancerId); // if (loadBalancerLock == null) { // s_logger.warn("removeFromLoadBalancer: failed to lock load balancer " + loadBalancerId + ", deleting mappings anyway..."); // } // // // remove all the loadBalancer->VM mappings // _loadBalancerVMMapDao.remove(loadBalancerId, instanceIds, Boolean.FALSE); // // // Save and create the event // String description; // String type = EventTypes.EVENT_NET_RULE_DELETE; // String level = EventVO.LEVEL_INFO; // // for (PortForwardingRuleVO updatedRule : allLbRules) { // if (!updatedRule.isEnabled()) { // _rulesDao.remove(updatedRule.getId()); // // description = "deleted load balancer rule [" + updatedRule.getSourceIpAddress() + ":" + updatedRule.getSourcePort() + "]->[" // + updatedRule.getDestinationIpAddress() + ":" + updatedRule.getDestinationPort() + "]" + " " + updatedRule.getProtocol(); // // EventUtils.saveEvent(userId, loadBalancer.getAccountId(), level, type, description); // } // } // txn.commit(); // } catch (Exception ex) { // s_logger.warn("Failed to delete load balancing rule with exception: ", ex); // success = false; // txn.rollback(); // } finally { // if (loadBalancerLock != null) { // _loadBalancerDao.releaseFromLockTable(loadBalancerId); // } // } // return success; // } // // @Override @DB // public boolean deleteLoadBalancerRule(DeleteLoadBalancerRuleCmd cmd) throws InvalidParameterValueException, PermissionDeniedException{ // Long loadBalancerId = cmd.getId(); // Long userId = UserContext.current().getUserId(); // Account account = UserContext.current().getAccount(); // // ///verify input parameters // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(loadBalancerId); // if (loadBalancer == null) { // throw new InvalidParameterValueException ("Unable to find load balancer rule with id " + loadBalancerId); // } // // if (account != null) { // if (!isAdmin(account.getType())) { // if (loadBalancer.getAccountId() != account.getId()) { // throw new PermissionDeniedException("Account " + account.getAccountName() + " does not own load balancer rule " + loadBalancer.getName() + " (id:" + loadBalancerId + "), permission denied"); // } // } else if (!_domainDao.isChildDomain(account.getDomainId(), loadBalancer.getDomainId())) { // throw new PermissionDeniedException("Unable to delete load balancer rule " + loadBalancer.getName() + " (id:" + loadBalancerId + "), permission denied."); // } // } // // if (userId == null) { // userId = Long.valueOf(1); // } // // Transaction txn = Transaction.currentTxn(); // LoadBalancerVO loadBalancerLock = null; // try { // // IPAddressVO ipAddress = _ipAddressDao.findById(loadBalancer.getIpAddress()); // if (ipAddress == null) { // return false; // } // // DomainRouterVO router = _routerMgr.getRouter(ipAddress.getAccountId(), ipAddress.getDataCenterId()); // List<PortForwardingRuleVO> fwRules = _firewallRulesDao.listByLoadBalancerId(loadBalancerId); // // txn.start(); // // if ((fwRules != null) && !fwRules.isEmpty()) { // for (PortForwardingRuleVO fwRule : fwRules) { // fwRule.setEnabled(false); // _firewallRulesDao.update(fwRule.getId(), fwRule); // } // // List<PortForwardingRuleVO> allLbRules = new ArrayList<PortForwardingRuleVO>(); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(loadBalancer.getAccountId(), ipAddress.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _firewallRulesDao.listIPForwarding(ipv.getAddress(), false); // allLbRules.addAll(rules); // } // // updateFirewallRules(loadBalancer.getIpAddress(), allLbRules, router); // // // firewall rules are updated, lock the load balancer as the mappings are updated // loadBalancerLock = _loadBalancerDao.acquireInLockTable(loadBalancerId); // if (loadBalancerLock == null) { // s_logger.warn("deleteLoadBalancer: failed to lock load balancer " + loadBalancerId + ", deleting mappings anyway..."); // } // // // remove all loadBalancer->VM mappings // List<LoadBalancerVMMapVO> lbVmMap = _loadBalancerVMMapDao.listByLoadBalancerId(loadBalancerId); // if (lbVmMap != null && !lbVmMap.isEmpty()) { // for (LoadBalancerVMMapVO lb : lbVmMap) { // _loadBalancerVMMapDao.remove(lb.getId()); // } // } // // // Save and create the event // String description; // String type = EventTypes.EVENT_NET_RULE_DELETE; // String ruleName = "load balancer"; // String level = EventVO.LEVEL_INFO; // Account accountOwner = _accountDao.findById(loadBalancer.getAccountId()); // // for (PortForwardingRuleVO updatedRule : fwRules) { // _firewallRulesDao.remove(updatedRule.getId()); // // description = "deleted " + ruleName + " rule [" + updatedRule.getSourceIpAddress() + ":" + updatedRule.getSourcePort() + "]->[" // + updatedRule.getDestinationIpAddress() + ":" + updatedRule.getDestinationPort() + "]" + " " + updatedRule.getProtocol(); // // EventUtils.saveEvent(userId, accountOwner.getId(), level, type, description); // } // } // // txn.commit(); // } catch (Exception ex) { // txn.rollback(); // s_logger.error("Unexpected exception deleting load balancer " + loadBalancerId, ex); // return false; // } finally { // if (loadBalancerLock != null) { // _loadBalancerDao.releaseFromLockTable(loadBalancerId); // } // } // // boolean success = _loadBalancerDao.remove(loadBalancerId); // // // save off an event for removing the load balancer // EventVO event = new EventVO(); // event.setUserId(userId); // event.setAccountId(loadBalancer.getAccountId()); // event.setType(EventTypes.EVENT_LOAD_BALANCER_DELETE); // if (success) { // event.setLevel(EventVO.LEVEL_INFO); // String params = "id="+loadBalancer.getId(); // event.setParameters(params); // event.setDescription("Successfully deleted load balancer " + loadBalancer.getName() + " (id:" + loadBalancer.getId() + ")"); // } else { // event.setLevel(EventVO.LEVEL_ERROR); // event.setDescription("Failed to delete load balancer " + loadBalancer.getName() + " (id:" + loadBalancer.getId() + ")"); // } // _eventDao.persist(event); // return success; // } // @Override @DB // public boolean assignToLoadBalancer(AssignToLoadBalancerRuleCmd cmd) throws NetworkRuleConflictException { // Long loadBalancerId = cmd.getLoadBalancerId(); // Long instanceIdParam = cmd.getVirtualMachineId(); // List<Long> instanceIds = cmd.getVirtualMachineIds(); // // if ((instanceIdParam == null) && (instanceIds == null)) { // throw new InvalidParameterValueException("Unable to assign to load balancer " + loadBalancerId + ", no instance id is specified."); // } // // if ((instanceIds == null) && (instanceIdParam != null)) { // instanceIds = new ArrayList<Long>(); // instanceIds.add(instanceIdParam); // } // // // FIXME: We should probably lock the load balancer here to prevent multiple updates... // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(loadBalancerId); // if (loadBalancer == null) { // throw new InvalidParameterValueException("Failed to assign to load balancer " + loadBalancerId + ", the load balancer was not found."); // } // // // // Permission check... // Account account = UserContext.current().getAccount(); // if (account != null) { // if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) { // if (!_domainDao.isChildDomain(account.getDomainId(), loadBalancer.getDomainId())) { // throw new PermissionDeniedException("Failed to assign to load balancer " + loadBalancerId + ", permission denied."); // } // } else if (account.getType() != Account.ACCOUNT_TYPE_ADMIN && account.getId() != loadBalancer.getAccountId()) { // throw new PermissionDeniedException("Failed to assign to load balancer " + loadBalancerId + ", permission denied."); // } // } // // Transaction txn = Transaction.currentTxn(); // List<PortForwardingRuleVO> firewallRulesToApply = new ArrayList<PortForwardingRuleVO>(); // long accountId = 0; // DomainRouterVO router = null; // // List<LoadBalancerVMMapVO> mappedInstances = _loadBalancerVMMapDao.listByLoadBalancerId(loadBalancerId, false); // Set<Long> mappedInstanceIds = new HashSet<Long>(); // if (mappedInstances != null) { // for (LoadBalancerVMMapVO mappedInstance : mappedInstances) { // mappedInstanceIds.add(Long.valueOf(mappedInstance.getInstanceId())); // } // } // // List<Long> finalInstanceIds = new ArrayList<Long>(); // for (Long instanceId : instanceIds) { // if (mappedInstanceIds.contains(instanceId)) { // continue; // } else { // finalInstanceIds.add(instanceId); // } // // UserVmVO userVm = _vmDao.findById(instanceId); // if (userVm == null) { // s_logger.warn("Unable to find virtual machine with id " + instanceId); // throw new InvalidParameterValueException("Unable to find virtual machine with id " + instanceId); // } else { // // sanity check that the vm can be applied to the load balancer // ServiceOfferingVO offering = _serviceOfferingDao.findById(userVm.getServiceOfferingId()); // if ((offering == null) || !GuestIpType.Virtualized.equals(offering.getGuestIpType())) { // // we previously added these instanceIds to the loadBalancerVMMap, so remove them here as we are rejecting the API request // // without actually modifying the load balancer // _loadBalancerVMMapDao.remove(loadBalancerId, instanceIds, Boolean.TRUE); // // if (s_logger.isDebugEnabled()) { // s_logger.debug("Unable to add virtual machine " + userVm.toString() + " to load balancer " + loadBalancerId + ", bad network type (" + ((offering == null) ? "null" : offering.getGuestIpType()) + ")"); // } // // throw new InvalidParameterValueException("Unable to add virtual machine " + userVm.toString() + " to load balancer " + loadBalancerId + ", bad network type (" + ((offering == null) ? "null" : offering.getGuestIpType()) + ")"); // } // } // // if (accountId == 0) { // accountId = userVm.getAccountId(); // } else if (accountId != userVm.getAccountId()) { // s_logger.warn("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") belongs to account " + userVm.getAccountId() // + ", previous vm in list belongs to account " + accountId); // throw new InvalidParameterValueException("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") belongs to account " + userVm.getAccountId() // + ", previous vm in list belongs to account " + accountId); // } // // DomainRouterVO nextRouter = null; // if (userVm.getDomainRouterId() != null) { // nextRouter = _routerMgr.getRouter(userVm.getDomainRouterId()); // } // if (nextRouter == null) { // s_logger.warn("Unable to find router (" + userVm.getDomainRouterId() + ") for virtual machine with id " + instanceId); // throw new InvalidParameterValueException("Unable to find router (" + userVm.getDomainRouterId() + ") for virtual machine with id " + instanceId); // } // // if (router == null) { // router = nextRouter; // // // Make sure owner of router is owner of load balancer. Since we are already checking that all VMs belong to the same router, by checking router // // ownership once we'll make sure all VMs belong to the owner of the load balancer. // if (router.getAccountId() != loadBalancer.getAccountId()) { // throw new InvalidParameterValueException("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") does not belong to the owner of load balancer " + // loadBalancer.getName() + " (owner is account id " + loadBalancer.getAccountId() + ")"); // } // } else if (router.getId() != nextRouter.getId()) { // throw new InvalidParameterValueException("guest vm " + userVm.getHostName() + " (id:" + userVm.getId() + ") belongs to router " + nextRouter.getHostName() // + ", previous vm in list belongs to router " + router.getHostName()); // } // // // check for ip address/port conflicts by checking exising forwarding and loadbalancing rules // String ipAddress = loadBalancer.getIpAddress(); // String privateIpAddress = userVm.getGuestIpAddress(); // List<PortForwardingRuleVO> existingRulesOnPubIp = _rulesDao.listIPForwarding(ipAddress); // // if (existingRulesOnPubIp != null) { // for (PortForwardingRuleVO fwRule : existingRulesOnPubIp) { // if (!( (fwRule.isForwarding() == false) && // (fwRule.getGroupId() != null) && // (fwRule.getGroupId() == loadBalancer.getId()) )) { // // if the rule is not for the current load balancer, check to see if the private IP is our target IP, // // in which case we have a conflict // if (fwRule.getSourcePort().equals(loadBalancer.getPublicPort())) { // throw new NetworkRuleConflictException("An existing port forwarding service rule for " + ipAddress + ":" + loadBalancer.getPublicPort() // + " exists, found while trying to apply load balancer " + loadBalancer.getName() + " (id:" + loadBalancer.getId() + ") to instance " // + userVm.getHostName() + "."); // } // } else if (fwRule.getDestinationIpAddress().equals(privateIpAddress) && fwRule.getDestinationPort().equals(loadBalancer.getPrivatePort()) && fwRule.isEnabled()) { // // for the current load balancer, don't add the same instance to the load balancer more than once // continue; // } // } // } // // PortForwardingRuleVO newFwRule = new PortForwardingRuleVO(); // newFwRule.setAlgorithm(loadBalancer.getAlgorithm()); // newFwRule.setEnabled(true); // newFwRule.setForwarding(false); // newFwRule.setPrivatePort(loadBalancer.getPrivatePort()); // newFwRule.setPublicPort(loadBalancer.getPublicPort()); // newFwRule.setPublicIpAddress(loadBalancer.getIpAddress()); // newFwRule.setPrivateIpAddress(userVm.getGuestIpAddress()); // newFwRule.setGroupId(loadBalancer.getId()); // // firewallRulesToApply.add(newFwRule); // } // // // if there's no work to do, bail out early rather than reconfiguring the proxy with the existing rules // if (firewallRulesToApply.isEmpty()) { // return true; // } // // //Sync on domR // if(router == null){ // throw new InvalidParameterValueException("Failed to assign to load balancer " + loadBalancerId + ", the domain router was not found at " + loadBalancer.getIpAddress()); // } // else{ // cmd.synchronizeCommand("Router", router.getId()); // } // // IPAddressVO ipAddr = _ipAddressDao.findById(loadBalancer.getIpAddress()); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(accountId, ipAddr.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _rulesDao.listIpForwardingRulesForLoadBalancers(ipv.getAddress()); // firewallRulesToApply.addAll(rules); // } // // txn.start(); // // List<PortForwardingRuleVO> updatedRules = null; // if (router.getState().equals(State.Starting)) { // // Starting is a special case...if the router is starting that means the IP address hasn't yet been assigned to the domR and the update firewall rules script will fail. // // In this case, just store the rules and they will be applied when the router state is resent (after the router is started). // updatedRules = firewallRulesToApply; // } else { // updatedRules = updateFirewallRules(loadBalancer.getIpAddress(), firewallRulesToApply, router); // } // // // Save and create the event // String description; // String type = EventTypes.EVENT_NET_RULE_ADD; // String ruleName = "load balancer"; // String level = EventVO.LEVEL_INFO; // // LoadBalancerVO loadBalancerLock = null; // try { // loadBalancerLock = _loadBalancerDao.acquireInLockTable(loadBalancerId); // if (loadBalancerLock == null) { // s_logger.warn("assignToLoadBalancer: Failed to lock load balancer " + loadBalancerId + ", proceeding with updating loadBalancerVMMappings..."); // } // if ((updatedRules != null) && (updatedRules.size() == firewallRulesToApply.size())) { // // flag the instances as mapped to the load balancer // for (Long addedInstanceId : finalInstanceIds) { // LoadBalancerVMMapVO mappedVM = new LoadBalancerVMMapVO(loadBalancerId, addedInstanceId); // _loadBalancerVMMapDao.persist(mappedVM); // } // // /* We used to add these instances as pending when the API command is received on the server, and once they were applied, // * the pending status was removed. In the 2.2 API framework, this is no longer done and instead the new mappings just // * need to be persisted // List<LoadBalancerVMMapVO> pendingMappedVMs = _loadBalancerVMMapDao.listByLoadBalancerId(loadBalancerId, true); // for (LoadBalancerVMMapVO pendingMappedVM : pendingMappedVMs) { // if (instanceIds.contains(pendingMappedVM.getInstanceId())) { // LoadBalancerVMMapVO pendingMappedVMForUpdate = _loadBalancerVMMapDao.createForUpdate(); // pendingMappedVMForUpdate.setPending(false); // _loadBalancerVMMapDao.update(pendingMappedVM.getId(), pendingMappedVMForUpdate); // } // } // */ // // for (PortForwardingRuleVO updatedRule : updatedRules) { // _rulesDao.persist(updatedRule); // // description = "created new " + ruleName + " rule [" + updatedRule.getSourceIpAddress() + ":" // + updatedRule.getSourcePort() + "]->[" + updatedRule.getDestinationIpAddress() + ":" // + updatedRule.getDestinationPort() + "]" + " " + updatedRule.getProtocol(); // // EventUtils.saveEvent(UserContext.current().getUserId(), loadBalancer.getAccountId(), level, type, description); // } // txn.commit(); // return true; // } else { // // Remove the instanceIds from the load balancer since there was a failure. Make sure to commit the // // transaction here, otherwise the act of throwing the internal error exception will cause this // // remove operation to be rolled back. // _loadBalancerVMMapDao.remove(loadBalancerId, instanceIds, null); // txn.commit(); // // s_logger.warn("Failed to apply load balancer " + loadBalancer.getName() + " (id:" + loadBalancerId + ") to guest virtual machines " + StringUtils.join(instanceIds, ",")); // throw new CloudRuntimeException("Failed to apply load balancer " + loadBalancer.getName() + " (id:" + loadBalancerId + ") to guest virtual machine " + StringUtils.join(instanceIds, ",")); // } // } finally { // if (loadBalancerLock != null) { // _loadBalancerDao.releaseFromLockTable(loadBalancerId); // } // } // } // @Override @DB // public LoadBalancer createLoadBalancerRule(CreateLoadBalancerRuleCmd cmd) throws InvalidParameterValueException, PermissionDeniedException { // String publicIp = cmd.getPublicIp(); // // // make sure ip address exists // IPAddressVO ipAddr = _ipAddressDao.findById(cmd.getPublicIp()); // if (ipAddr == null) { // throw new InvalidParameterValueException("Unable to create load balancer rule, invalid IP address " + publicIp); // } // // VlanVO vlan = _vlanDao.findById(ipAddr.getVlanDbId()); // if (vlan != null) { // if (!VlanType.VirtualNetwork.equals(vlan.getVlanType())) { // throw new InvalidParameterValueException("Unable to create load balancer rule for IP address " + publicIp + ", only VirtualNetwork type IP addresses can be used for load balancers."); // } // } // else ERROR? // // // Verify input parameters // if ((ipAddr.getAccountId() == null) || (ipAddr.getAllocated() == null)) { // throw new InvalidParameterValueException("Unable to create load balancer rule, cannot find account owner for ip " + publicIp); // } // // Account account = UserContext.current().getAccount(); // if (account != null) { // if ((account.getType() == Account.ACCOUNT_TYPE_ADMIN) || (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)) { // if (!_domainDao.isChildDomain(account.getDomainId(), ipAddr.getDomainId())) { // throw new PermissionDeniedException("Unable to create load balancer rule on IP address " + publicIp + ", permission denied."); // } // } else if (account.getId() != ipAddr.getAccountId().longValue()) { // throw new PermissionDeniedException("Unable to create load balancer rule, account " + account.getAccountName() + " doesn't own ip address " + publicIp); // } // } // // String loadBalancerName = cmd.getLoadBalancerRuleName(); // LoadBalancerVO existingLB = _loadBalancerDao.findByAccountAndName(ipAddr.getAccountId(), loadBalancerName); // if (existingLB != null) { // throw new InvalidParameterValueException("Unable to create load balancer rule, an existing load balancer rule with name " + loadBalancerName + " already exists."); // } // // // validate params // String publicPort = cmd.getPublicPort(); // String privatePort = cmd.getPrivatePort(); // String algorithm = cmd.getAlgorithm(); // // if (!NetUtils.isValidPort(publicPort)) { // throw new InvalidParameterValueException("publicPort is an invalid value"); // } // if (!NetUtils.isValidPort(privatePort)) { // throw new InvalidParameterValueException("privatePort is an invalid value"); // } // if ((algorithm == null) || !NetUtils.isValidAlgorithm(algorithm)) { // throw new InvalidParameterValueException("Invalid algorithm"); // } // // boolean locked = false; // try { // LoadBalancerVO exitingLB = _loadBalancerDao.findByIpAddressAndPublicPort(publicIp, publicPort); // if (exitingLB != null) { // throw new InvalidParameterValueException("IP Address/public port already load balanced by an existing load balancer rule"); // } // // List<PortForwardingRuleVO> existingFwRules = _rulesDao.listIPForwarding(publicIp, publicPort, true); // if ((existingFwRules != null) && !existingFwRules.isEmpty()) { // throw new InvalidParameterValueException("IP Address (" + publicIp + ") and port (" + publicPort + ") already in use"); // } // // ipAddr = _ipAddressDao.acquireInLockTable(publicIp); // if (ipAddr == null) { // throw new PermissionDeniedException("User does not own ip address " + publicIp); // } // // locked = true; // // LoadBalancerVO loadBalancer = new LoadBalancerVO(loadBalancerName, cmd.getDescription(), ipAddr.getAccountId(), publicIp, publicPort, privatePort, algorithm); // loadBalancer = _loadBalancerDao.persist(loadBalancer); // Long id = loadBalancer.getId(); // // // Save off information for the event that the security group was applied // Long userId = UserContext.current().getUserId(); // if (userId == null) { // userId = Long.valueOf(User.UID_SYSTEM); // } // // EventVO event = new EventVO(); // event.setUserId(userId); // event.setAccountId(ipAddr.getAccountId()); // event.setType(EventTypes.EVENT_LOAD_BALANCER_CREATE); // // if (id == null) { // event.setDescription("Failed to create load balancer " + loadBalancer.getName() + " on ip address " + publicIp + "[" + publicPort + "->" + privatePort + "]"); // event.setLevel(EventVO.LEVEL_ERROR); // } else { // event.setDescription("Successfully created load balancer " + loadBalancer.getName() + " on ip address " + publicIp + "[" + publicPort + "->" + privatePort + "]"); // String params = "id="+loadBalancer.getId()+"\ndcId="+ipAddr.getDataCenterId(); // event.setParameters(params); // event.setLevel(EventVO.LEVEL_INFO); // } // _eventDao.persist(event); // // return _loadBalancerDao.findById(id); // } finally { // if (locked) { // _ipAddressDao.releaseFromLockTable(publicIp); // } // } // } // @Override // public boolean updateLoadBalancerRules(final List<PortForwardingRuleVO> fwRules, final DomainRouterVO router, Long hostId) { // // for (PortForwardingRuleVO rule : fwRules) { // // Determine the the VLAN ID and netmask of the rule's public IP address // IPAddressVO ip = _ipAddressDao.findById(rule.getSourceIpAddress()); // VlanVO vlan = _vlanDao.findById(new Long(ip.getVlanDbId())); // String vlanNetmask = vlan.getVlanNetmask(); // // rule.setVlanNetmask(vlanNetmask); // } // // final LoadBalancerConfigurator cfgrtr = new HAProxyConfigurator(); // final String [] cfg = cfgrtr.generateConfiguration(fwRules); // final String [][] addRemoveRules = cfgrtr.generateFwRules(fwRules); // final LoadBalancerCfgCommand cmd = new LoadBalancerCfgCommand(cfg, addRemoveRules, router.getInstanceName(), router.getPrivateIpAddress()); // final Answer ans = _agentMgr.easySend(hostId, cmd); // if (ans == null) { // return false; // } else { // return ans.getResult(); // } // } // @Override @DB // public LoadBalancerVO updateLoadBalancerRule(UpdateLoadBalancerRuleCmd cmd) throws InvalidParameterValueException, PermissionDeniedException{ // Long loadBalancerId = cmd.getId(); // String privatePort = cmd.getPrivatePort(); // String algorithm = cmd.getAlgorithm(); // String name = cmd.getLoadBalancerName(); // String description = cmd.getDescription(); // Account account = UserContext.current().getAccount(); // // //Verify input parameters // LoadBalancerVO loadBalancer = _loadBalancerDao.findById(loadBalancerId); // if (loadBalancer == null) { // throw new InvalidParameterValueException("Unable to find load balancer rule " + loadBalancerId + " for update."); // } // // // make sure the name's not already in use // if (name != null) { // LoadBalancerVO existingLB = _loadBalancerDao.findByAccountAndName(loadBalancer.getAccountId(), name); // if ((existingLB != null) && (existingLB.getId() != loadBalancer.getId())) { // throw new InvalidParameterValueException("Unable to update load balancer " + loadBalancer.getName() + " with new name " + name + ", the name is already in use."); // } // } // // Account lbOwner = _accountDao.findById(loadBalancer.getAccountId()); // if (lbOwner == null) { // throw new InvalidParameterValueException("Unable to update load balancer rule, cannot find owning account"); // } // // Long accountId = lbOwner.getId(); // if (account != null) { // if (!isAdmin(account.getType())) { // if (account.getId() != accountId.longValue()) { // throw new PermissionDeniedException("Unable to update load balancer rule, permission denied"); // } // } else if (!_domainDao.isChildDomain(account.getDomainId(), lbOwner.getDomainId())) { // throw new PermissionDeniedException("Unable to update load balancer rule, permission denied."); // } // } // // String updatedPrivatePort = ((privatePort == null) ? loadBalancer.getPrivatePort() : privatePort); // String updatedAlgorithm = ((algorithm == null) ? loadBalancer.getAlgorithm() : algorithm); // String updatedName = ((name == null) ? loadBalancer.getName() : name); // String updatedDescription = ((description == null) ? loadBalancer.getDescription() : description); // // Transaction txn = Transaction.currentTxn(); // try { // txn.start(); // loadBalancer.setPrivatePort(updatedPrivatePort); // loadBalancer.setAlgorithm(updatedAlgorithm); // loadBalancer.setName(updatedName); // loadBalancer.setDescription(updatedDescription); // _loadBalancerDao.update(loadBalancer.getId(), loadBalancer); // // List<PortForwardingRuleVO> fwRules = _firewallRulesDao.listByLoadBalancerId(loadBalancer.getId()); // if ((fwRules != null) && !fwRules.isEmpty()) { // for (PortForwardingRuleVO fwRule : fwRules) { // fwRule.setPrivatePort(updatedPrivatePort); // fwRule.setAlgorithm(updatedAlgorithm); // _firewallRulesDao.update(fwRule.getId(), fwRule); // } // } // txn.commit(); // } catch (RuntimeException ex) { // s_logger.warn("Unhandled exception trying to update load balancer rule", ex); // txn.rollback(); // throw ex; // } finally { // txn.close(); // } // // // now that the load balancer has been updated, reconfigure the HA Proxy on the router with all the LB rules // List<PortForwardingRuleVO> allLbRules = new ArrayList<PortForwardingRuleVO>(); // IPAddressVO ipAddress = _ipAddressDao.findById(loadBalancer.getIpAddress()); // List<IPAddressVO> ipAddrs = listPublicIpAddressesInVirtualNetwork(loadBalancer.getAccountId(), ipAddress.getDataCenterId(), null); // for (IPAddressVO ipv : ipAddrs) { // List<PortForwardingRuleVO> rules = _firewallRulesDao.listIPForwarding(ipv.getAddress(), false); // allLbRules.addAll(rules); // } // // IPAddressVO ip = _ipAddressDao.findById(loadBalancer.getIpAddress()); // DomainRouterVO router = _routerMgr.getRouter(ip.getAccountId(), ip.getDataCenterId()); // updateFirewallRules(loadBalancer.getIpAddress(), allLbRules, router); // return _loadBalancerDao.findById(loadBalancer.getId()); // } @Override public List<UserVmVO> listLoadBalancerInstances(ListLoadBalancerRuleInstancesCmd cmd) throws PermissionDeniedException { Account account = UserContext.current().getCaller(); Long loadBalancerId = cmd.getId(); Boolean applied = cmd.isApplied(); if (applied == null) { applied = Boolean.TRUE; } LoadBalancerVO loadBalancer = _lbDao.findById(loadBalancerId); if (loadBalancer == null) { return null; } long lbAcctId = loadBalancer.getAccountId(); if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) { Account userAccount = _accountDao.findById(lbAcctId); if (!_domainDao.isChildDomain(account.getDomainId(), userAccount.getDomainId())) { throw new PermissionDeniedException("Invalid load balancer rule id (" + loadBalancerId + ") given, unable to list load balancer instances."); } } else if (account.getType() == Account.ACCOUNT_TYPE_NORMAL && account.getId() != lbAcctId) { throw new PermissionDeniedException("Unable to list load balancer instances, account " + account.getAccountName() + " does not own load balancer rule " + loadBalancer.getName()); } List<UserVmVO> loadBalancerInstances = new ArrayList<UserVmVO>(); List<LoadBalancerVMMapVO> vmLoadBalancerMappings = null; if (applied) { // List only the instances that have actually been applied to the load balancer (pending is false). vmLoadBalancerMappings = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId, false); } else { // List all instances applied, even pending ones that are currently being assigned, so that the semantics // of "what instances can I apply to this load balancer" are maintained. vmLoadBalancerMappings = _lb2VmMapDao.listByLoadBalancerId(loadBalancerId); } List<Long> appliedInstanceIdList = new ArrayList<Long>(); if ((vmLoadBalancerMappings != null) && !vmLoadBalancerMappings.isEmpty()) { for (LoadBalancerVMMapVO vmLoadBalancerMapping : vmLoadBalancerMappings) { appliedInstanceIdList.add(vmLoadBalancerMapping.getInstanceId()); } } IPAddressVO addr = _ipAddressDao.findById(loadBalancer.getSourceIpAddressId()); List<UserVmVO> userVms = _vmDao.listVirtualNetworkInstancesByAcctAndZone(loadBalancer.getAccountId(), addr.getDataCenterId(), loadBalancer.getNetworkId()); for (UserVmVO userVm : userVms) { // if the VM is destroyed, being expunged, in an error state, or in an unknown state, skip it switch (userVm.getState()) { case Destroyed: case Expunging: case Error: case Unknown: continue; } boolean isApplied = appliedInstanceIdList.contains(userVm.getId()); if (!applied && !isApplied) { loadBalancerInstances.add(userVm); } else if (applied && isApplied) { loadBalancerInstances.add(userVm); } } return loadBalancerInstances; } @Override public List<LoadBalancerVO> searchForLoadBalancers(ListLoadBalancerRulesCmd cmd) throws InvalidParameterValueException, PermissionDeniedException { Account caller = UserContext.current().getCaller(); Account owner = null; Long domainId = cmd.getDomainId(); String accountName = cmd.getAccountName(); Long accountId = null; Long ipId = cmd.getPublicIpId(); if (accountName != null && domainId != null) { owner = _accountDao.findActiveAccount(accountName, domainId); if (owner == null) { accountId = -1L; } } if (caller.getType() == Account.ACCOUNT_TYPE_NORMAL) { accountId = caller.getAccountId(); } else if (caller.getType() == Account.ACCOUNT_TYPE_ADMIN && owner != null) { accountId = owner.getId(); } else if (owner != null && caller.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN){ _accountMgr.checkAccess(caller, owner); } Filter searchFilter = new Filter(LoadBalancerVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal()); Object id = cmd.getId(); Object name = cmd.getLoadBalancerRuleName(); Object keyword = cmd.getKeyword(); Object instanceId = cmd.getVirtualMachineId(); SearchBuilder<LoadBalancerVO> sb = _lbDao.createSearchBuilder(); sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE); sb.and("sourceIpAddress", sb.entity().getSourceIpAddressId(), SearchCriteria.Op.EQ); sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ); sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ); if (instanceId != null) { SearchBuilder<LoadBalancerVMMapVO> lbVMSearch = _lb2VmMapDao.createSearchBuilder(); lbVMSearch.and("instanceId", lbVMSearch.entity().getInstanceId(), SearchCriteria.Op.EQ); sb.join("lbVMSearch", lbVMSearch, sb.entity().getId(), lbVMSearch.entity().getLoadBalancerId(), JoinBuilder.JoinType.INNER); } SearchCriteria<LoadBalancerVO> sc = sb.create(); if (keyword != null) { SearchCriteria<LoadBalancerVO> ssc = _lbDao.createSearchCriteria(); ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%"); ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%"); sc.addAnd("name", SearchCriteria.Op.SC, ssc); } if (name != null) { sc.setParameters("name", "%" + name + "%"); } if (id != null) { sc.setParameters("id", id); } if (ipId != null) { sc.setParameters("sourceIpAddress", ipId); } if (instanceId != null) { sc.setJoinParameters("lbVMSearch", "instanceId", instanceId); } if (accountId != null) { sc.setParameters("accountId", accountId); } else if (domainId != null) { sc.setParameters("domainId", domainId); } return _lbDao.search(sc, searchFilter); } @Override public List<LoadBalancingRule> listByNetworkId(long networkId) { List<LoadBalancerVO> lbs = _lbDao.listByNetworkId(networkId); List<LoadBalancingRule> lbRules = new ArrayList<LoadBalancingRule>(); for (LoadBalancerVO lb : lbs) { List<LbDestination> dstList = getExistingDestinations(lb.getId()); LoadBalancingRule loadBalancing = new LoadBalancingRule(lb, dstList); lbRules.add(loadBalancing); } return lbRules; } }
bug 8327: successive removeFromLoadBalancerRule(s) renders LB rule dysfunctional status 8327: resolved fixed
server/src/com/cloud/network/lb/LoadBalancingRulesManagerImpl.java
bug 8327: successive removeFromLoadBalancerRule(s) renders LB rule dysfunctional status 8327: resolved fixed
Java
apache-2.0
87ffdb5a02ab6ed1d5e2bbbdffcddfe0d1af6ccf
0
ohjongin/android-log-viewer,KartiKeya123/android-log-viewer,vdiskmobile/android-log-viewer,CCDMK/android-log-viewer,ujiro99/android-log-viewer,ujiro99/android-log-viewer,ohjongin/android-log-viewer,KartiKeya123/android-log-viewer,vdiskmobile/android-log-viewer,CCDMK/android-log-viewer
/* * Copyright 2011 Mikhail Lopatkin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitbucket.mlopatkin.android.liblogcat; import java.text.ParseException; import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.bitbucket.mlopatkin.android.liblogcat.LogRecord.Buffer; import org.bitbucket.mlopatkin.android.liblogcat.LogRecord.Priority; /** * Utility class to parse log record lines in different formats. */ public class LogRecordParser { private LogRecordParser() { } private static final String TIMESTAMP_REGEX = "(\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d)"; private static final String ID_REGEX = "(\\d+)"; private static final String PID_REGEX = ID_REGEX; private static final String PID_BRACKETS = "\\(\\s*" + PID_REGEX + "\\)"; private static final String TID_REGEX = ID_REGEX; private static final String TAG_REGEX = "(.*?)"; private static final String PRIORITY_REGEX = "([AVDIWEF])"; private static final String MESSAGE_REGEX = "(.*)"; private static final String SEP = "\\s+"; private static final String SEP_OPT = "\\s*"; private static class ThreadTime { private static final String TAG = TAG_REGEX + "\\s*: "; private static final String[] LOG_RECORD_FIELDS = { TIMESTAMP_REGEX, SEP, PID_REGEX, SEP, TID_REGEX, SEP, PRIORITY_REGEX, SEP, TAG, MESSAGE_REGEX }; private static final Pattern threadTimeRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return threadTimeRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } try { Date dateTime = TimeFormatUtils.getTimeFromString(m.group(1)); int pid = Integer.parseInt(m.group(2)); int tid = Integer.parseInt(m.group(3)); Priority priority = getPriorityFromChar(m.group(4)); String tag = m.group(5); String message = m.group(6); return new LogRecord(dateTime, pid, tid, priority, tag, message, buffer); } catch (ParseException e) { return new LogRecord(new Date(), -1, -1, Priority.ERROR, "Parse Error", m.group()); } } } private static class Brief { private static final String[] LOG_RECORD_FIELDS = { PRIORITY_REGEX, "/", TAG_REGEX, SEP_OPT, PID_BRACKETS, ": ", MESSAGE_REGEX }; private static final Pattern briefRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return briefRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } Priority priority = getPriorityFromChar(m.group(1)); String tag = m.group(2); int pid = Integer.parseInt(m.group(3)); String message = m.group(4); return new LogRecord(null, pid, LogRecord.NO_ID, priority, tag, message, buffer); } } private static class Process { private static final String TAG_BRACKETS = "\\(" + TAG_REGEX + "\\)"; private static final String[] LOG_RECORD_FIELDS = { PRIORITY_REGEX, PID_BRACKETS, " ", MESSAGE_REGEX, " ", TAG_BRACKETS }; private static final Pattern processRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return processRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } Priority priority = getPriorityFromChar(m.group(1)); int pid = Integer.parseInt(m.group(2)); String message = m.group(3); String tag = m.group(4); return new LogRecord(null, pid, LogRecord.NO_ID, priority, tag, message, buffer); } } private static class Tag { private static final String[] LOG_RECORD_FIELDS = { PRIORITY_REGEX, "/", TAG_REGEX, ": ", MESSAGE_REGEX }; private static final Pattern tagRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return tagRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } Priority priority = getPriorityFromChar(m.group(1)); String tag = m.group(2); String message = m.group(3); return new LogRecord(null, LogRecord.NO_ID, LogRecord.NO_ID, priority, tag, message, buffer); } } private static class Time { private static final String[] LOG_RECORD_FIELDS = { TIMESTAMP_REGEX, SEP, PRIORITY_REGEX, "/", TAG_REGEX, SEP_OPT, PID_BRACKETS, ": ", MESSAGE_REGEX }; private static final Pattern timeRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return timeRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } try { Date dateTime = TimeFormatUtils.getTimeFromString(m.group(1)); Priority priority = getPriorityFromChar(m.group(2)); String tag = m.group(3); int pid = Integer.parseInt(m.group(4)); String message = m.group(5); return new LogRecord(dateTime, pid, LogRecord.NO_ID, priority, tag, message, buffer); } catch (ParseException e) { return new LogRecord(new Date(), -1, -1, Priority.ERROR, "Parse Error", m.group()); } } } private static Priority getPriorityFromChar(String next) { next = next.trim(); for (Priority val : Priority.values()) { if (val.getLetter().equalsIgnoreCase(next)) { return val; } } throw new IllegalArgumentException("Symbol '" + next + "' doesn't correspond to valid priority value"); } public static LogRecord parseThreadTime(Buffer buffer, String line) { return ThreadTime.createFromGroups(buffer, ThreadTime.matchLine(line)); } public static LogRecord parseBrief(Buffer buffer, String line) { return Brief.createFromGroups(buffer, Brief.matchLine(line)); } public static LogRecord parseProcess(Buffer buffer, String line) { return Process.createFromGroups(buffer, Process.matchLine(line)); } public static LogRecord parseTag(Buffer buffer, String line) { return Tag.createFromGroups(buffer, Tag.matchLine(line)); } private static final String LOG_BEGIN = "--------- beginning of "; public static boolean isLogBeginningLine(String line) { return (line != null) && line.startsWith(LOG_BEGIN); } public static LogRecord parseTime(Buffer buffer, String line) { return Time.createFromGroups(buffer, Time.matchLine(line)); } }
src/org/bitbucket/mlopatkin/android/liblogcat/LogRecordParser.java
/* * Copyright 2011 Mikhail Lopatkin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitbucket.mlopatkin.android.liblogcat; import java.text.ParseException; import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.bitbucket.mlopatkin.android.liblogcat.LogRecord.Buffer; import org.bitbucket.mlopatkin.android.liblogcat.LogRecord.Priority; /** * Utility class to parse log record lines in different formats. */ public class LogRecordParser { private LogRecordParser() { } private static final String TIMESTAMP_REGEX = "(\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d)"; private static final String ID_REGEX = "(\\d+)"; private static final String PID_REGEX = ID_REGEX; private static final String PID_BRACKETS = "\\(\\s*" + PID_REGEX + "\\)"; private static final String TID_REGEX = ID_REGEX; private static final String TAG_REGEX = "(.*?)"; private static final String PRIORITY_REGEX = "([AVDIWEF])"; private static final String MESSAGE_REGEX = "(.*)"; private static final String SEP = "\\s+"; private static class ThreadTime { private static final String TAG = TAG_REGEX + "\\s*: "; private static final String[] LOG_RECORD_FIELDS = { TIMESTAMP_REGEX, SEP, PID_REGEX, SEP, TID_REGEX, SEP, PRIORITY_REGEX, SEP, TAG, MESSAGE_REGEX }; private static final Pattern threadTimeRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return threadTimeRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } try { Date dateTime = TimeFormatUtils.getTimeFromString(m.group(1)); int pid = Integer.parseInt(m.group(2)); int tid = Integer.parseInt(m.group(3)); Priority priority = getPriorityFromChar(m.group(4)); String tag = m.group(5); String message = m.group(6); return new LogRecord(dateTime, pid, tid, priority, tag, message, buffer); } catch (ParseException e) { return new LogRecord(new Date(), -1, -1, Priority.ERROR, "Parse Error", m.group()); } } } private static class Brief { private static final String[] LOG_RECORD_FIELDS = { PRIORITY_REGEX, "/", TAG_REGEX, SEP, PID_BRACKETS, ": ", MESSAGE_REGEX }; private static final Pattern briefRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return briefRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } Priority priority = getPriorityFromChar(m.group(1)); String tag = m.group(2); int pid = Integer.parseInt(m.group(3)); String message = m.group(4); return new LogRecord(null, pid, LogRecord.NO_ID, priority, tag, message, buffer); } } private static class Process { private static final String TAG_BRACKETS = "\\(" + TAG_REGEX + "\\)"; private static final String[] LOG_RECORD_FIELDS = { PRIORITY_REGEX, PID_BRACKETS, " ", MESSAGE_REGEX, " ", TAG_BRACKETS }; private static final Pattern processRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return processRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } Priority priority = getPriorityFromChar(m.group(1)); int pid = Integer.parseInt(m.group(2)); String message = m.group(3); String tag = m.group(4); return new LogRecord(null, pid, LogRecord.NO_ID, priority, tag, message, buffer); } } private static class Tag { private static final String[] LOG_RECORD_FIELDS = { PRIORITY_REGEX, "/", TAG_REGEX, ": ", MESSAGE_REGEX }; private static final Pattern tagRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return tagRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } Priority priority = getPriorityFromChar(m.group(1)); String tag = m.group(2); String message = m.group(3); return new LogRecord(null, LogRecord.NO_ID, LogRecord.NO_ID, priority, tag, message, buffer); } } private static class Time { private static final String[] LOG_RECORD_FIELDS = { TIMESTAMP_REGEX, SEP, PRIORITY_REGEX, "/", TAG_REGEX, SEP, PID_BRACKETS, ": ", MESSAGE_REGEX }; private static final Pattern timeRecordPattern = Pattern.compile("^" + StringUtils.join(LOG_RECORD_FIELDS) + "$"); static Matcher matchLine(String line) { return timeRecordPattern.matcher(line); } static LogRecord createFromGroups(Buffer buffer, Matcher m) { if (!m.matches()) { return null; } try { Date dateTime = TimeFormatUtils.getTimeFromString(m.group(1)); Priority priority = getPriorityFromChar(m.group(2)); String tag = m.group(3); int pid = Integer.parseInt(m.group(4)); String message = m.group(5); return new LogRecord(dateTime, pid, LogRecord.NO_ID, priority, tag, message, buffer); } catch (ParseException e) { return new LogRecord(new Date(), -1, -1, Priority.ERROR, "Parse Error", m.group()); } } } private static Priority getPriorityFromChar(String next) { next = next.trim(); for (Priority val : Priority.values()) { if (val.getLetter().equalsIgnoreCase(next)) { return val; } } throw new IllegalArgumentException("Symbol '" + next + "' doesn't correspond to valid priority value"); } public static LogRecord parseThreadTime(Buffer buffer, String line) { return ThreadTime.createFromGroups(buffer, ThreadTime.matchLine(line)); } public static LogRecord parseBrief(Buffer buffer, String line) { return Brief.createFromGroups(buffer, Brief.matchLine(line)); } public static LogRecord parseProcess(Buffer buffer, String line) { return Process.createFromGroups(buffer, Process.matchLine(line)); } public static LogRecord parseTag(Buffer buffer, String line) { return Tag.createFromGroups(buffer, Tag.matchLine(line)); } private static final String LOG_BEGIN = "--------- beginning of "; public static boolean isLogBeginningLine(String line) { return (line != null) && line.startsWith(LOG_BEGIN); } public static LogRecord parseTime(Buffer buffer, String line) { return Time.createFromGroups(buffer, Time.matchLine(line)); } }
Fixed to pass tests
src/org/bitbucket/mlopatkin/android/liblogcat/LogRecordParser.java
Fixed to pass tests
Java
apache-2.0
7d5e298aa2829c9253902bd3704aca7cea6b7625
0
nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch
// increases or decreases a single global value called the accumulator by the value given in the argument. public class Accumulator extends OpCode { public Accumulator (int amount) { super(OpCode.ACCUMULATOR); _amount = amount; } private int _amount; }
AdventOfCode/2020/day8/Accumulator.java
// increases or decreases a single global value called the accumulator by the value given in the argument. public class Accumlator extends OpCode { public Accumulator (int amount) { super(OpCode.ACCUMULATOR); _amount = amount; } private int _amount; }
Update Accumulator.java
AdventOfCode/2020/day8/Accumulator.java
Update Accumulator.java
Java
apache-2.0
d8d9d900354c6d9c0d81034d58124bae7f4bec11
0
chrismattmann/lucene-geo-gazetteer,chrismattmann/lucene-geo-gazetteer
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.usc.ir.geo.gazetteer; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.logging.Level; import java.util.logging.Logger; import edu.usc.ir.geo.gazetteer.domain.Location; import edu.usc.ir.geo.gazetteer.service.Launcher; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.Field; import org.apache.lucene.document.IntField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.queryparser.classic.MultiFieldQueryParser; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import com.google.gson.Gson; public class GeoNameResolver implements Closeable { private static final String JSON_OPT = "json"; /** * Below constants define name of field in lucene index */ public static final String FIELD_NAME_ID = "ID"; public static final String FIELD_NAME_NAME = "name"; public static final String FIELD_NAME_LONGITUDE = "longitude"; public static final String FIELD_NAME_LATITUDE = "latitude"; public static final String FIELD_NAME_ALTERNATE_NAMES = "alternatenames"; public static final String FIELD_NAME_FEATURE_CODE = "featureCode"; public static final String FIELD_NAME_COUNTRY_CODE = "countryCode"; public static final String FIELD_NAME_ADMIN1_CODE = "admin1Code"; public static final String FIELD_NAME_ADMIN2_CODE = "admin2Code"; public static final String FIELD_NAME_POPULATION = "population"; /** * Below constants define weight multipliers used for result relevance. */ private static final int WEIGHT_SORT_ORDER = 20; private static final int WEIGHT_SIZE_ALT_NAME = 50; private static final int WEIGHT_NAME_MATCH = 20000; private static final int WEIGHT_NAME_PART_MATCH = 15000; private static final Logger LOG = Logger.getLogger(GeoNameResolver.class .getName()); private static final Double OUT_OF_BOUNDS = 999999.0; private static Analyzer analyzer = new StandardAnalyzer(); private static IndexWriter indexWriter; private static Directory indexDir; private static int hitsPerPage = 8; private IndexReader indexReader; public GeoNameResolver(){ } /** * Creates a GeoNameResolver for given path * @param indexPath the path to lucene index * @throws IOException */ public GeoNameResolver(String indexPath) throws IOException { this.indexReader = createIndexReader(indexPath); } /** * * @param locationNames List of location na,es * @param count Number of results per location * @return resolved Geo Names * @throws IOException */ public HashMap<String, List<Location>> searchGeoName(List<String> locationNames, int count) throws IOException { return resolveEntities(locationNames, count, this.indexReader); } /** * Search corresponding GeoName for each location entity * @param count * Number of results for one locations * @param querystr * it's the NER actually * * @return HashMap each name has a list of resolved entities * @throws IOException * @throws RuntimeException */ public HashMap<String, List<Location>> searchGeoName(String indexerPath, List<String> locationNameEntities, int count) throws IOException { if (locationNameEntities.size() == 0 || locationNameEntities.get(0).length() == 0) return new HashMap<String, List<Location>>(); IndexReader reader = createIndexReader(indexerPath); HashMap<String, List<Location>> resolvedEntities = resolveEntities(locationNameEntities, count, reader); reader.close(); return resolvedEntities; } private IndexReader createIndexReader(String indexerPath) throws IOException { File indexfile = new File(indexerPath); indexDir = FSDirectory.open(indexfile.toPath()); if (!DirectoryReader.indexExists(indexDir)) { LOG.log(Level.SEVERE, "No Lucene Index Dierctory Found, Invoke indexBuild() First !"); System.exit(1); } return DirectoryReader.open(indexDir); } private HashMap<String, List<Location>> resolveEntities(List<String> locationNames, int count, IndexReader reader) throws IOException { if (locationNames.size() >= 200) hitsPerPage = 5; // avoid heavy computation IndexSearcher searcher = new IndexSearcher(reader); Query q = null; HashMap<String, List<Location>> allCandidates = new HashMap<String, List<Location>>(); for (String name : locationNames) { if (!allCandidates.containsKey(name)) { try { //query is wrapped in additional quotes (") to avoid query tokenization on space q = new MultiFieldQueryParser(new String[] { FIELD_NAME_NAME, FIELD_NAME_ALTERNATE_NAMES }, analyzer).parse(String.format("\"%s\"", name) ); //sort descending on population SortField populationSort = new SortedNumericSortField(FIELD_NAME_POPULATION, SortField.Type.LONG, true); Sort sort = new Sort(populationSort); //Fetch 3 times desired values, these will be sorted on code and only desired number will be kept ScoreDoc[] hits = searcher.search(q, hitsPerPage * 3 , sort).scoreDocs; List<Location> topHits = new ArrayList<Location>(); for (int i = 0; i < hits.length; ++i) { Location tmpLocObj = new Location(); int docId = hits[i].doc; Document d; try { d = searcher.doc(docId); tmpLocObj.setName(d.get(FIELD_NAME_NAME)); tmpLocObj.setLongitude(d.get(FIELD_NAME_LONGITUDE)); tmpLocObj.setLatitude(d.get(FIELD_NAME_LATITUDE)); //If alternate names are empty put name as actual name //This covers missing data and equals weight for later computation if (d.get(FIELD_NAME_ALTERNATE_NAMES).isEmpty()){ tmpLocObj.setAlternateNames(d.get(FIELD_NAME_NAME)); }else{ tmpLocObj.setAlternateNames(d.get(FIELD_NAME_ALTERNATE_NAMES)); } tmpLocObj.setCountryCode(d.get(FIELD_NAME_COUNTRY_CODE)); tmpLocObj.setAdmin1Code(d.get(FIELD_NAME_ADMIN1_CODE)); tmpLocObj.setAdmin2Code(d.get(FIELD_NAME_ADMIN2_CODE)); tmpLocObj.setFeatureCode(d.get(FIELD_NAME_FEATURE_CODE)); } catch (IOException e) { e.printStackTrace(); } topHits.add(tmpLocObj); } //Picking hitsPerPage number of locations from feature code sorted list allCandidates.put(name, pickTopSortedByCode(topHits,hitsPerPage)); } catch (org.apache.lucene.queryparser.classic.ParseException e) { e.printStackTrace(); } } } HashMap<String, List<Location>> resolvedEntities = new HashMap<String, List<Location>>(); pickBestCandidates(resolvedEntities, allCandidates, count); return resolvedEntities; } /** * Sorts inputLocations as per FeatureCodeComparator and returns at most topCount locations * @param inputLocations List of locations to be sorted * @param topCount Number of locations to be kept in curtailed list * @return List of at most topCount locations sorted by edu.usc.ir.geo.gazetteer.CustomLuceneGeoGazetteerComparator.FeatureCodeComparator */ private List<Location> pickTopSortedByCode(List<Location> inputLocations, int topCount) { if(inputLocations == null || inputLocations.size()==0){ return new ArrayList<>(); } Collections.sort(inputLocations, new CustomLuceneGeoGazetteerComparator.FeatureCodeComparator()); return inputLocations.subList(0, inputLocations.size() > topCount ? topCount : inputLocations.size() - 1); } /** * Select the best match for each location name extracted from a document, * choosing from among a list of lists of candidate matches. Filter uses the * following features: 1) edit distance between name and the resolved name, * choose smallest one 2) content (haven't implemented) * * @param resolvedEntities * final result for the input stream * @param allCandidates * each location name may hits several documents, this is the * collection for all hitted documents * @param count * Number of results for one locations * @throws IOException * @throws RuntimeException */ private void pickBestCandidates( HashMap<String, List<Location>> resolvedEntities, HashMap<String, List<Location>> allCandidates, int count) { for (String extractedName : allCandidates.keySet()) { List<Location> cur = allCandidates.get(extractedName); if(cur.isEmpty()) continue;//continue if no results found int maxWeight = Integer.MIN_VALUE ; //In case weight is equal for all return top element int bestIndex = 0; //Priority queue to return top elements PriorityQueue<Location> pq = new PriorityQueue<>(cur.size(), new Comparator<Location>() { @Override public int compare(Location o1, Location o2) { return Integer.compare(o2.getWeight(), o1.getWeight()); } }); for (int i = 0; i < cur.size(); ++i) { int weight = 0; // get cur's ith resolved entry's name String resolvedName = String.format(" %s ", cur.get(i).getName()); if (resolvedName.contains(String.format(" %s ", extractedName))) { // Assign a weight as per configuration if extracted name is found as a exact word in name weight = WEIGHT_NAME_MATCH; } else if (resolvedName.contains(extractedName)) { // Assign a weight as per configuration if extracted name is found partly in name weight = WEIGHT_NAME_PART_MATCH; } // get all alternate names of cur's ith resolved entry's String[] altNames = cur.get(i).getAlternateNames().split(","); float altEditDist = 0; for(String altName : altNames){ if(altName.contains(extractedName)){ altEditDist+=StringUtils.getLevenshteinDistance(extractedName, altName); } } //lesser the edit distance more should be the weight weight += getCalibratedWeight(altNames.length, altEditDist); //Give preference to sorted results. 0th result should have more priority weight += (cur.size()-i) * WEIGHT_SORT_ORDER; cur.get(i).setWeight(weight); if (weight > maxWeight) { maxWeight = weight; bestIndex = i; } pq.add(cur.get(i)) ; } if (bestIndex == -1) continue; List<Location> resultList = new ArrayList<>(); for(int i =0 ; i< count && !pq.isEmpty() ; i++){ resultList.add(pq.poll()); } resolvedEntities.put(extractedName, resultList); } } /** * Returns a weight for average edit distance for set of alternate name<br/><br/> * altNamesSize * WEIGHT_SIZE_ALT_NAME - (altEditDist/altNamesSize) ;<br/><br/> * altNamesSize * WEIGHT_SIZE_ALT_NAME ensure more priority for results with more alternate names.<br/> * altEditDist/altNamesSize is average edit distance. <br/> * Lesser the average, higher the over all expression * @param altNamesSize - Count of altNames * @param altEditDist - sum of individual edit distances * @return */ public float getCalibratedWeight(int altNamesSize, float altEditDist) { return altNamesSize * WEIGHT_SIZE_ALT_NAME - (altEditDist/altNamesSize) ; } /** * Build the gazetteer index line by line * * @param gazetteerPath * path of the gazetteer file * @param indexerPath * path to the created Lucene index directory. * @throws IOException * @throws RuntimeException */ public void buildIndex(String gazetteerPath, String indexerPath) throws IOException { File indexfile = new File(indexerPath); indexDir = FSDirectory.open(indexfile.toPath()); if (!DirectoryReader.indexExists(indexDir)) { IndexWriterConfig config = new IndexWriterConfig(analyzer); indexWriter = new IndexWriter(indexDir, config); Logger logger = Logger.getLogger(this.getClass().getName()); logger.log(Level.WARNING, "Start Building Index for Gazatteer"); BufferedReader filereader = new BufferedReader( new InputStreamReader(new FileInputStream(gazetteerPath), "UTF-8")); String line; int count = 0; while ((line = filereader.readLine()) != null) { try { count += 1; if (count % 100000 == 0) { logger.log(Level.INFO, "Indexed Row Count: " + count); } addDoc(indexWriter, line); } catch (RuntimeException re) { logger.log(Level.WARNING, "Skipping... Error on line: {}", line); } } logger.log(Level.WARNING, "Building Finished"); filereader.close(); indexWriter.close(); } } /** * Index gazetteer's one line data by built-in Lucene Index functions * * @param indexWriter * Lucene indexWriter to be loaded * @param line * a line from the gazetteer file * @throws IOException * @throws NumberFormatException */ private static void addDoc(IndexWriter indexWriter, final String line) { String[] tokens = line.split("\t"); int ID = Integer.parseInt(tokens[0]); String name = tokens[1]; String alternatenames = tokens[3]; Double latitude = -999999.0; try { latitude = Double.parseDouble(tokens[4]); } catch (NumberFormatException e) { latitude = OUT_OF_BOUNDS; } Double longitude = -999999.0; try { longitude = Double.parseDouble(tokens[5]); } catch (NumberFormatException e) { longitude = OUT_OF_BOUNDS; } int population = 0; try { population = Integer.parseInt(tokens[14]); } catch (NumberFormatException e) { population = 0;// Treat as population does not exists } // Additional fields to rank more known locations higher // All available codes can be viewed on www.geonames.org String featureCode = tokens[7];// more granular category String countryCode = tokens[8]; String admin1Code = tokens[10];// eg US State String admin2Code = tokens[11];// eg county Document doc = new Document(); doc.add(new IntField(FIELD_NAME_ID, ID, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_NAME, name, Field.Store.YES)); doc.add(new DoubleField(FIELD_NAME_LONGITUDE, longitude, Field.Store.YES)); doc.add(new DoubleField(FIELD_NAME_LATITUDE, latitude, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_ALTERNATE_NAMES, alternatenames, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_FEATURE_CODE, featureCode, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_COUNTRY_CODE, countryCode, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_ADMIN1_CODE, admin1Code, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_ADMIN2_CODE, admin2Code, Field.Store.YES)); doc.add(new NumericDocValuesField(FIELD_NAME_POPULATION, population));//sort enabled field try { indexWriter.addDocument(doc); } catch (IOException e) { e.printStackTrace(); } } @Override public void close() throws IOException { if (indexReader != null) { this.indexReader.close(); } } /** * Writes the result as formatted json to given PrintStream * @param resolvedEntities map of resolved entities * @param out the print stream for writing output */ public static void writeResultJson(Map<String, List<Location>> resolvedEntities, PrintStream out) { out.println(new Gson().toJson(resolvedEntities) ); } /** * Writes the result to given PrintStream * @deprecated Use writeResultJson instead * @param resolvedEntities map of resolved entities * @param out the print stream for writing output */ @Deprecated public static void writeResult(Map<String, List<Location>> resolvedEntities, PrintStream out) { out.println("["); List<String> keys = (List<String>)(List<?>) Arrays.asList(resolvedEntities.keySet().toArray()); //TODO: use org.json.JSONArray and remove this custom formatting code for (int j=0; j < keys.size(); j++) { String n = keys.get(j); out.println("{\"" + n + "\" : ["); List<Location> terms = resolvedEntities.get(n); for (int i = 0; i < terms.size(); i++) { Location res = terms.get(i); if (i < terms.size() - 1) { out.println(res + ","); } else { out.println(res); } } if (j < keys.size() -1){ out.println("]},"); } else{ out.println("]}"); } } out.println("]"); } public static void main(String[] args) throws Exception { Option buildOpt = OptionBuilder.withArgName("gazetteer file").hasArg().withLongOpt("build") .withDescription("The Path to the Geonames allCountries.txt") .create('b'); Option searchOpt = OptionBuilder.withArgName("set of location names").withLongOpt("search").hasArgs() .withDescription("Location names to search the Gazetteer for") .create('s'); Option indexOpt = OptionBuilder .withArgName("directoryPath") .withLongOpt("index") .hasArgs() .withDescription( "The path to the Lucene index directory to either create or read") .create('i'); Option helpOpt = OptionBuilder.withLongOpt("help") .withDescription("Print this message.").create('h'); Option resultCountOpt = OptionBuilder.withArgName("number of results").withLongOpt("count").hasArgs() .withDescription("Number of best results to be returned for one location").withType(Integer.class) .create('c'); Option serverOption = OptionBuilder.withArgName("Launch Server") .withLongOpt("server") .withDescription("Launches Geo Gazetteer Service") .create("server"); Option jsonOption = OptionBuilder.withArgName("outputs json") .withLongOpt(JSON_OPT) .withDescription("Formats output in well defined json structure") .create(JSON_OPT); String indexPath = null; String gazetteerPath = null; Options options = new Options(); options.addOption(buildOpt); options.addOption(searchOpt); options.addOption(indexOpt); options.addOption(helpOpt); options.addOption(resultCountOpt); options.addOption(serverOption); options.addOption(jsonOption); // create the parser CommandLineParser parser = new DefaultParser(); GeoNameResolver resolver = new GeoNameResolver(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("index")) { indexPath = line.getOptionValue("index"); } if (line.hasOption("build")) { gazetteerPath = line.getOptionValue("build"); } if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("lucene-geo-gazetteer", options); System.exit(1); } if (indexPath != null && gazetteerPath != null) { LOG.info("Building Lucene index at path: [" + indexPath + "] with geoNames.org file: [" + gazetteerPath + "]"); resolver.buildIndex(gazetteerPath, indexPath); } if (line.hasOption("search")) { List<String> geoTerms = new ArrayList<String>(Arrays.asList(line .getOptionValues("search"))); String countStr = line.getOptionValue("count", "1"); int count = 1; if (countStr.matches("\\d+")) count = Integer.parseInt(countStr); Map<String, List<Location>> resolved = resolver .searchGeoName(indexPath, geoTerms, count); if(line.hasOption(JSON_OPT)){ writeResultJson(resolved, System.out); }else{ writeResult(resolved, System.out); } } else if (line.hasOption("server")){ if (indexPath == null) { System.err.println("Index path is required"); System.exit(-2); } //TODO: get port from CLI args int port = 8765; Launcher.launchService(port, indexPath); } else { System.err.println("Sub command not recognised"); System.exit(-1); } } catch (ParseException exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); } } }
src/main/java/edu/usc/ir/geo/gazetteer/GeoNameResolver.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.usc.ir.geo.gazetteer; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.logging.Level; import java.util.logging.Logger; import edu.usc.ir.geo.gazetteer.domain.Location; import edu.usc.ir.geo.gazetteer.service.Launcher; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.Field; import org.apache.lucene.document.IntField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.queryparser.classic.MultiFieldQueryParser; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import com.google.gson.Gson; public class GeoNameResolver implements Closeable { private static final String JSON_OPT = "json"; /** * Below constants define name of field in lucene index */ public static final String FIELD_NAME_ID = "ID"; public static final String FIELD_NAME_NAME = "name"; public static final String FIELD_NAME_LONGITUDE = "longitude"; public static final String FIELD_NAME_LATITUDE = "latitude"; public static final String FIELD_NAME_ALTERNATE_NAMES = "alternatenames"; public static final String FIELD_NAME_FEATURE_CODE = "featureCode"; public static final String FIELD_NAME_COUNTRY_CODE = "countryCode"; public static final String FIELD_NAME_ADMIN1_CODE = "admin1Code"; public static final String FIELD_NAME_ADMIN2_CODE = "admin2Code"; public static final String FIELD_NAME_POPULATION = "population"; /** * Below constants define weight multipliers used for result relevance. */ private static final int WEIGHT_SORT_ORDER = 20; private static final int WEIGHT_SIZE_ALT_NAME = 50; private static final int WEIGHT_NAME_MATCH = 20000; private static final int WEIGHT_NAME_PART_MATCH = 15000; private static final Logger LOG = Logger.getLogger(GeoNameResolver.class .getName()); private static final Double OUT_OF_BOUNDS = 999999.0; private static Analyzer analyzer = new StandardAnalyzer(); private static IndexWriter indexWriter; private static Directory indexDir; private static int hitsPerPage = 8; private IndexReader indexReader; public GeoNameResolver(){ } /** * Creates a GeoNameResolver for given path * @param indexPath the path to lucene index * @throws IOException */ public GeoNameResolver(String indexPath) throws IOException { this.indexReader = createIndexReader(indexPath); } /** * * @param locationNames List of location na,es * @param count Number of results per location * @return resolved Geo Names * @throws IOException */ public HashMap<String, List<Location>> searchGeoName(List<String> locationNames, int count) throws IOException { return resolveEntities(locationNames, count, this.indexReader); } /** * Search corresponding GeoName for each location entity * @param count * Number of results for one locations * @param querystr * it's the NER actually * * @return HashMap each name has a list of resolved entities * @throws IOException * @throws RuntimeException */ public HashMap<String, List<Location>> searchGeoName(String indexerPath, List<String> locationNameEntities, int count) throws IOException { if (locationNameEntities.size() == 0 || locationNameEntities.get(0).length() == 0) return new HashMap<String, List<Location>>(); IndexReader reader = createIndexReader(indexerPath); HashMap<String, List<Location>> resolvedEntities = resolveEntities(locationNameEntities, count, reader); reader.close(); return resolvedEntities; } private IndexReader createIndexReader(String indexerPath) throws IOException { File indexfile = new File(indexerPath); indexDir = FSDirectory.open(indexfile.toPath()); if (!DirectoryReader.indexExists(indexDir)) { LOG.log(Level.SEVERE, "No Lucene Index Dierctory Found, Invoke indexBuild() First !"); System.exit(1); } return DirectoryReader.open(indexDir); } private HashMap<String, List<Location>> resolveEntities(List<String> locationNames, int count, IndexReader reader) throws IOException { if (locationNames.size() >= 200) hitsPerPage = 5; // avoid heavy computation IndexSearcher searcher = new IndexSearcher(reader); Query q = null; HashMap<String, List<Location>> allCandidates = new HashMap<String, List<Location>>(); for (String name : locationNames) { if (!allCandidates.containsKey(name)) { try { //query is wrapped in additional quotes (") to avoid query tokenization on space q = new MultiFieldQueryParser(new String[] { FIELD_NAME_NAME, FIELD_NAME_ALTERNATE_NAMES }, analyzer).parse(String.format("\"%s\"", name) ); //sort descending on population SortField populationSort = new SortedNumericSortField(FIELD_NAME_POPULATION, SortField.Type.LONG, true); Sort sort = new Sort(populationSort); //Fetch 3 times desired values, these will be sorted on code and only desired number will be kept ScoreDoc[] hits = searcher.search(q, hitsPerPage * 3 , sort).scoreDocs; List<Location> topHits = new ArrayList<Location>(); for (int i = 0; i < hits.length; ++i) { Location tmpLocObj = new Location(); int docId = hits[i].doc; Document d; try { d = searcher.doc(docId); tmpLocObj.setName(d.get(FIELD_NAME_NAME)); tmpLocObj.setLongitude(d.get(FIELD_NAME_LONGITUDE)); tmpLocObj.setLatitude(d.get(FIELD_NAME_LATITUDE)); //If alternate names are empty put name as actual name //This covers missing data and equals weight for later computation if (d.get(FIELD_NAME_ALTERNATE_NAMES).isEmpty()){ tmpLocObj.setAlternateNames(d.get(FIELD_NAME_NAME)); }else{ tmpLocObj.setAlternateNames(d.get(FIELD_NAME_ALTERNATE_NAMES)); } tmpLocObj.setCountryCode(d.get(FIELD_NAME_COUNTRY_CODE)); tmpLocObj.setAdmin1Code(d.get(FIELD_NAME_ADMIN1_CODE)); tmpLocObj.setAdmin2Code(d.get(FIELD_NAME_ADMIN2_CODE)); tmpLocObj.setFeatureCode(d.get(FIELD_NAME_FEATURE_CODE)); } catch (IOException e) { e.printStackTrace(); } topHits.add(tmpLocObj); } //Picking hitsPerPage number of locations from feature code sorted list allCandidates.put(name, pickTopSortedByCode(topHits,hitsPerPage)); } catch (org.apache.lucene.queryparser.classic.ParseException e) { e.printStackTrace(); } } } HashMap<String, List<Location>> resolvedEntities = new HashMap<String, List<Location>>(); pickBestCandidates(resolvedEntities, allCandidates, count); return resolvedEntities; } /** * Sorts inputLocations as per FeatureCodeComparator and returns at most topCount locations * @param inputLocations List of locations to be sorted * @param topCount Number of locations to be kept in curtailed list * @return List of at most topCount locations sorted by edu.usc.ir.geo.gazetteer.CustomLuceneGeoGazetteerComparator.FeatureCodeComparator */ private List<Location> pickTopSortedByCode(List<Location> inputLocations, int topCount) { if(inputLocations == null || inputLocations.size()==0){ return new ArrayList<>(); } inputLocations.sort(new CustomLuceneGeoGazetteerComparator.FeatureCodeComparator()); return inputLocations.subList(0, inputLocations.size() > topCount ? topCount : inputLocations.size() - 1); } /** * Select the best match for each location name extracted from a document, * choosing from among a list of lists of candidate matches. Filter uses the * following features: 1) edit distance between name and the resolved name, * choose smallest one 2) content (haven't implemented) * * @param resolvedEntities * final result for the input stream * @param allCandidates * each location name may hits several documents, this is the * collection for all hitted documents * @param count * Number of results for one locations * @throws IOException * @throws RuntimeException */ private void pickBestCandidates( HashMap<String, List<Location>> resolvedEntities, HashMap<String, List<Location>> allCandidates, int count) { for (String extractedName : allCandidates.keySet()) { List<Location> cur = allCandidates.get(extractedName); if(cur.isEmpty()) continue;//continue if no results found int maxWeight = Integer.MIN_VALUE ; //In case weight is equal for all return top element int bestIndex = 0; //Priority queue to return top elements PriorityQueue<Location> pq = new PriorityQueue<>(cur.size(), new Comparator<Location>() { @Override public int compare(Location o1, Location o2) { return Integer.compare(o2.getWeight(), o1.getWeight()); } }); for (int i = 0; i < cur.size(); ++i) { int weight = 0; // get cur's ith resolved entry's name String resolvedName = String.format(" %s ", cur.get(i).getName()); if (resolvedName.contains(String.format(" %s ", extractedName))) { // Assign a weight as per configuration if extracted name is found as a exact word in name weight = WEIGHT_NAME_MATCH; } else if (resolvedName.contains(extractedName)) { // Assign a weight as per configuration if extracted name is found partly in name weight = WEIGHT_NAME_PART_MATCH; } // get all alternate names of cur's ith resolved entry's String[] altNames = cur.get(i).getAlternateNames().split(","); float altEditDist = 0; for(String altName : altNames){ if(altName.contains(extractedName)){ altEditDist+=StringUtils.getLevenshteinDistance(extractedName, altName); } } //lesser the edit distance more should be the weight weight += getCalibratedWeight(altNames.length, altEditDist); //Give preference to sorted results. 0th result should have more priority weight += (cur.size()-i) * WEIGHT_SORT_ORDER; cur.get(i).setWeight(weight); if (weight > maxWeight) { maxWeight = weight; bestIndex = i; } pq.add(cur.get(i)) ; } if (bestIndex == -1) continue; List<Location> resultList = new ArrayList<>(); for(int i =0 ; i< count && !pq.isEmpty() ; i++){ resultList.add(pq.poll()); } resolvedEntities.put(extractedName, resultList); } } /** * Returns a weight for average edit distance for set of alternate name<br/><br/> * altNamesSize * WEIGHT_SIZE_ALT_NAME - (altEditDist/altNamesSize) ;<br/><br/> * altNamesSize * WEIGHT_SIZE_ALT_NAME ensure more priority for results with more alternate names.<br/> * altEditDist/altNamesSize is average edit distance. <br/> * Lesser the average, higher the over all expression * @param altNamesSize - Count of altNames * @param altEditDist - sum of individual edit distances * @return */ public float getCalibratedWeight(int altNamesSize, float altEditDist) { return altNamesSize * WEIGHT_SIZE_ALT_NAME - (altEditDist/altNamesSize) ; } /** * Build the gazetteer index line by line * * @param gazetteerPath * path of the gazetteer file * @param indexerPath * path to the created Lucene index directory. * @throws IOException * @throws RuntimeException */ public void buildIndex(String gazetteerPath, String indexerPath) throws IOException { File indexfile = new File(indexerPath); indexDir = FSDirectory.open(indexfile.toPath()); if (!DirectoryReader.indexExists(indexDir)) { IndexWriterConfig config = new IndexWriterConfig(analyzer); indexWriter = new IndexWriter(indexDir, config); Logger logger = Logger.getLogger(this.getClass().getName()); logger.log(Level.WARNING, "Start Building Index for Gazatteer"); BufferedReader filereader = new BufferedReader( new InputStreamReader(new FileInputStream(gazetteerPath), "UTF-8")); String line; int count = 0; while ((line = filereader.readLine()) != null) { try { count += 1; if (count % 100000 == 0) { logger.log(Level.INFO, "Indexed Row Count: " + count); } addDoc(indexWriter, line); } catch (RuntimeException re) { logger.log(Level.WARNING, "Skipping... Error on line: {}", line); } } logger.log(Level.WARNING, "Building Finished"); filereader.close(); indexWriter.close(); } } /** * Index gazetteer's one line data by built-in Lucene Index functions * * @param indexWriter * Lucene indexWriter to be loaded * @param line * a line from the gazetteer file * @throws IOException * @throws NumberFormatException */ private static void addDoc(IndexWriter indexWriter, final String line) { String[] tokens = line.split("\t"); int ID = Integer.parseInt(tokens[0]); String name = tokens[1]; String alternatenames = tokens[3]; Double latitude = -999999.0; try { latitude = Double.parseDouble(tokens[4]); } catch (NumberFormatException e) { latitude = OUT_OF_BOUNDS; } Double longitude = -999999.0; try { longitude = Double.parseDouble(tokens[5]); } catch (NumberFormatException e) { longitude = OUT_OF_BOUNDS; } int population = 0; try { population = Integer.parseInt(tokens[14]); } catch (NumberFormatException e) { population = 0;// Treat as population does not exists } // Additional fields to rank more known locations higher // All available codes can be viewed on www.geonames.org String featureCode = tokens[7];// more granular category String countryCode = tokens[8]; String admin1Code = tokens[10];// eg US State String admin2Code = tokens[11];// eg county Document doc = new Document(); doc.add(new IntField(FIELD_NAME_ID, ID, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_NAME, name, Field.Store.YES)); doc.add(new DoubleField(FIELD_NAME_LONGITUDE, longitude, Field.Store.YES)); doc.add(new DoubleField(FIELD_NAME_LATITUDE, latitude, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_ALTERNATE_NAMES, alternatenames, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_FEATURE_CODE, featureCode, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_COUNTRY_CODE, countryCode, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_ADMIN1_CODE, admin1Code, Field.Store.YES)); doc.add(new TextField(FIELD_NAME_ADMIN2_CODE, admin2Code, Field.Store.YES)); doc.add(new NumericDocValuesField(FIELD_NAME_POPULATION, population));//sort enabled field try { indexWriter.addDocument(doc); } catch (IOException e) { e.printStackTrace(); } } @Override public void close() throws IOException { if (indexReader != null) { this.indexReader.close(); } } /** * Writes the result as formatted json to given PrintStream * @param resolvedEntities map of resolved entities * @param out the print stream for writing output */ public static void writeResultJson(Map<String, List<Location>> resolvedEntities, PrintStream out) { out.println(new Gson().toJson(resolvedEntities) ); } /** * Writes the result to given PrintStream * @deprecated Use writeResultJson instead * @param resolvedEntities map of resolved entities * @param out the print stream for writing output */ @Deprecated public static void writeResult(Map<String, List<Location>> resolvedEntities, PrintStream out) { out.println("["); List<String> keys = (List<String>)(List<?>) Arrays.asList(resolvedEntities.keySet().toArray()); //TODO: use org.json.JSONArray and remove this custom formatting code for (int j=0; j < keys.size(); j++) { String n = keys.get(j); out.println("{\"" + n + "\" : ["); List<Location> terms = resolvedEntities.get(n); for (int i = 0; i < terms.size(); i++) { Location res = terms.get(i); if (i < terms.size() - 1) { out.println(res + ","); } else { out.println(res); } } if (j < keys.size() -1){ out.println("]},"); } else{ out.println("]}"); } } out.println("]"); } public static void main(String[] args) throws Exception { Option buildOpt = OptionBuilder.withArgName("gazetteer file").hasArg().withLongOpt("build") .withDescription("The Path to the Geonames allCountries.txt") .create('b'); Option searchOpt = OptionBuilder.withArgName("set of location names").withLongOpt("search").hasArgs() .withDescription("Location names to search the Gazetteer for") .create('s'); Option indexOpt = OptionBuilder .withArgName("directoryPath") .withLongOpt("index") .hasArgs() .withDescription( "The path to the Lucene index directory to either create or read") .create('i'); Option helpOpt = OptionBuilder.withLongOpt("help") .withDescription("Print this message.").create('h'); Option resultCountOpt = OptionBuilder.withArgName("number of results").withLongOpt("count").hasArgs() .withDescription("Number of best results to be returned for one location").withType(Integer.class) .create('c'); Option serverOption = OptionBuilder.withArgName("Launch Server") .withLongOpt("server") .withDescription("Launches Geo Gazetteer Service") .create("server"); Option jsonOption = OptionBuilder.withArgName("outputs json") .withLongOpt(JSON_OPT) .withDescription("Formats output in well defined json structure") .create(JSON_OPT); String indexPath = null; String gazetteerPath = null; Options options = new Options(); options.addOption(buildOpt); options.addOption(searchOpt); options.addOption(indexOpt); options.addOption(helpOpt); options.addOption(resultCountOpt); options.addOption(serverOption); options.addOption(jsonOption); // create the parser CommandLineParser parser = new DefaultParser(); GeoNameResolver resolver = new GeoNameResolver(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("index")) { indexPath = line.getOptionValue("index"); } if (line.hasOption("build")) { gazetteerPath = line.getOptionValue("build"); } if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("lucene-geo-gazetteer", options); System.exit(1); } if (indexPath != null && gazetteerPath != null) { LOG.info("Building Lucene index at path: [" + indexPath + "] with geoNames.org file: [" + gazetteerPath + "]"); resolver.buildIndex(gazetteerPath, indexPath); } if (line.hasOption("search")) { List<String> geoTerms = new ArrayList<String>(Arrays.asList(line .getOptionValues("search"))); String countStr = line.getOptionValue("count", "1"); int count = 1; if (countStr.matches("\\d+")) count = Integer.parseInt(countStr); Map<String, List<Location>> resolved = resolver .searchGeoName(indexPath, geoTerms, count); if(line.hasOption(JSON_OPT)){ writeResultJson(resolved, System.out); }else{ writeResult(resolved, System.out); } } else if (line.hasOption("server")){ if (indexPath == null) { System.err.println("Index path is required"); System.exit(-2); } //TODO: get port from CLI args int port = 8765; Launcher.launchService(port, indexPath); } else { System.err.println("Sub command not recognised"); System.exit(-1); } } catch (ParseException exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); } } }
- fix for java7
src/main/java/edu/usc/ir/geo/gazetteer/GeoNameResolver.java
- fix for java7
Java
apache-2.0
e688cb5da685cf9ebfacf24d9df78bb894aa4501
0
pentaho/pentaho-kettle,birdtsai/pentaho-kettle,MikhailHubanau/pentaho-kettle,nicoben/pentaho-kettle,skofra0/pentaho-kettle,rfellows/pentaho-kettle,ccaspanello/pentaho-kettle,DFieldFL/pentaho-kettle,MikhailHubanau/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,ViswesvarSekar/pentaho-kettle,ivanpogodin/pentaho-kettle,YuryBY/pentaho-kettle,eayoungs/pentaho-kettle,matthewtckr/pentaho-kettle,denisprotopopov/pentaho-kettle,pedrofvteixeira/pentaho-kettle,YuryBY/pentaho-kettle,matthewtckr/pentaho-kettle,birdtsai/pentaho-kettle,stepanovdg/pentaho-kettle,ma459006574/pentaho-kettle,codek/pentaho-kettle,stevewillcock/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,skofra0/pentaho-kettle,GauravAshara/pentaho-kettle,rmansoor/pentaho-kettle,stepanovdg/pentaho-kettle,birdtsai/pentaho-kettle,emartin-pentaho/pentaho-kettle,yshakhau/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,alina-ipatina/pentaho-kettle,pedrofvteixeira/pentaho-kettle,eayoungs/pentaho-kettle,mdamour1976/pentaho-kettle,SergeyTravin/pentaho-kettle,zlcnju/kettle,mkambol/pentaho-kettle,pavel-sakun/pentaho-kettle,pymjer/pentaho-kettle,mattyb149/pentaho-kettle,jbrant/pentaho-kettle,codek/pentaho-kettle,nicoben/pentaho-kettle,cjsonger/pentaho-kettle,nantunes/pentaho-kettle,graimundo/pentaho-kettle,HiromuHota/pentaho-kettle,emartin-pentaho/pentaho-kettle,DFieldFL/pentaho-kettle,nanata1115/pentaho-kettle,graimundo/pentaho-kettle,brosander/pentaho-kettle,bmorrise/pentaho-kettle,gretchiemoran/pentaho-kettle,zlcnju/kettle,tmcsantos/pentaho-kettle,e-cuellar/pentaho-kettle,ma459006574/pentaho-kettle,graimundo/pentaho-kettle,marcoslarsen/pentaho-kettle,airy-ict/pentaho-kettle,SergeyTravin/pentaho-kettle,tkafalas/pentaho-kettle,emartin-pentaho/pentaho-kettle,yshakhau/pentaho-kettle,marcoslarsen/pentaho-kettle,tkafalas/pentaho-kettle,brosander/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,mkambol/pentaho-kettle,stevewillcock/pentaho-kettle,kurtwalker/pentaho-kettle,ivanpogodin/pentaho-kettle,sajeetharan/pentaho-kettle,cjsonger/pentaho-kettle,aminmkhan/pentaho-kettle,SergeyTravin/pentaho-kettle,birdtsai/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,eayoungs/pentaho-kettle,airy-ict/pentaho-kettle,CapeSepias/pentaho-kettle,mattyb149/pentaho-kettle,rmansoor/pentaho-kettle,dkincade/pentaho-kettle,ddiroma/pentaho-kettle,cjsonger/pentaho-kettle,rmansoor/pentaho-kettle,GauravAshara/pentaho-kettle,drndos/pentaho-kettle,aminmkhan/pentaho-kettle,ccaspanello/pentaho-kettle,andrei-viaryshka/pentaho-kettle,aminmkhan/pentaho-kettle,e-cuellar/pentaho-kettle,alina-ipatina/pentaho-kettle,roboguy/pentaho-kettle,akhayrutdinov/pentaho-kettle,brosander/pentaho-kettle,airy-ict/pentaho-kettle,andrei-viaryshka/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,hudak/pentaho-kettle,akhayrutdinov/pentaho-kettle,pymjer/pentaho-kettle,dkincade/pentaho-kettle,e-cuellar/pentaho-kettle,codek/pentaho-kettle,ddiroma/pentaho-kettle,Advent51/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,kurtwalker/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,pminutillo/pentaho-kettle,mbatchelor/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,pentaho/pentaho-kettle,matrix-stone/pentaho-kettle,pymjer/pentaho-kettle,flbrino/pentaho-kettle,pedrofvteixeira/pentaho-kettle,pminutillo/pentaho-kettle,codek/pentaho-kettle,denisprotopopov/pentaho-kettle,Advent51/pentaho-kettle,wseyler/pentaho-kettle,dkincade/pentaho-kettle,flbrino/pentaho-kettle,MikhailHubanau/pentaho-kettle,akhayrutdinov/pentaho-kettle,mbatchelor/pentaho-kettle,yshakhau/pentaho-kettle,nicoben/pentaho-kettle,ccaspanello/pentaho-kettle,pavel-sakun/pentaho-kettle,sajeetharan/pentaho-kettle,alina-ipatina/pentaho-kettle,YuryBY/pentaho-kettle,bmorrise/pentaho-kettle,emartin-pentaho/pentaho-kettle,tmcsantos/pentaho-kettle,matthewtckr/pentaho-kettle,mdamour1976/pentaho-kettle,matrix-stone/pentaho-kettle,pavel-sakun/pentaho-kettle,pavel-sakun/pentaho-kettle,CapeSepias/pentaho-kettle,pentaho/pentaho-kettle,bmorrise/pentaho-kettle,Advent51/pentaho-kettle,marcoslarsen/pentaho-kettle,stepanovdg/pentaho-kettle,stepanovdg/pentaho-kettle,wseyler/pentaho-kettle,zlcnju/kettle,mdamour1976/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,YuryBY/pentaho-kettle,Advent51/pentaho-kettle,EcoleKeine/pentaho-kettle,tkafalas/pentaho-kettle,drndos/pentaho-kettle,CapeSepias/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,yshakhau/pentaho-kettle,ivanpogodin/pentaho-kettle,nanata1115/pentaho-kettle,jbrant/pentaho-kettle,roboguy/pentaho-kettle,mattyb149/pentaho-kettle,jbrant/pentaho-kettle,nantunes/pentaho-kettle,nanata1115/pentaho-kettle,mkambol/pentaho-kettle,kurtwalker/pentaho-kettle,stevewillcock/pentaho-kettle,eayoungs/pentaho-kettle,andrei-viaryshka/pentaho-kettle,drndos/pentaho-kettle,hudak/pentaho-kettle,matrix-stone/pentaho-kettle,HiromuHota/pentaho-kettle,DFieldFL/pentaho-kettle,wseyler/pentaho-kettle,EcoleKeine/pentaho-kettle,cjsonger/pentaho-kettle,graimundo/pentaho-kettle,gretchiemoran/pentaho-kettle,ccaspanello/pentaho-kettle,pedrofvteixeira/pentaho-kettle,ViswesvarSekar/pentaho-kettle,HiromuHota/pentaho-kettle,ddiroma/pentaho-kettle,pentaho/pentaho-kettle,ma459006574/pentaho-kettle,wseyler/pentaho-kettle,lgrill-pentaho/pentaho-kettle,marcoslarsen/pentaho-kettle,ViswesvarSekar/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,zlcnju/kettle,bmorrise/pentaho-kettle,lgrill-pentaho/pentaho-kettle,kurtwalker/pentaho-kettle,hudak/pentaho-kettle,pminutillo/pentaho-kettle,mdamour1976/pentaho-kettle,pminutillo/pentaho-kettle,stevewillcock/pentaho-kettle,nanata1115/pentaho-kettle,sajeetharan/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,HiromuHota/pentaho-kettle,nantunes/pentaho-kettle,lgrill-pentaho/pentaho-kettle,ivanpogodin/pentaho-kettle,ma459006574/pentaho-kettle,matthewtckr/pentaho-kettle,rfellows/pentaho-kettle,nantunes/pentaho-kettle,flbrino/pentaho-kettle,gretchiemoran/pentaho-kettle,skofra0/pentaho-kettle,denisprotopopov/pentaho-kettle,jbrant/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,denisprotopopov/pentaho-kettle,brosander/pentaho-kettle,gretchiemoran/pentaho-kettle,rmansoor/pentaho-kettle,CapeSepias/pentaho-kettle,tmcsantos/pentaho-kettle,flbrino/pentaho-kettle,roboguy/pentaho-kettle,dkincade/pentaho-kettle,e-cuellar/pentaho-kettle,EcoleKeine/pentaho-kettle,GauravAshara/pentaho-kettle,matrix-stone/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,ddiroma/pentaho-kettle,mbatchelor/pentaho-kettle,skofra0/pentaho-kettle,tkafalas/pentaho-kettle,mbatchelor/pentaho-kettle,roboguy/pentaho-kettle,hudak/pentaho-kettle,mkambol/pentaho-kettle,ViswesvarSekar/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,mattyb149/pentaho-kettle,akhayrutdinov/pentaho-kettle,alina-ipatina/pentaho-kettle,GauravAshara/pentaho-kettle,nicoben/pentaho-kettle,lgrill-pentaho/pentaho-kettle,sajeetharan/pentaho-kettle,pymjer/pentaho-kettle,airy-ict/pentaho-kettle,DFieldFL/pentaho-kettle,tmcsantos/pentaho-kettle,rfellows/pentaho-kettle,aminmkhan/pentaho-kettle,SergeyTravin/pentaho-kettle,drndos/pentaho-kettle,EcoleKeine/pentaho-kettle
// // Google Analytics Plugin for Pentaho PDI a.k.a. Kettle // // Copyright (C) 2010 Slawomir Chodnicki // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA package org.pentaho.di.trans.steps.googleanalytics; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Map; import org.pentaho.di.core.*; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.*; import org.pentaho.di.trans.*; import org.pentaho.di.trans.step.*; import org.w3c.dom.Node; public class GaInputStepMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = GaInputStepMeta.class; // for i18n purposes public final static String GA_ACCOUNTS_URL = "https://www.google.com/analytics/feeds/accounts/default"; public final static String GA_DATA_URL = "https://www.google.com/analytics/feeds/data"; public final static String FIELD_TYPE_CONFIDENCE_INTERVAL = "Confidence Interval for Metric"; public final static String FIELD_TYPE_DIMENSION = "Dimension"; public final static String FIELD_TYPE_METRIC = "Metric"; public final static String FIELD_TYPE_DATA_SOURCE_PROPERTY = "Data Source Property"; public final static String FIELD_TYPE_DATA_SOURCE_FIELD = "Data Source Field"; public final static String FIELD_DATA_SOURCE_TABLE_ID = "dxp:tableId"; public final static String FIELD_DATA_SOURCE_TABLE_NAME = "dxp:tableName"; public final static String DEFAULT_GA_APPLICATION_NAME = "type-exit.org kettle plugin"; private String gaAppName; private String gaEmail; private String gaPassword; private String gaProfileTableId; private String gaProfileName; private boolean useCustomTableId; private String gaCustomTableId; private String startDate; private String endDate; private String dimensions; private String metrics; private String filters; private String sort; private boolean useCustomSegment; private int rowLimit; private String customSegment; private String segmentName; private String segmentId; private String feedField[]; private String feedFieldType[]; private String outputField[]; private int outputType[]; private String conversionMask[]; public GaInputStepMeta() { super(); } public int getRowLimit() { return rowLimit; } public void setRowLimit(int rowLimit) { if (rowLimit < 0){ rowLimit = 0; } this.rowLimit = rowLimit; } public String[] getConversionMask() { return conversionMask; } public String getGaAppName() { return gaAppName; } public void setGaAppName(String gaAppName) { this.gaAppName = gaAppName; } public boolean isUseCustomTableId() { return useCustomTableId; } public void setUseCustomTableId(boolean useCustomTableId) { this.useCustomTableId = useCustomTableId; } public String getGaCustomTableId() { return gaCustomTableId; } public void setGaCustomTableId(String gaCustomTableId) { this.gaCustomTableId = gaCustomTableId; } public String getSegmentName() { return segmentName; } public void setSegmentName(String segmentName) { this.segmentName = segmentName; } public String getSegmentId() { return segmentId; } public void setSegmentId(String segmentId) { this.segmentId = segmentId; } public boolean isUseCustomSegment() { return useCustomSegment; } public void setUseCustomSegment(boolean useCustomSegment) { this.useCustomSegment = useCustomSegment; } public String getCustomSegment() { return customSegment; } public void setCustomSegment(String customSegment) { this.customSegment = customSegment; } public String getDimensions() { return dimensions; } public void setDimensions(String dimensions) { this.dimensions = dimensions; } public String getMetrics() { return metrics; } public void setMetrics(String metrics) { this.metrics = metrics; } public String getFilters() { return filters; } public void setFilters(String filters) { this.filters = filters; } public String getSort() { return sort; } public void setSort(String sort) { this.sort = sort; } public String getStartDate() { return startDate; } public void setStartDate(String startDate) { this.startDate = startDate; } public String getEndDate() { return endDate; } public void setEndDate(String endDate) { this.endDate = endDate; } public String getGaEmail() { return gaEmail; } public void setGaEmail(String email) { this.gaEmail = email; } public String getGaPassword() { return gaPassword; } public void setGaPassword(String gaPassword) { this.gaPassword = gaPassword; } public String getGaProfileTableId() { return gaProfileTableId; } public void setGaProfileTableId(String gaProfile) { this.gaProfileTableId = gaProfile; } public String getGaProfileName() { return gaProfileName; } public void setGaProfileName(String gaProfileName) { this.gaProfileName = gaProfileName; } public String[] getFeedFieldType() { return feedFieldType; } public String[] getFeedField() { return feedField; } public String[] getOutputField() { return outputField; } public int[] getOutputType() { return outputType; } // set sensible defaults for a new step public void setDefault() { gaEmail = "[email protected]"; segmentId = "gaid::-1"; segmentName = "All Visits"; dimensions = "ga:browser"; metrics = "ga:visits"; startDate = new SimpleDateFormat("yyyy-MM-dd").format(new Date()); endDate = new String(startDate); sort = "-ga:visits"; gaAppName = DEFAULT_GA_APPLICATION_NAME; rowLimit = 0; // default is to have no key lookup settings allocate(0); } // helper method to allocate the arrays public void allocate(int nrkeys) { feedField = new String[nrkeys]; outputField = new String[nrkeys]; outputType = new int[nrkeys]; feedFieldType = new String[nrkeys]; conversionMask = new String[nrkeys]; } public void getFields(RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) { // clear the output r.clear(); // append the outputFields to the output for (int i = 0; i < outputField.length; i++) { ValueMetaInterface v = new ValueMeta(outputField[i], outputType[i]); // that would influence the output //v.setConversionMask(conversionMask[i]); v.setOrigin(origin); r.addValueMeta(v); } } public Object clone() { // field by field copy is default GaInputStepMeta retval = (GaInputStepMeta) super.clone(); // add proper deep copy for the collections int nrKeys = feedField.length; retval.allocate(nrKeys); for (int i = 0; i < nrKeys; i++) { retval.feedField[i] = feedField[i]; retval.outputField[i] = outputField[i]; retval.outputType[i] = outputType[i]; retval.feedFieldType[i] = feedFieldType[i]; retval.conversionMask[i] = conversionMask[i]; } return retval; } private boolean getBooleanAttributeFromNode(Node node, String tag) { String sValue = XMLHandler.getTagValue(node, tag); return (sValue != null && sValue.equalsIgnoreCase("Y")); } public String getXML() throws KettleValueException { StringBuffer retval = new StringBuffer(800); retval.append(" ").append(XMLHandler.addTagValue("user", gaEmail)); retval.append(" ").append(XMLHandler.addTagValue("pass", "Encrypted " + Encr.encryptPassword(gaPassword))); retval.append(" ").append(XMLHandler.addTagValue("appName", gaAppName)); retval.append(" ").append(XMLHandler.addTagValue("profileName", gaProfileName)); retval.append(" ").append(XMLHandler.addTagValue("profileTableId", gaProfileTableId)); retval.append(" ").append(XMLHandler.addTagValue("customTableId", gaCustomTableId)); retval.append(" ").append(XMLHandler.addTagValue("useCustomTableId", useCustomTableId)); retval.append(" ").append(XMLHandler.addTagValue("startDate", startDate)); retval.append(" ").append(XMLHandler.addTagValue("endDate", endDate)); retval.append(" ").append(XMLHandler.addTagValue("dimensions", dimensions)); retval.append(" ").append(XMLHandler.addTagValue("metrics", metrics)); retval.append(" ").append(XMLHandler.addTagValue("filters", filters)); retval.append(" ").append(XMLHandler.addTagValue("sort", sort)); retval.append(" ").append(XMLHandler.addTagValue("useCustomSegment", useCustomSegment)); retval.append(" ").append(XMLHandler.addTagValue("customSegment", customSegment)); retval.append(" ").append(XMLHandler.addTagValue("segmentId", segmentId)); retval.append(" ").append(XMLHandler.addTagValue("segmentName", segmentName)); retval.append(" ").append(XMLHandler.addTagValue("rowLimit", rowLimit)); for (int i = 0; i < feedField.length; i++) { retval.append(" <feedField>").append(Const.CR); retval.append(" ").append(XMLHandler.addTagValue("feedFieldType", feedFieldType[i])); retval.append(" ").append(XMLHandler.addTagValue("feedField", feedField[i])); retval.append(" ").append(XMLHandler.addTagValue("outField", outputField[i])); retval.append(" ").append(XMLHandler.addTagValue("type", ValueMeta.getTypeDesc(outputType[i]))); retval.append(" ").append(XMLHandler.addTagValue("conversionMask", conversionMask[i])); retval.append(" </feedField>").append(Const.CR); } return retval.toString(); } public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException { try { gaEmail = XMLHandler.getTagValue(stepnode, "user"); gaPassword = Encr.decryptPasswordOptionallyEncrypted(XMLHandler.getTagValue(stepnode, "pass")); gaAppName = XMLHandler.getTagValue(stepnode, "appName"); gaProfileName = XMLHandler.getTagValue(stepnode, "profileName"); gaProfileTableId = XMLHandler.getTagValue(stepnode, "profileTableId"); gaCustomTableId = XMLHandler.getTagValue(stepnode, "customTableId"); useCustomTableId = getBooleanAttributeFromNode(stepnode, "useCustomTableId"); startDate = XMLHandler.getTagValue(stepnode, "startDate"); endDate = XMLHandler.getTagValue(stepnode, "endDate"); dimensions = XMLHandler.getTagValue(stepnode, "dimensions"); metrics = XMLHandler.getTagValue(stepnode, "metrics"); filters = XMLHandler.getTagValue(stepnode, "filters"); sort = XMLHandler.getTagValue(stepnode, "sort"); useCustomSegment = getBooleanAttributeFromNode(stepnode, "useCustomSegment"); customSegment = XMLHandler.getTagValue(stepnode, "customSegment"); segmentId = XMLHandler.getTagValue(stepnode, "segmentId"); segmentName = XMLHandler.getTagValue(stepnode, "segmentName"); rowLimit = Const.toInt(XMLHandler.getTagValue(stepnode, "rowLimit"), 0); allocate(0); int nrFields = XMLHandler.countNodes(stepnode, "feedField"); allocate(nrFields); for (int i = 0; i < nrFields; i++) { Node knode = XMLHandler.getSubNodeByNr(stepnode, "feedField", i); feedFieldType[i] = XMLHandler.getTagValue(knode, "feedFieldType"); feedField[i] = XMLHandler.getTagValue(knode, "feedField"); outputField[i] = XMLHandler.getTagValue(knode, "outField"); outputType[i] = ValueMeta.getType(XMLHandler.getTagValue(knode, "type")); conversionMask[i] = XMLHandler.getTagValue(knode, "conversionMask"); if (outputType[i] < 0) { outputType[i] = ValueMetaInterface.TYPE_STRING; } } } catch (Exception e) { throw new KettleXMLException(BaseMessages.getString(PKG, "GoogleAnalytics.Error.UnableToReadFromXML"), e); } } public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { gaEmail = rep.getStepAttributeString(id_step, "user"); gaPassword = Encr.decryptPasswordOptionallyEncrypted(rep.getStepAttributeString(id_step, "pass")); gaProfileName = rep.getStepAttributeString(id_step, "profileName"); gaAppName = rep.getStepAttributeString(id_step, "appName"); gaProfileTableId = rep.getStepAttributeString(id_step, "profileTableId"); gaCustomTableId = rep.getStepAttributeString(id_step, "customTableId"); useCustomTableId = rep.getStepAttributeBoolean(id_step, "useCustomTableId"); startDate = rep.getStepAttributeString(id_step, "startDate"); endDate = rep.getStepAttributeString(id_step, "endDate"); dimensions = rep.getStepAttributeString(id_step, "dimensions"); metrics = rep.getStepAttributeString(id_step, "metrics"); filters = rep.getStepAttributeString(id_step, "filters"); sort = rep.getStepAttributeString(id_step, "sort"); useCustomSegment = rep.getStepAttributeBoolean(id_step, "useCustomSegment"); customSegment = rep.getStepAttributeString(id_step, "customSegment"); segmentId = rep.getStepAttributeString(id_step, "segmentId"); segmentName = rep.getStepAttributeString(id_step, "segmentName"); rowLimit = (int)rep.getStepAttributeInteger(id_step, "rowLimit"); int nrFields = rep.countNrStepAttributes(id_step, "feedField"); allocate(nrFields); for (int i = 0; i < nrFields; i++) { feedFieldType[i] = rep.getStepAttributeString(id_step, i, "feedFieldType"); feedField[i] = rep.getStepAttributeString(id_step, i, "feedField"); outputField[i] = rep.getStepAttributeString(id_step, i, "outField"); outputType[i] = ValueMeta.getType(rep.getStepAttributeString(id_step, i, "type")); conversionMask[i] = rep.getStepAttributeString(id_step, i, "conversionMask"); if (outputType[i] < 0) { outputType[i] = ValueMetaInterface.TYPE_STRING; } } } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "GoogleAnalytics.Error.UnableToReadFromRep"), e); } } public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "user", gaEmail); rep.saveStepAttribute(id_transformation, id_step, "pass", "Encrypted " + Encr.encryptPassword(gaPassword)); rep.saveStepAttribute(id_transformation, id_step, "appName", gaAppName); rep.saveStepAttribute(id_transformation, id_step, "profileName", gaProfileName); rep.saveStepAttribute(id_transformation, id_step, "profileTableId", gaProfileTableId); rep.saveStepAttribute(id_transformation, id_step, "customTableId", gaCustomTableId); rep.saveStepAttribute(id_transformation, id_step, "useCustomTableId", useCustomTableId); rep.saveStepAttribute(id_transformation, id_step, "startDate", startDate); rep.saveStepAttribute(id_transformation, id_step, "endDate", endDate); rep.saveStepAttribute(id_transformation, id_step, "dimensions", dimensions); rep.saveStepAttribute(id_transformation, id_step, "metrics", metrics); rep.saveStepAttribute(id_transformation, id_step, "filters", filters); rep.saveStepAttribute(id_transformation, id_step, "sort", sort); rep.saveStepAttribute(id_transformation, id_step, "useCustomSegment", useCustomSegment); rep.saveStepAttribute(id_transformation, id_step, "customSegment", customSegment); rep.saveStepAttribute(id_transformation, id_step, "segmentId", segmentId); rep.saveStepAttribute(id_transformation, id_step, "segmentName", segmentName); rep.saveStepAttribute(id_transformation, id_step, "rowLimit", rowLimit); for (int i = 0; i < feedField.length; i++) { rep.saveStepAttribute(id_transformation, id_step, i, "feedFieldType", feedFieldType[i]); rep.saveStepAttribute(id_transformation, id_step, i, "feedField", feedField[i]); rep.saveStepAttribute(id_transformation, id_step, i, "outField", outputField[i]); rep.saveStepAttribute(id_transformation, id_step, i, "conversionMask", conversionMask[i]); rep.saveStepAttribute(id_transformation, id_step, i, "type", ValueMeta.getTypeDesc(outputType[i])); } } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "GoogleAnalytics.Error.UnableToSaveToRep") + id_step, e); } } public void check(List<CheckResultInterface> remarks, TransMeta transmeta, StepMeta stepinfo, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) { CheckResult cr; if (prev==null || prev.size()==0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.NotReceivingFields"), stepinfo); //$NON-NLS-1$ remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.StepRecevingData",prev.size()+""), stepinfo); //$NON-NLS-1$ //$NON-NLS-2$ remarks.add(cr); } // See if we have input streams leading to this step! if (input.length>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.StepRecevingData2"), stepinfo); //$NON-NLS-1$ remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.NoInputReceivedFromOtherSteps"), stepinfo); //$NON-NLS-1$ remarks.add(cr); } } public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans disp) { return new GaInputStep(stepMeta, stepDataInterface, cnr, transMeta, disp); } public StepDataInterface getStepData() { return new GaInputStepData(); } }
src/org/pentaho/di/trans/steps/googleanalytics/GaInputStepMeta.java
// // Google Analytics Plugin for Pentaho PDI a.k.a. Kettle // // Copyright (C) 2010 Slawomir Chodnicki // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA package org.pentaho.di.trans.steps.googleanalytics; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Map; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.*; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.*; import org.pentaho.di.trans.*; import org.pentaho.di.trans.step.*; import org.pentaho.di.ui.trans.steps.googleanalytics.GaInputStepDialog; import org.w3c.dom.Node; public class GaInputStepMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = GaInputStepMeta.class; // for i18n purposes public final static String GA_ACCOUNTS_URL = "https://www.google.com/analytics/feeds/accounts/default"; public final static String GA_DATA_URL = "https://www.google.com/analytics/feeds/data"; public final static String FIELD_TYPE_CONFIDENCE_INTERVAL = "Confidence Interval for Metric"; public final static String FIELD_TYPE_DIMENSION = "Dimension"; public final static String FIELD_TYPE_METRIC = "Metric"; public final static String FIELD_TYPE_DATA_SOURCE_PROPERTY = "Data Source Property"; public final static String FIELD_TYPE_DATA_SOURCE_FIELD = "Data Source Field"; public final static String FIELD_DATA_SOURCE_TABLE_ID = "dxp:tableId"; public final static String FIELD_DATA_SOURCE_TABLE_NAME = "dxp:tableName"; public final static String DEFAULT_GA_APPLICATION_NAME = "type-exit.org kettle plugin"; private String gaAppName; private String gaEmail; private String gaPassword; private String gaProfileTableId; private String gaProfileName; private boolean useCustomTableId; private String gaCustomTableId; private String startDate; private String endDate; private String dimensions; private String metrics; private String filters; private String sort; private boolean useCustomSegment; private int rowLimit; private String customSegment; private String segmentName; private String segmentId; private String feedField[]; private String feedFieldType[]; private String outputField[]; private int outputType[]; private String conversionMask[]; public GaInputStepMeta() { super(); } public int getRowLimit() { return rowLimit; } public void setRowLimit(int rowLimit) { if (rowLimit < 0){ rowLimit = 0; } this.rowLimit = rowLimit; } public String[] getConversionMask() { return conversionMask; } public String getGaAppName() { return gaAppName; } public void setGaAppName(String gaAppName) { this.gaAppName = gaAppName; } public boolean isUseCustomTableId() { return useCustomTableId; } public void setUseCustomTableId(boolean useCustomTableId) { this.useCustomTableId = useCustomTableId; } public String getGaCustomTableId() { return gaCustomTableId; } public void setGaCustomTableId(String gaCustomTableId) { this.gaCustomTableId = gaCustomTableId; } public String getSegmentName() { return segmentName; } public void setSegmentName(String segmentName) { this.segmentName = segmentName; } public String getSegmentId() { return segmentId; } public void setSegmentId(String segmentId) { this.segmentId = segmentId; } public boolean isUseCustomSegment() { return useCustomSegment; } public void setUseCustomSegment(boolean useCustomSegment) { this.useCustomSegment = useCustomSegment; } public String getCustomSegment() { return customSegment; } public void setCustomSegment(String customSegment) { this.customSegment = customSegment; } public String getDimensions() { return dimensions; } public void setDimensions(String dimensions) { this.dimensions = dimensions; } public String getMetrics() { return metrics; } public void setMetrics(String metrics) { this.metrics = metrics; } public String getFilters() { return filters; } public void setFilters(String filters) { this.filters = filters; } public String getSort() { return sort; } public void setSort(String sort) { this.sort = sort; } public String getStartDate() { return startDate; } public void setStartDate(String startDate) { this.startDate = startDate; } public String getEndDate() { return endDate; } public void setEndDate(String endDate) { this.endDate = endDate; } public String getGaEmail() { return gaEmail; } public void setGaEmail(String email) { this.gaEmail = email; } public String getGaPassword() { return gaPassword; } public void setGaPassword(String gaPassword) { this.gaPassword = gaPassword; } public String getGaProfileTableId() { return gaProfileTableId; } public void setGaProfileTableId(String gaProfile) { this.gaProfileTableId = gaProfile; } public String getGaProfileName() { return gaProfileName; } public void setGaProfileName(String gaProfileName) { this.gaProfileName = gaProfileName; } public String[] getFeedFieldType() { return feedFieldType; } public String[] getFeedField() { return feedField; } public String[] getOutputField() { return outputField; } public int[] getOutputType() { return outputType; } // set sensible defaults for a new step public void setDefault() { gaEmail = "[email protected]"; segmentId = "gaid::-1"; segmentName = "All Visits"; dimensions = "ga:browser"; metrics = "ga:visits"; startDate = new SimpleDateFormat("yyyy-MM-dd").format(new Date()); endDate = new String(startDate); sort = "-ga:visits"; gaAppName = DEFAULT_GA_APPLICATION_NAME; rowLimit = 0; // default is to have no key lookup settings allocate(0); } // helper method to allocate the arrays public void allocate(int nrkeys) { feedField = new String[nrkeys]; outputField = new String[nrkeys]; outputType = new int[nrkeys]; feedFieldType = new String[nrkeys]; conversionMask = new String[nrkeys]; } public void getFields(RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) { // clear the output r.clear(); // append the outputFields to the output for (int i = 0; i < outputField.length; i++) { ValueMetaInterface v = new ValueMeta(outputField[i], outputType[i]); // that would influence the output //v.setConversionMask(conversionMask[i]); v.setOrigin(origin); r.addValueMeta(v); } } public Object clone() { // field by field copy is default GaInputStepMeta retval = (GaInputStepMeta) super.clone(); // add proper deep copy for the collections int nrKeys = feedField.length; retval.allocate(nrKeys); for (int i = 0; i < nrKeys; i++) { retval.feedField[i] = feedField[i]; retval.outputField[i] = outputField[i]; retval.outputType[i] = outputType[i]; retval.feedFieldType[i] = feedFieldType[i]; retval.conversionMask[i] = conversionMask[i]; } return retval; } private boolean getBooleanAttributeFromNode(Node node, String tag) { String sValue = XMLHandler.getTagValue(node, tag); return (sValue != null && sValue.equalsIgnoreCase("Y")); } public String getXML() throws KettleValueException { StringBuffer retval = new StringBuffer(800); retval.append(" ").append(XMLHandler.addTagValue("user", gaEmail)); retval.append(" ").append(XMLHandler.addTagValue("pass", "Encrypted " + Encr.encryptPassword(gaPassword))); retval.append(" ").append(XMLHandler.addTagValue("appName", gaAppName)); retval.append(" ").append(XMLHandler.addTagValue("profileName", gaProfileName)); retval.append(" ").append(XMLHandler.addTagValue("profileTableId", gaProfileTableId)); retval.append(" ").append(XMLHandler.addTagValue("customTableId", gaCustomTableId)); retval.append(" ").append(XMLHandler.addTagValue("useCustomTableId", useCustomTableId)); retval.append(" ").append(XMLHandler.addTagValue("startDate", startDate)); retval.append(" ").append(XMLHandler.addTagValue("endDate", endDate)); retval.append(" ").append(XMLHandler.addTagValue("dimensions", dimensions)); retval.append(" ").append(XMLHandler.addTagValue("metrics", metrics)); retval.append(" ").append(XMLHandler.addTagValue("filters", filters)); retval.append(" ").append(XMLHandler.addTagValue("sort", sort)); retval.append(" ").append(XMLHandler.addTagValue("useCustomSegment", useCustomSegment)); retval.append(" ").append(XMLHandler.addTagValue("customSegment", customSegment)); retval.append(" ").append(XMLHandler.addTagValue("segmentId", segmentId)); retval.append(" ").append(XMLHandler.addTagValue("segmentName", segmentName)); retval.append(" ").append(XMLHandler.addTagValue("rowLimit", rowLimit)); for (int i = 0; i < feedField.length; i++) { retval.append(" <feedField>").append(Const.CR); retval.append(" ").append(XMLHandler.addTagValue("feedFieldType", feedFieldType[i])); retval.append(" ").append(XMLHandler.addTagValue("feedField", feedField[i])); retval.append(" ").append(XMLHandler.addTagValue("outField", outputField[i])); retval.append(" ").append(XMLHandler.addTagValue("type", ValueMeta.getTypeDesc(outputType[i]))); retval.append(" ").append(XMLHandler.addTagValue("conversionMask", conversionMask[i])); retval.append(" </feedField>").append(Const.CR); } return retval.toString(); } public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException { try { gaEmail = XMLHandler.getTagValue(stepnode, "user"); gaPassword = Encr.decryptPasswordOptionallyEncrypted(XMLHandler.getTagValue(stepnode, "pass")); gaAppName = XMLHandler.getTagValue(stepnode, "appName"); gaProfileName = XMLHandler.getTagValue(stepnode, "profileName"); gaProfileTableId = XMLHandler.getTagValue(stepnode, "profileTableId"); gaCustomTableId = XMLHandler.getTagValue(stepnode, "customTableId"); useCustomTableId = getBooleanAttributeFromNode(stepnode, "useCustomTableId"); startDate = XMLHandler.getTagValue(stepnode, "startDate"); endDate = XMLHandler.getTagValue(stepnode, "endDate"); dimensions = XMLHandler.getTagValue(stepnode, "dimensions"); metrics = XMLHandler.getTagValue(stepnode, "metrics"); filters = XMLHandler.getTagValue(stepnode, "filters"); sort = XMLHandler.getTagValue(stepnode, "sort"); useCustomSegment = getBooleanAttributeFromNode(stepnode, "useCustomSegment"); customSegment = XMLHandler.getTagValue(stepnode, "customSegment"); segmentId = XMLHandler.getTagValue(stepnode, "segmentId"); segmentName = XMLHandler.getTagValue(stepnode, "segmentName"); rowLimit = Const.toInt(XMLHandler.getTagValue(stepnode, "rowLimit"), 0); allocate(0); int nrFields = XMLHandler.countNodes(stepnode, "feedField"); allocate(nrFields); for (int i = 0; i < nrFields; i++) { Node knode = XMLHandler.getSubNodeByNr(stepnode, "feedField", i); feedFieldType[i] = XMLHandler.getTagValue(knode, "feedFieldType"); feedField[i] = XMLHandler.getTagValue(knode, "feedField"); outputField[i] = XMLHandler.getTagValue(knode, "outField"); outputType[i] = ValueMeta.getType(XMLHandler.getTagValue(knode, "type")); conversionMask[i] = XMLHandler.getTagValue(knode, "conversionMask"); if (outputType[i] < 0) { outputType[i] = ValueMetaInterface.TYPE_STRING; } } } catch (Exception e) { throw new KettleXMLException(BaseMessages.getString(PKG, "GoogleAnalytics.Error.UnableToReadFromXML"), e); } } public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { gaEmail = rep.getStepAttributeString(id_step, "user"); gaPassword = Encr.decryptPasswordOptionallyEncrypted(rep.getStepAttributeString(id_step, "pass")); gaProfileName = rep.getStepAttributeString(id_step, "profileName"); gaAppName = rep.getStepAttributeString(id_step, "appName"); gaProfileTableId = rep.getStepAttributeString(id_step, "profileTableId"); gaCustomTableId = rep.getStepAttributeString(id_step, "customTableId"); useCustomTableId = rep.getStepAttributeBoolean(id_step, "useCustomTableId"); startDate = rep.getStepAttributeString(id_step, "startDate"); endDate = rep.getStepAttributeString(id_step, "endDate"); dimensions = rep.getStepAttributeString(id_step, "dimensions"); metrics = rep.getStepAttributeString(id_step, "metrics"); filters = rep.getStepAttributeString(id_step, "filters"); sort = rep.getStepAttributeString(id_step, "sort"); useCustomSegment = rep.getStepAttributeBoolean(id_step, "useCustomSegment"); customSegment = rep.getStepAttributeString(id_step, "customSegment"); segmentId = rep.getStepAttributeString(id_step, "segmentId"); segmentName = rep.getStepAttributeString(id_step, "segmentName"); rowLimit = (int)rep.getStepAttributeInteger(id_step, "rowLimit"); int nrFields = rep.countNrStepAttributes(id_step, "feedField"); allocate(nrFields); for (int i = 0; i < nrFields; i++) { feedFieldType[i] = rep.getStepAttributeString(id_step, i, "feedFieldType"); feedField[i] = rep.getStepAttributeString(id_step, i, "feedField"); outputField[i] = rep.getStepAttributeString(id_step, i, "outField"); outputType[i] = ValueMeta.getType(rep.getStepAttributeString(id_step, i, "type")); conversionMask[i] = rep.getStepAttributeString(id_step, i, "conversionMask"); if (outputType[i] < 0) { outputType[i] = ValueMetaInterface.TYPE_STRING; } } } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "GoogleAnalytics.Error.UnableToReadFromRep"), e); } } public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveStepAttribute(id_transformation, id_step, "user", gaEmail); rep.saveStepAttribute(id_transformation, id_step, "pass", "Encrypted " + Encr.encryptPassword(gaPassword)); rep.saveStepAttribute(id_transformation, id_step, "appName", gaAppName); rep.saveStepAttribute(id_transformation, id_step, "profileName", gaProfileName); rep.saveStepAttribute(id_transformation, id_step, "profileTableId", gaProfileTableId); rep.saveStepAttribute(id_transformation, id_step, "customTableId", gaCustomTableId); rep.saveStepAttribute(id_transformation, id_step, "useCustomTableId", useCustomTableId); rep.saveStepAttribute(id_transformation, id_step, "startDate", startDate); rep.saveStepAttribute(id_transformation, id_step, "endDate", endDate); rep.saveStepAttribute(id_transformation, id_step, "dimensions", dimensions); rep.saveStepAttribute(id_transformation, id_step, "metrics", metrics); rep.saveStepAttribute(id_transformation, id_step, "filters", filters); rep.saveStepAttribute(id_transformation, id_step, "sort", sort); rep.saveStepAttribute(id_transformation, id_step, "useCustomSegment", useCustomSegment); rep.saveStepAttribute(id_transformation, id_step, "customSegment", customSegment); rep.saveStepAttribute(id_transformation, id_step, "segmentId", segmentId); rep.saveStepAttribute(id_transformation, id_step, "segmentName", segmentName); rep.saveStepAttribute(id_transformation, id_step, "rowLimit", rowLimit); for (int i = 0; i < feedField.length; i++) { rep.saveStepAttribute(id_transformation, id_step, i, "feedFieldType", feedFieldType[i]); rep.saveStepAttribute(id_transformation, id_step, i, "feedField", feedField[i]); rep.saveStepAttribute(id_transformation, id_step, i, "outField", outputField[i]); rep.saveStepAttribute(id_transformation, id_step, i, "conversionMask", conversionMask[i]); rep.saveStepAttribute(id_transformation, id_step, i, "type", ValueMeta.getTypeDesc(outputType[i])); } } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "GoogleAnalytics.Error.UnableToSaveToRep") + id_step, e); } } public void check(List<CheckResultInterface> remarks, TransMeta transmeta, StepMeta stepinfo, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) { CheckResult cr; if (prev==null || prev.size()==0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.NotReceivingFields"), stepinfo); //$NON-NLS-1$ remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.StepRecevingData",prev.size()+""), stepinfo); //$NON-NLS-1$ //$NON-NLS-2$ remarks.add(cr); } // See if we have input streams leading to this step! if (input.length>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.StepRecevingData2"), stepinfo); //$NON-NLS-1$ remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "GoogleAnalytics.CheckResult.NoInputReceivedFromOtherSteps"), stepinfo); //$NON-NLS-1$ remarks.add(cr); } } public StepDialogInterface getDialog(Shell shell, StepMetaInterface meta, TransMeta transMeta, String name) { return new GaInputStepDialog(shell, meta, transMeta, name); } public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans disp) { return new GaInputStep(stepMeta, stepDataInterface, cnr, transMeta, disp); } public StepDataInterface getStepData() { return new GaInputStepData(); } }
fixing build: removed explicit dependency on dialog class git-svn-id: 51b39fcfd0d3a6ea7caa15377cad4af13b9d2664@14502 5fb7f6ec-07c1-534a-b4ca-9155e429e800
src/org/pentaho/di/trans/steps/googleanalytics/GaInputStepMeta.java
fixing build: removed explicit dependency on dialog class
Java
apache-2.0
0b7e85e090bc34a29a7733276ed36a37824e13c0
0
apache/commons-configuration,apache/commons-configuration,mohanaraosv/commons-configuration,mohanaraosv/commons-configuration,mohanaraosv/commons-configuration,apache/commons-configuration
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.configuration.reloading; /** * Interface that allows other objects to synchronize on a root lock. * * @author <a * href="http://commons.apache.org/configuration/team-list.html">Commons * Configuration team</a> * @version $Id$ */ public interface Reloadable { Object getReloadLock(); }
src/java/org/apache/commons/configuration/reloading/Reloadable.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.configuration.reloading; /** * Interface that allows other objects to synchronize on a root lock. */ public interface Reloadable { Object getReloadLock(); }
Checkstyle git-svn-id: 0d31da9e303333003508381311333cf78a25d41b@1158890 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/commons/configuration/reloading/Reloadable.java
Checkstyle
Java
apache-2.0
9c964f30fca692f07f1807b0179fc74f29ae404a
0
bingo-open-source/bingo-core
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package bingo.lang; import java.util.Collection; import java.util.Map; /** * Assertion utility class that assists in validating arguments. Useful for identifying programmer errors early and * clearly at runtime. * * <p> * For example, if the contract of a public method states it does not allow <code>null</code> arguments, Assert can be * used to validate that contract. Doing this clearly indicates a contract violation when it occurs and protects the * class's invariants. * * <p> * Typically used to validate method arguments rather than configuration properties, to check for cases that are usually * programmer errors rather than configuration errors. In contrast to config initialization code, there is usally no * point in falling back to defaults in such methods. * * <p> * This class is similar to JUnit's assertion library. If an argument value is deemed invalid, an * {@link IllegalArgumentException} is thrown (typically). For example: * * <pre class="code"> * Assert.notNull(clazz, &quot;The class must not be null&quot;); * Assert.isTrue(i &gt; 0, &quot;The value must be greater than zero&quot;); * </pre> * * Mainly for internal use within the framework; consider Jakarta's Commons Lang >= 2.0 for a more comprehensive suite * of assertion utilities. * * @author Keith Donald * @author Juergen Hoeller * @author Colin Sampaleanu * @author Rob Harrop * @since 1.1.2 */ //From spring framework, under Apache License 2.0 public abstract class Assert { /** * Assert a boolean expression, throwing <code>IllegalArgumentException</code> if the test result is * <code>false</code>. * * <pre class="code"> * Assert.isTrue(i &gt; 0, &quot;The value must be greater than zero&quot;); * </pre> * * @param expression a boolean expression * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if expression is <code>false</code> */ public static void isTrue(boolean expression, String message) { if (!expression) { throw new IllegalArgumentException(message); } } /** * Assert a boolean expression, throwing <code>IllegalArgumentException</code> if the test result is * <code>false</code>. * * <pre class="code"> * Assert.isTrue(i &gt; 0); * </pre> * * @param expression a boolean expression * @throws IllegalArgumentException if expression is <code>false</code> */ public static void isTrue(boolean expression) { isTrue(expression, "[Assertion failed] - this expression must be true"); } /** * Assert a boolean expression, throwing <code>IllegalArgumentException</code> if the test result is * <code>true</code>. * * <pre class="code"> * Assert.isFalse(i &gt; 0); * </pre> * * @param expression a boolean expression * @throws IllegalArgumentException if expression is <code>true</code> */ public static void isFalse(boolean expression) { isFalse(expression, "[Assertion failed] - this expression must be false"); } /** * Assert a boolean expression, throwing <code>IllegalArgumentException</code> if the test result is * <code>true</code>. * * <pre class="code"> * Assert.isFalse(i &gt; 0, &quot;The value must not be greater than zero&quot;); * </pre> * * @param expression a boolean expression * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if expression is <code>true</code> */ public static void isFalse(boolean expression, String message) { if (expression) { throw new IllegalArgumentException(message); } } /** * Assert that an object is <code>null</code> . * * <pre class="code"> * Assert.isNull(value, &quot;The value must be null&quot;); * </pre> * * @param object the object to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object is not <code>null</code> */ public static void isNull(Object object, String message) { if (object != null) { throw new IllegalArgumentException(message); } } /** * Assert that an object is <code>null</code> . * * <pre class="code"> * Assert.isNull(value); * </pre> * * @param object the object to check * @throws IllegalArgumentException if the object is not <code>null</code> */ public static void isNull(Object object) { isNull(object, "[Assertion failed] - the object argument must be null"); } /** * Assert that an object is not <code>null</code> . * * <pre class="code"> * Assert.notNull(clazz, &quot;The class must not be null&quot;); * </pre> * * @param object the object to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object is <code>null</code> */ public static <T> T notNull(T object, String message) { if (object == null) { throw new IllegalArgumentException(message); } return object; } /** * Assert that an object is not <code>null</code> . * * <pre class="code"> * Assert.notNull(clazz); * </pre> * * @param object the object to check * @throws IllegalArgumentException if the object is <code>null</code> */ public static <T> T notNull(T object) { return notNull(object, "[Assertion failed] - this argument is required; it must not be null"); } /** * Assert that the given String is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty(name, &quot;Name must not be empty&quot;); * </pre> * * @param text the String to check * @param message the exception message to use if the assertion fails * @see StringUtils#notEmpty */ public static String notEmpty(String text, String message) { if (Strings.isEmpty(text)) { throw new IllegalArgumentException(message); } return text; } /** * Assert that the given String is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty(name); * </pre> * * @param text the String to check * @see StringUtils#notEmpty */ public static String notEmpty(String text) { return notEmpty(text, "[Assertion failed] - this String argument must have length; it must not be null or empty"); } /** * Assert that the given String has valid text content; that is, it must not be <code>null</code> and must contain * at least one non-whitespace character. * * <pre class="code"> * Assert.notBlank(name, &quot;'name' must not be empty&quot;); * </pre> * * @param text the String to check * @param message the exception message to use if the assertion fails * @see StringUtils#notBlank */ public static String notBlank(String text, String message) { if (Strings.isBlank(text)) { throw new IllegalArgumentException(message); } return text; } /** * Assert that the given String has valid text content; that is, it must not be <code>null</code> and must contain * at least one non-whitespace character. * * <pre class="code"> * Assert.notBlank(name, &quot;'name' must not be empty&quot;); * </pre> * * @param text the String to check * @see StringUtils#notBlank */ public static String notBlank(String text) { return notBlank(text, "[Assertion failed] - this String argument must have text; it must not be null, empty, or blank"); } /** * Assert that the given text does not contain the given substring. * * <pre class="code"> * Assert.notContains(name, &quot;rod&quot;, &quot;Name must not contain 'rod'&quot;); * </pre> * * @param textToSearch the text to search * @param substring the substring to find within the text * @param message the exception message to use if the assertion fails */ public static void notContains(String textToSearch, String substring, String message) { if (Strings.isNotBlank(textToSearch) && Strings.isNotBlank(substring) && textToSearch.indexOf(substring) != -1) { throw new IllegalArgumentException(message); } } /** * Assert that the given text does not contain the given substring. * * <pre class="code"> * Assert.notContains(name, &quot;rod&quot;); * </pre> * * @param textToSearch the text to search * @param substring the substring to find within the text */ public static void notContains(String textToSearch, String substring) { notContains(textToSearch, substring, "[Assertion failed] - this String argument must not contain the substring [" + substring + "]"); } /** * Assert that an array has elements; that is, it must not be <code>null</code> and must have at least one element. * * <pre class="code"> * Assert.notEmpty(array, &quot;The array must have elements&quot;); * </pre> * * @param array the array to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object array is <code>null</code> or has no elements */ public static <T> T[] notEmpty(T[] array, String message) { if (Arrays.isEmpty(array)) { throw new IllegalArgumentException(message); } return array; } /** * Assert that an array has elements; that is, it must not be <code>null</code> and must have at least one element. * * <pre class="code"> * Assert.notEmpty(array); * </pre> * * @param array the array to check * @throws IllegalArgumentException if the object array is <code>null</code> or has no elements */ public static <T> T[] notEmpty(T[] array) { return notEmpty(array, "[Assertion failed] - this array must not be empty: it must contain at least 1 element"); } /** * Assert that an array has no null elements. Note: Does not complain if the array is empty! * * <pre class="code"> * Assert.noNullElements(array, &quot;The array must have non-null elements&quot;); * </pre> * * @param array the array to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object array contains a <code>null</code> element */ public static <T> T[] noNullElements(T[] array, String message) { if (array != null) { for (int i = 0; i < array.length; i++) { if (array[i] == null) { throw new IllegalArgumentException(message); } } } return array; } /** * Assert that an array has no null elements. Note: Does not complain if the array is empty! * * <pre class="code"> * Assert.noNullElements(array); * </pre> * * @param array the array to check * @throws IllegalArgumentException if the object array contains a <code>null</code> element */ public static <T> T[] noNullElements(T[] array) { return noNullElements(array, "[Assertion failed] - this array must not contain any null elements"); } /** * Assert that a collection has elements; that is, it must not be <code>null</code> and must have at least one * element. * * <pre class="code"> * Assert.notEmpty(collection, &quot;Collection must have elements&quot;); * </pre> * * @param collection the collection to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the collection is <code>null</code> or has no elements */ public static <C extends Collection<E>,E> C notEmpty(C collection, String message) { if (Collections.isEmpty(collection)) { throw new IllegalArgumentException(message); } return collection; } /** * Assert that a collection has elements; that is, it must not be <code>null</code> and must have at least one * element. * * <pre class="code"> * Assert.notEmpty(collection, &quot;Collection must have elements&quot;); * </pre> * * @param collection the collection to check * @throws IllegalArgumentException if the collection is <code>null</code> or has no elements */ public static <C extends Collection<E>,E> C notEmpty(C collection) { return notEmpty(collection, "[Assertion failed] - this collection must not be empty: it must contain at least 1 element"); } /** * Assert that a Map has entries; that is, it must not be <code>null</code> and must have at least one entry. * * <pre class="code"> * Assert.notEmpty(map, &quot;Map must have entries&quot;); * </pre> * * @param map the map to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the map is <code>null</code> or has no entries */ public static <M extends Map<K,V>,K,V> M notEmpty(M map, String message) { if (Maps.isEmpty(map)) { throw new IllegalArgumentException(message); } return map; } /** * Assert that a Map has entries; that is, it must not be <code>null</code> and must have at least one entry. * * <pre class="code"> * Assert.notEmpty(map); * </pre> * * @param map the map to check * @throws IllegalArgumentException if the map is <code>null</code> or has no entries */ public static <M extends Map<K,V>,K,V> M notEmpty(M map) { return notEmpty(map, "[Assertion failed] - this map must not be empty; it must contain at least one entry"); } /** * Assert that the provided object is an instance of the provided class. * * <pre class="code"> * Assert.instanceOf(Foo.class, foo); * </pre> * * @param clazz the required class * @param obj the object to check * @throws IllegalArgumentException if the object is not an instance of clazz * @see Class#isInstance */ public static <T> T isInstanceOf(Class<?> clazz, T obj) { return isInstanceOf(clazz, obj, ""); } /** * Assert that the provided object is an instance of the provided class. * * <pre class="code"> * Assert.instanceOf(Foo.class, foo); * </pre> * * @param type the type to check against * @param obj the object to check * @param message a message which will be prepended to the message produced by the function itself, and which may be * used to provide context. It should normally end in a ": " or ". " so that the function generate * message looks ok when prepended to it. * @throws IllegalArgumentException if the object is not an instance of clazz * @see Class#isInstance */ public static <T> T isInstanceOf(Class<?> type, T obj, String message) { notNull(type, "Type to check against must not be null"); if (!type.isInstance(obj)) { throw new IllegalArgumentException(message + "Object of class [" + (obj != null ? obj.getClass().getName() : "null") + "] must be an instance of " + type); } return obj; } /** * Assert that <code>superType.isAssignableFrom(subType)</code> is <code>true</code>. * * <pre class="code"> * Assert.isAssignableFrom(Number.class, myClass); * </pre> * * @param superType the super type to check * @param subType the sub type to check * @throws IllegalArgumentException if the classes are not assignable */ public static void isAssignableFrom(Class<?> superType, Class<?> subType) { isAssignableFrom(superType, subType, ""); } /** * Assert that <code>superType.isAssignableFrom(subType)</code> is <code>true</code>. * * <pre class="code"> * Assert.isAssignableFrom(Number.class, myClass); * </pre> * * @param superType the super type to check against * @param subType the sub type to check * @param message a message which will be prepended to the message produced by the function itself, and which may be * used to provide context. It should normally end in a ": " or ". " so that the function generate * message looks ok when prepended to it. * @throws IllegalArgumentException if the classes are not assignable */ public static void isAssignableFrom(Class<?> superType, Class<?> subType, String message) { notNull(superType, "Type to check against must not be null"); if (subType == null || !superType.isAssignableFrom(subType)) { throw new IllegalArgumentException(message + subType + " is not assignable to " + superType); } } /** * Assert a boolean expression, throwing <code>IllegalStateException</code> if the test result is <code>false</code> * . Call isTrue if you wish to throw IllegalArgumentException on an assertion failure. * * <pre class="code"> * Assert.state(id == null, &quot;The id property must not already be initialized&quot;); * </pre> * * @param expression a boolean expression * @param message the exception message to use if the assertion fails * @throws IllegalStateException if expression is <code>false</code> */ public static void stateValid(boolean expression, String message) { if (!expression) { throw new IllegalStateException(message); } } /** * Assert a boolean expression, throwing {@link IllegalStateException} if the test result is <code>false</code>. * <p> * Call {@link #isTrue(boolean)} if you wish to throw {@link IllegalArgumentException} on an assertion failure. * * <pre class="code"> * Assert.state(id == null); * </pre> * * @param expression a boolean expression * @throws IllegalStateException if the supplied expression is <code>false</code> */ public static void stateValid(boolean expression) { stateValid(expression, "[Assertion failed] - this state invariant must be true"); } /** * Assert that an argument value is not <code>null</code> . * * <pre class="code"> * Assert.notNull("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the object to check * * @throws IllegalArgumentException if the value is <code>null</code> */ public static void argNotNull(String argName,Object argValue) { if (argValue == null) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null",argName)); } } /** * Assert that the given argument String is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,String argValue) { if (Strings.isEmpty(argValue)) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } /** * Assert that the given argument Map is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,Map<?, ?> argValue) { if (null == argValue || argValue.isEmpty()) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } /** * Assert that the given argument Collection is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,Collection<?> argValue) { if (null == argValue || argValue.isEmpty()) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } /** * Assert that the given argument Array is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,Object[] argValue) { if (null == argValue || argValue.length == 0) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } }
core-lang/src/main/java/bingo/lang/Assert.java
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package bingo.lang; import java.util.Collection; import java.util.Map; /** * Assertion utility class that assists in validating arguments. Useful for identifying programmer errors early and * clearly at runtime. * * <p> * For example, if the contract of a public method states it does not allow <code>null</code> arguments, Assert can be * used to validate that contract. Doing this clearly indicates a contract violation when it occurs and protects the * class's invariants. * * <p> * Typically used to validate method arguments rather than configuration properties, to check for cases that are usually * programmer errors rather than configuration errors. In contrast to config initialization code, there is usally no * point in falling back to defaults in such methods. * * <p> * This class is similar to JUnit's assertion library. If an argument value is deemed invalid, an * {@link IllegalArgumentException} is thrown (typically). For example: * * <pre class="code"> * Assert.notNull(clazz, &quot;The class must not be null&quot;); * Assert.isTrue(i &gt; 0, &quot;The value must be greater than zero&quot;); * </pre> * * Mainly for internal use within the framework; consider Jakarta's Commons Lang >= 2.0 for a more comprehensive suite * of assertion utilities. * * @author Keith Donald * @author Juergen Hoeller * @author Colin Sampaleanu * @author Rob Harrop * @since 1.1.2 */ //From spring framework, under Apache License 2.0 public abstract class Assert { /** * Assert a boolean expression, throwing <code>IllegalArgumentException</code> if the test result is * <code>false</code>. * * <pre class="code"> * Assert.isTrue(i &gt; 0, &quot;The value must be greater than zero&quot;); * </pre> * * @param expression a boolean expression * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if expression is <code>false</code> */ public static void isTrue(boolean expression, String message) { if (!expression) { throw new IllegalArgumentException(message); } } /** * Assert a boolean expression, throwing <code>IllegalArgumentException</code> if the test result is * <code>false</code>. * * <pre class="code"> * Assert.isTrue(i &gt; 0); * </pre> * * @param expression a boolean expression * @throws IllegalArgumentException if expression is <code>false</code> */ public static void isTrue(boolean expression) { isTrue(expression, "[Assertion failed] - this expression must be true"); } public static void isFalse(boolean expression) { isFalse(expression, "[Assertion failed] - this expression must be false"); } public static void isFalse(boolean expression, String message) { if (expression) { throw new IllegalArgumentException(message); } } /** * Assert that an object is <code>null</code> . * * <pre class="code"> * Assert.isNull(value, &quot;The value must be null&quot;); * </pre> * * @param object the object to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object is not <code>null</code> */ public static void isNull(Object object, String message) { if (object != null) { throw new IllegalArgumentException(message); } } /** * Assert that an object is <code>null</code> . * * <pre class="code"> * Assert.isNull(value); * </pre> * * @param object the object to check * @throws IllegalArgumentException if the object is not <code>null</code> */ public static void isNull(Object object) { isNull(object, "[Assertion failed] - the object argument must be null"); } /** * Assert that an object is not <code>null</code> . * * <pre class="code"> * Assert.notNull(clazz, &quot;The class must not be null&quot;); * </pre> * * @param object the object to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object is <code>null</code> */ public static <T> T notNull(T object, String message) { if (object == null) { throw new IllegalArgumentException(message); } return object; } /** * Assert that an object is not <code>null</code> . * * <pre class="code"> * Assert.notNull(clazz); * </pre> * * @param object the object to check * @throws IllegalArgumentException if the object is <code>null</code> */ public static <T> T notNull(T object) { return notNull(object, "[Assertion failed] - this argument is required; it must not be null"); } /** * Assert that the given String is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty(name, &quot;Name must not be empty&quot;); * </pre> * * @param text the String to check * @param message the exception message to use if the assertion fails * @see StringUtils#notEmpty */ public static String notEmpty(String text, String message) { if (Strings.isEmpty(text)) { throw new IllegalArgumentException(message); } return text; } /** * Assert that the given String is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty(name); * </pre> * * @param text the String to check * @see StringUtils#notEmpty */ public static String notEmpty(String text) { return notEmpty(text, "[Assertion failed] - this String argument must have length; it must not be null or empty"); } /** * Assert that the given String has valid text content; that is, it must not be <code>null</code> and must contain * at least one non-whitespace character. * * <pre class="code"> * Assert.notBlank(name, &quot;'name' must not be empty&quot;); * </pre> * * @param text the String to check * @param message the exception message to use if the assertion fails * @see StringUtils#notBlank */ public static String notBlank(String text, String message) { if (Strings.isBlank(text)) { throw new IllegalArgumentException(message); } return text; } /** * Assert that the given String has valid text content; that is, it must not be <code>null</code> and must contain * at least one non-whitespace character. * * <pre class="code"> * Assert.notBlank(name, &quot;'name' must not be empty&quot;); * </pre> * * @param text the String to check * @see StringUtils#notBlank */ public static String notBlank(String text) { return notBlank(text, "[Assertion failed] - this String argument must have text; it must not be null, empty, or blank"); } /** * Assert that the given text does not contain the given substring. * * <pre class="code"> * Assert.notContains(name, &quot;rod&quot;, &quot;Name must not contain 'rod'&quot;); * </pre> * * @param textToSearch the text to search * @param substring the substring to find within the text * @param message the exception message to use if the assertion fails */ public static void notContains(String textToSearch, String substring, String message) { if (Strings.isNotBlank(textToSearch) && Strings.isNotBlank(substring) && textToSearch.indexOf(substring) != -1) { throw new IllegalArgumentException(message); } } /** * Assert that the given text does not contain the given substring. * * <pre class="code"> * Assert.notContains(name, &quot;rod&quot;); * </pre> * * @param textToSearch the text to search * @param substring the substring to find within the text */ public static void notContains(String textToSearch, String substring) { notContains(textToSearch, substring, "[Assertion failed] - this String argument must not contain the substring [" + substring + "]"); } /** * Assert that an array has elements; that is, it must not be <code>null</code> and must have at least one element. * * <pre class="code"> * Assert.notEmpty(array, &quot;The array must have elements&quot;); * </pre> * * @param array the array to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object array is <code>null</code> or has no elements */ public static <T> T[] notEmpty(T[] array, String message) { if (Arrays.isEmpty(array)) { throw new IllegalArgumentException(message); } return array; } /** * Assert that an array has elements; that is, it must not be <code>null</code> and must have at least one element. * * <pre class="code"> * Assert.notEmpty(array); * </pre> * * @param array the array to check * @throws IllegalArgumentException if the object array is <code>null</code> or has no elements */ public static <T> T[] notEmpty(T[] array) { return notEmpty(array, "[Assertion failed] - this array must not be empty: it must contain at least 1 element"); } /** * Assert that an array has no null elements. Note: Does not complain if the array is empty! * * <pre class="code"> * Assert.noNullElements(array, &quot;The array must have non-null elements&quot;); * </pre> * * @param array the array to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the object array contains a <code>null</code> element */ public static <T> T[] noNullElements(T[] array, String message) { if (array != null) { for (int i = 0; i < array.length; i++) { if (array[i] == null) { throw new IllegalArgumentException(message); } } } return array; } /** * Assert that an array has no null elements. Note: Does not complain if the array is empty! * * <pre class="code"> * Assert.noNullElements(array); * </pre> * * @param array the array to check * @throws IllegalArgumentException if the object array contains a <code>null</code> element */ public static <T> T[] noNullElements(T[] array) { return noNullElements(array, "[Assertion failed] - this array must not contain any null elements"); } /** * Assert that a collection has elements; that is, it must not be <code>null</code> and must have at least one * element. * * <pre class="code"> * Assert.notEmpty(collection, &quot;Collection must have elements&quot;); * </pre> * * @param collection the collection to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the collection is <code>null</code> or has no elements */ public static <C extends Collection<E>,E> C notEmpty(C collection, String message) { if (Collections.isEmpty(collection)) { throw new IllegalArgumentException(message); } return collection; } /** * Assert that a collection has elements; that is, it must not be <code>null</code> and must have at least one * element. * * <pre class="code"> * Assert.notEmpty(collection, &quot;Collection must have elements&quot;); * </pre> * * @param collection the collection to check * @throws IllegalArgumentException if the collection is <code>null</code> or has no elements */ public static <C extends Collection<E>,E> C notEmpty(C collection) { return notEmpty(collection, "[Assertion failed] - this collection must not be empty: it must contain at least 1 element"); } /** * Assert that a Map has entries; that is, it must not be <code>null</code> and must have at least one entry. * * <pre class="code"> * Assert.notEmpty(map, &quot;Map must have entries&quot;); * </pre> * * @param map the map to check * @param message the exception message to use if the assertion fails * @throws IllegalArgumentException if the map is <code>null</code> or has no entries */ public static <M extends Map<K,V>,K,V> M notEmpty(M map, String message) { if (Maps.isEmpty(map)) { throw new IllegalArgumentException(message); } return map; } /** * Assert that a Map has entries; that is, it must not be <code>null</code> and must have at least one entry. * * <pre class="code"> * Assert.notEmpty(map); * </pre> * * @param map the map to check * @throws IllegalArgumentException if the map is <code>null</code> or has no entries */ public static <M extends Map<K,V>,K,V> M notEmpty(M map) { return notEmpty(map, "[Assertion failed] - this map must not be empty; it must contain at least one entry"); } /** * Assert that the provided object is an instance of the provided class. * * <pre class="code"> * Assert.instanceOf(Foo.class, foo); * </pre> * * @param clazz the required class * @param obj the object to check * @throws IllegalArgumentException if the object is not an instance of clazz * @see Class#isInstance */ public static <T> T isInstanceOf(Class<?> clazz, T obj) { return isInstanceOf(clazz, obj, ""); } /** * Assert that the provided object is an instance of the provided class. * * <pre class="code"> * Assert.instanceOf(Foo.class, foo); * </pre> * * @param type the type to check against * @param obj the object to check * @param message a message which will be prepended to the message produced by the function itself, and which may be * used to provide context. It should normally end in a ": " or ". " so that the function generate * message looks ok when prepended to it. * @throws IllegalArgumentException if the object is not an instance of clazz * @see Class#isInstance */ public static <T> T isInstanceOf(Class<?> type, T obj, String message) { notNull(type, "Type to check against must not be null"); if (!type.isInstance(obj)) { throw new IllegalArgumentException(message + "Object of class [" + (obj != null ? obj.getClass().getName() : "null") + "] must be an instance of " + type); } return obj; } /** * Assert that <code>superType.isAssignableFrom(subType)</code> is <code>true</code>. * * <pre class="code"> * Assert.isAssignableFrom(Number.class, myClass); * </pre> * * @param superType the super type to check * @param subType the sub type to check * @throws IllegalArgumentException if the classes are not assignable */ public static void isAssignableFrom(Class<?> superType, Class<?> subType) { isAssignableFrom(superType, subType, ""); } /** * Assert that <code>superType.isAssignableFrom(subType)</code> is <code>true</code>. * * <pre class="code"> * Assert.isAssignableFrom(Number.class, myClass); * </pre> * * @param superType the super type to check against * @param subType the sub type to check * @param message a message which will be prepended to the message produced by the function itself, and which may be * used to provide context. It should normally end in a ": " or ". " so that the function generate * message looks ok when prepended to it. * @throws IllegalArgumentException if the classes are not assignable */ public static void isAssignableFrom(Class<?> superType, Class<?> subType, String message) { notNull(superType, "Type to check against must not be null"); if (subType == null || !superType.isAssignableFrom(subType)) { throw new IllegalArgumentException(message + subType + " is not assignable to " + superType); } } /** * Assert a boolean expression, throwing <code>IllegalStateException</code> if the test result is <code>false</code> * . Call isTrue if you wish to throw IllegalArgumentException on an assertion failure. * * <pre class="code"> * Assert.state(id == null, &quot;The id property must not already be initialized&quot;); * </pre> * * @param expression a boolean expression * @param message the exception message to use if the assertion fails * @throws IllegalStateException if expression is <code>false</code> */ public static void stateValid(boolean expression, String message) { if (!expression) { throw new IllegalStateException(message); } } /** * Assert a boolean expression, throwing {@link IllegalStateException} if the test result is <code>false</code>. * <p> * Call {@link #isTrue(boolean)} if you wish to throw {@link IllegalArgumentException} on an assertion failure. * * <pre class="code"> * Assert.state(id == null); * </pre> * * @param expression a boolean expression * @throws IllegalStateException if the supplied expression is <code>false</code> */ public static void stateValid(boolean expression) { stateValid(expression, "[Assertion failed] - this state invariant must be true"); } /** * Assert that an argument value is not <code>null</code> . * * <pre class="code"> * Assert.notNull("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the object to check * * @throws IllegalArgumentException if the value is <code>null</code> */ public static void argNotNull(String argName,Object argValue) { if (argValue == null) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null",argName)); } } /** * Assert that the given argument String is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,String argValue) { if (Strings.isEmpty(argValue)) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } /** * Assert that the given argument Map is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,Map<?, ?> argValue) { if (null == argValue || argValue.isEmpty()) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } /** * Assert that the given argument Collection is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,Collection<?> argValue) { if (null == argValue || argValue.isEmpty()) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } /** * Assert that the given argument Array is not empty; that is, it must not be <code>null</code> and not the empty String. * * <pre class="code"> * Assert.notEmpty("id",id); * </pre> * * @param argName the name of argument to check * @param argValue the String value to check * * @throws IllegalArgumentException if the value is <code>null</code> or empty */ public static void argNotEmpty(String argName,Object[] argValue) { if (null == argValue || argValue.length == 0) { throw new IllegalArgumentException(Strings.format("[Assertion failed] - the argument '{0}' is required, it must not be null or empty",argName)); } } }
add a few javadoc to some methods of Assert.
core-lang/src/main/java/bingo/lang/Assert.java
add a few javadoc to some methods of Assert.
Java
apache-2.0
d0fc6a0d16d89f903859f69be3d08666f2fa50ed
0
saucam/incubator-parquet-mr,nkhuyu/parquet-mr,tsdeng/incubator-parquet-mr,winningsix/incubator-parquet-mr,nkhuyu/parquet-mr,tsdeng/incubator-parquet-mr,spena/parquet-mr,rdblue/parquet-mr,cchang738/parquet-mr,cloudera/parquet-mr,nezihyigitbasi-nflx/parquet-mr,forcedotcom/incubator-parquet-mr,HyukjinKwon/parquet-mr,davidgin/parquet-mr,SinghAsDev/parquet-mr,hassyma/parquet-mr,davidgin/parquet-mr,nkhuyu/parquet-mr,cchang738/parquet-mr,pronix/parquet-mr,nitin2goyal/parquet-mr,DataDog/parquet-mr,pronix/parquet-mr,saucam/incubator-parquet-mr,sircodesalotOfTheRound/parquet-mr,Zariel/parquet-mr,sircodesalotOfTheRound/parquet-mr,laurentgo/parquet-mr,spena/parquet-mr,zhenxiao/parquet-mr,jaltekruse/parquet-mr-1,piyushnarang/parquet-mr,SaintBacchus/parquet-mr,spena/parquet-mr,HyukjinKwon/parquet-mr,nezihyigitbasi-nflx/parquet-mr,saucam/incubator-parquet-mr,coughman/incubator-parquet-mr,danielcweeks/incubator-parquet-mr,nitin2goyal/parquet-mr,dlanza1/parquet-mr,davidgin/parquet-mr,HyukjinKwon/parquet-mr-1,HyukjinKwon/parquet-mr-1,dongche/incubator-parquet-mr,nguyenvanthan/parquet-mr,cloudera/parquet-mr,laurentgo/parquet-mr,hassyma/parquet-mr,cchang738/parquet-mr,MickDavies/incubator-parquet-mr,nitin2goyal/parquet-mr-1,piyushnarang/parquet-mr,sircodesalotOfTheRound/parquet-mr,nguyenvanthan/parquet-mr,zhenxiao/parquet-mr,apache/parquet-mr,MickDavies/incubator-parquet-mr,dlanza1/parquet-mr,piyushnarang/parquet-mr,nezihyigitbasi-nflx/parquet-mr,coughman/incubator-parquet-mr,DataDog/parquet-mr,winningsix/incubator-parquet-mr,nitin2goyal/parquet-mr-1,DataDog/parquet-mr,danielcweeks/incubator-parquet-mr,apache/parquet-mr,SinghAsDev/parquet-mr,forcedotcom/incubator-parquet-mr,dongche/incubator-parquet-mr,nguyenvanthan/parquet-mr,sworisbreathing/parquet-mr,hassyma/parquet-mr,dlanza1/parquet-mr,tomwhite/parquet-mr-old,sworisbreathing/parquet-mr,nitin2goyal/parquet-mr-1,dongche/incubator-parquet-mr,jaltekruse/parquet-mr-1,Zariel/parquet-mr,nevillelyh/parquet-mr,HyukjinKwon/parquet-mr,rdblue/parquet-mr,winningsix/incubator-parquet-mr,Zariel/parquet-mr,jaltekruse/parquet-mr-1,forcedotcom/incubator-parquet-mr,sworisbreathing/parquet-mr,nevillelyh/parquet-mr,laurentgo/parquet-mr,SinghAsDev/parquet-mr,tomwhite/parquet-mr-old,nitin2goyal/parquet-mr,SaintBacchus/parquet-mr,coughman/incubator-parquet-mr,danielcweeks/incubator-parquet-mr,HyukjinKwon/parquet-mr-1,apache/parquet-mr,MickDavies/incubator-parquet-mr,nevillelyh/parquet-mr,rdblue/parquet-mr,SaintBacchus/parquet-mr,pronix/parquet-mr,tsdeng/incubator-parquet-mr,zhenxiao/parquet-mr
/** * Copyright 2013 Criteo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package parquet.hive; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.junit.Test; import parquet.hive.convert.HiveSchemaConverter; import parquet.schema.MessageType; import parquet.schema.MessageTypeParser; /** * * TestHiveSchemaConverter * * * @author Mickaël Lacour <[email protected]> * */ public class TestHiveSchemaConverter { private List<String> createHiveColumnsFrom(final String columnNamesStr) { List<String> columnNames; if (columnNamesStr.length() == 0) { columnNames = new ArrayList<String>(); } else { columnNames = Arrays.asList(columnNamesStr.split(",")); } return columnNames; } private List<TypeInfo> createHiveTypeInfoFrom(final String columnsTypeStr) { List<TypeInfo> columnTypes; if (columnsTypeStr.length() == 0) { columnTypes = new ArrayList<TypeInfo>(); } else { columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnsTypeStr); } return columnTypes; } private void testConversion(final String columnNamesStr, final String columnsTypeStr, final String expectedSchema) throws Exception { final List<String> columnNames = createHiveColumnsFrom(columnNamesStr); final List<TypeInfo> columnTypes = createHiveTypeInfoFrom(columnsTypeStr); final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes); final MessageType expectedMT = MessageTypeParser.parseMessageType(expectedSchema); assertEquals("converting " + columnNamesStr + ": " + columnsTypeStr + " to " + expectedSchema, expectedMT, messageTypeFound); } @Test public void testSimpleType() throws Exception { testConversion( "a,b,c", "int,double,boolean", "message hive_schema {\n" + " optional int32 a;\n" + " optional double b;\n" + " optional boolean c;\n" + "}\n"); } @Test public void testArray() throws Exception { testConversion("arrayCol", "array<int>", "message hive_schema {\n" + " optional group arrayCol (LIST) {\n" + " repeated group bag {\n" + " optional int32 array_element;\n" + " }\n" + " }\n" + "}\n"); } @Test public void testStruct() throws Exception { testConversion("structCol", "struct<a:int,b:double,c:boolean>", "message hive_schema {\n" + " optional group structCol {\n" + " optional int32 a;\n" + " optional double b;\n" + " optional boolean c;\n" + " }\n" + "}\n"); } @Test public void testMap() throws Exception { testConversion("mapCol", "map<string,string>", "message hive_schema {\n" + " optional group mapCol (MAP) {\n" + " repeated group map (MAP_KEY_VALUE) {\n" + " required binary key;\n" + " optional binary value;\n" + " }\n" + " }\n" + "}\n"); } }
parquet-hive/src/test/java/parquet/hive/TestHiveSchemaConverter.java
/** * Copyright 2013 Criteo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package parquet.hive; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.junit.Test; import parquet.hive.convert.HiveSchemaConverter; import parquet.schema.MessageType; import parquet.schema.MessageTypeParser; /** * * TestHiveSchemaConverter * * * @author Mickaël Lacour <[email protected]> * */ public class TestHiveSchemaConverter { private List<String> createHiveColumnsFrom(final String columnNamesStr) { List<String> columnNames; if (columnNamesStr.length() == 0) { columnNames = new ArrayList<String>(); } else { columnNames = Arrays.asList(columnNamesStr.split(",")); } return columnNames; } private List<TypeInfo> createHiveTypeInfoFrom(final String columnsTypeStr) { List<TypeInfo> columnTypes; if (columnsTypeStr.length() == 0) { columnTypes = new ArrayList<TypeInfo>(); } else { columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnsTypeStr); } return columnTypes; } private void testConversion(final String columnNamesStr, final String columnsTypeStr, final String expectedSchema) throws Exception { final List<String> columnNames = createHiveColumnsFrom(columnNamesStr); final List<TypeInfo> columnTypes = createHiveTypeInfoFrom(columnsTypeStr); final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes); final MessageType expectedMT = MessageTypeParser.parseMessageType(expectedSchema); assertEquals("converting " + columnNamesStr + ": " + columnsTypeStr + " to " + expectedSchema, expectedMT, messageTypeFound); } @Test public void testSimpleType() throws Exception { testConversion( "a,b,c", "int,double,boolean", "message hive_schema {\n" + " optional int32 a;\n" + " optional double b;\n" + " optional boolean c;\n" + " }\n" + "}\n"); } @Test public void testArray() throws Exception { testConversion("arrayCol", "array<int>", "message hive_schema {\n" + " optional group arrayCol (LIST) {\n" + " repeated group bag {\n" + " optional int32 array_element;\n" + " }\n" + " }\n" + "}\n"); } @Test public void testStruct() throws Exception { testConversion("structCol", "struct<a:int,b:double,c:boolean>", "message hive_schema {\n" + " optional group structCol {\n" + " optional int32 a;\n" + " optional double b;\n" + " optional boolean c;\n" + " }\n" + "}\n"); } @Test public void testMap() throws Exception { testConversion("mapCol", "map<string,string>", "message hive_schema {\n" + " optional group mapCol (MAP_KEY_VALUE) {\n" + " repeated group map {\n" + " required binary key;\n" + " optional binary value;\n" + " }\n" + " }\n" + "}\n"); } }
Correct schema syntaxes for TestHiveSchemaConverter.
parquet-hive/src/test/java/parquet/hive/TestHiveSchemaConverter.java
Correct schema syntaxes for TestHiveSchemaConverter.
Java
apache-2.0
8e4dd21ef39646a7e1434eda77f1972fda3a0c94
0
consulo/consulo-ui-designer,consulo/consulo-ui-designer
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner; import javax.annotation.Nullable; import com.intellij.designer.DesignerEditorPanelFacade; import com.intellij.designer.LightToolWindowManager; import com.intellij.designer.ToggleEditorModeAction; import com.intellij.ide.palette.impl.PaletteToolWindowManager; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.wm.ToolWindowAnchor; import com.intellij.uiDesigner.editor.UIFormEditor; import com.intellij.uiDesigner.propertyInspector.DesignerToolWindowManager; import consulo.disposer.Disposable; /** * @author Alexander Lobas */ public abstract class AbstractToolWindowManager extends LightToolWindowManager implements Disposable { public AbstractToolWindowManager(Project project, FileEditorManager fileEditorManager) { super(project, fileEditorManager); } @Override public void dispose() { projectClosed(); } @Nullable @Override protected DesignerEditorPanelFacade getDesigner(FileEditor editor) { if(editor instanceof UIFormEditor) { UIFormEditor formEditor = (UIFormEditor) editor; return formEditor.getEditor(); } return null; } @Override protected ToggleEditorModeAction createToggleAction(ToolWindowAnchor anchor) { return new ToggleEditorModeAction(this, myProject, anchor) { @Override protected LightToolWindowManager getOppositeManager() { AbstractToolWindowManager designerManager = DesignerToolWindowManager.getInstance(myProject); AbstractToolWindowManager paletteManager = PaletteToolWindowManager.getInstance(myProject); return myManager == designerManager ? paletteManager : designerManager; } }; } }
src/main/java/com/intellij/uiDesigner/AbstractToolWindowManager.java
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner; import javax.annotation.Nullable; import com.intellij.designer.DesignerEditorPanelFacade; import com.intellij.designer.LightToolWindowManager; import com.intellij.designer.ToggleEditorModeAction; import com.intellij.ide.palette.impl.PaletteToolWindowManager; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.wm.ToolWindowAnchor; import com.intellij.uiDesigner.editor.UIFormEditor; import com.intellij.uiDesigner.propertyInspector.DesignerToolWindowManager; import com.sun.media.jfxmediaimpl.MediaDisposer; /** * @author Alexander Lobas */ public abstract class AbstractToolWindowManager extends LightToolWindowManager implements MediaDisposer.Disposable { public AbstractToolWindowManager(Project project, FileEditorManager fileEditorManager) { super(project, fileEditorManager); } @Override public void dispose() { projectClosed(); } @Nullable @Override protected DesignerEditorPanelFacade getDesigner(FileEditor editor) { if(editor instanceof UIFormEditor) { UIFormEditor formEditor = (UIFormEditor) editor; return formEditor.getEditor(); } return null; } @Override protected ToggleEditorModeAction createToggleAction(ToolWindowAnchor anchor) { return new ToggleEditorModeAction(this, myProject, anchor) { @Override protected LightToolWindowManager getOppositeManager() { AbstractToolWindowManager designerManager = DesignerToolWindowManager.getInstance(myProject); AbstractToolWindowManager paletteManager = PaletteToolWindowManager.getInstance(myProject); return myManager == designerManager ? paletteManager : designerManager; } }; } }
wrong import
src/main/java/com/intellij/uiDesigner/AbstractToolWindowManager.java
wrong import
Java
apache-2.0
c2ee64ce3c6394e63d8c8d7d0a38e1f0c7f4ffde
0
BrunoEberhard/minimal-j,BrunoEberhard/minimal-j,BrunoEberhard/minimal-j
package org.minimalj.model; /** * A class implementing View is a model class that holds only a part * of the fields of an other class.<p> * * The objects of this class are never saved if they referenced * by other objects.<p> * * Note that while the concept is similar to database views this * interface does <i>not</i> represent views on the database but * views on a java model entity. * * @param <T> the class on which this view is based. Mandatory. */ public interface View<T> { }
src/main/java/org/minimalj/model/View.java
package org.minimalj.model; /** * A class implementing View is a model class that holds only a part * of the fields of an other class. * * The objects of this class are never saved if they referenced * by other objects * * @param <T> the class on which this view is based. Mandatory. */ public interface View<T> { }
View: added javadoc
src/main/java/org/minimalj/model/View.java
View: added javadoc
Java
apache-2.0
f8a910ca37a55e6a30b2fcfb38f452dc19455b66
0
gotmyjobs/couchbase-lite-java-core,mariosotil/couchbase-lite-java-core,netsense-sas/couchbase-lite-java-core,couchbase/couchbase-lite-java-core,Spotme/couchbase-lite-java-core,4u7/couchbase-lite-java-core,mariosotil/couchbase-lite-java-core,mariosotil/couchbase-lite-java-core
package com.couchbase.lite.replicator; import com.couchbase.lite.AsyncTask; import com.couchbase.lite.CouchbaseLiteException; import com.couchbase.lite.Database; import com.couchbase.lite.Manager; import com.couchbase.lite.Misc; import com.couchbase.lite.NetworkReachabilityListener; import com.couchbase.lite.RevisionList; import com.couchbase.lite.Status; import com.couchbase.lite.auth.Authenticator; import com.couchbase.lite.auth.AuthenticatorImpl; import com.couchbase.lite.auth.Authorizer; import com.couchbase.lite.auth.FacebookAuthorizer; import com.couchbase.lite.auth.PersonaAuthorizer; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.internal.RevisionInternal; import com.couchbase.lite.support.BatchProcessor; import com.couchbase.lite.support.Batcher; import com.couchbase.lite.support.CouchbaseLiteHttpClientFactory; import com.couchbase.lite.support.HttpClientFactory; import com.couchbase.lite.support.PersistentCookieStore; import com.couchbase.lite.support.RemoteMultipartDownloaderRequest; import com.couchbase.lite.support.RemoteMultipartRequest; import com.couchbase.lite.support.RemoteRequest; import com.couchbase.lite.support.RemoteRequestCompletionBlock; import com.couchbase.lite.util.CollectionUtils; import com.couchbase.lite.util.Log; import com.couchbase.lite.util.TextUtils; import com.couchbase.lite.util.URIUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.CookieStore; import org.apache.http.client.HttpResponseException; import org.apache.http.cookie.Cookie; import org.apache.http.entity.mime.MultipartEntity; import org.apache.http.impl.cookie.BasicClientCookie2; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** * A Couchbase Lite pull or push Replication between a local and a remote Database. */ public abstract class Replication implements NetworkReachabilityListener { private static int lastSessionID = 0; protected boolean continuous; protected String filterName; protected ScheduledExecutorService workExecutor; protected Database db; protected URL remote; protected String lastSequence; protected boolean lastSequenceChanged; protected Map<String, Object> remoteCheckpoint; protected boolean savingCheckpoint; protected boolean overdueForSave; protected boolean running; protected boolean active; protected Throwable error; protected String sessionID; protected Batcher<RevisionInternal> batcher; protected int asyncTaskCount; protected AtomicInteger completedChangesCount; private AtomicInteger changesCount; protected boolean online; protected HttpClientFactory clientFactory; private final List<ChangeListener> changeListeners; protected List<String> documentIDs; protected Map<String, Object> filterParams; protected ExecutorService remoteRequestExecutor; protected Authenticator authenticator; private ReplicationStatus status = ReplicationStatus.REPLICATION_STOPPED; protected Map<String, Object> requestHeaders; private int revisionsFailed; private ScheduledFuture retryIfReadyFuture; private final Map<RemoteRequest, Future> requests; private String serverType; private String remoteCheckpointDocID; private CollectionUtils.Functor<Map<String,Object>,Map<String,Object>> propertiesTransformationBlock; protected CollectionUtils.Functor<RevisionInternal,RevisionInternal> revisionBodyTransformationBlock; protected static final int PROCESSOR_DELAY = 500; protected static final int INBOX_CAPACITY = 100; protected static final int RETRY_DELAY = 60; protected static final int EXECUTOR_THREAD_POOL_SIZE = 5; /** * @exclude */ public static final String BY_CHANNEL_FILTER_NAME = "sync_gateway/bychannel"; /** * @exclude */ public static final String CHANNELS_QUERY_PARAM = "channels"; /** * @exclude */ public static final String REPLICATOR_DATABASE_NAME = "_replicator"; /** * Options for what metadata to include in document bodies */ public enum ReplicationStatus { /** The replication is finished or hit a fatal error. */ REPLICATION_STOPPED, /** The remote host is currently unreachable. */ REPLICATION_OFFLINE, /** Continuous replication is caught up and waiting for more changes.*/ REPLICATION_IDLE, /** The replication is actively transferring data. */ REPLICATION_ACTIVE } /** * Private Constructor * @exclude */ @InterfaceAudience.Private /* package */ Replication(Database db, URL remote, boolean continuous, ScheduledExecutorService workExecutor) { this(db, remote, continuous, null, workExecutor); } /** * Private Constructor * @exclude */ @InterfaceAudience.Private /* package */ Replication(Database db, URL remote, boolean continuous, HttpClientFactory clientFactory, ScheduledExecutorService workExecutor) { this.db = db; this.continuous = continuous; this.workExecutor = workExecutor; this.remote = remote; this.remoteRequestExecutor = Executors.newFixedThreadPool(EXECUTOR_THREAD_POOL_SIZE); this.changeListeners = new CopyOnWriteArrayList<ChangeListener>(); this.online = true; this.requestHeaders = new HashMap<String, Object>(); this.requests = new ConcurrentHashMap<RemoteRequest, Future>(); this.completedChangesCount = new AtomicInteger(0); this.changesCount = new AtomicInteger(0); if (remote.getQuery() != null && !remote.getQuery().isEmpty()) { URI uri = URI.create(remote.toExternalForm()); String personaAssertion = URIUtils.getQueryParameter(uri, PersonaAuthorizer.QUERY_PARAMETER); if (personaAssertion != null && !personaAssertion.isEmpty()) { String email = PersonaAuthorizer.registerAssertion(personaAssertion); PersonaAuthorizer authorizer = new PersonaAuthorizer(email); setAuthenticator(authorizer); } String facebookAccessToken = URIUtils.getQueryParameter(uri, FacebookAuthorizer.QUERY_PARAMETER); if (facebookAccessToken != null && !facebookAccessToken.isEmpty()) { String email = URIUtils.getQueryParameter(uri, FacebookAuthorizer.QUERY_PARAMETER_EMAIL); FacebookAuthorizer authorizer = new FacebookAuthorizer(email); URL remoteWithQueryRemoved = null; try { remoteWithQueryRemoved = new URL(remote.getProtocol(), remote.getHost(), remote.getPort(), remote.getPath()); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } authorizer.registerAccessToken(facebookAccessToken, email, remoteWithQueryRemoved.toExternalForm()); setAuthenticator(authorizer); } // we need to remove the query from the URL, since it will cause problems when // communicating with sync gw / couchdb try { this.remote = new URL(remote.getProtocol(), remote.getHost(), remote.getPort(), remote.getPath()); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } } batcher = new Batcher<RevisionInternal>(workExecutor, INBOX_CAPACITY, PROCESSOR_DELAY, new BatchProcessor<RevisionInternal>() { @Override public void process(List<RevisionInternal> inbox) { try { Log.v(Log.TAG_SYNC, "*** %s: BEGIN processInbox (%d sequences)", this, inbox.size()); processInbox(new RevisionList(inbox)); Log.v(Log.TAG_SYNC, "*** %s: END processInbox (lastSequence=%s)", this, lastSequence); Log.v(Log.TAG_SYNC, "%s: batcher calling updateActive()", this); updateActive(); } catch (Exception e) { Log.e(Log.TAG_SYNC,"ERROR: processInbox failed: ",e); throw new RuntimeException(e); } } }); setClientFactory(clientFactory); } /** * Set the HTTP client factory if one was passed in, or use the default * set in the manager if available. * @param clientFactory */ @InterfaceAudience.Private protected void setClientFactory(HttpClientFactory clientFactory) { Manager manager = null; if (this.db != null) { manager = this.db.getManager(); } HttpClientFactory managerClientFactory = null; if (manager != null) { managerClientFactory = manager.getDefaultHttpClientFactory(); } if (clientFactory != null) { this.clientFactory = clientFactory; } else { if (managerClientFactory != null) { this.clientFactory = managerClientFactory; } else { PersistentCookieStore cookieStore = db.getPersistentCookieStore(); this.clientFactory = new CouchbaseLiteHttpClientFactory(cookieStore); } } } /** * Get the local database which is the source or target of this replication */ @InterfaceAudience.Public public Database getLocalDatabase() { return db; } /** * Get the remote URL which is the source or target of this replication */ @InterfaceAudience.Public public URL getRemoteUrl() { return remote; } /** * Is this a pull replication? (Eg, it pulls data from Sync Gateway -> Device running CBL?) */ @InterfaceAudience.Public public abstract boolean isPull(); /** * Should the target database be created if it doesn't already exist? (Defaults to NO). */ @InterfaceAudience.Public public abstract boolean shouldCreateTarget(); /** * Set whether the target database be created if it doesn't already exist? */ @InterfaceAudience.Public public abstract void setCreateTarget(boolean createTarget); /** * Should the replication operate continuously, copying changes as soon as the * source database is modified? (Defaults to NO). */ @InterfaceAudience.Public public boolean isContinuous() { return continuous; } /** * Set whether the replication should operate continuously. */ @InterfaceAudience.Public public void setContinuous(boolean continuous) { if (!isRunning()) { this.continuous = continuous; } } /** * Name of an optional filter function to run on the source server. Only documents for * which the function returns true are replicated. * * For a pull replication, the name looks like "designdocname/filtername". * For a push replication, use the name under which you registered the filter with the Database. */ @InterfaceAudience.Public public String getFilter() { return filterName; } /** * Set the filter to be used by this replication */ @InterfaceAudience.Public public void setFilter(String filterName) { this.filterName = filterName; } /** * Parameters to pass to the filter function. Should map strings to strings. */ @InterfaceAudience.Public public Map<String, Object> getFilterParams() { return filterParams; } /** * Set parameters to pass to the filter function. */ @InterfaceAudience.Public public void setFilterParams(Map<String, Object> filterParams) { this.filterParams = filterParams; } /** * List of Sync Gateway channel names to filter by; a nil value means no filtering, i.e. all * available channels will be synced. Only valid for pull replications whose source database * is on a Couchbase Sync Gateway server. (This is a convenience that just reads or * changes the values of .filter and .query_params.) */ @InterfaceAudience.Public public List<String> getChannels() { if (filterParams == null || filterParams.isEmpty()) { return new ArrayList<String>(); } String params = (String) filterParams.get(CHANNELS_QUERY_PARAM); if (!isPull() || getFilter() == null || !getFilter().equals(BY_CHANNEL_FILTER_NAME) || params == null || params.isEmpty()) { return new ArrayList<String>(); } String[] paramsArray = params.split(","); return new ArrayList<String>(Arrays.asList(paramsArray)); } /** * Set the list of Sync Gateway channel names */ @InterfaceAudience.Public public void setChannels(List<String> channels) { if (channels != null && !channels.isEmpty()) { if (!isPull()) { Log.w(Log.TAG_SYNC, "filterChannels can only be set in pull replications"); return; } setFilter(BY_CHANNEL_FILTER_NAME); Map<String, Object> filterParams = new HashMap<String, Object>(); filterParams.put(CHANNELS_QUERY_PARAM, TextUtils.join(",", channels)); setFilterParams(filterParams); } else if (getFilter().equals(BY_CHANNEL_FILTER_NAME)) { setFilter(null); setFilterParams(null); } } /** * Extra HTTP headers to send in all requests to the remote server. * Should map strings (header names) to strings. */ @InterfaceAudience.Public public Map<String, Object> getHeaders() { return requestHeaders; } /** * Set Extra HTTP headers to be sent in all requests to the remote server. */ @InterfaceAudience.Public public void setHeaders(Map<String, Object> requestHeadersParam) { if (requestHeadersParam != null && !requestHeaders.equals(requestHeadersParam)) { requestHeaders = requestHeadersParam; } } /** * Gets the documents to specify as part of the replication. */ @InterfaceAudience.Public public List<String> getDocIds() { return documentIDs; } /** * Sets the documents to specify as part of the replication. */ @InterfaceAudience.Public public void setDocIds(List<String> docIds) { documentIDs = docIds; } /** * The replication's current state, one of {stopped, offline, idle, active}. */ @InterfaceAudience.Public public ReplicationStatus getStatus() { return status; } /** * The number of completed changes processed, if the task is active, else 0 (observable). */ @InterfaceAudience.Public public int getCompletedChangesCount() { return completedChangesCount.get(); } /** * The total number of changes to be processed, if the task is active, else 0 (observable). */ @InterfaceAudience.Public public int getChangesCount() { return changesCount.get(); } /** * True while the replication is running, False if it's stopped. * Note that a continuous replication never actually stops; it only goes idle waiting for new * data to appear. */ @InterfaceAudience.Public public boolean isRunning() { return running; } /** * The error status of the replication, or null if there have not been any errors since * it started. */ @InterfaceAudience.Public public Throwable getLastError() { return error; } /** * Starts the replication, asynchronously. */ @InterfaceAudience.Public public void start() { if (!db.isOpen()) { // Race condition: db closed before replication starts Log.w(Log.TAG_SYNC, "Not starting replication because db.isOpen() returned false."); return; } if (running) { return; } db.addReplication(this); db.addActiveReplication(this); final CollectionUtils.Functor<Map<String,Object>,Map<String,Object>> xformer = propertiesTransformationBlock; if (xformer != null) { revisionBodyTransformationBlock = new CollectionUtils.Functor<RevisionInternal, RevisionInternal>() { @Override public RevisionInternal invoke(RevisionInternal rev) { Map<String,Object> properties = rev.getProperties(); Map<String, Object> xformedProperties = xformer.invoke(properties); if (xformedProperties == null) { rev = null; } else if (xformedProperties != properties) { assert(xformedProperties != null); assert(xformedProperties.get("_id").equals(properties.get("_id"))); assert(xformedProperties.get("_rev").equals(properties.get("_rev"))); RevisionInternal nuRev = new RevisionInternal(rev.getProperties(), db); nuRev.setProperties(xformedProperties); rev = nuRev; } return rev; } }; } this.sessionID = String.format("repl%03d", ++lastSessionID); Log.v(Log.TAG_SYNC, "%s: STARTING ...", this); running = true; lastSequence = null; checkSession(); db.getManager().getContext().getNetworkReachabilityManager().addNetworkReachabilityListener(this); } /** * Stops replication, asynchronously. */ @InterfaceAudience.Public public void stop() { if (!running) { return; } Log.v(Log.TAG_SYNC, "%s: STOPPING...", this); if (batcher != null) { batcher.clear(); // no sense processing any pending changes } else { Log.v(Log.TAG_SYNC, "%s: stop() called, not calling batcher.clear() since it's null", this); } continuous = false; stopRemoteRequests(); cancelPendingRetryIfReady(); if (db != null) { db.forgetReplication(this); } else { Log.v(Log.TAG_SYNC, "%s: stop() called, not calling db.forgetReplication() since it's null", this); } if (running && asyncTaskCount <= 0) { Log.v(Log.TAG_SYNC, "%s: calling stopped()", this); stopped(); } else { Log.v(Log.TAG_SYNC, "%s: not calling stopped(). running: %s asyncTaskCount: %d", this, running, asyncTaskCount); } } /** * Restarts a completed or failed replication. */ @InterfaceAudience.Public public void restart() { // TODO: add the "started" flag and check it here stop(); start(); } /** * Adds a change delegate that will be called whenever the Replication changes. */ @InterfaceAudience.Public public void addChangeListener(ChangeListener changeListener) { changeListeners.add(changeListener); } /** * Return a string representation of this replication. * * The credentials will be masked in order to avoid passwords leaking into logs. */ @Override @InterfaceAudience.Public public String toString() { String maskedRemoteWithoutCredentials = (remote != null ? remote.toExternalForm() : ""); maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials.replaceAll("://.*:.*@", "://---:---@"); String name = getClass().getSimpleName() + "@" + Integer.toHexString(hashCode()) + "[" + maskedRemoteWithoutCredentials + "]"; return name; } /** * Sets an HTTP cookie for the Replication. * * @param name The name of the cookie. * @param value The value of the cookie. * @param path The path attribute of the cookie. If null or empty, will use remote.getPath() * @param maxAge The maxAge, in milliseconds, that this cookie should be valid for. * @param secure Whether the cookie should only be sent using a secure protocol (e.g. HTTPS). * @param httpOnly (ignored) Whether the cookie should only be used when transmitting HTTP, or HTTPS, requests thus restricting access from other, non-HTTP APIs. */ @InterfaceAudience.Public public void setCookie(String name, String value, String path, long maxAge, boolean secure, boolean httpOnly) { Date now = new Date(); Date expirationDate = new Date(now.getTime() + maxAge); setCookie(name, value, path, expirationDate, secure, httpOnly); } /** * Sets an HTTP cookie for the Replication. * * @param name The name of the cookie. * @param value The value of the cookie. * @param path The path attribute of the cookie. If null or empty, will use remote.getPath() * @param expirationDate The expiration date of the cookie. * @param secure Whether the cookie should only be sent using a secure protocol (e.g. HTTPS). * @param httpOnly (ignored) Whether the cookie should only be used when transmitting HTTP, or HTTPS, requests thus restricting access from other, non-HTTP APIs. */ @InterfaceAudience.Public public void setCookie(String name, String value, String path, Date expirationDate, boolean secure, boolean httpOnly) { if (remote == null) { throw new IllegalStateException("Cannot setCookie since remote == null"); } BasicClientCookie2 cookie = new BasicClientCookie2(name, value); cookie.setDomain(remote.getHost()); if (path != null && path.length() > 0) { cookie.setPath(path); } else { cookie.setPath(remote.getPath()); } cookie.setExpiryDate(expirationDate); cookie.setSecure(secure); List<Cookie> cookies = Arrays.asList((Cookie)cookie); this.clientFactory.addCookies(cookies); } /** * Deletes an HTTP cookie for the Replication. * * @param name The name of the cookie. */ @InterfaceAudience.Public public void deleteCookie(String name) { this.clientFactory.deleteCookie(name); } /** * The type of event raised by a Replication when any of the following * properties change: mode, running, error, completed, total. */ @InterfaceAudience.Public public static class ChangeEvent { private Replication source; public ChangeEvent(Replication source) { this.source = source; } public Replication getSource() { return source; } } /** * A delegate that can be used to listen for Replication changes. */ @InterfaceAudience.Public public static interface ChangeListener { public void changed(ChangeEvent event); } /** * Removes the specified delegate as a listener for the Replication change event. */ @InterfaceAudience.Public public void removeChangeListener(ChangeListener changeListener) { changeListeners.remove(changeListener); } /** * Set the Authenticator used for authenticating with the Sync Gateway */ @InterfaceAudience.Public public void setAuthenticator(Authenticator authenticator) { this.authenticator = authenticator; } /** * Get the Authenticator used for authenticating with the Sync Gateway */ @InterfaceAudience.Public public Authenticator getAuthenticator() { return authenticator; } /** * @exclude */ @InterfaceAudience.Private public void databaseClosing() { saveLastSequence(); stop(); clearDbRef(); } /** * If we're in the middle of saving the checkpoint and waiting for a response, by the time the * response arrives _db will be nil, so there won't be any way to save the checkpoint locally. * To avoid that, pre-emptively save the local checkpoint now. * * @exclude */ private void clearDbRef() { if (savingCheckpoint && lastSequence != null && db != null) { db.setLastSequence(lastSequence, remoteCheckpointDocID(), !isPull()); db = null; } } /** * @exclude */ @InterfaceAudience.Private public String getLastSequence() { return lastSequence; } /** * @exclude */ @InterfaceAudience.Private public void setLastSequence(String lastSequenceIn) { if (lastSequenceIn != null && !lastSequenceIn.equals(lastSequence)) { Log.v(Log.TAG_SYNC, "%s: Setting lastSequence to %s from(%s)", this, lastSequenceIn, lastSequence ); lastSequence = lastSequenceIn; if (!lastSequenceChanged) { lastSequenceChanged = true; workExecutor.schedule(new Runnable() { @Override public void run() { saveLastSequence(); } }, 2 * 1000, TimeUnit.MILLISECONDS); } } } @InterfaceAudience.Private /* package */ void addToCompletedChangesCount(int delta) { int previousVal = this.completedChangesCount.getAndAdd(delta); Log.v(Log.TAG_SYNC, "%s: Incrementing completedChangesCount count from %s by adding %d -> %d", this, previousVal, delta, completedChangesCount.get()); notifyChangeListeners(); } @InterfaceAudience.Private /* package */ void addToChangesCount(int delta) { int previousVal = this.changesCount.getAndAdd(delta); if (changesCount.get() < 0) { Log.w(Log.TAG_SYNC, "Changes count is negative, this could indicate an error"); } Log.v(Log.TAG_SYNC, "%s: Incrementing changesCount count from %s by adding %d -> %d", this, previousVal, delta, changesCount.get()); notifyChangeListeners(); } /** * @exclude */ @InterfaceAudience.Private public String getSessionID() { return sessionID; } @InterfaceAudience.Private protected void checkSession() { // REVIEW : This is not in line with the iOS implementation if (getAuthenticator() != null && ((AuthenticatorImpl)getAuthenticator()).usesCookieBasedLogin()) { checkSessionAtPath("/_session"); } else { fetchRemoteCheckpointDoc(); } } @InterfaceAudience.Private protected void checkSessionAtPath(final String sessionPath) { Log.v(Log.TAG_SYNC, "%s | %s: checkSessionAtPath() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("GET", sessionPath, null, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable error) { try { if (error != null) { // If not at /db/_session, try CouchDB location /_session if (error instanceof HttpResponseException && ((HttpResponseException) error).getStatusCode() == 404 && sessionPath.equalsIgnoreCase("/_session")) { checkSessionAtPath("_session"); return; } Log.e(Log.TAG_SYNC, this + ": Session check failed", error); setError(error); } else { Map<String, Object> response = (Map<String, Object>) result; Map<String, Object> userCtx = (Map<String, Object>) response.get("userCtx"); String username = (String) userCtx.get("name"); if (username != null && username.length() > 0) { Log.d(Log.TAG_SYNC, "%s Active session, logged in as %s", this, username); fetchRemoteCheckpointDoc(); } else { Log.d(Log.TAG_SYNC, "%s No active session, going to login", this); login(); } } } finally { Log.v(Log.TAG_SYNC, "%s | %s: checkSessionAtPath() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } /** * @exclude */ @InterfaceAudience.Private public abstract void beginReplicating(); @InterfaceAudience.Private protected void stopped() { Log.v(Log.TAG_SYNC, "%s: STOPPED", this); running = false; notifyChangeListeners(); saveLastSequence(); batcher = null; if (db != null) { db.getManager().getContext().getNetworkReachabilityManager().removeNetworkReachabilityListener(this); } clearDbRef(); // db no longer tracks me so it won't notify me when it closes; clear ref now } @InterfaceAudience.Private private void notifyChangeListeners() { updateProgress(); for (ChangeListener listener : changeListeners) { ChangeEvent changeEvent = new ChangeEvent(this); listener.changed(changeEvent); } } @InterfaceAudience.Private protected void login() { Map<String, String> loginParameters = ((AuthenticatorImpl)getAuthenticator()).loginParametersForSite(remote); if (loginParameters == null) { Log.d(Log.TAG_SYNC, "%s: %s has no login parameters, so skipping login", this, getAuthenticator()); fetchRemoteCheckpointDoc(); return; } final String loginPath = ((AuthenticatorImpl)getAuthenticator()).loginPathForSite(remote); Log.d(Log.TAG_SYNC, "%s: Doing login with %s at %s", this, getAuthenticator().getClass(), loginPath); Log.v(Log.TAG_SYNC, "%s | %s: login() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("POST", loginPath, loginParameters, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { try { if (e != null) { Log.d(Log.TAG_SYNC, "%s: Login failed for path: %s", this, loginPath); setError(e); } else { Log.v(Log.TAG_SYNC, "%s: Successfully logged in!", this); fetchRemoteCheckpointDoc(); } } finally { Log.v(Log.TAG_SYNC, "%s | %s: login() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } /** * @exclude */ @InterfaceAudience.Private public synchronized void asyncTaskStarted() { Log.v(Log.TAG_SYNC, "%s: asyncTaskStarted %d -> %d", this, this.asyncTaskCount, this.asyncTaskCount + 1); if (asyncTaskCount++ == 0) { Log.v(Log.TAG_SYNC, "%s: asyncTaskStarted() calling updateActive()", this); updateActive(); } } /** * @exclude */ @InterfaceAudience.Private public synchronized void asyncTaskFinished(int numTasks) { Log.v(Log.TAG_SYNC, "%s: asyncTaskFinished %d -> %d", this, this.asyncTaskCount, this.asyncTaskCount - numTasks); this.asyncTaskCount -= numTasks; assert(asyncTaskCount >= 0); if (asyncTaskCount == 0) { Log.v(Log.TAG_SYNC, "%s: asyncTaskFinished() calling updateActive()", this); updateActive(); } } /** * @exclude */ @InterfaceAudience.Private public void updateActive() { try { int batcherCount = 0; if (batcher != null) { batcherCount = batcher.count(); } else { Log.w(Log.TAG_SYNC, "%s: batcher object is null.", this); } boolean newActive = batcherCount > 0 || asyncTaskCount > 0; Log.d(Log.TAG_SYNC, "%s: updateActive() called. active: %s, newActive: %s batcherCount: %d, asyncTaskCount: %d", this, active, newActive, batcherCount, asyncTaskCount); if (active != newActive) { Log.d(Log.TAG_SYNC, "%s: Progress: set active = %s asyncTaskCount: %d batcherCount: %d", this, newActive, asyncTaskCount, batcherCount); active = newActive; notifyChangeListeners(); if (!active) { if (!continuous) { Log.d(Log.TAG_SYNC, "%s since !continuous, calling stopped()", this); stopped(); } else if (error != null) /*(revisionsFailed > 0)*/ { Log.d(Log.TAG_SYNC, "%s: Failed to xfer %d revisions, will retry in %d sec", this, revisionsFailed, RETRY_DELAY); cancelPendingRetryIfReady(); scheduleRetryIfReady(); } } } else { Log.d(Log.TAG_SYNC, "%s: active == newActive.", this); } } catch (Exception e) { Log.e(Log.TAG_SYNC, "Exception in updateActive()", e); } finally { Log.d(Log.TAG_SYNC, "%s: exit updateActive()", this); } } /** * @exclude */ @InterfaceAudience.Private public void addToInbox(RevisionInternal rev) { Log.v(Log.TAG_SYNC, "%s: addToInbox() called, rev: %s", this, rev); batcher.queueObject(rev); Log.v(Log.TAG_SYNC, "%s: addToInbox() calling updateActive()", this); updateActive(); } @InterfaceAudience.Private protected void processInbox(RevisionList inbox) { } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncRequest(String method, String relativePath, Object body, RemoteRequestCompletionBlock onCompletion) { try { String urlStr = buildRelativeURLString(relativePath); URL url = new URL(urlStr); sendAsyncRequest(method, url, body, onCompletion); } catch (MalformedURLException e) { Log.e(Log.TAG_SYNC, "Malformed URL for async request", e); } } @InterfaceAudience.Private /* package */ String buildRelativeURLString(String relativePath) { // the following code is a band-aid for a system problem in the codebase // where it is appending "relative paths" that start with a slash, eg: // http://dotcom/db/ + /relpart == http://dotcom/db/relpart // which is not compatible with the way the java url concatonation works. String remoteUrlString = remote.toExternalForm(); if (remoteUrlString.endsWith("/") && relativePath.startsWith("/")) { remoteUrlString = remoteUrlString.substring(0, remoteUrlString.length() - 1); } return remoteUrlString + relativePath; } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncRequest(String method, URL url, Object body, final RemoteRequestCompletionBlock onCompletion) { final RemoteRequest request = new RemoteRequest(workExecutor, clientFactory, method, url, body, getLocalDatabase(), getHeaders(), onCompletion); request.setAuthenticator(getAuthenticator()); request.setOnPreCompletion(new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { if (serverType == null && result instanceof HttpResponse) { HttpResponse response = (HttpResponse) result; Header serverHeader = response.getFirstHeader("Server"); if (serverHeader != null) { String serverVersion = serverHeader.getValue(); Log.v(Log.TAG_SYNC, "serverVersion: %s", serverVersion); serverType = serverVersion; } } } }); request.setOnPostCompletion(new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { requests.remove(request); } }); if (remoteRequestExecutor.isTerminated()) { String msg = "sendAsyncRequest called, but remoteRequestExecutor has been terminated"; throw new IllegalStateException(msg); } Future future = remoteRequestExecutor.submit(request); requests.put(request, future); } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncMultipartDownloaderRequest(String method, String relativePath, Object body, Database db, RemoteRequestCompletionBlock onCompletion) { try { String urlStr = buildRelativeURLString(relativePath); URL url = new URL(urlStr); RemoteMultipartDownloaderRequest request = new RemoteMultipartDownloaderRequest( workExecutor, clientFactory, method, url, body, db, getHeaders(), onCompletion); request.setAuthenticator(getAuthenticator()); remoteRequestExecutor.execute(request); } catch (MalformedURLException e) { Log.e(Log.TAG_SYNC, "Malformed URL for async request", e); } } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncMultipartRequest(String method, String relativePath, MultipartEntity multiPartEntity, RemoteRequestCompletionBlock onCompletion) { URL url = null; try { String urlStr = buildRelativeURLString(relativePath); url = new URL(urlStr); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } RemoteMultipartRequest request = new RemoteMultipartRequest( workExecutor, clientFactory, method, url, multiPartEntity, getLocalDatabase(), getHeaders(), onCompletion); request.setAuthenticator(getAuthenticator()); remoteRequestExecutor.execute(request); } /** * CHECKPOINT STORAGE: * */ @InterfaceAudience.Private /* package */ void maybeCreateRemoteDB() { // Pusher overrides this to implement the .createTarget option } /** * This is the _local document ID stored on the remote server to keep track of state. * Its ID is based on the local database ID (the private one, to make the result unguessable) * and the remote database's URL. * * @exclude */ @InterfaceAudience.Private public String remoteCheckpointDocID() { if (remoteCheckpointDocID != null) { return remoteCheckpointDocID; } else { // TODO: Needs to be consistent with -hasSameSettingsAs: -- // TODO: If a.remoteCheckpointID == b.remoteCheckpointID then [a hasSameSettingsAs: b] if (db == null) { return null; } // canonicalization: make sure it produces the same checkpoint id regardless of // ordering of filterparams / docids Map<String, Object> filterParamsCanonical = null; if (getFilterParams() != null) { filterParamsCanonical = new TreeMap<String, Object>(getFilterParams()); } List<String> docIdsSorted = null; if (getDocIds() != null) { docIdsSorted = new ArrayList<String>(getDocIds()); Collections.sort(docIdsSorted); } // use a treemap rather than a dictionary for purposes of canonicalization Map<String, Object> spec = new TreeMap<String, Object>(); spec.put("localUUID", db.privateUUID()); spec.put("remoteURL", remote.toExternalForm()); spec.put("push", !isPull()); spec.put("continuous", isContinuous()); if (getFilter() != null) { spec.put("filter", getFilter()); } if (filterParamsCanonical != null) { spec.put("filterParams", filterParamsCanonical); } if (docIdsSorted != null) { spec.put("docids", docIdsSorted); } byte[] inputBytes = null; try { inputBytes = db.getManager().getObjectMapper().writeValueAsBytes(spec); } catch (IOException e) { throw new RuntimeException(e); } remoteCheckpointDocID = Misc.TDHexSHA1Digest(inputBytes); return remoteCheckpointDocID; } } @InterfaceAudience.Private private boolean is404(Throwable e) { if (e instanceof HttpResponseException) { return ((HttpResponseException) e).getStatusCode() == 404; } return false; } /** * @exclude */ @InterfaceAudience.Private public void fetchRemoteCheckpointDoc() { lastSequenceChanged = false; String checkpointId = remoteCheckpointDocID(); final String localLastSequence = db.lastSequenceWithCheckpointId(checkpointId); Log.v(Log.TAG_SYNC, "%s | %s: fetchRemoteCheckpointDoc() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("GET", "/_local/" + checkpointId, null, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { try { if (e != null && !is404(e)) { Log.w(Log.TAG_SYNC, "%s: error getting remote checkpoint", e, this); setError(e); } else { if (e != null && is404(e)) { Log.d(Log.TAG_SYNC, "%s: 404 error getting remote checkpoint %s, calling maybeCreateRemoteDB", this, remoteCheckpointDocID()); maybeCreateRemoteDB(); } Map<String, Object> response = (Map<String, Object>) result; remoteCheckpoint = response; String remoteLastSequence = null; if (response != null) { remoteLastSequence = (String) response.get("lastSequence"); } if (remoteLastSequence != null && remoteLastSequence.equals(localLastSequence)) { lastSequence = localLastSequence; Log.d(Log.TAG_SYNC, "%s: Replicating from lastSequence=%s", this, lastSequence); } else { Log.d(Log.TAG_SYNC, "%s: lastSequence mismatch: I had: %s, remote had: %s", this, localLastSequence, remoteLastSequence); } beginReplicating(); } } finally { Log.v(Log.TAG_SYNC, "%s | %s: fetchRemoteCheckpointDoc() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } /** * @exclude */ @InterfaceAudience.Private public void saveLastSequence() { if (!lastSequenceChanged) { return; } if (savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) overdueForSave = true; return; } lastSequenceChanged = false; overdueForSave = false; Log.d(Log.TAG_SYNC, "%s: saveLastSequence() called. lastSequence: %s", this, lastSequence); final Map<String, Object> body = new HashMap<String, Object>(); if (remoteCheckpoint != null) { body.putAll(remoteCheckpoint); } body.put("lastSequence", lastSequence); String remoteCheckpointDocID = remoteCheckpointDocID(); if (remoteCheckpointDocID == null) { Log.w(Log.TAG_SYNC, "%s: remoteCheckpointDocID is null, aborting saveLastSequence()", this); return; } savingCheckpoint = true; final String checkpointID = remoteCheckpointDocID; Log.d(Log.TAG_SYNC, "%s: put remote _local document. checkpointID: %s", this, checkpointID); sendAsyncRequest("PUT", "/_local/" + checkpointID, body, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { savingCheckpoint = false; if (e != null) { Log.w(Log.TAG_SYNC, "%s: Unable to save remote checkpoint", e, this); } if (db == null) { Log.w(Log.TAG_SYNC, "%s: Database is null, ignoring remote checkpoint response", this); return; } if (!db.isOpen()) { Log.w(Log.TAG_SYNC, "%s: Database is closed, ignoring remote checkpoint response", this); return; } if (e != null) { // Failed to save checkpoint: switch (getStatusFromError(e)) { case Status.NOT_FOUND: remoteCheckpoint = null; // doc deleted or db reset overdueForSave = true; // try saving again break; case Status.CONFLICT: refreshRemoteCheckpointDoc(); break; default: // TODO: On 401 or 403, and this is a pull, remember that remote // TODo: is read-only & don't attempt to read its checkpoint next time. break; } } else { // Saved checkpoint: Map<String, Object> response = (Map<String, Object>) result; body.put("_rev", response.get("rev")); remoteCheckpoint = body; db.setLastSequence(lastSequence, checkpointID, !isPull()); } if (overdueForSave) { saveLastSequence(); } } }); } @InterfaceAudience.Public public boolean goOffline() { if (!online) { return false; } if (db == null) { return false; } db.runAsync(new AsyncTask() { @Override public void run(Database database) { Log.d(Log.TAG_SYNC, "%s: Going offline", this); online = false; stopRemoteRequests(); updateProgress(); notifyChangeListeners(); } }); return true; } @InterfaceAudience.Public public boolean goOnline() { if (online) { return false; } if (db == null) { return false; } db.runAsync(new AsyncTask() { @Override public void run(Database database) { Log.d(Log.TAG_SYNC, "%s: Going online", this); online = true; if (running) { lastSequence = null; setError(null); } /* Log.d(Log.TAG_SYNC, "%s: Shutting down remoteRequestExecutor", this); List<Runnable> tasksAwaitingExecution = remoteRequestExecutor.shutdownNow(); for (Runnable runnable : tasksAwaitingExecution) { Log.d(Log.TAG_SYNC, "%s: runnable: %s", this, runnable); if (runnable instanceof RemoteRequest) { RemoteRequest remoteRequest = (RemoteRequest) runnable; Log.v(Log.TAG_SYNC, "%s: request awaiting execution: %s underlying req: %s", this, remoteRequest, remoteRequest.getRequest().getURI()); } } boolean succeeded = false; try { succeeded = remoteRequestExecutor.awaitTermination(30, TimeUnit.SECONDS); } catch (InterruptedException e) { Log.e(Log.TAG_SYNC, "%s: timeout remoteRequestExecutor.awaitTermination", this, e); } Log.d(Log.TAG_SYNC, "%s: remoteRequestExecutor.awaitTermination succeeded: %s", this, succeeded); */ remoteRequestExecutor = Executors.newCachedThreadPool(); checkSession(); notifyChangeListeners(); } }); return true; } @InterfaceAudience.Private private void stopRemoteRequests() { Log.v(Log.TAG_SYNC, "%s: stopRemoteRequests() cancelling: %d requests", this, requests.size()); for (RemoteRequest request : requests.keySet()) { Log.v(Log.TAG_SYNC, "%s: aborting request: %s underlying req: %s", this, request, request.getRequest().getURI()); request.abort(); Log.v(Log.TAG_SYNC, "%s: aborted request", this); } } @InterfaceAudience.Private /* package */ void updateProgress() { if (!isRunning()) { status = ReplicationStatus.REPLICATION_STOPPED; } else if (!online) { status = ReplicationStatus.REPLICATION_OFFLINE; } else { if (active) { status = ReplicationStatus.REPLICATION_ACTIVE; } else { status = ReplicationStatus.REPLICATION_IDLE; } } } @InterfaceAudience.Private protected void setError(Throwable throwable) { // TODO /* if (error.code == NSURLErrorCancelled && $equal(error.domain, NSURLErrorDomain)) return; */ if (throwable != error) { Log.e(Log.TAG_SYNC, "%s: Progress: set error = %s", this, throwable); error = throwable; notifyChangeListeners(); } } @InterfaceAudience.Private protected void revisionFailed() { // Remember that some revisions failed to transfer, so we can later retry. ++revisionsFailed; } protected RevisionInternal transformRevision(RevisionInternal rev) { if(revisionBodyTransformationBlock != null) { try { final int generation = rev.getGeneration(); RevisionInternal xformed = revisionBodyTransformationBlock.invoke(rev); if (xformed == null) return null; if (xformed != rev) { assert(xformed.getDocId().equals(rev.getDocId())); assert(xformed.getRevId().equals(rev.getRevId())); assert(xformed.getProperties().get("_revisions").equals(rev.getProperties().get("_revisions"))); if (xformed.getProperties().get("_attachments") != null) { // Insert 'revpos' properties into any attachments added by the callback: RevisionInternal mx = new RevisionInternal(xformed.getProperties(), db); xformed = mx; mx.mutateAttachments(new CollectionUtils.Functor<Map<String,Object>,Map<String,Object>>() { public Map<String, Object> invoke(Map<String, Object> info) { if (info.get("revpos") != null) { return info; } if(info.get("data") == null) { throw new IllegalStateException("Transformer added attachment without adding data"); } Map<String,Object> nuInfo = new HashMap<String, Object>(info); nuInfo.put("revpos",generation); return nuInfo; } }); } rev = xformed; } }catch (Exception e) { Log.w(Log.TAG_SYNC,"%s: Exception transforming a revision of doc '%s", e, this, rev.getDocId()); } } return rev; } /** * Called after a continuous replication has gone idle, but it failed to transfer some revisions * and so wants to try again in a minute. Should be overridden by subclasses. */ @InterfaceAudience.Private protected void retry() { setError(null); } @InterfaceAudience.Private protected void retryIfReady() { if (!running) { return; } if (online) { Log.d(Log.TAG_SYNC, "%s: RETRYING, to transfer missed revisions", this); revisionsFailed = 0; cancelPendingRetryIfReady(); retry(); } else { scheduleRetryIfReady(); } } @InterfaceAudience.Private protected void cancelPendingRetryIfReady() { if (retryIfReadyFuture != null && retryIfReadyFuture.isCancelled() == false) { retryIfReadyFuture.cancel(true); } } @InterfaceAudience.Private protected void scheduleRetryIfReady() { retryIfReadyFuture = workExecutor.schedule(new Runnable() { @Override public void run() { retryIfReady(); } }, RETRY_DELAY, TimeUnit.SECONDS); } @InterfaceAudience.Private private int getStatusFromError(Throwable t) { if (t instanceof CouchbaseLiteException) { CouchbaseLiteException couchbaseLiteException = (CouchbaseLiteException) t; return couchbaseLiteException.getCBLStatus().getCode(); } return Status.UNKNOWN; } /** * Variant of -fetchRemoveCheckpointDoc that's used while replication is running, to reload the * checkpoint to get its current revision number, if there was an error saving it. */ @InterfaceAudience.Private private void refreshRemoteCheckpointDoc() { Log.d(Log.TAG_SYNC, "%s: Refreshing remote checkpoint to get its _rev...", this); savingCheckpoint = true; Log.v(Log.TAG_SYNC, "%s | %s: refreshRemoteCheckpointDoc() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("GET", "/_local/" + remoteCheckpointDocID(), null, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { try { if (db == null) { Log.w(Log.TAG_SYNC, "%s: db == null while refreshing remote checkpoint. aborting", this); return; } savingCheckpoint = false; if (e != null && getStatusFromError(e) != Status.NOT_FOUND) { Log.e(Log.TAG_SYNC, "%s: Error refreshing remote checkpoint", e, this); } else { Log.d(Log.TAG_SYNC, "%s: Refreshed remote checkpoint: %s", this, result); remoteCheckpoint = (Map<String, Object>) result; lastSequenceChanged = true; saveLastSequence(); // try saving again } } finally { Log.v(Log.TAG_SYNC, "%s | %s: refreshRemoteCheckpointDoc() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } @InterfaceAudience.Private protected Status statusFromBulkDocsResponseItem(Map<String, Object> item) { try { if (!item.containsKey("error")) { return new Status(Status.OK); } String errorStr = (String) item.get("error"); if (errorStr == null || errorStr.isEmpty()) { return new Status(Status.OK); } // 'status' property is nonstandard; TouchDB returns it, others don't. String statusString = (String) item.get("status"); int status = Integer.parseInt(statusString); if (status >= 400) { return new Status(status); } // If no 'status' present, interpret magic hardcoded CouchDB error strings: if (errorStr.equalsIgnoreCase("unauthorized")) { return new Status(Status.UNAUTHORIZED); } else if (errorStr.equalsIgnoreCase("forbidden")) { return new Status(Status.FORBIDDEN); } else if (errorStr.equalsIgnoreCase("conflict")) { return new Status(Status.CONFLICT); } else { return new Status(Status.UPSTREAM_ERROR); } } catch (Exception e) { Log.e(Database.TAG, "Exception getting status from " + item, e); } return new Status(Status.OK); } @Override @InterfaceAudience.Private public void networkReachable() { goOnline(); } @Override @InterfaceAudience.Private public void networkUnreachable() { goOffline(); } @InterfaceAudience.Private /* package */ boolean serverIsSyncGatewayVersion(String minVersion) { String prefix = "Couchbase Sync Gateway/"; if (serverType == null) { return false; } else { if (serverType.startsWith(prefix)) { String versionString = serverType.substring(prefix.length()); return versionString.compareTo(minVersion) >= 0; } } return false; } @InterfaceAudience.Private /* package */ void setServerType(String serverType) { this.serverType = serverType; } @InterfaceAudience.Private /* package */ HttpClientFactory getClientFactory() { return clientFactory; } }
src/main/java/com/couchbase/lite/replicator/Replication.java
package com.couchbase.lite.replicator; import com.couchbase.lite.AsyncTask; import com.couchbase.lite.CouchbaseLiteException; import com.couchbase.lite.Database; import com.couchbase.lite.Manager; import com.couchbase.lite.Misc; import com.couchbase.lite.NetworkReachabilityListener; import com.couchbase.lite.RevisionList; import com.couchbase.lite.Status; import com.couchbase.lite.auth.Authenticator; import com.couchbase.lite.auth.AuthenticatorImpl; import com.couchbase.lite.auth.Authorizer; import com.couchbase.lite.auth.FacebookAuthorizer; import com.couchbase.lite.auth.PersonaAuthorizer; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.internal.RevisionInternal; import com.couchbase.lite.support.BatchProcessor; import com.couchbase.lite.support.Batcher; import com.couchbase.lite.support.CouchbaseLiteHttpClientFactory; import com.couchbase.lite.support.HttpClientFactory; import com.couchbase.lite.support.PersistentCookieStore; import com.couchbase.lite.support.RemoteMultipartDownloaderRequest; import com.couchbase.lite.support.RemoteMultipartRequest; import com.couchbase.lite.support.RemoteRequest; import com.couchbase.lite.support.RemoteRequestCompletionBlock; import com.couchbase.lite.util.CollectionUtils; import com.couchbase.lite.util.Log; import com.couchbase.lite.util.TextUtils; import com.couchbase.lite.util.URIUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.CookieStore; import org.apache.http.client.HttpResponseException; import org.apache.http.cookie.Cookie; import org.apache.http.entity.mime.MultipartEntity; import org.apache.http.impl.cookie.BasicClientCookie2; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** * A Couchbase Lite pull or push Replication between a local and a remote Database. */ public abstract class Replication implements NetworkReachabilityListener { private static int lastSessionID = 0; protected boolean continuous; protected String filterName; protected ScheduledExecutorService workExecutor; protected Database db; protected URL remote; protected String lastSequence; protected boolean lastSequenceChanged; protected Map<String, Object> remoteCheckpoint; protected boolean savingCheckpoint; protected boolean overdueForSave; protected boolean running; protected boolean active; protected Throwable error; protected String sessionID; protected Batcher<RevisionInternal> batcher; protected int asyncTaskCount; protected AtomicInteger completedChangesCount; private AtomicInteger changesCount; protected boolean online; protected HttpClientFactory clientFactory; private final List<ChangeListener> changeListeners; protected List<String> documentIDs; protected Map<String, Object> filterParams; protected ExecutorService remoteRequestExecutor; protected Authenticator authenticator; private ReplicationStatus status = ReplicationStatus.REPLICATION_STOPPED; protected Map<String, Object> requestHeaders; private int revisionsFailed; private ScheduledFuture retryIfReadyFuture; private final Map<RemoteRequest, Future> requests; private String serverType; private String remoteCheckpointDocID; private CollectionUtils.Functor<Map<String,Object>,Map<String,Object>> propertiesTransformationBlock; protected CollectionUtils.Functor<RevisionInternal,RevisionInternal> revisionBodyTransformationBlock; protected static final int PROCESSOR_DELAY = 500; protected static final int INBOX_CAPACITY = 100; protected static final int RETRY_DELAY = 60; protected static final int EXECUTOR_THREAD_POOL_SIZE = 5; /** * @exclude */ public static final String BY_CHANNEL_FILTER_NAME = "sync_gateway/bychannel"; /** * @exclude */ public static final String CHANNELS_QUERY_PARAM = "channels"; /** * @exclude */ public static final String REPLICATOR_DATABASE_NAME = "_replicator"; /** * Options for what metadata to include in document bodies */ public enum ReplicationStatus { /** The replication is finished or hit a fatal error. */ REPLICATION_STOPPED, /** The remote host is currently unreachable. */ REPLICATION_OFFLINE, /** Continuous replication is caught up and waiting for more changes.*/ REPLICATION_IDLE, /** The replication is actively transferring data. */ REPLICATION_ACTIVE } /** * Private Constructor * @exclude */ @InterfaceAudience.Private /* package */ Replication(Database db, URL remote, boolean continuous, ScheduledExecutorService workExecutor) { this(db, remote, continuous, null, workExecutor); } /** * Private Constructor * @exclude */ @InterfaceAudience.Private /* package */ Replication(Database db, URL remote, boolean continuous, HttpClientFactory clientFactory, ScheduledExecutorService workExecutor) { this.db = db; this.continuous = continuous; this.workExecutor = workExecutor; this.remote = remote; this.remoteRequestExecutor = Executors.newFixedThreadPool(EXECUTOR_THREAD_POOL_SIZE); this.changeListeners = new CopyOnWriteArrayList<ChangeListener>(); this.online = true; this.requestHeaders = new HashMap<String, Object>(); this.requests = new ConcurrentHashMap<RemoteRequest, Future>(); this.completedChangesCount = new AtomicInteger(0); this.changesCount = new AtomicInteger(0); if (remote.getQuery() != null && !remote.getQuery().isEmpty()) { URI uri = URI.create(remote.toExternalForm()); String personaAssertion = URIUtils.getQueryParameter(uri, PersonaAuthorizer.QUERY_PARAMETER); if (personaAssertion != null && !personaAssertion.isEmpty()) { String email = PersonaAuthorizer.registerAssertion(personaAssertion); PersonaAuthorizer authorizer = new PersonaAuthorizer(email); setAuthenticator(authorizer); } String facebookAccessToken = URIUtils.getQueryParameter(uri, FacebookAuthorizer.QUERY_PARAMETER); if (facebookAccessToken != null && !facebookAccessToken.isEmpty()) { String email = URIUtils.getQueryParameter(uri, FacebookAuthorizer.QUERY_PARAMETER_EMAIL); FacebookAuthorizer authorizer = new FacebookAuthorizer(email); URL remoteWithQueryRemoved = null; try { remoteWithQueryRemoved = new URL(remote.getProtocol(), remote.getHost(), remote.getPort(), remote.getPath()); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } authorizer.registerAccessToken(facebookAccessToken, email, remoteWithQueryRemoved.toExternalForm()); setAuthenticator(authorizer); } // we need to remove the query from the URL, since it will cause problems when // communicating with sync gw / couchdb try { this.remote = new URL(remote.getProtocol(), remote.getHost(), remote.getPort(), remote.getPath()); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } } batcher = new Batcher<RevisionInternal>(workExecutor, INBOX_CAPACITY, PROCESSOR_DELAY, new BatchProcessor<RevisionInternal>() { @Override public void process(List<RevisionInternal> inbox) { try { Log.v(Log.TAG_SYNC, "*** %s: BEGIN processInbox (%d sequences)", this, inbox.size()); processInbox(new RevisionList(inbox)); Log.v(Log.TAG_SYNC, "*** %s: END processInbox (lastSequence=%s)", this, lastSequence); Log.v(Log.TAG_SYNC, "%s: batcher calling updateActive()", this); updateActive(); } catch (Exception e) { Log.e(Log.TAG_SYNC,"ERROR: processInbox failed: ",e); throw new RuntimeException(e); } } }); setClientFactory(clientFactory); } /** * Set the HTTP client factory if one was passed in, or use the default * set in the manager if available. * @param clientFactory */ @InterfaceAudience.Private protected void setClientFactory(HttpClientFactory clientFactory) { Manager manager = null; if (this.db != null) { manager = this.db.getManager(); } HttpClientFactory managerClientFactory = null; if (manager != null) { managerClientFactory = manager.getDefaultHttpClientFactory(); } if (clientFactory != null) { this.clientFactory = clientFactory; } else { if (managerClientFactory != null) { this.clientFactory = managerClientFactory; } else { PersistentCookieStore cookieStore = db.getPersistentCookieStore(); this.clientFactory = new CouchbaseLiteHttpClientFactory(cookieStore); } } } /** * Get the local database which is the source or target of this replication */ @InterfaceAudience.Public public Database getLocalDatabase() { return db; } /** * Get the remote URL which is the source or target of this replication */ @InterfaceAudience.Public public URL getRemoteUrl() { return remote; } /** * Is this a pull replication? (Eg, it pulls data from Sync Gateway -> Device running CBL?) */ @InterfaceAudience.Public public abstract boolean isPull(); /** * Should the target database be created if it doesn't already exist? (Defaults to NO). */ @InterfaceAudience.Public public abstract boolean shouldCreateTarget(); /** * Set whether the target database be created if it doesn't already exist? */ @InterfaceAudience.Public public abstract void setCreateTarget(boolean createTarget); /** * Should the replication operate continuously, copying changes as soon as the * source database is modified? (Defaults to NO). */ @InterfaceAudience.Public public boolean isContinuous() { return continuous; } /** * Set whether the replication should operate continuously. */ @InterfaceAudience.Public public void setContinuous(boolean continuous) { if (!isRunning()) { this.continuous = continuous; } } /** * Name of an optional filter function to run on the source server. Only documents for * which the function returns true are replicated. * * For a pull replication, the name looks like "designdocname/filtername". * For a push replication, use the name under which you registered the filter with the Database. */ @InterfaceAudience.Public public String getFilter() { return filterName; } /** * Set the filter to be used by this replication */ @InterfaceAudience.Public public void setFilter(String filterName) { this.filterName = filterName; } /** * Parameters to pass to the filter function. Should map strings to strings. */ @InterfaceAudience.Public public Map<String, Object> getFilterParams() { return filterParams; } /** * Set parameters to pass to the filter function. */ @InterfaceAudience.Public public void setFilterParams(Map<String, Object> filterParams) { this.filterParams = filterParams; } /** * List of Sync Gateway channel names to filter by; a nil value means no filtering, i.e. all * available channels will be synced. Only valid for pull replications whose source database * is on a Couchbase Sync Gateway server. (This is a convenience that just reads or * changes the values of .filter and .query_params.) */ @InterfaceAudience.Public public List<String> getChannels() { if (filterParams == null || filterParams.isEmpty()) { return new ArrayList<String>(); } String params = (String) filterParams.get(CHANNELS_QUERY_PARAM); if (!isPull() || getFilter() == null || !getFilter().equals(BY_CHANNEL_FILTER_NAME) || params == null || params.isEmpty()) { return new ArrayList<String>(); } String[] paramsArray = params.split(","); return new ArrayList<String>(Arrays.asList(paramsArray)); } /** * Set the list of Sync Gateway channel names */ @InterfaceAudience.Public public void setChannels(List<String> channels) { if (channels != null && !channels.isEmpty()) { if (!isPull()) { Log.w(Log.TAG_SYNC, "filterChannels can only be set in pull replications"); return; } setFilter(BY_CHANNEL_FILTER_NAME); Map<String, Object> filterParams = new HashMap<String, Object>(); filterParams.put(CHANNELS_QUERY_PARAM, TextUtils.join(",", channels)); setFilterParams(filterParams); } else if (getFilter().equals(BY_CHANNEL_FILTER_NAME)) { setFilter(null); setFilterParams(null); } } /** * Extra HTTP headers to send in all requests to the remote server. * Should map strings (header names) to strings. */ @InterfaceAudience.Public public Map<String, Object> getHeaders() { return requestHeaders; } /** * Set Extra HTTP headers to be sent in all requests to the remote server. */ @InterfaceAudience.Public public void setHeaders(Map<String, Object> requestHeadersParam) { if (requestHeadersParam != null && !requestHeaders.equals(requestHeadersParam)) { requestHeaders = requestHeadersParam; } } /** * Gets the documents to specify as part of the replication. */ @InterfaceAudience.Public public List<String> getDocIds() { return documentIDs; } /** * Sets the documents to specify as part of the replication. */ @InterfaceAudience.Public public void setDocIds(List<String> docIds) { documentIDs = docIds; } /** * The replication's current state, one of {stopped, offline, idle, active}. */ @InterfaceAudience.Public public ReplicationStatus getStatus() { return status; } /** * The number of completed changes processed, if the task is active, else 0 (observable). */ @InterfaceAudience.Public public int getCompletedChangesCount() { return completedChangesCount.get(); } /** * The total number of changes to be processed, if the task is active, else 0 (observable). */ @InterfaceAudience.Public public int getChangesCount() { return changesCount.get(); } /** * True while the replication is running, False if it's stopped. * Note that a continuous replication never actually stops; it only goes idle waiting for new * data to appear. */ @InterfaceAudience.Public public boolean isRunning() { return running; } /** * The error status of the replication, or null if there have not been any errors since * it started. */ @InterfaceAudience.Public public Throwable getLastError() { return error; } /** * Starts the replication, asynchronously. */ @InterfaceAudience.Public public void start() { if (!db.isOpen()) { // Race condition: db closed before replication starts Log.w(Log.TAG_SYNC, "Not starting replication because db.isOpen() returned false."); return; } if (running) { return; } db.addReplication(this); db.addActiveReplication(this); final CollectionUtils.Functor<Map<String,Object>,Map<String,Object>> xformer = propertiesTransformationBlock; if (xformer != null) { revisionBodyTransformationBlock = new CollectionUtils.Functor<RevisionInternal, RevisionInternal>() { @Override public RevisionInternal invoke(RevisionInternal rev) { Map<String,Object> properties = rev.getProperties(); Map<String, Object> xformedProperties = xformer.invoke(properties); if (xformedProperties == null) { rev = null; } else if (xformedProperties != properties) { assert(xformedProperties != null); assert(xformedProperties.get("_id").equals(properties.get("_id"))); assert(xformedProperties.get("_rev").equals(properties.get("_rev"))); RevisionInternal nuRev = new RevisionInternal(rev.getProperties(), db); nuRev.setProperties(xformedProperties); rev = nuRev; } return rev; } }; } this.sessionID = String.format("repl%03d", ++lastSessionID); Log.v(Log.TAG_SYNC, "%s: STARTING ...", this); running = true; lastSequence = null; checkSession(); db.getManager().getContext().getNetworkReachabilityManager().addNetworkReachabilityListener(this); } /** * Stops replication, asynchronously. */ @InterfaceAudience.Public public void stop() { if (!running) { return; } Log.v(Log.TAG_SYNC, "%s: STOPPING...", this); if (batcher != null) { batcher.clear(); // no sense processing any pending changes } else { Log.v(Log.TAG_SYNC, "%s: stop() called, not calling batcher.clear() since it's null"); } continuous = false; stopRemoteRequests(); cancelPendingRetryIfReady(); if (db != null) { db.forgetReplication(this); } else { Log.v(Log.TAG_SYNC, "%s: stop() called, not calling db.forgetReplication() since it's null"); } if (running && asyncTaskCount <= 0) { Log.v(Log.TAG_SYNC, "%s: calling stopped()", this); stopped(); } else { Log.v(Log.TAG_SYNC, "%s: not calling stopped(). running: %s asyncTaskCount: %d", this, running, asyncTaskCount); } } /** * Restarts a completed or failed replication. */ @InterfaceAudience.Public public void restart() { // TODO: add the "started" flag and check it here stop(); start(); } /** * Adds a change delegate that will be called whenever the Replication changes. */ @InterfaceAudience.Public public void addChangeListener(ChangeListener changeListener) { changeListeners.add(changeListener); } /** * Return a string representation of this replication. * * The credentials will be masked in order to avoid passwords leaking into logs. */ @Override @InterfaceAudience.Public public String toString() { String maskedRemoteWithoutCredentials = (remote != null ? remote.toExternalForm() : ""); maskedRemoteWithoutCredentials = maskedRemoteWithoutCredentials.replaceAll("://.*:.*@", "://---:---@"); String name = getClass().getSimpleName() + "@" + Integer.toHexString(hashCode()) + "[" + maskedRemoteWithoutCredentials + "]"; return name; } /** * Sets an HTTP cookie for the Replication. * * @param name The name of the cookie. * @param value The value of the cookie. * @param path The path attribute of the cookie. If null or empty, will use remote.getPath() * @param maxAge The maxAge, in milliseconds, that this cookie should be valid for. * @param secure Whether the cookie should only be sent using a secure protocol (e.g. HTTPS). * @param httpOnly (ignored) Whether the cookie should only be used when transmitting HTTP, or HTTPS, requests thus restricting access from other, non-HTTP APIs. */ @InterfaceAudience.Public public void setCookie(String name, String value, String path, long maxAge, boolean secure, boolean httpOnly) { Date now = new Date(); Date expirationDate = new Date(now.getTime() + maxAge); setCookie(name, value, path, expirationDate, secure, httpOnly); } /** * Sets an HTTP cookie for the Replication. * * @param name The name of the cookie. * @param value The value of the cookie. * @param path The path attribute of the cookie. If null or empty, will use remote.getPath() * @param expirationDate The expiration date of the cookie. * @param secure Whether the cookie should only be sent using a secure protocol (e.g. HTTPS). * @param httpOnly (ignored) Whether the cookie should only be used when transmitting HTTP, or HTTPS, requests thus restricting access from other, non-HTTP APIs. */ @InterfaceAudience.Public public void setCookie(String name, String value, String path, Date expirationDate, boolean secure, boolean httpOnly) { if (remote == null) { throw new IllegalStateException("Cannot setCookie since remote == null"); } BasicClientCookie2 cookie = new BasicClientCookie2(name, value); cookie.setDomain(remote.getHost()); if (path != null && path.length() > 0) { cookie.setPath(path); } else { cookie.setPath(remote.getPath()); } cookie.setExpiryDate(expirationDate); cookie.setSecure(secure); List<Cookie> cookies = Arrays.asList((Cookie)cookie); this.clientFactory.addCookies(cookies); } /** * Deletes an HTTP cookie for the Replication. * * @param name The name of the cookie. */ @InterfaceAudience.Public public void deleteCookie(String name) { this.clientFactory.deleteCookie(name); } /** * The type of event raised by a Replication when any of the following * properties change: mode, running, error, completed, total. */ @InterfaceAudience.Public public static class ChangeEvent { private Replication source; public ChangeEvent(Replication source) { this.source = source; } public Replication getSource() { return source; } } /** * A delegate that can be used to listen for Replication changes. */ @InterfaceAudience.Public public static interface ChangeListener { public void changed(ChangeEvent event); } /** * Removes the specified delegate as a listener for the Replication change event. */ @InterfaceAudience.Public public void removeChangeListener(ChangeListener changeListener) { changeListeners.remove(changeListener); } /** * Set the Authenticator used for authenticating with the Sync Gateway */ @InterfaceAudience.Public public void setAuthenticator(Authenticator authenticator) { this.authenticator = authenticator; } /** * Get the Authenticator used for authenticating with the Sync Gateway */ @InterfaceAudience.Public public Authenticator getAuthenticator() { return authenticator; } /** * @exclude */ @InterfaceAudience.Private public void databaseClosing() { saveLastSequence(); stop(); clearDbRef(); } /** * If we're in the middle of saving the checkpoint and waiting for a response, by the time the * response arrives _db will be nil, so there won't be any way to save the checkpoint locally. * To avoid that, pre-emptively save the local checkpoint now. * * @exclude */ private void clearDbRef() { if (savingCheckpoint && lastSequence != null && db != null) { db.setLastSequence(lastSequence, remoteCheckpointDocID(), !isPull()); db = null; } } /** * @exclude */ @InterfaceAudience.Private public String getLastSequence() { return lastSequence; } /** * @exclude */ @InterfaceAudience.Private public void setLastSequence(String lastSequenceIn) { if (lastSequenceIn != null && !lastSequenceIn.equals(lastSequence)) { Log.v(Log.TAG_SYNC, "%s: Setting lastSequence to %s from(%s)", this, lastSequenceIn, lastSequence ); lastSequence = lastSequenceIn; if (!lastSequenceChanged) { lastSequenceChanged = true; workExecutor.schedule(new Runnable() { @Override public void run() { saveLastSequence(); } }, 2 * 1000, TimeUnit.MILLISECONDS); } } } @InterfaceAudience.Private /* package */ void addToCompletedChangesCount(int delta) { int previousVal = this.completedChangesCount.getAndAdd(delta); Log.v(Log.TAG_SYNC, "%s: Incrementing completedChangesCount count from %s by adding %d -> %d", this, previousVal, delta, completedChangesCount.get()); notifyChangeListeners(); } @InterfaceAudience.Private /* package */ void addToChangesCount(int delta) { int previousVal = this.changesCount.getAndAdd(delta); if (changesCount.get() < 0) { Log.w(Log.TAG_SYNC, "Changes count is negative, this could indicate an error"); } Log.v(Log.TAG_SYNC, "%s: Incrementing changesCount count from %s by adding %d -> %d", this, previousVal, delta, changesCount.get()); notifyChangeListeners(); } /** * @exclude */ @InterfaceAudience.Private public String getSessionID() { return sessionID; } @InterfaceAudience.Private protected void checkSession() { // REVIEW : This is not in line with the iOS implementation if (getAuthenticator() != null && ((AuthenticatorImpl)getAuthenticator()).usesCookieBasedLogin()) { checkSessionAtPath("/_session"); } else { fetchRemoteCheckpointDoc(); } } @InterfaceAudience.Private protected void checkSessionAtPath(final String sessionPath) { Log.v(Log.TAG_SYNC, "%s | %s: checkSessionAtPath() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("GET", sessionPath, null, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable error) { try { if (error != null) { // If not at /db/_session, try CouchDB location /_session if (error instanceof HttpResponseException && ((HttpResponseException) error).getStatusCode() == 404 && sessionPath.equalsIgnoreCase("/_session")) { checkSessionAtPath("_session"); return; } Log.e(Log.TAG_SYNC, this + ": Session check failed", error); setError(error); } else { Map<String, Object> response = (Map<String, Object>) result; Map<String, Object> userCtx = (Map<String, Object>) response.get("userCtx"); String username = (String) userCtx.get("name"); if (username != null && username.length() > 0) { Log.d(Log.TAG_SYNC, "%s Active session, logged in as %s", this, username); fetchRemoteCheckpointDoc(); } else { Log.d(Log.TAG_SYNC, "%s No active session, going to login", this); login(); } } } finally { Log.v(Log.TAG_SYNC, "%s | %s: checkSessionAtPath() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } /** * @exclude */ @InterfaceAudience.Private public abstract void beginReplicating(); @InterfaceAudience.Private protected void stopped() { Log.v(Log.TAG_SYNC, "%s: STOPPED", this); running = false; notifyChangeListeners(); saveLastSequence(); batcher = null; if (db != null) { db.getManager().getContext().getNetworkReachabilityManager().removeNetworkReachabilityListener(this); } clearDbRef(); // db no longer tracks me so it won't notify me when it closes; clear ref now } @InterfaceAudience.Private private void notifyChangeListeners() { updateProgress(); for (ChangeListener listener : changeListeners) { ChangeEvent changeEvent = new ChangeEvent(this); listener.changed(changeEvent); } } @InterfaceAudience.Private protected void login() { Map<String, String> loginParameters = ((AuthenticatorImpl)getAuthenticator()).loginParametersForSite(remote); if (loginParameters == null) { Log.d(Log.TAG_SYNC, "%s: %s has no login parameters, so skipping login", this, getAuthenticator()); fetchRemoteCheckpointDoc(); return; } final String loginPath = ((AuthenticatorImpl)getAuthenticator()).loginPathForSite(remote); Log.d(Log.TAG_SYNC, "%s: Doing login with %s at %s", this, getAuthenticator().getClass(), loginPath); Log.v(Log.TAG_SYNC, "%s | %s: login() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("POST", loginPath, loginParameters, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { try { if (e != null) { Log.d(Log.TAG_SYNC, "%s: Login failed for path: %s", this, loginPath); setError(e); } else { Log.v(Log.TAG_SYNC, "%s: Successfully logged in!", this); fetchRemoteCheckpointDoc(); } } finally { Log.v(Log.TAG_SYNC, "%s | %s: login() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } /** * @exclude */ @InterfaceAudience.Private public synchronized void asyncTaskStarted() { Log.v(Log.TAG_SYNC, "%s: asyncTaskStarted %d -> %d", this, this.asyncTaskCount, this.asyncTaskCount + 1); if (asyncTaskCount++ == 0) { Log.v(Log.TAG_SYNC, "%s: asyncTaskStarted() calling updateActive()", this); updateActive(); } } /** * @exclude */ @InterfaceAudience.Private public synchronized void asyncTaskFinished(int numTasks) { Log.v(Log.TAG_SYNC, "%s: asyncTaskFinished %d -> %d", this, this.asyncTaskCount, this.asyncTaskCount - numTasks); this.asyncTaskCount -= numTasks; assert(asyncTaskCount >= 0); if (asyncTaskCount == 0) { Log.v(Log.TAG_SYNC, "%s: asyncTaskFinished() calling updateActive()", this); updateActive(); } } /** * @exclude */ @InterfaceAudience.Private public void updateActive() { try { int batcherCount = 0; if (batcher != null) { batcherCount = batcher.count(); } else { Log.w(Log.TAG_SYNC, "%s: batcher object is null.", this); } boolean newActive = batcherCount > 0 || asyncTaskCount > 0; Log.d(Log.TAG_SYNC, "%s: updateActive() called. active: %s, newActive: %s batcherCount: %d, asyncTaskCount: %d", this, active, newActive, batcherCount, asyncTaskCount); if (active != newActive) { Log.d(Log.TAG_SYNC, "%s: Progress: set active = %s asyncTaskCount: %d batcherCount: %d", this, newActive, asyncTaskCount, batcherCount); active = newActive; notifyChangeListeners(); if (!active) { if (!continuous) { Log.d(Log.TAG_SYNC, "%s since !continuous, calling stopped()", this); stopped(); } else if (error != null) /*(revisionsFailed > 0)*/ { Log.d(Log.TAG_SYNC, "%s: Failed to xfer %d revisions, will retry in %d sec", this, revisionsFailed, RETRY_DELAY); cancelPendingRetryIfReady(); scheduleRetryIfReady(); } } } else { Log.d(Log.TAG_SYNC, "%s: active == newActive.", this); } } catch (Exception e) { Log.e(Log.TAG_SYNC, "Exception in updateActive()", e); } finally { Log.d(Log.TAG_SYNC, "%s: exit updateActive()", this); } } /** * @exclude */ @InterfaceAudience.Private public void addToInbox(RevisionInternal rev) { Log.v(Log.TAG_SYNC, "%s: addToInbox() called, rev: %s", this, rev); batcher.queueObject(rev); Log.v(Log.TAG_SYNC, "%s: addToInbox() calling updateActive()", this); updateActive(); } @InterfaceAudience.Private protected void processInbox(RevisionList inbox) { } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncRequest(String method, String relativePath, Object body, RemoteRequestCompletionBlock onCompletion) { try { String urlStr = buildRelativeURLString(relativePath); URL url = new URL(urlStr); sendAsyncRequest(method, url, body, onCompletion); } catch (MalformedURLException e) { Log.e(Log.TAG_SYNC, "Malformed URL for async request", e); } } @InterfaceAudience.Private /* package */ String buildRelativeURLString(String relativePath) { // the following code is a band-aid for a system problem in the codebase // where it is appending "relative paths" that start with a slash, eg: // http://dotcom/db/ + /relpart == http://dotcom/db/relpart // which is not compatible with the way the java url concatonation works. String remoteUrlString = remote.toExternalForm(); if (remoteUrlString.endsWith("/") && relativePath.startsWith("/")) { remoteUrlString = remoteUrlString.substring(0, remoteUrlString.length() - 1); } return remoteUrlString + relativePath; } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncRequest(String method, URL url, Object body, final RemoteRequestCompletionBlock onCompletion) { final RemoteRequest request = new RemoteRequest(workExecutor, clientFactory, method, url, body, getLocalDatabase(), getHeaders(), onCompletion); request.setAuthenticator(getAuthenticator()); request.setOnPreCompletion(new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { if (serverType == null && result instanceof HttpResponse) { HttpResponse response = (HttpResponse) result; Header serverHeader = response.getFirstHeader("Server"); if (serverHeader != null) { String serverVersion = serverHeader.getValue(); Log.v(Log.TAG_SYNC, "serverVersion: %s", serverVersion); serverType = serverVersion; } } } }); request.setOnPostCompletion(new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { requests.remove(request); } }); if (remoteRequestExecutor.isTerminated()) { String msg = "sendAsyncRequest called, but remoteRequestExecutor has been terminated"; throw new IllegalStateException(msg); } Future future = remoteRequestExecutor.submit(request); requests.put(request, future); } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncMultipartDownloaderRequest(String method, String relativePath, Object body, Database db, RemoteRequestCompletionBlock onCompletion) { try { String urlStr = buildRelativeURLString(relativePath); URL url = new URL(urlStr); RemoteMultipartDownloaderRequest request = new RemoteMultipartDownloaderRequest( workExecutor, clientFactory, method, url, body, db, getHeaders(), onCompletion); request.setAuthenticator(getAuthenticator()); remoteRequestExecutor.execute(request); } catch (MalformedURLException e) { Log.e(Log.TAG_SYNC, "Malformed URL for async request", e); } } /** * @exclude */ @InterfaceAudience.Private public void sendAsyncMultipartRequest(String method, String relativePath, MultipartEntity multiPartEntity, RemoteRequestCompletionBlock onCompletion) { URL url = null; try { String urlStr = buildRelativeURLString(relativePath); url = new URL(urlStr); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } RemoteMultipartRequest request = new RemoteMultipartRequest( workExecutor, clientFactory, method, url, multiPartEntity, getLocalDatabase(), getHeaders(), onCompletion); request.setAuthenticator(getAuthenticator()); remoteRequestExecutor.execute(request); } /** * CHECKPOINT STORAGE: * */ @InterfaceAudience.Private /* package */ void maybeCreateRemoteDB() { // Pusher overrides this to implement the .createTarget option } /** * This is the _local document ID stored on the remote server to keep track of state. * Its ID is based on the local database ID (the private one, to make the result unguessable) * and the remote database's URL. * * @exclude */ @InterfaceAudience.Private public String remoteCheckpointDocID() { if (remoteCheckpointDocID != null) { return remoteCheckpointDocID; } else { // TODO: Needs to be consistent with -hasSameSettingsAs: -- // TODO: If a.remoteCheckpointID == b.remoteCheckpointID then [a hasSameSettingsAs: b] if (db == null) { return null; } // canonicalization: make sure it produces the same checkpoint id regardless of // ordering of filterparams / docids Map<String, Object> filterParamsCanonical = null; if (getFilterParams() != null) { filterParamsCanonical = new TreeMap<String, Object>(getFilterParams()); } List<String> docIdsSorted = null; if (getDocIds() != null) { docIdsSorted = new ArrayList<String>(getDocIds()); Collections.sort(docIdsSorted); } // use a treemap rather than a dictionary for purposes of canonicalization Map<String, Object> spec = new TreeMap<String, Object>(); spec.put("localUUID", db.privateUUID()); spec.put("remoteURL", remote.toExternalForm()); spec.put("push", !isPull()); spec.put("continuous", isContinuous()); if (getFilter() != null) { spec.put("filter", getFilter()); } if (filterParamsCanonical != null) { spec.put("filterParams", filterParamsCanonical); } if (docIdsSorted != null) { spec.put("docids", docIdsSorted); } byte[] inputBytes = null; try { inputBytes = db.getManager().getObjectMapper().writeValueAsBytes(spec); } catch (IOException e) { throw new RuntimeException(e); } remoteCheckpointDocID = Misc.TDHexSHA1Digest(inputBytes); return remoteCheckpointDocID; } } @InterfaceAudience.Private private boolean is404(Throwable e) { if (e instanceof HttpResponseException) { return ((HttpResponseException) e).getStatusCode() == 404; } return false; } /** * @exclude */ @InterfaceAudience.Private public void fetchRemoteCheckpointDoc() { lastSequenceChanged = false; String checkpointId = remoteCheckpointDocID(); final String localLastSequence = db.lastSequenceWithCheckpointId(checkpointId); Log.v(Log.TAG_SYNC, "%s | %s: fetchRemoteCheckpointDoc() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("GET", "/_local/" + checkpointId, null, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { try { if (e != null && !is404(e)) { Log.w(Log.TAG_SYNC, "%s: error getting remote checkpoint", e, this); setError(e); } else { if (e != null && is404(e)) { Log.d(Log.TAG_SYNC, "%s: 404 error getting remote checkpoint %s, calling maybeCreateRemoteDB", this, remoteCheckpointDocID()); maybeCreateRemoteDB(); } Map<String, Object> response = (Map<String, Object>) result; remoteCheckpoint = response; String remoteLastSequence = null; if (response != null) { remoteLastSequence = (String) response.get("lastSequence"); } if (remoteLastSequence != null && remoteLastSequence.equals(localLastSequence)) { lastSequence = localLastSequence; Log.d(Log.TAG_SYNC, "%s: Replicating from lastSequence=%s", this, lastSequence); } else { Log.d(Log.TAG_SYNC, "%s: lastSequence mismatch: I had: %s, remote had: %s", this, localLastSequence, remoteLastSequence); } beginReplicating(); } } finally { Log.v(Log.TAG_SYNC, "%s | %s: fetchRemoteCheckpointDoc() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } /** * @exclude */ @InterfaceAudience.Private public void saveLastSequence() { if (!lastSequenceChanged) { return; } if (savingCheckpoint) { // If a save is already in progress, don't do anything. (The completion block will trigger // another save after the first one finishes.) overdueForSave = true; return; } lastSequenceChanged = false; overdueForSave = false; Log.d(Log.TAG_SYNC, "%s: saveLastSequence() called. lastSequence: %s", this, lastSequence); final Map<String, Object> body = new HashMap<String, Object>(); if (remoteCheckpoint != null) { body.putAll(remoteCheckpoint); } body.put("lastSequence", lastSequence); String remoteCheckpointDocID = remoteCheckpointDocID(); if (remoteCheckpointDocID == null) { Log.w(Log.TAG_SYNC, "%s: remoteCheckpointDocID is null, aborting saveLastSequence()", this); return; } savingCheckpoint = true; final String checkpointID = remoteCheckpointDocID; Log.d(Log.TAG_SYNC, "%s: put remote _local document. checkpointID: %s", this, checkpointID); sendAsyncRequest("PUT", "/_local/" + checkpointID, body, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { savingCheckpoint = false; if (e != null) { Log.w(Log.TAG_SYNC, "%s: Unable to save remote checkpoint", e, this); } if (db == null) { Log.w(Log.TAG_SYNC, "%s: Database is null, ignoring remote checkpoint response", this); return; } if (!db.isOpen()) { Log.w(Log.TAG_SYNC, "%s: Database is closed, ignoring remote checkpoint response", this); return; } if (e != null) { // Failed to save checkpoint: switch (getStatusFromError(e)) { case Status.NOT_FOUND: remoteCheckpoint = null; // doc deleted or db reset overdueForSave = true; // try saving again break; case Status.CONFLICT: refreshRemoteCheckpointDoc(); break; default: // TODO: On 401 or 403, and this is a pull, remember that remote // TODo: is read-only & don't attempt to read its checkpoint next time. break; } } else { // Saved checkpoint: Map<String, Object> response = (Map<String, Object>) result; body.put("_rev", response.get("rev")); remoteCheckpoint = body; db.setLastSequence(lastSequence, checkpointID, !isPull()); } if (overdueForSave) { saveLastSequence(); } } }); } @InterfaceAudience.Public public boolean goOffline() { if (!online) { return false; } if (db == null) { return false; } db.runAsync(new AsyncTask() { @Override public void run(Database database) { Log.d(Log.TAG_SYNC, "%s: Going offline", this); online = false; stopRemoteRequests(); updateProgress(); notifyChangeListeners(); } }); return true; } @InterfaceAudience.Public public boolean goOnline() { if (online) { return false; } if (db == null) { return false; } db.runAsync(new AsyncTask() { @Override public void run(Database database) { Log.d(Log.TAG_SYNC, "%s: Going online", this); online = true; if (running) { lastSequence = null; setError(null); } /* Log.d(Log.TAG_SYNC, "%s: Shutting down remoteRequestExecutor", this); List<Runnable> tasksAwaitingExecution = remoteRequestExecutor.shutdownNow(); for (Runnable runnable : tasksAwaitingExecution) { Log.d(Log.TAG_SYNC, "%s: runnable: %s", this, runnable); if (runnable instanceof RemoteRequest) { RemoteRequest remoteRequest = (RemoteRequest) runnable; Log.v(Log.TAG_SYNC, "%s: request awaiting execution: %s underlying req: %s", this, remoteRequest, remoteRequest.getRequest().getURI()); } } boolean succeeded = false; try { succeeded = remoteRequestExecutor.awaitTermination(30, TimeUnit.SECONDS); } catch (InterruptedException e) { Log.e(Log.TAG_SYNC, "%s: timeout remoteRequestExecutor.awaitTermination", this, e); } Log.d(Log.TAG_SYNC, "%s: remoteRequestExecutor.awaitTermination succeeded: %s", this, succeeded); */ remoteRequestExecutor = Executors.newCachedThreadPool(); checkSession(); notifyChangeListeners(); } }); return true; } @InterfaceAudience.Private private void stopRemoteRequests() { Log.v(Log.TAG_SYNC, "%s: stopRemoteRequests() cancelling: %d requests", this, requests.size()); for (RemoteRequest request : requests.keySet()) { Log.v(Log.TAG_SYNC, "%s: aborting request: %s underlying req: %s", this, request, request.getRequest().getURI()); request.abort(); Log.v(Log.TAG_SYNC, "%s: aborted request", this); } } @InterfaceAudience.Private /* package */ void updateProgress() { if (!isRunning()) { status = ReplicationStatus.REPLICATION_STOPPED; } else if (!online) { status = ReplicationStatus.REPLICATION_OFFLINE; } else { if (active) { status = ReplicationStatus.REPLICATION_ACTIVE; } else { status = ReplicationStatus.REPLICATION_IDLE; } } } @InterfaceAudience.Private protected void setError(Throwable throwable) { // TODO /* if (error.code == NSURLErrorCancelled && $equal(error.domain, NSURLErrorDomain)) return; */ if (throwable != error) { Log.e(Log.TAG_SYNC, "%s: Progress: set error = %s", this, throwable); error = throwable; notifyChangeListeners(); } } @InterfaceAudience.Private protected void revisionFailed() { // Remember that some revisions failed to transfer, so we can later retry. ++revisionsFailed; } protected RevisionInternal transformRevision(RevisionInternal rev) { if(revisionBodyTransformationBlock != null) { try { final int generation = rev.getGeneration(); RevisionInternal xformed = revisionBodyTransformationBlock.invoke(rev); if (xformed == null) return null; if (xformed != rev) { assert(xformed.getDocId().equals(rev.getDocId())); assert(xformed.getRevId().equals(rev.getRevId())); assert(xformed.getProperties().get("_revisions").equals(rev.getProperties().get("_revisions"))); if (xformed.getProperties().get("_attachments") != null) { // Insert 'revpos' properties into any attachments added by the callback: RevisionInternal mx = new RevisionInternal(xformed.getProperties(), db); xformed = mx; mx.mutateAttachments(new CollectionUtils.Functor<Map<String,Object>,Map<String,Object>>() { public Map<String, Object> invoke(Map<String, Object> info) { if (info.get("revpos") != null) { return info; } if(info.get("data") == null) { throw new IllegalStateException("Transformer added attachment without adding data"); } Map<String,Object> nuInfo = new HashMap<String, Object>(info); nuInfo.put("revpos",generation); return nuInfo; } }); } rev = xformed; } }catch (Exception e) { Log.w(Log.TAG_SYNC,"%s: Exception transforming a revision of doc '%s", e, this, rev.getDocId()); } } return rev; } /** * Called after a continuous replication has gone idle, but it failed to transfer some revisions * and so wants to try again in a minute. Should be overridden by subclasses. */ @InterfaceAudience.Private protected void retry() { setError(null); } @InterfaceAudience.Private protected void retryIfReady() { if (!running) { return; } if (online) { Log.d(Log.TAG_SYNC, "%s: RETRYING, to transfer missed revisions", this); revisionsFailed = 0; cancelPendingRetryIfReady(); retry(); } else { scheduleRetryIfReady(); } } @InterfaceAudience.Private protected void cancelPendingRetryIfReady() { if (retryIfReadyFuture != null && retryIfReadyFuture.isCancelled() == false) { retryIfReadyFuture.cancel(true); } } @InterfaceAudience.Private protected void scheduleRetryIfReady() { retryIfReadyFuture = workExecutor.schedule(new Runnable() { @Override public void run() { retryIfReady(); } }, RETRY_DELAY, TimeUnit.SECONDS); } @InterfaceAudience.Private private int getStatusFromError(Throwable t) { if (t instanceof CouchbaseLiteException) { CouchbaseLiteException couchbaseLiteException = (CouchbaseLiteException) t; return couchbaseLiteException.getCBLStatus().getCode(); } return Status.UNKNOWN; } /** * Variant of -fetchRemoveCheckpointDoc that's used while replication is running, to reload the * checkpoint to get its current revision number, if there was an error saving it. */ @InterfaceAudience.Private private void refreshRemoteCheckpointDoc() { Log.d(Log.TAG_SYNC, "%s: Refreshing remote checkpoint to get its _rev...", this); savingCheckpoint = true; Log.v(Log.TAG_SYNC, "%s | %s: refreshRemoteCheckpointDoc() calling asyncTaskStarted()", this, Thread.currentThread()); asyncTaskStarted(); sendAsyncRequest("GET", "/_local/" + remoteCheckpointDocID(), null, new RemoteRequestCompletionBlock() { @Override public void onCompletion(Object result, Throwable e) { try { if (db == null) { Log.w(Log.TAG_SYNC, "%s: db == null while refreshing remote checkpoint. aborting", this); return; } savingCheckpoint = false; if (e != null && getStatusFromError(e) != Status.NOT_FOUND) { Log.e(Log.TAG_SYNC, "%s: Error refreshing remote checkpoint", e, this); } else { Log.d(Log.TAG_SYNC, "%s: Refreshed remote checkpoint: %s", this, result); remoteCheckpoint = (Map<String, Object>) result; lastSequenceChanged = true; saveLastSequence(); // try saving again } } finally { Log.v(Log.TAG_SYNC, "%s | %s: refreshRemoteCheckpointDoc() calling asyncTaskFinished()", this, Thread.currentThread()); asyncTaskFinished(1); } } }); } @InterfaceAudience.Private protected Status statusFromBulkDocsResponseItem(Map<String, Object> item) { try { if (!item.containsKey("error")) { return new Status(Status.OK); } String errorStr = (String) item.get("error"); if (errorStr == null || errorStr.isEmpty()) { return new Status(Status.OK); } // 'status' property is nonstandard; TouchDB returns it, others don't. String statusString = (String) item.get("status"); int status = Integer.parseInt(statusString); if (status >= 400) { return new Status(status); } // If no 'status' present, interpret magic hardcoded CouchDB error strings: if (errorStr.equalsIgnoreCase("unauthorized")) { return new Status(Status.UNAUTHORIZED); } else if (errorStr.equalsIgnoreCase("forbidden")) { return new Status(Status.FORBIDDEN); } else if (errorStr.equalsIgnoreCase("conflict")) { return new Status(Status.CONFLICT); } else { return new Status(Status.UPSTREAM_ERROR); } } catch (Exception e) { Log.e(Database.TAG, "Exception getting status from " + item, e); } return new Status(Status.OK); } @Override @InterfaceAudience.Private public void networkReachable() { goOnline(); } @Override @InterfaceAudience.Private public void networkUnreachable() { goOffline(); } @InterfaceAudience.Private /* package */ boolean serverIsSyncGatewayVersion(String minVersion) { String prefix = "Couchbase Sync Gateway/"; if (serverType == null) { return false; } else { if (serverType.startsWith(prefix)) { String versionString = serverType.substring(prefix.length()); return versionString.compareTo(minVersion) >= 0; } } return false; } @InterfaceAudience.Private /* package */ void setServerType(String serverType) { this.serverType = serverType; } @InterfaceAudience.Private /* package */ HttpClientFactory getClientFactory() { return clientFactory; } }
Issue #305 fix format string issue https://github.com/couchbase/couchbase-lite-android/issues/305
src/main/java/com/couchbase/lite/replicator/Replication.java
Issue #305 fix format string issue
Java
apache-2.0
7c568c0fa6ffd9d7d6ec68e99e51e6f992dc9ae8
0
px3/bulldog,xjaros1/bulldog,Datenheld/Bulldog,Datenheld/Bulldog,px3/bulldog,SilverThings/bulldog,SilverThings/bulldog,xjaros1/bulldog
package org.bulldog.linux.io; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.bulldog.core.Parity; import org.bulldog.core.io.serial.SerialDataEventArgs; import org.bulldog.core.io.serial.SerialDataListener; import org.bulldog.core.io.serial.SerialPort; import org.bulldog.core.util.BulldogUtil; import org.bulldog.linux.jni.NativePollResult; import org.bulldog.linux.jni.NativeSerial; import org.bulldog.linux.jni.NativeTools; public class LinuxSerialPort implements SerialPort, LinuxEpollListener { private static final String ERROR_CLOSING_PORT = "Port could not be closed. Invalid file descriptor?"; private static final String ERROR_PORT_NOT_OPEN = "Serial port is not open!"; private static final String ERROR_PORT_ALREADY_OPEN = "Serial port has already been opened! Please close it first and reopen it!"; private static final int DEFAULT_BAUD_RATE = 9600; private static final int DEFAULT_READ_TIMEOUT = 5; private static final int DEFAULT_DATA_BITS = 8; private static final int DEFAULT_STOP_BITS = 1; private String deviceFilePath; private int baudRate = DEFAULT_BAUD_RATE; private boolean isOpen = false; private int fileDescriptor = 0; private String alias = ""; private Parity parity = Parity.None; private int dataBits = DEFAULT_DATA_BITS; private int stopBits = DEFAULT_STOP_BITS; private FileDescriptor streamDescriptor; private OutputStream outputStream; private InputStream inputStream; private boolean blocking = true; private LinuxEpollThread listenerThread; private List<SerialDataListener> listeners = Collections.synchronizedList(new ArrayList<SerialDataListener>()); public LinuxSerialPort(String filename) { this.deviceFilePath = filename; listenerThread = new LinuxEpollThread(filename); listenerThread.addListener(this); } @Override public void addListener(SerialDataListener listener) { this.listeners.add(listener); if(!listenerThread.isRunning()) { listenerThread.start(); } } public void close() throws IOException { if(!isOpen()) { return; } listenerThread.stop(); try { int returnValue = NativeSerial.serialClose(fileDescriptor); if(returnValue < 0) { throw new IOException(ERROR_CLOSING_PORT); } } finally { finalizeStreams(); } isOpen = false; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; LinuxSerialPort other = (LinuxSerialPort) obj; if (deviceFilePath == null) { if (other.deviceFilePath != null) return false; } else if (!deviceFilePath.equals(other.deviceFilePath)) return false; return true; } private void finalizeStreams() throws IOException { if(inputStream != null) { try { inputStream.close(); } catch(Exception ex) {} finally { inputStream = null; } } if(outputStream != null) { try { outputStream.close(); outputStream = null; } catch(Exception ex) {} finally { outputStream = null; } } } public void fireSerialDataEvent(byte[] data) { synchronized(listeners) { for(SerialDataListener listener : listeners) { listener.onSerialDataAvailable(new SerialDataEventArgs(this, data)); } } } public String getAlias() { return alias; } @Override public int getBaudRate() { return this.baudRate; } @Override public boolean getBlocking() { return blocking; } public int getDataBits() { return dataBits; } public String getDeviceFilePath() { return deviceFilePath; } @Override public InputStream getInputStream() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return inputStream; } @Override public String getName() { return deviceFilePath; } @Override public OutputStream getOutputStream() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return outputStream; } @Override public Parity getParity() { return this.parity; } private int getParityCode() { if(parity == Parity.Even) { return NativeSerial.PARENB; } else if(parity == Parity.Odd) { return NativeSerial.PARENB | NativeSerial.PARODD; } else if(parity == Parity.Mark) { return NativeSerial.PARENB | NativeSerial.PARODD | NativeSerial.CMSPAR; } else if(parity == Parity.Space) { return NativeSerial.PARENB | NativeSerial.CMSPAR; } return 0; } @Override public int getStopBits() { return stopBits; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((deviceFilePath == null) ? 0 : deviceFilePath.hashCode()); return result; } public boolean isOpen() { return isOpen; } public void open() throws IOException { fileDescriptor = NativeSerial.serialOpen(deviceFilePath, baudRate, getParityCode(), getBlocking(), DEFAULT_READ_TIMEOUT, dataBits, stopBits); streamDescriptor = NativeTools.getJavaDescriptor(fileDescriptor); outputStream = new FileOutputStream(streamDescriptor); inputStream = new FileInputStream(streamDescriptor); isOpen = true; listenerThread.setup(); if(listeners.size() > 0) { listenerThread.start(); } } public byte readByte() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return NativeSerial.serialRead(fileDescriptor); } @Override public int readBytes(byte[] buffer) throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } ByteBuffer byteBuffer = ByteBuffer.allocateDirect(buffer.length); int bytesRead = NativeSerial.serialReadBuffer(fileDescriptor, byteBuffer, buffer.length); byteBuffer.get(buffer); return bytesRead; } @Override public String readString() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return BulldogUtil.convertStreamToString(getInputStream()); } @Override public void removeListener(SerialDataListener listener) { this.listeners.remove(listener); if(listenerThread.isRunning()) { listenerThread.stop(); } } public void setAlias(String alias) { this.alias = alias; } @Override public void setBaudRate(int baudRate) { if(isOpen()) { throw new IllegalStateException(ERROR_PORT_ALREADY_OPEN); } this.baudRate = baudRate; } @Override public void setBlocking(boolean blocking) { if(isOpen()) { throw new IllegalStateException(ERROR_PORT_ALREADY_OPEN); } this.blocking = blocking; } public void setDataBits(int dataBits) { if(dataBits < 5 || dataBits > 8) { throw new IllegalArgumentException("The amount of databits must be between 5 and 8"); } this.dataBits = dataBits; } @Override public void setParity(Parity parity) { if(isOpen()) { throw new IllegalStateException(ERROR_PORT_ALREADY_OPEN); } this.parity = parity; } @Override public void setStopBits(int stopBits) { if(stopBits != 1 && stopBits != 2) { throw new IllegalArgumentException("You can only have 1 or 2 stop bits"); } this.stopBits = stopBits; } public void writeByte(byte data) throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } NativeSerial.serialWrite(fileDescriptor, data); } @Override public void writeBytes(byte[] bytes) throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } outputStream.write(bytes); } @Override public void writeString(String string) throws IOException { writeBytes(string.getBytes()); } @Override public void processEpollResults(NativePollResult[] results) { for(NativePollResult result : results) { fireSerialDataEvent(result); } } protected void fireSerialDataEvent(NativePollResult result) { synchronized(listeners) { for(SerialDataListener listener : listeners) { listener.onSerialDataAvailable(new SerialDataEventArgs(this, result.getData())); } } } }
bulldog.linux/src/main/java/org/bulldog/linux/io/LinuxSerialPort.java
package org.bulldog.linux.io; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.bulldog.core.Parity; import org.bulldog.core.io.serial.SerialDataEventArgs; import org.bulldog.core.io.serial.SerialDataListener; import org.bulldog.core.io.serial.SerialPort; import org.bulldog.core.util.BulldogUtil; import org.bulldog.linux.jni.NativePollResult; import org.bulldog.linux.jni.NativeSerial; import org.bulldog.linux.jni.NativeTools; public class LinuxSerialPort implements SerialPort, LinuxEpollListener { private static final String ERROR_CLOSING_PORT = "Port could not be closed. Invalid file descriptor?"; private static final String ERROR_PORT_NOT_OPEN = "Serial port is not open!"; private static final String ERROR_PORT_ALREADY_OPEN = "Serial port has already been opened! Please close it first and reopen it!"; private static final int DEFAULT_BAUD_RATE = 9600; private static final int DEFAULT_READ_TIMEOUT = 5; private static final int DEFAULT_DATA_BITS = 8; private static final int DEFAULT_STOP_BITS = 1; private String deviceFilePath; private int baudRate = DEFAULT_BAUD_RATE; private boolean isOpen = false; private int fileDescriptor = 0; private String alias = ""; private Parity parity = Parity.None; private int dataBits = DEFAULT_DATA_BITS; private int stopBits = DEFAULT_STOP_BITS; private FileDescriptor streamDescriptor; private OutputStream outputStream; private InputStream inputStream; private boolean blocking = true; private LinuxEpollThread listenerThread; private List<SerialDataListener> listeners = Collections.synchronizedList(new ArrayList<SerialDataListener>()); public LinuxSerialPort(String filename) { this.deviceFilePath = filename; listenerThread = new LinuxEpollThread(filename); listenerThread.addListener(this); } @Override public void addListener(SerialDataListener listener) { this.listeners.add(listener); if(!listenerThread.isRunning()) { listenerThread.start(); } } public void close() throws IOException { if(!isOpen()) { return; } listenerThread.stop(); try { int returnValue = NativeSerial.serialClose(fileDescriptor); if(returnValue < 0) { throw new IOException(ERROR_CLOSING_PORT); } } finally { finalizeStreams(); } isOpen = false; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; LinuxSerialPort other = (LinuxSerialPort) obj; if (deviceFilePath == null) { if (other.deviceFilePath != null) return false; } else if (!deviceFilePath.equals(other.deviceFilePath)) return false; return true; } private void finalizeStreams() throws IOException { if(inputStream != null) { try { inputStream.close(); } catch(Exception ex) {} finally { inputStream = null; } } if(outputStream != null) { try { outputStream.close(); outputStream = null; } catch(Exception ex) {} finally { outputStream = null; } } } public void fireSerialDataEvent(byte[] data) { synchronized(listeners) { for(SerialDataListener listener : listeners) { listener.onSerialDataAvailable(new SerialDataEventArgs(this, data)); } } } public String getAlias() { return alias; } @Override public int getBaudRate() { return this.baudRate; } @Override public boolean getBlocking() { return blocking; } public int getDataBits() { return dataBits; } public String getDeviceFilePath() { return deviceFilePath; } @Override public InputStream getInputStream() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return inputStream; } @Override public String getName() { return deviceFilePath; } @Override public OutputStream getOutputStream() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return outputStream; } @Override public Parity getParity() { return this.parity; } private int getParityCode() { if(parity == Parity.Even) { return NativeSerial.PARENB; } else if(parity == Parity.Odd) { return NativeSerial.PARENB | NativeSerial.PARODD; } else if(parity == Parity.Mark) { return NativeSerial.PARENB | NativeSerial.PARODD | NativeSerial.CMSPAR; } else if(parity == Parity.Space) { return NativeSerial.PARENB | NativeSerial.CMSPAR; } return 0; } @Override public int getStopBits() { return stopBits; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((deviceFilePath == null) ? 0 : deviceFilePath.hashCode()); return result; } public boolean isOpen() { return isOpen; } public void open() throws IOException { fileDescriptor = NativeSerial.serialOpen(deviceFilePath, baudRate, getParityCode(), getBlocking(), DEFAULT_READ_TIMEOUT, dataBits, stopBits); streamDescriptor = NativeTools.getJavaDescriptor(fileDescriptor); outputStream = new FileOutputStream(streamDescriptor); inputStream = new FileInputStream(streamDescriptor); isOpen = true; listenerThread.setup(); if(listeners.size() > 0) { listenerThread.start(); } } public byte readByte() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return NativeSerial.serialRead(fileDescriptor); } @Override public int readBytes(byte[] buffer) throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } ByteBuffer byteBuffer = ByteBuffer.allocateDirect(buffer.length); int bytesRead = NativeSerial.serialReadBuffer(fileDescriptor, byteBuffer, buffer.length); byteBuffer.get(buffer); return bytesRead; } @Override public String readString() throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } return BulldogUtil.convertStreamToString(getInputStream()); } @Override public void removeListener(SerialDataListener listener) { this.listeners.remove(listener); if(listenerThread.isRunning()) { listenerThread.stop(); } } public void setAlias(String alias) { this.alias = alias; } @Override public void setBaudRate(int baudRate) { if(isOpen()) { throw new IllegalStateException(ERROR_PORT_ALREADY_OPEN); } this.baudRate = baudRate; } @Override public void setBlocking(boolean blocking) { if(isOpen()) { throw new IllegalStateException(ERROR_PORT_ALREADY_OPEN); } this.blocking = blocking; } public void setDataBits(int dataBits) { if(dataBits < 5 || dataBits > 8) { throw new IllegalArgumentException("The amount of databits must be between 5 and 8"); } this.dataBits = dataBits; } @Override public void setParity(Parity parity) { if(isOpen()) { throw new IllegalStateException(ERROR_PORT_ALREADY_OPEN); } this.parity = parity; } @Override public void setStopBits(int stopBits) { if(stopBits != 1 && stopBits != 2) { throw new IllegalArgumentException("You can only have 1 or 2 stop bits"); } this.stopBits = stopBits; } public void writeByte(byte data) throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } NativeSerial.serialWrite(fileDescriptor, data); } @Override public void writeBytes(byte[] bytes) throws IOException { if(!isOpen()) { throw new IllegalStateException(ERROR_PORT_NOT_OPEN); } outputStream.write(bytes); } @Override public void writeString(String string) throws IOException { writeBytes(string.getBytes()); } @Override public void processEpollResults(NativePollResult[] results) { for(NativePollResult result : results) { for(SerialDataListener listener : listeners) { listener.onSerialDataAvailable(new SerialDataEventArgs(this, result.getData())); } } } }
Small refactoring;
bulldog.linux/src/main/java/org/bulldog/linux/io/LinuxSerialPort.java
Small refactoring;
Java
bsd-2-clause
385ba394837ba16747630ff5addc1cba20503b7e
0
laffer1/justjournal,laffer1/justjournal,laffer1/justjournal,laffer1/justjournal
/* * Copyright (c) 2013 Lucas Holt * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ package com.justjournal.ctl.api; import com.justjournal.Login; import com.justjournal.core.Settings; import com.justjournal.model.*; import com.justjournal.model.api.NewUser; import com.justjournal.repository.*; import com.justjournal.utility.StringUtil; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletResponse; import javax.transaction.Transactional; import java.util.Calendar; import java.util.Date; import java.util.Map; /** * Create new accounts in Just Journal. To delete accounts, use AccountController. * * @author Lucas Holt * @see com.justjournal.ctl.api.AccountController */ @Transactional @Controller @RequestMapping("/api/signup") public class SignUpController { private static final Logger log = Logger.getLogger(SignUpController.class); @Autowired private UserRepository userRepository; @Autowired private UserBioDao userBioDao; @Autowired private UserContactRepository userContactRepository; @Autowired private UserPrefRepository userPrefRepository; @Autowired private JournalRepository journalRepository; @Autowired private Settings settings; @RequestMapping(method = RequestMethod.POST, produces = "application/json") public @ResponseBody Map<String, String> post(@RequestBody final NewUser user, final HttpServletResponse response) { if (!settings.isUserAllowNew()) { response.setStatus(HttpServletResponse.SC_FORBIDDEN); return java.util.Collections.singletonMap("error", "Could not add user"); } if (!StringUtil.lengthCheck(user.getEmail(), 6, 100)) { throw new IllegalArgumentException("Invalid email address"); } if (!Login.isUserName(user.getUsername())) { throw new IllegalArgumentException( "Username must be letters and numbers only"); } if (!Login.isPassword(user.getPassword())) { throw new IllegalArgumentException( "Password must be 5-18 characters."); } return newUser(user, response); } private Map<String, String> newUser(final NewUser newUser, final HttpServletResponse response) { try { User user = new User(); user.setName(newUser.getFirstName()); user.setLastName(newUser.getLastName()); user.setUsername(newUser.getUsername()); user.setPassword(Login.SHA1(newUser.getPassword())); user.setType(0); user.setSince(Calendar.getInstance().get(Calendar.YEAR)); user.setLastLogin(new Date()); user = userRepository.saveAndFlush(user); if (user == null) throw new Exception("Unable to save user"); Journal journal = new Journal(); journal.setAllowSpider(true); journal.setOwnerViewOnly(false); journal.setPingServices(true); journal.setName(user.getName() + "\'s Journal"); journal.setSince(Calendar.getInstance().getTime()); journal.setModified(Calendar.getInstance().getTime()); journal = journalRepository.saveAndFlush(journal); if (journal == null) throw new Exception("Unable to save journal"); UserPref userPref = new UserPref(); userPref.setShowAvatar(PrefBool.N); userPref.setUser(user); userPref = userPrefRepository.save(userPref); if (userPref == null) throw new Exception("Unable to save user preferences"); UserContact userContact = new UserContact(); userContact.setEmail(newUser.getEmail()); userContact.setUser(user); userContact = userContactRepository.save(userContact); if (userContact == null) throw new Exception("Unable to save user contact"); UserBio userBio = new UserBio(); userBio.setBio(""); userBio.setUser(user); userBio = userBioDao.save(userBio); if (userBio == null) throw new Exception("Unable to save user bio"); return java.util.Collections.singletonMap("id", Integer.toString(user.getId())); } catch (final Exception e) { log.error(e); response.setStatus(HttpServletResponse.SC_FORBIDDEN); return java.util.Collections.singletonMap("error", "Could not add user"); } } }
src/main/java/com/justjournal/ctl/api/SignUpController.java
/* * Copyright (c) 2013 Lucas Holt * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ package com.justjournal.ctl.api; import com.justjournal.Login; import com.justjournal.core.Settings; import com.justjournal.model.*; import com.justjournal.model.api.NewUser; import com.justjournal.repository.*; import com.justjournal.utility.StringUtil; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletResponse; import javax.transaction.Transactional; import java.util.Calendar; import java.util.Date; import java.util.Map; /** * Create new accounts in Just Journal. To delete accounts, use AccountController. * * @author Lucas Holt * @see com.justjournal.ctl.api.AccountController */ @Transactional @Controller @RequestMapping("/api/signup") public class SignUpController { private static final Logger log = Logger.getLogger(SignUpController.class); @Autowired private UserRepository userRepository; @Autowired private UserBioDao userBioDao; @Autowired private UserContactRepository userContactRepository; @Autowired private UserPrefRepository userPrefRepository; @Autowired private JournalRepository journalRepository; @Autowired private Settings settings; @RequestMapping(method = RequestMethod.POST, produces = "application/json") public @ResponseBody Map<String, String> post(@RequestBody final NewUser user, final HttpServletResponse response) { if (!settings.isUserAllowNew()) { response.setStatus(HttpServletResponse.SC_FORBIDDEN); return java.util.Collections.singletonMap("error", "Could not add user"); } if (!StringUtil.lengthCheck(user.getEmail(), 6, 100)) { throw new IllegalArgumentException("Invalid email address"); } if (!Login.isUserName(user.getUsername())) { throw new IllegalArgumentException( "Username must be letters and numbers only"); } if (!Login.isPassword(user.getPassword())) { throw new IllegalArgumentException( "Password must be 5-18 characters."); } return newUser(user, response); } private Map<String, String> newUser(final NewUser newUser, final HttpServletResponse response) { try { User user = new User(); user.setName(newUser.getFirstName()); user.setLastName(newUser.getLastName()); user.setUsername(newUser.getUsername()); user.setPassword(Login.SHA1(newUser.getPassword())); user.setType(0); user.setSince(Calendar.getInstance().get(Calendar.YEAR)); user.setLastLogin(new Date()); user = userRepository.saveAndFlush(user); if (user == null) throw new Exception("Unable to save user"); Journal journal = new Journal(); journal.setAllowSpider(true); journal.setOwnerViewOnly(false); journal.setPingServices(true); journal.setName(user.getName() + "\'s Journal"); journal = journalRepository.saveAndFlush(journal); if (journal == null) throw new Exception("Unable to save journal"); UserPref userPref = new UserPref(); userPref.setShowAvatar(PrefBool.N); userPref.setUser(user); userPref = userPrefRepository.save(userPref); if (userPref == null) throw new Exception("Unable to save user preferences"); UserContact userContact = new UserContact(); userContact.setEmail(newUser.getEmail()); userContact.setUser(user); userContact = userContactRepository.save(userContact); if (userContact == null) throw new Exception("Unable to save user contact"); UserBio userBio = new UserBio(); userBio.setBio(""); userBio.setUser(user); userBio = userBioDao.save(userBio); if (userBio == null) throw new Exception("Unable to save user bio"); return java.util.Collections.singletonMap("id", Integer.toString(user.getId())); } catch (final Exception e) { log.error(e); response.setStatus(HttpServletResponse.SC_FORBIDDEN); return java.util.Collections.singletonMap("error", "Could not add user"); } } }
tweak journal creation on signup
src/main/java/com/justjournal/ctl/api/SignUpController.java
tweak journal creation on signup
Java
bsd-3-clause
e50d8f12efaa288e1a68c44c5ceb3157557a2a02
0
ontodev/robot,ontodev/robot,ontodev/robot
package org.obolibrary.robot; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory; import org.semanticweb.owlapi.search.EntitySearcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; /** * Implements several variations on MIREOT, as first described in "MIREOT: The minimum information * to reference an external ontology term" (<a * href="http://dx.doi.org/10.3233/AO-2011-0087">link</a>). * * @author <a href="mailto:[email protected]">James A. Overton</a> */ public class MireotOperation { /** Logger. */ private static final Logger logger = LoggerFactory.getLogger(MireotOperation.class); /** Shared data factory. */ private static OWLDataFactory dataFactory = new OWLDataFactoryImpl(); /** RDFS isDefinedBy annotation property. */ private static OWLAnnotationProperty isDefinedBy = dataFactory.getRDFSIsDefinedBy(); /** Specify how to handle intermediates. */ private static String intermediates; /** Specify if source should be annotated. */ private static boolean annotateSource; /** Specify a map of sources. */ private static Map<IRI, IRI> sourceMap; /** * Get a set of default annotation properties. Currenly includes only RDFS label. * * @return a set of annotation properties */ public static Set<OWLAnnotationProperty> getDefaultAnnotationProperties() { Set<OWLAnnotationProperty> annotationProperties = new HashSet<>(); annotationProperties.add(dataFactory.getRDFSLabel()); return annotationProperties; } /** * Given an ontology, a set of upper-level IRIs, a set of lower-level IRIs, and a set of * annotation properties, return a new ontology with just the named ancestors of those terms, * their subclass relations, and the selected annotations. The input ontology is not changed. * * @param inputOntology the ontology to extract from * @param upperIRIs ancestors will be copied up to and including these terms * @param lowerIRIs copy these terms and their superclasses * @param annotationProperties the annotation properties to copy; if null, all will be copied * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ public static OWLOntology getAncestors( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<IRI> lowerIRIs, Set<OWLAnnotationProperty> annotationProperties) throws OWLOntologyCreationException { return getAncestors(inputOntology, upperIRIs, lowerIRIs, annotationProperties, null, null); } /** * Return a set of all OWLEntities from an ontology. If an entity is both an individual and a * class, exclude the indiviudal and only include the class. * * @param inputOntology OWLOntology to get entities from * @return set of OWLEntities */ private static Set<OWLEntity> getAllEntities(OWLOntology inputOntology) { Set<OWLEntity> entities = new HashSet<>(); // Filter out any individuals that have the same IRI as a class (we prefer the class in MIREOT) Set<OWLClass> classes = inputOntology.getClassesInSignature(); Set<IRI> classIRIs = classes.stream().map(OWLNamedObject::getIRI).collect(Collectors.toSet()); Set<OWLNamedIndividual> individuals = inputOntology.getIndividualsInSignature(); individuals = individuals .stream() .filter(i -> !classIRIs.contains(i.getIRI())) .collect(Collectors.toSet()); entities.addAll(classes); entities.addAll(individuals); entities.addAll(inputOntology.getAnnotationPropertiesInSignature()); entities.addAll(inputOntology.getDataPropertiesInSignature()); entities.addAll(inputOntology.getObjectPropertiesInSignature()); return entities; } /** * Given an input ontology, a set of upper IRIs, a set of lower IRIs, a set of annotation * properties (or null for all), and a map of extract options, get the ancestors of the lower IRIs * up to the upper IRIs. Include the specified annotation properties. * * @param inputOntology OWLOntology to extract from * @param upperIRIs top level IRIs * @param lowerIRIs bottom level IRIs * @param annotationProperties annotation properties to copy, or null for all * @param options map of extract options or null * @param inputSourceMap map of source IRIs to targets * @return extracted module * @throws OWLOntologyCreationException on problems creating the new ontology */ public static OWLOntology getAncestors( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<IRI> lowerIRIs, Set<OWLAnnotationProperty> annotationProperties, Map<String, String> options, Map<IRI, IRI> inputSourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); // Get options setOptions(options, inputSourceMap); // Get all entities in the ontology (preferring Class over NamedIndividual) Set<OWLEntity> entities = getAllEntities(inputOntology); // The other OWLAPI extract methods use the source ontology IRI // so we'll use it here too. OWLOntology outputOntology = outputManager.createOntology(inputOntology.getOntologyID()); // Directly copy all upper entities Set<OWLEntity> upperEntities = new HashSet<>(); if (upperIRIs != null && upperIRIs.size() > 0) { upperEntities = entities.stream().filter(e -> upperIRIs.contains(e.getIRI())).collect(Collectors.toSet()); } for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } // Create a reasoner to get ancestors OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); OWLReasoner reasoner = reasonerFactory.createReasoner(inputOntology); // For each lower entity, get the ancestors (all or none) Set<OWLEntity> lowerEntities = entities.stream().filter(e -> lowerIRIs.contains(e.getIRI())).collect(Collectors.toSet()); for (OWLEntity cls : lowerEntities) { OntologyHelper.copy(inputOntology, outputOntology, cls, annotationProperties); if ("none".equals(intermediates)) { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, cls, cls, annotationProperties); } else { copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, cls, annotationProperties); } if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, cls, sourceMap); } } // Maybe remove unnecessary intermediates if (intermediates.equalsIgnoreCase("minimal")) { Set<IRI> precious = new HashSet<>(); if (upperIRIs != null) { precious.addAll(upperIRIs); } precious.addAll(lowerIRIs); OntologyHelper.collapseOntology(outputOntology, precious); } return outputOntology; } /** * Given an ontology, a set of upper-level IRIs, a set of lower-level IRIs, a set of annotation * properties, and a boolean indiciating if rdfs:isDefinedBy should be added to copied classes, * return a new ontology with just the named ancestors of those terms, their subclass relations, * and the selected annotations. The input ontology is not changed. * * @deprecated replaced by {@link #getAncestors(OWLOntology, Set, Set, Set)} * @param inputOntology the ontology to extract from * @param upperIRIs ancestors will be copied up to and including these terms * @param lowerIRIs copy these terms and their superclasses * @param annotationProperties the annotation properties to copy; if null, all will be copied * @param annotateSource if true, annotate copied classes with rdfs:isDefinedBy * @param sourceMap map of term IRI to source IRI * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ @Deprecated public static OWLOntology getAncestors( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<IRI> lowerIRIs, Set<OWLAnnotationProperty> annotationProperties, boolean annotateSource, Map<IRI, IRI> sourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); OWLReasoner reasoner = reasonerFactory.createReasoner(inputOntology); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); // The other OWLAPI extract methods use the source ontology IRI // so we'll use it here too. OWLOntology outputOntology = outputManager.createOntology(inputOntology.getOntologyID()); Set<OWLEntity> upperEntities = OntologyHelper.getEntities(inputOntology, upperIRIs); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } Set<OWLEntity> lowerEntities = OntologyHelper.getEntities(inputOntology, lowerIRIs); for (OWLEntity entity : lowerEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, entity, annotationProperties); } return outputOntology; } /** * Given an ontology, a set of upper-level IRIs, and a set of annotation properties, return a new * ontology with just those terms and their named descendants, their subclass relations, and the * selected annotations. The input ontology is not changed. * * @param inputOntology the ontology to extract from * @param upperIRIs these terms and their descendants will be copied * @param annotationProperties the annotation properties to copy; if null, all will be copied * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ public static OWLOntology getDescendants( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<OWLAnnotationProperty> annotationProperties) throws OWLOntologyCreationException { return getDescendants(inputOntology, upperIRIs, annotationProperties, null, null); } /** * Given an ontology, a set of upper-level IRIs, and a set of annotation properties, return a new * ontology with just those terms and their named descendants, their subclass relations, and the * selected annotations. The input ontology is not changed. * * @deprecated replaced by {@link #getDescendants(OWLOntology, Set, Set, Map, Map)} * @param inputOntology the ontology to extract from * @param upperIRIs these terms and their descendants will be copied * @param annotationProperties the annotation properties to copy; if null, all will be copied * @param annotateSource if true, annotate copied classes with rdfs:isDefinedBy * @param sourceMap map of term IRI to source IRI * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ @Deprecated public static OWLOntology getDescendants( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<OWLAnnotationProperty> annotationProperties, boolean annotateSource, Map<IRI, IRI> sourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); OWLOntology outputOntology = outputManager.createOntology(); Set<OWLEntity> upperEntities = OntologyHelper.getEntities(inputOntology, upperIRIs); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } copyDescendantsAllIntermediates(inputOntology, outputOntology, entity, annotationProperties); } return outputOntology; } /** * Given input and output ontologies, a set of upper entitities, a target entity, and a set of * annotation properties, copy the target entity and all its named ancestors (recursively) from * the input ontology to the output ontology, along with the specified annotations. The input * ontology is not changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param reasoner OWLReasoner to get superclasses and superproperties while maintaining structure * @param upperEntities the top level of entities, or null * @param entity the target entity that will have its ancestors copied * @param annotationProperties the annotations to copy, or null for all */ private static void copyAncestorsAllIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLReasoner reasoner, Set<OWLEntity> upperEntities, OWLEntity entity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // If this is an upperEntity, copy it and return. if (upperEntities != null && upperEntities.contains(entity)) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); return; } // Otherwise copy ancestors recursively. if (entity.isOWLClass()) { Set<OWLClass> superclasses = reasoner.getSuperClasses(entity.asOWLClass(), true).getFlattened(); for (OWLClass superclass : superclasses) { OntologyHelper.copy(inputOntology, outputOntology, superclass, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(entity.asOWLClass(), superclass)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superclass, annotationProperties); } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> superProperties = EntitySearcher.getSuperProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty superProperty : superProperties) { OntologyHelper.copy(inputOntology, outputOntology, superProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( entity.asOWLAnnotationProperty(), superProperty)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, annotationProperties); } } else if (entity.isOWLObjectProperty()) { Set<OWLObjectPropertyExpression> superProperties = reasoner.getSuperObjectProperties(entity.asOWLObjectProperty(), true).getFlattened(); for (OWLObjectPropertyExpression superexpression : superProperties) { if (superexpression.isAnonymous()) { continue; } OWLObjectProperty superProperty = superexpression.asOWLObjectProperty(); OntologyHelper.copy(inputOntology, outputOntology, superProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom( entity.asOWLObjectProperty(), superProperty)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, annotationProperties); } } else if (entity.isOWLDataProperty()) { Set<OWLDataProperty> superProperties = reasoner.getSuperDataProperties(entity.asOWLDataProperty(), true).getFlattened(); for (OWLDataProperty superProperty : superProperties) { OntologyHelper.copy(inputOntology, outputOntology, superProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom(entity.asOWLDataProperty(), superProperty)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, annotationProperties); } } // Annotate with rdfs:isDefinedBy (maybe) if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } /** * Given input and output ontologies, a set of upper entities, a target entity, a bottom entity * (from lower terms), and a set of annotation properties, copy the bottom entity and any * superclasses from the upper entities from the input ontology to the output ontology, along with * the specified annotations. No intermediate superclasses are included. The input ontology is not * changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param reasoner OWLReasoner to get superclasses and superproperties while maintaining structure * @param upperEntities the top level of entities, or null * @param entity the target entity to check if included in upper entities * @param bottomEntity the entity from lower terms to include * @param annotationProperties the annotations to copy, or null for all */ private static void copyAncestorsNoIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLReasoner reasoner, Set<OWLEntity> upperEntities, OWLEntity entity, OWLEntity bottomEntity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // If there are no upperEntities or if this is an upperEntity, copy it and return if (upperEntities == null || upperEntities.contains(entity)) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); return; } // Otherwise find the highest level ancestor that was included in upper-terms if (entity.isOWLClass()) { Set<OWLClass> superclasses = reasoner.getSuperClasses(entity.asOWLClass(), true).getFlattened(); for (OWLClass superclass : superclasses) { if (upperEntities.contains(superclass)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(bottomEntity.asOWLClass(), superclass)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superclass, bottomEntity, annotationProperties); } } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> superProperties = EntitySearcher.getSuperProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty superProperty : superProperties) { if (upperEntities.contains(superProperty)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( bottomEntity.asOWLAnnotationProperty(), superProperty)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, bottomEntity, annotationProperties); } } } else if (entity.isOWLObjectProperty()) { Set<OWLObjectPropertyExpression> superProperties = reasoner.getSuperObjectProperties(entity.asOWLObjectProperty(), true).getFlattened(); for (OWLObjectPropertyExpression superexpression : superProperties) { if (superexpression.isAnonymous()) { continue; } OWLObjectProperty superProperty = superexpression.asOWLObjectProperty(); if (upperEntities.contains(superProperty)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom( bottomEntity.asOWLObjectProperty(), superProperty)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, bottomEntity, annotationProperties); } } } else if (entity.isOWLDataProperty()) { Set<OWLDataProperty> superProperties = reasoner.getSuperDataProperties(entity.asOWLDataProperty(), true).getFlattened(); for (OWLDataProperty superProperty : superProperties) { if (upperEntities.contains(superProperty)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom( bottomEntity.asOWLDataProperty(), superProperty)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, bottomEntity, annotationProperties); } } } // Annotate with rdfs:isDefinedBy (maybe) if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } /** * Given input and output ontologies, a target entity, and a set of annotation properties, copy * the target entity and all its named ancestors (recursively) from the input ontology to the * output ontology, along with the specified annotations. The input ontology is not changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param entity the target entity that will have its ancestors copied * @param annotationProperties the annotations to copy, or null for all */ private static void copyDescendantsAllIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLEntity entity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // Otherwise copy ancestors recursively. if (entity.isOWLClass()) { Collection<OWLClassExpression> subClasses = EntitySearcher.getSubClasses(entity.asOWLClass(), inputOntology); for (OWLClassExpression subExpression : subClasses) { if (subExpression.isAnonymous()) { continue; } OWLClass subClass = subExpression.asOWLClass(); OntologyHelper.copy(inputOntology, outputOntology, subClass, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(subClass, entity.asOWLClass())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subClass, annotationProperties); } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> subProperties = EntitySearcher.getSubProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty subProperty : subProperties) { OntologyHelper.copy(inputOntology, outputOntology, subProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( subProperty, entity.asOWLAnnotationProperty())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subProperty, annotationProperties); } } else if (entity.isOWLObjectProperty()) { Collection<OWLObjectPropertyExpression> superProperties = EntitySearcher.getSuperProperties(entity.asOWLObjectProperty(), inputOntology); for (OWLObjectPropertyExpression subExpression : superProperties) { if (subExpression.isAnonymous()) { continue; } OWLObjectProperty subProperty = subExpression.asOWLObjectProperty(); OntologyHelper.copy(inputOntology, outputOntology, subProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom(subProperty, entity.asOWLObjectProperty())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subProperty, annotationProperties); } } else if (entity.isOWLDataProperty()) { Collection<OWLDataPropertyExpression> subProperties = EntitySearcher.getSubProperties(entity.asOWLDataProperty(), inputOntology); for (OWLDataPropertyExpression subExpression : subProperties) { OWLDataProperty subProperty = subExpression.asOWLDataProperty(); OntologyHelper.copy(inputOntology, outputOntology, subProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom(subProperty, entity.asOWLDataProperty())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subProperty, annotationProperties); } } } /** * Given input and output ontologies, a top entity (from upper terms), a target entity, a and a * set of annotation properties, copy the bottom entity and any superclasses from the upper * entities from the input ontology to the output ontology, along with the specified annotations. * No intermediate superclasses are included. The input ontology is not changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param topEntity the entity for upper terms to include * @param entity the target entity to check if included in upper entities * @param annotationProperties the annotations to copy, or null for all */ private static void copyDescendantsNoIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLEntity topEntity, OWLEntity entity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // Otherwise find the highest level ancestor that was included in upper-terms if (entity.isOWLClass()) { Collection<OWLClassExpression> subClasses = EntitySearcher.getSubClasses(entity.asOWLClass(), inputOntology); for (OWLClassExpression subExpression : subClasses) { if (subExpression.isAnonymous()) { continue; } OWLClass subClass = subExpression.asOWLClass(); // Find out if this class has any subclasses Collection<OWLClassExpression> subSubClasses = EntitySearcher.getSubClasses(subClass, inputOntology); if (subSubClasses.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(subClass, topEntity.asOWLClass())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subClass, annotationProperties); } } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> subProperties = EntitySearcher.getSubProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty subProperty : subProperties) { // Find out if this property has any subproperties Collection<OWLAnnotationProperty> subSubProperties = EntitySearcher.getSubProperties(subProperty, inputOntology); if (subSubProperties.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( subProperty, topEntity.asOWLAnnotationProperty())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subProperty, annotationProperties); } } } else if (entity.isOWLObjectProperty()) { Collection<OWLObjectPropertyExpression> subProperties = EntitySearcher.getSubProperties(entity.asOWLObjectProperty(), inputOntology); for (OWLObjectPropertyExpression subExpression : subProperties) { if (subExpression.isAnonymous()) { continue; } OWLObjectProperty subProperty = subExpression.asOWLObjectProperty(); // Find out if this property has any subproperties Collection<OWLObjectPropertyExpression> subSubProperties = EntitySearcher.getSubProperties(subProperty, inputOntology); if (subSubProperties.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom( subProperty, topEntity.asOWLObjectProperty())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subProperty, annotationProperties); } } } else if (entity.isOWLDataProperty()) { Collection<OWLDataPropertyExpression> subProperties = EntitySearcher.getSubProperties(entity.asOWLDataProperty(), inputOntology); for (OWLDataPropertyExpression subExpression : subProperties) { OWLDataProperty subProperty = subExpression.asOWLDataProperty(); // Find out if this property has any subproperties Collection<OWLDataPropertyExpression> subSubProperties = EntitySearcher.getSubProperties(subProperty, inputOntology); if (subSubProperties.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom(subProperty, topEntity.asOWLDataProperty())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subProperty, annotationProperties); } } } } /** * Given an ontology, a set of upper-level IRIs, and a set of annotation properties, return a new * ontology with just those terms and their named descendants, their subclass relations, and the * selected annotations. The input ontology is not changed. * * @param inputOntology the ontology to extract from * @param upperIRIs these terms and their descendants will be copied * @param annotationProperties the annotation properties to copy; if null, all will be copied * @param options map of options * @param inputSourceMap map of source IRIs (or null) * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ public static OWLOntology getDescendants( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<OWLAnnotationProperty> annotationProperties, Map<String, String> options, Map<IRI, IRI> inputSourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); // Get options setOptions(options, inputSourceMap); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); OWLOntology outputOntology = outputManager.createOntology(); // Get all entities in the ontology (preferring Class over NamedIndividual) Set<OWLEntity> entities = getAllEntities(inputOntology); Set<OWLEntity> upperEntities = entities.stream().filter(e -> upperIRIs.contains(e.getIRI())).collect(Collectors.toSet()); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if ("none".equals(intermediates)) { copyDescendantsNoIntermediates( inputOntology, outputOntology, entity, entity, annotationProperties); } else { copyDescendantsAllIntermediates( inputOntology, outputOntology, entity, annotationProperties); } if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } if ("minimal".equalsIgnoreCase(intermediates)) { OntologyHelper.collapseOntology(outputOntology, upperIRIs); } return outputOntology; } /** * Given an ontology, a manager for that ontology, an entity to annotate, and a map of source * replacements, add the rdfs:isDefinedBy annotation to the entity. * * @param ontology OWLOntology to add annotation in * @param manager OWLManager for the ontology * @param entity OWLEntity to add annotation on * @param sourceMap term-to-source map */ private static void maybeAnnotateSource( OWLOntology ontology, OWLOntologyManager manager, OWLEntity entity, Map<IRI, IRI> sourceMap) { Set<OWLAnnotationValue> existingValues = OntologyHelper.getAnnotationValues(ontology, isDefinedBy, entity.getIRI()); if (existingValues == null || existingValues.size() == 0) { OWLAnnotationAxiom isDefinedBy = ExtractOperation.getIsDefinedBy(entity, sourceMap); if (isDefinedBy != null) { manager.addAxiom(ontology, isDefinedBy); } } } /** * Given a map of options and an optional source map, set the MIREOT options. * * @param options map of options * @param inputSourceMap map of source IRIs (or null) */ private static void setOptions(Map<String, String> options, Map<IRI, IRI> inputSourceMap) { if (options == null) { options = ExtractOperation.getDefaultOptions(); } intermediates = OptionsHelper.getOption(options, "intermediates", "all"); annotateSource = OptionsHelper.optionIsTrue(options, "annotate-with-sources"); sourceMap = inputSourceMap; } }
robot-core/src/main/java/org/obolibrary/robot/MireotOperation.java
package org.obolibrary.robot; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory; import org.semanticweb.owlapi.search.EntitySearcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; /** * Implements several variations on MIREOT, as first described in "MIREOT: The minimum information * to reference an external ontology term" (<a * href="http://dx.doi.org/10.3233/AO-2011-0087">link</a>). * * @author <a href="mailto:[email protected]">James A. Overton</a> */ public class MireotOperation { /** Logger. */ private static final Logger logger = LoggerFactory.getLogger(MireotOperation.class); /** Shared data factory. */ private static OWLDataFactory dataFactory = new OWLDataFactoryImpl(); /** RDFS isDefinedBy annotation property. */ private static OWLAnnotationProperty isDefinedBy = dataFactory.getRDFSIsDefinedBy(); /** Specify how to handle intermediates. */ private static String intermediates; /** Specify if source should be annotated. */ private static boolean annotateSource; /** Specify a map of sources. */ private static Map<IRI, IRI> sourceMap; /** * Get a set of default annotation properties. Currenly includes only RDFS label. * * @return a set of annotation properties */ public static Set<OWLAnnotationProperty> getDefaultAnnotationProperties() { Set<OWLAnnotationProperty> annotationProperties = new HashSet<>(); annotationProperties.add(dataFactory.getRDFSLabel()); return annotationProperties; } /** * Given an ontology, a set of upper-level IRIs, a set of lower-level IRIs, and a set of * annotation properties, return a new ontology with just the named ancestors of those terms, * their subclass relations, and the selected annotations. The input ontology is not changed. * * @param inputOntology the ontology to extract from * @param upperIRIs ancestors will be copied up to and including these terms * @param lowerIRIs copy these terms and their superclasses * @param annotationProperties the annotation properties to copy; if null, all will be copied * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ public static OWLOntology getAncestors( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<IRI> lowerIRIs, Set<OWLAnnotationProperty> annotationProperties) throws OWLOntologyCreationException { return getAncestors(inputOntology, upperIRIs, lowerIRIs, annotationProperties, null, null); } /** * Given an input ontology, a set of upper IRIs, a set of lower IRIs, a set of annotation * properties (or null for all), and a map of extract options, get the ancestors of the lower IRIs * up to the upper IRIs. Include the specified annotation properties. * * @param inputOntology OWLOntology to extract from * @param upperIRIs top level IRIs * @param lowerIRIs bottom level IRIs * @param annotationProperties annotation properties to copy, or null for all * @param options map of extract options or null * @param inputSourceMap map of source IRIs to targets * @return extracted module * @throws OWLOntologyCreationException on problems creating the new ontology */ public static OWLOntology getAncestors( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<IRI> lowerIRIs, Set<OWLAnnotationProperty> annotationProperties, Map<String, String> options, Map<IRI, IRI> inputSourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); // Get options setOptions(options, inputSourceMap); // The other OWLAPI extract methods use the source ontology IRI // so we'll use it here too. OWLOntology outputOntology = outputManager.createOntology(inputOntology.getOntologyID()); // Directly copy all upper entities Set<OWLEntity> upperEntities = OntologyHelper.getEntities(inputOntology, upperIRIs); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); OWLReasoner reasoner = reasonerFactory.createReasoner(inputOntology); // For each lower entity, get the ancestors (all or none) Set<OWLEntity> lowerEntities = OntologyHelper.getEntities(inputOntology, lowerIRIs); for (OWLEntity entity : lowerEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if ("none".equals(intermediates)) { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, entity, entity, annotationProperties); } else { copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, entity, annotationProperties); } if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } // Maybe remove unnecessary intermediates if (intermediates.equalsIgnoreCase("minimal")) { Set<IRI> precious = new HashSet<>(); if (upperIRIs != null) { precious.addAll(upperIRIs); } precious.addAll(lowerIRIs); OntologyHelper.collapseOntology(outputOntology, precious); } return outputOntology; } /** * Given an ontology, a set of upper-level IRIs, a set of lower-level IRIs, a set of annotation * properties, and a boolean indiciating if rdfs:isDefinedBy should be added to copied classes, * return a new ontology with just the named ancestors of those terms, their subclass relations, * and the selected annotations. The input ontology is not changed. * * @deprecated replaced by {@link #getAncestors(OWLOntology, Set, Set, Set)} * @param inputOntology the ontology to extract from * @param upperIRIs ancestors will be copied up to and including these terms * @param lowerIRIs copy these terms and their superclasses * @param annotationProperties the annotation properties to copy; if null, all will be copied * @param annotateSource if true, annotate copied classes with rdfs:isDefinedBy * @param sourceMap map of term IRI to source IRI * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ @Deprecated public static OWLOntology getAncestors( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<IRI> lowerIRIs, Set<OWLAnnotationProperty> annotationProperties, boolean annotateSource, Map<IRI, IRI> sourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); OWLReasoner reasoner = reasonerFactory.createReasoner(inputOntology); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); // The other OWLAPI extract methods use the source ontology IRI // so we'll use it here too. OWLOntology outputOntology = outputManager.createOntology(inputOntology.getOntologyID()); Set<OWLEntity> upperEntities = OntologyHelper.getEntities(inputOntology, upperIRIs); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } Set<OWLEntity> lowerEntities = OntologyHelper.getEntities(inputOntology, lowerIRIs); for (OWLEntity entity : lowerEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, entity, annotationProperties); } return outputOntology; } /** * Given an ontology, a set of upper-level IRIs, and a set of annotation properties, return a new * ontology with just those terms and their named descendants, their subclass relations, and the * selected annotations. The input ontology is not changed. * * @param inputOntology the ontology to extract from * @param upperIRIs these terms and their descendants will be copied * @param annotationProperties the annotation properties to copy; if null, all will be copied * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ public static OWLOntology getDescendants( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<OWLAnnotationProperty> annotationProperties) throws OWLOntologyCreationException { return getDescendants(inputOntology, upperIRIs, annotationProperties, null, null); } /** * Given an ontology, a set of upper-level IRIs, and a set of annotation properties, return a new * ontology with just those terms and their named descendants, their subclass relations, and the * selected annotations. The input ontology is not changed. * * @param inputOntology the ontology to extract from * @param upperIRIs these terms and their descendants will be copied * @param annotationProperties the annotation properties to copy; if null, all will be copied * @param options map of options * @param inputSourceMap map of source IRIs (or null) * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ public static OWLOntology getDescendants( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<OWLAnnotationProperty> annotationProperties, Map<String, String> options, Map<IRI, IRI> inputSourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); // Get options setOptions(options, inputSourceMap); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); OWLOntology outputOntology = outputManager.createOntology(); Set<OWLEntity> upperEntities = OntologyHelper.getEntities(inputOntology, upperIRIs); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if ("none".equals(intermediates)) { copyDescendantsNoIntermediates( inputOntology, outputOntology, entity, entity, annotationProperties); } else { copyDescendantsAllIntermediates( inputOntology, outputOntology, entity, annotationProperties); } if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } if ("minimal".equalsIgnoreCase(intermediates)) { OntologyHelper.collapseOntology(outputOntology, upperIRIs); } return outputOntology; } /** * Given an ontology, a set of upper-level IRIs, and a set of annotation properties, return a new * ontology with just those terms and their named descendants, their subclass relations, and the * selected annotations. The input ontology is not changed. * * @deprecated replaced by {@link #getDescendants(OWLOntology, Set, Set, Map, Map)} * @param inputOntology the ontology to extract from * @param upperIRIs these terms and their descendants will be copied * @param annotationProperties the annotation properties to copy; if null, all will be copied * @param annotateSource if true, annotate copied classes with rdfs:isDefinedBy * @param sourceMap map of term IRI to source IRI * @return a new ontology with the target terms and their named ancestors * @throws OWLOntologyCreationException on problems creating new ontology */ @Deprecated public static OWLOntology getDescendants( OWLOntology inputOntology, Set<IRI> upperIRIs, Set<OWLAnnotationProperty> annotationProperties, boolean annotateSource, Map<IRI, IRI> sourceMap) throws OWLOntologyCreationException { logger.debug("Extract with MIREOT ..."); OWLOntologyManager outputManager = OWLManager.createOWLOntologyManager(); OWLOntology outputOntology = outputManager.createOntology(); Set<OWLEntity> upperEntities = OntologyHelper.getEntities(inputOntology, upperIRIs); for (OWLEntity entity : upperEntities) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } copyDescendantsAllIntermediates(inputOntology, outputOntology, entity, annotationProperties); } return outputOntology; } /** * Given input and output ontologies, a set of upper entitities, a target entity, and a set of * annotation properties, copy the target entity and all its named ancestors (recursively) from * the input ontology to the output ontology, along with the specified annotations. The input * ontology is not changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param reasoner OWLReasoner to get superclasses and superproperties while maintaining structure * @param upperEntities the top level of entities, or null * @param entity the target entity that will have its ancestors copied * @param annotationProperties the annotations to copy, or null for all */ private static void copyAncestorsAllIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLReasoner reasoner, Set<OWLEntity> upperEntities, OWLEntity entity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // If this is an upperEntity, copy it and return. if (upperEntities != null && upperEntities.contains(entity)) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); return; } // Otherwise copy ancestors recursively. if (entity.isOWLClass()) { Set<OWLClass> superclasses = reasoner.getSuperClasses(entity.asOWLClass(), true).getFlattened(); for (OWLClass superclass : superclasses) { OntologyHelper.copy(inputOntology, outputOntology, superclass, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(entity.asOWLClass(), superclass)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superclass, annotationProperties); } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> superProperties = EntitySearcher.getSuperProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty superProperty : superProperties) { OntologyHelper.copy(inputOntology, outputOntology, superProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( entity.asOWLAnnotationProperty(), superProperty)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, annotationProperties); } } else if (entity.isOWLObjectProperty()) { Set<OWLObjectPropertyExpression> superProperties = reasoner.getSuperObjectProperties(entity.asOWLObjectProperty(), true).getFlattened(); for (OWLObjectPropertyExpression superexpression : superProperties) { if (superexpression.isAnonymous()) { continue; } OWLObjectProperty superProperty = superexpression.asOWLObjectProperty(); OntologyHelper.copy(inputOntology, outputOntology, superProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom( entity.asOWLObjectProperty(), superProperty)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, annotationProperties); } } else if (entity.isOWLDataProperty()) { Set<OWLDataProperty> superProperties = reasoner.getSuperDataProperties(entity.asOWLDataProperty(), true).getFlattened(); for (OWLDataProperty superProperty : superProperties) { OntologyHelper.copy(inputOntology, outputOntology, superProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom(entity.asOWLDataProperty(), superProperty)); copyAncestorsAllIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, annotationProperties); } } // Annotate with rdfs:isDefinedBy (maybe) if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } /** * Given input and output ontologies, a set of upper entities, a target entity, a bottom entity * (from lower terms), and a set of annotation properties, copy the bottom entity and any * superclasses from the upper entities from the input ontology to the output ontology, along with * the specified annotations. No intermediate superclasses are included. The input ontology is not * changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param reasoner OWLReasoner to get superclasses and superproperties while maintaining structure * @param upperEntities the top level of entities, or null * @param entity the target entity to check if included in upper entities * @param bottomEntity the entity from lower terms to include * @param annotationProperties the annotations to copy, or null for all */ private static void copyAncestorsNoIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLReasoner reasoner, Set<OWLEntity> upperEntities, OWLEntity entity, OWLEntity bottomEntity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // If there are no upperEntities or if this is an upperEntity, copy it and return if (upperEntities == null || upperEntities.contains(entity)) { OntologyHelper.copy(inputOntology, outputOntology, entity, annotationProperties); return; } // Otherwise find the highest level ancestor that was included in upper-terms if (entity.isOWLClass()) { Set<OWLClass> superclasses = reasoner.getSuperClasses(entity.asOWLClass(), true).getFlattened(); for (OWLClass superclass : superclasses) { if (upperEntities.contains(superclass)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(bottomEntity.asOWLClass(), superclass)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superclass, bottomEntity, annotationProperties); } } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> superProperties = EntitySearcher.getSuperProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty superProperty : superProperties) { if (upperEntities.contains(superProperty)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( bottomEntity.asOWLAnnotationProperty(), superProperty)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, bottomEntity, annotationProperties); } } } else if (entity.isOWLObjectProperty()) { Set<OWLObjectPropertyExpression> superProperties = reasoner.getSuperObjectProperties(entity.asOWLObjectProperty(), true).getFlattened(); for (OWLObjectPropertyExpression superexpression : superProperties) { if (superexpression.isAnonymous()) { continue; } OWLObjectProperty superProperty = superexpression.asOWLObjectProperty(); if (upperEntities.contains(superProperty)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom( bottomEntity.asOWLObjectProperty(), superProperty)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, bottomEntity, annotationProperties); } } } else if (entity.isOWLDataProperty()) { Set<OWLDataProperty> superProperties = reasoner.getSuperDataProperties(entity.asOWLDataProperty(), true).getFlattened(); for (OWLDataProperty superProperty : superProperties) { if (upperEntities.contains(superProperty)) { OntologyHelper.copyAnnotations(inputOntology, outputOntology, entity, null); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom( bottomEntity.asOWLDataProperty(), superProperty)); } else { copyAncestorsNoIntermediates( inputOntology, outputOntology, reasoner, upperEntities, superProperty, bottomEntity, annotationProperties); } } } // Annotate with rdfs:isDefinedBy (maybe) if (annotateSource) { maybeAnnotateSource(outputOntology, outputManager, entity, sourceMap); } } /** * Given input and output ontologies, a target entity, and a set of annotation properties, copy * the target entity and all its named ancestors (recursively) from the input ontology to the * output ontology, along with the specified annotations. The input ontology is not changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param entity the target entity that will have its ancestors copied * @param annotationProperties the annotations to copy, or null for all */ private static void copyDescendantsAllIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLEntity entity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // Otherwise copy ancestors recursively. if (entity.isOWLClass()) { Collection<OWLClassExpression> subClasses = EntitySearcher.getSubClasses(entity.asOWLClass(), inputOntology); for (OWLClassExpression subExpression : subClasses) { if (subExpression.isAnonymous()) { continue; } OWLClass subClass = subExpression.asOWLClass(); OntologyHelper.copy(inputOntology, outputOntology, subClass, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(subClass, entity.asOWLClass())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subClass, annotationProperties); } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> subProperties = EntitySearcher.getSubProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty subProperty : subProperties) { OntologyHelper.copy(inputOntology, outputOntology, subProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( subProperty, entity.asOWLAnnotationProperty())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subProperty, annotationProperties); } } else if (entity.isOWLObjectProperty()) { Collection<OWLObjectPropertyExpression> superProperties = EntitySearcher.getSuperProperties(entity.asOWLObjectProperty(), inputOntology); for (OWLObjectPropertyExpression subExpression : superProperties) { if (subExpression.isAnonymous()) { continue; } OWLObjectProperty subProperty = subExpression.asOWLObjectProperty(); OntologyHelper.copy(inputOntology, outputOntology, subProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom(subProperty, entity.asOWLObjectProperty())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subProperty, annotationProperties); } } else if (entity.isOWLDataProperty()) { Collection<OWLDataPropertyExpression> subProperties = EntitySearcher.getSubProperties(entity.asOWLDataProperty(), inputOntology); for (OWLDataPropertyExpression subExpression : subProperties) { OWLDataProperty subProperty = subExpression.asOWLDataProperty(); OntologyHelper.copy(inputOntology, outputOntology, subProperty, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom(subProperty, entity.asOWLDataProperty())); copyDescendantsAllIntermediates( inputOntology, outputOntology, subProperty, annotationProperties); } } } /** * Given input and output ontologies, a top entity (from upper terms), a target entity, a and a * set of annotation properties, copy the bottom entity and any superclasses from the upper * entities from the input ontology to the output ontology, along with the specified annotations. * No intermediate superclasses are included. The input ontology is not changed. * * @param inputOntology the ontology to copy from * @param outputOntology the ontology to copy to * @param topEntity the entity for upper terms to include * @param entity the target entity to check if included in upper entities * @param annotationProperties the annotations to copy, or null for all */ private static void copyDescendantsNoIntermediates( OWLOntology inputOntology, OWLOntology outputOntology, OWLEntity topEntity, OWLEntity entity, Set<OWLAnnotationProperty> annotationProperties) { OWLOntologyManager outputManager = outputOntology.getOWLOntologyManager(); // Otherwise find the highest level ancestor that was included in upper-terms if (entity.isOWLClass()) { Collection<OWLClassExpression> subClasses = EntitySearcher.getSubClasses(entity.asOWLClass(), inputOntology); for (OWLClassExpression subExpression : subClasses) { if (subExpression.isAnonymous()) { continue; } OWLClass subClass = subExpression.asOWLClass(); // Find out if this class has any subclasses Collection<OWLClassExpression> subSubClasses = EntitySearcher.getSubClasses(subClass, inputOntology); if (subSubClasses.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubClassOfAxiom(subClass, topEntity.asOWLClass())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subClass, annotationProperties); } } } else if (entity.isOWLAnnotationProperty()) { Collection<OWLAnnotationProperty> subProperties = EntitySearcher.getSubProperties(entity.asOWLAnnotationProperty(), inputOntology, true); for (OWLAnnotationProperty subProperty : subProperties) { // Find out if this property has any subproperties Collection<OWLAnnotationProperty> subSubProperties = EntitySearcher.getSubProperties(subProperty, inputOntology); if (subSubProperties.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubAnnotationPropertyOfAxiom( subProperty, topEntity.asOWLAnnotationProperty())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subProperty, annotationProperties); } } } else if (entity.isOWLObjectProperty()) { Collection<OWLObjectPropertyExpression> subProperties = EntitySearcher.getSubProperties(entity.asOWLObjectProperty(), inputOntology); for (OWLObjectPropertyExpression subExpression : subProperties) { if (subExpression.isAnonymous()) { continue; } OWLObjectProperty subProperty = subExpression.asOWLObjectProperty(); // Find out if this property has any subproperties Collection<OWLObjectPropertyExpression> subSubProperties = EntitySearcher.getSubProperties(subProperty, inputOntology); if (subSubProperties.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubObjectPropertyOfAxiom( subProperty, topEntity.asOWLObjectProperty())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subProperty, annotationProperties); } } } else if (entity.isOWLDataProperty()) { Collection<OWLDataPropertyExpression> subProperties = EntitySearcher.getSubProperties(entity.asOWLDataProperty(), inputOntology); for (OWLDataPropertyExpression subExpression : subProperties) { OWLDataProperty subProperty = subExpression.asOWLDataProperty(); // Find out if this property has any subproperties Collection<OWLDataPropertyExpression> subSubProperties = EntitySearcher.getSubProperties(subProperty, inputOntology); if (subSubProperties.isEmpty()) { OntologyHelper.copyAnnotations( inputOntology, outputOntology, entity, annotationProperties); outputManager.addAxiom( outputOntology, dataFactory.getOWLSubDataPropertyOfAxiom(subProperty, topEntity.asOWLDataProperty())); } else { copyDescendantsNoIntermediates( inputOntology, outputOntology, topEntity, subProperty, annotationProperties); } } } } /** * Given an ontology, a manager for that ontology, an entity to annotate, and a map of source * replacements, add the rdfs:isDefinedBy annotation to the entity. * * @param ontology OWLOntology to add annotation in * @param manager OWLManager for the ontology * @param entity OWLEntity to add annotation on * @param sourceMap term-to-source map */ private static void maybeAnnotateSource( OWLOntology ontology, OWLOntologyManager manager, OWLEntity entity, Map<IRI, IRI> sourceMap) { Set<OWLAnnotationValue> existingValues = OntologyHelper.getAnnotationValues(ontology, isDefinedBy, entity.getIRI()); if (existingValues == null || existingValues.size() == 0) { manager.addAxiom(ontology, ExtractOperation.getIsDefinedBy(entity, sourceMap)); } } /** * Given a map of options and an optional source map, set the MIREOT options. * * @param options map of options * @param inputSourceMap map of source IRIs (or null) */ private static void setOptions(Map<String, String> options, Map<IRI, IRI> inputSourceMap) { if (options == null) { options = ExtractOperation.getDefaultOptions(); } intermediates = OptionsHelper.getOption(options, "intermediates", "all"); annotateSource = OptionsHelper.optionIsTrue(options, "annotate-with-sources"); sourceMap = inputSourceMap; } }
Fix for overlapping classes/individuals
robot-core/src/main/java/org/obolibrary/robot/MireotOperation.java
Fix for overlapping classes/individuals
Java
mit
064390ee10939c82a965a49296ffa51e52bc4068
0
zingkg/renamer-java
package com.zingkg.renamer; import java.io.File; import java.util.Arrays; import java.util.LinkedList; import java.util.List; /** * Main class for running the program as a command line application. */ public class CommandLine { public static int ERROR = 1; public static int SUCCESS = 0; /** * @param args the command line arguments. * @return Success of operation. */ public static int main(String[] args) { if (args.length < 1) { System.err.println("--help for more information"); return ERROR; } switch (args[0]) { case "--help": printHelp(); return SUCCESS; case FileUtilities.RENAME_COMMAND: return rename(args); case FileUtilities.WIPE_RENAME_NUMBER_COMMAND: return wipeRenameNumber(args); case FileUtilities.NUMBER_PREPEND_COMMAND: return numberPrepend(args); case FileUtilities.NUMBER_APPEND_COMMAND: return numberAppend(args); case FileUtilities.DELETE_PRECEDING_NUM_PREPEND_COMMAND: return deletePrecedingNumPrepend(args); case FileUtilities.DELETE_ENDING_NUM_APPEND_COMMAND: return deleteEndingNumAppend(args); case FileUtilities.PREPEND_STRING_COMMAND: return prepend(args); case FileUtilities.APPEND_STRING_COMMAND: return append(args); default: return ERROR; } } private static void printHelp() { System.out.println("usage: renamer command <args> <files>"); System.out.println("Commands are:"); System.out.println(); System.out.println("Basic renamer program"); System.out.println( '\t' + FileUtilities.RENAME_COMMAND + "\tReplaces a string in the file with another" ); System.out.println( '\t' + FileUtilities.WIPE_RENAME_NUMBER_COMMAND + "\tWipes the file's name, uses " + "the input name, and numbers the files starting at 1" ); System.out.println(); System.out.println("Adding a name and numbering the files"); System.out.println( '\t' + FileUtilities.NUMBER_PREPEND_COMMAND + "\tPrepends a string and a number to " + "the file" ); System.out.println( '\t' + FileUtilities.NUMBER_APPEND_COMMAND + "\tAppends a string and a number to the " + "file" ); System.out.println(); System.out.println("Delete a number, add a name, and number the files"); System.out.println( '\t' + FileUtilities.DELETE_PRECEDING_NUM_PREPEND_COMMAND + "\tDeletes a preceding " + "number and prepends a number to the file" ); System.out.println( '\t' + FileUtilities.DELETE_ENDING_NUM_APPEND_COMMAND + "\tDeletes an ending number " + "and appends a number to the file" ); System.out.println(); System.out.println("Add a name to the file name"); System.out.println( '\t' + FileUtilities.PREPEND_STRING_COMMAND + "\tPrepends a string to the file(s)" ); System.out.println( '\t' + FileUtilities.APPEND_STRING_COMMAND + "\tAppends a string to the file(s)" ); } private static String getInputString(String[] args) { return args[1]; } private static int getStartingNumber(String[] args) { return Integer.parseInt(args[2]); } private static int rename(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.RENAME_COMMAND + " requires arguments: <find name> <replace name> " + "<files>" ); return ERROR; } List<String> files = getFiles(3, args); FileUtilities.renameFiles(files, FileUtilities.renameReplace(args[1], args[2], files)); return SUCCESS; } private static int wipeRenameNumber(String[] args) { if (args.length <= 2) { System.out.println( FileUtilities.WIPE_RENAME_NUMBER_COMMAND + " requires arguments: <input string> " + "<files>" ); return ERROR; } List<String> files = getFiles(2, args); List<File> newFiles = FileUtilities.wipeRenameAndNumber(getInputString(args), files); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int numberPrepend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.NUMBER_PREPEND_COMMAND + " requires arguments: <input string> " + "<starting number> <files>" ); return ERROR; } List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.numberPrepend( getInputString(args), getStartingNumber(args), files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int numberAppend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.NUMBER_APPEND_COMMAND + " requires arguments: <input string> " + "<starting number> <files>" ); return ERROR; } List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.numberAppend( getInputString(args), getStartingNumber(args), files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int deletePrecedingNumPrepend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.DELETE_PRECEDING_NUM_PREPEND_COMMAND + " requires arguments: " + "<input string> <starting number> <files>" ); return ERROR; } final int startNum = Integer.parseInt(args[2]); List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.deletePrecedingAndNumberPrepend( getInputString(args), startNum, files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int deleteEndingNumAppend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.DELETE_ENDING_NUM_APPEND_COMMAND + " requires arguments: " + "<input string> <starting number> <files>" ); return ERROR; } final int startNum = Integer.parseInt(args[2]); List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.deleteEndingAndNumberAppend( getInputString(args), startNum, files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int prepend(String[] args) { if (args.length <= 2) { System.out.println( FileUtilities.PREPEND_STRING_COMMAND + " requires arguments: <input string> " + "<files>" ); return ERROR; } List<String> files = getFiles(2, args); List<File> newFiles = FileUtilities.prependString(getInputString(args), files); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int append(String[] args) { if (args.length <= 2) { System.out.println( FileUtilities.APPEND_STRING_COMMAND + " requires arguments: " + "<input string> <files>" ); return ERROR; } List<String> files = getFiles(2, args); List<File> newFiles = FileUtilities.appendString(getInputString(args), files); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } /** * Gets the files in the command line. * * @param start * The start of the command lines to start accumulating file names. * @param args * The arguments in the command line. * @return A list with all of the files extracted from the command line. */ private static List<String> getFiles(final int start, String[] args) { List<String> files = new LinkedList<>(); files.addAll(start, Arrays.asList(args)); return files; } }
src/main/java/com/zingkg/renamer/CommandLine.java
package com.zingkg.renamer; import java.io.File; import java.util.Arrays; import java.util.LinkedList; import java.util.List; /** * Main class for running the program as a command line application. */ public class CommandLine { public static int ERROR = 1; public static int SUCCESS = 0; /** * @param args the command line arguments. * @return Success of operation. */ public static int main(String[] args) { if (args.length < 1) { System.err.println("--help for more information"); return ERROR; } switch (args[0]) { case "--help": printHelp(); return SUCCESS; case FileUtilities.RENAME_COMMAND: return rename(args); case FileUtilities.WIPE_RENAME_NUMBER_COMMAND: return wipeRenameNumber(args); case FileUtilities.NUMBER_PREPEND_COMMAND: return numberPrepend(args); case FileUtilities.NUMBER_APPEND_COMMAND: return numberAppend(args); case FileUtilities.DELETE_PRECEDING_NUM_PREPEND_COMMAND: return deletePrecedingNumPrepend(args); case FileUtilities.DELETE_ENDING_NUM_APPEND_COMMAND: return deleteEndingNumAppend(args); case FileUtilities.PREPEND_STRING_COMMAND: return prepend(args); case FileUtilities.APPEND_STRING_COMMAND: return append(args); default: return ERROR; } } private static void printHelp() { System.out.println("usage: renamer command <args> <files>"); System.out.println("Commands are:"); System.out.println(); System.out.println("Basic renamer program"); System.out.println( '\t' + FileUtilities.RENAME_COMMAND + "\tReplaces a string in the file with another" ); System.out.println( '\t' + FileUtilities.WIPE_RENAME_NUMBER_COMMAND + "\tWipes the file's name and " + "uses the input name and numbers the files starting at 1" ); System.out.println(); System.out.println("Adding a name and numbering the files"); System.out.println( '\t' + FileUtilities.NUMBER_PREPEND_COMMAND + "\tPrepends a string and a number to " + "the file" ); System.out.println( '\t' + FileUtilities.NUMBER_APPEND_COMMAND + "\tAppends a string and a number to the " + "file" ); System.out.println(); System.out.println("Delete a number, add a name, and number the files"); System.out.println( '\t' + FileUtilities.DELETE_PRECEDING_NUM_PREPEND_COMMAND + "\tDeletes a preceding " + "number and prepends a number to the file" ); System.out.println( '\t' + FileUtilities.DELETE_ENDING_NUM_APPEND_COMMAND + "\tDeletes an ending number " + "and appends a number to the file" ); System.out.println(); System.out.println("Add a name to the file name"); System.out.println( '\t' + FileUtilities.PREPEND_STRING_COMMAND + "\tPrepends a string to the file(s)" ); System.out.println( '\t' + FileUtilities.APPEND_STRING_COMMAND + "\tAppends a string to the file(s)" ); } private static String getInputString(String[] args) { return args[1]; } private static int getStartingNumber(String[] args) { return Integer.parseInt(args[2]); } private static int rename(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.RENAME_COMMAND + " requires arguments: <find name> <replace name> " + "<files>" ); return ERROR; } List<String> files = getFiles(3, args); FileUtilities.renameFiles(files, FileUtilities.renameReplace(args[1], args[2], files)); return SUCCESS; } private static int wipeRenameNumber(String[] args) { if (args.length <= 2) { System.out.println( FileUtilities.WIPE_RENAME_NUMBER_COMMAND + " requires arguments: <input string> " + "<files>" ); return ERROR; } List<String> files = getFiles(2, args); List<File> newFiles = FileUtilities.wipeRenameAndNumber(getInputString(args), files); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int numberPrepend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.NUMBER_PREPEND_COMMAND + " requires arguments: <input string> " + "<starting number> <files>" ); return ERROR; } List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.numberPrepend( getInputString(args), getStartingNumber(args), files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int numberAppend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.NUMBER_APPEND_COMMAND + " requires arguments: <input string> " + "<starting number> <files>" ); return ERROR; } List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.numberAppend( getInputString(args), getStartingNumber(args), files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int deletePrecedingNumPrepend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.DELETE_PRECEDING_NUM_PREPEND_COMMAND + " requires arguments: " + "<input string> <starting number> <files>" ); return ERROR; } final int startNum = Integer.parseInt(args[2]); List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.deletePrecedingAndNumberPrepend( getInputString(args), startNum, files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int deleteEndingNumAppend(String[] args) { if (args.length <= 3) { System.out.println( FileUtilities.DELETE_ENDING_NUM_APPEND_COMMAND + " requires arguments: " + "<input string> <starting number> <files>" ); return ERROR; } final int startNum = Integer.parseInt(args[2]); List<String> files = getFiles(3, args); List<File> newFiles = FileUtilities.deleteEndingAndNumberAppend( getInputString(args), startNum, files ); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int prepend(String[] args) { if (args.length <= 2) { System.out.println( FileUtilities.PREPEND_STRING_COMMAND + " requires arguments: <input string> " + "<files>" ); return ERROR; } List<String> files = getFiles(2, args); List<File> newFiles = FileUtilities.prependString(getInputString(args), files); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } private static int append(String[] args) { if (args.length <= 2) { System.out.println( FileUtilities.APPEND_STRING_COMMAND + " requires arguments: " + "<input string> <files>" ); return ERROR; } List<String> files = getFiles(2, args); List<File> newFiles = FileUtilities.appendString(getInputString(args), files); FileUtilities.renameFiles(files, newFiles); return SUCCESS; } /** * Gets the files in the command line. * * @param start * The start of the command lines to start accumulating file names. * @param args * The arguments in the command line. * @return A list with all of the files extracted from the command line. */ private static List<String> getFiles(final int start, String[] args) { List<String> files = new LinkedList<>(); files.addAll(start, Arrays.asList(args)); return files; } }
Fixed grammar on command line help.
src/main/java/com/zingkg/renamer/CommandLine.java
Fixed grammar on command line help.
Java
mit
8cb5f68dd7f0b050e8d51f4b5f3320c6bad8c9f4
0
domingos86/AntennaPod,drabux/AntennaPod,richq/AntennaPod,gk23/AntennaPod,twiceyuan/AntennaPod,keunes/AntennaPod,wooi/AntennaPod,mxttie/AntennaPod,LTUvac/AntennaPod,twiceyuan/AntennaPod,the100rabh/AntennaPod,waylife/AntennaPod,orelogo/AntennaPod,johnjohndoe/AntennaPod,TimB0/AntennaPod,TomHennen/AntennaPod,mxttie/AntennaPod,queenp/AntennaPod,corecode/AntennaPod,orelogo/AntennaPod,SpicyCurry/AntennaPod,mfietz/AntennaPod,orelogo/AntennaPod,TimB0/AntennaPod,wooi/AntennaPod,wangjun/AntennaPod,TomHennen/AntennaPod,richq/AntennaPod,wangjun/AntennaPod,gaohongyuan/AntennaPod,johnjohndoe/AntennaPod,hgl888/AntennaPod,mfietz/AntennaPod,hgl888/AntennaPod,domingos86/AntennaPod,corecode/AntennaPod,orelogo/AntennaPod,drabux/AntennaPod,LTUvac/AntennaPod,mfietz/AntennaPod,richq/AntennaPod,udif/AntennaPod,udif/AntennaPod,johnjohndoe/AntennaPod,gaohongyuan/AntennaPod,gk23/AntennaPod,TimB0/AntennaPod,wooi/AntennaPod,mfietz/AntennaPod,keunes/AntennaPod,mxttie/AntennaPod,queenp/AntennaPod,keunes/AntennaPod,richq/AntennaPod,LTUvac/AntennaPod,keunes/AntennaPod,corecode/AntennaPod,twiceyuan/AntennaPod,TomHennen/AntennaPod,johnjohndoe/AntennaPod,LTUvac/AntennaPod,mxttie/AntennaPod,drabux/AntennaPod,udif/AntennaPod,waylife/AntennaPod,waylife/AntennaPod,TomHennen/AntennaPod,wooi/AntennaPod,hgl888/AntennaPod,twiceyuan/AntennaPod,udif/AntennaPod,wangjun/AntennaPod,SpicyCurry/AntennaPod,gaohongyuan/AntennaPod,SpicyCurry/AntennaPod,gk23/AntennaPod,gaohongyuan/AntennaPod,TimB0/AntennaPod,drabux/AntennaPod,wangjun/AntennaPod,domingos86/AntennaPod,the100rabh/AntennaPod,queenp/AntennaPod,the100rabh/AntennaPod,the100rabh/AntennaPod,gk23/AntennaPod,domingos86/AntennaPod,SpicyCurry/AntennaPod,corecode/AntennaPod
package de.danoeh.antennapod.core.util.flattr; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.preference.PreferenceManager; import android.util.Log; import org.apache.commons.lang3.StringUtils; import org.shredzone.flattr4j.FlattrService; import org.shredzone.flattr4j.exception.FlattrException; import org.shredzone.flattr4j.model.Flattr; import org.shredzone.flattr4j.model.Thing; import org.shredzone.flattr4j.oauth.AccessToken; import org.shredzone.flattr4j.oauth.AndroidAuthenticator; import org.shredzone.flattr4j.oauth.Scope; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.EnumSet; import java.util.List; import java.util.TimeZone; import de.danoeh.antennapod.core.ClientConfig; import de.danoeh.antennapod.core.R; import de.danoeh.antennapod.core.asynctask.FlattrTokenFetcher; import de.danoeh.antennapod.core.storage.DBWriter; /** * Utility methods for doing something with flattr. */ public class FlattrUtils { private static final String TAG = "FlattrUtils"; private static final String HOST_NAME = "de.danoeh.antennapod"; private static final String PREF_ACCESS_TOKEN = "de.danoeh.antennapod.preference.flattrAccessToken"; private static volatile AccessToken cachedToken; private static AndroidAuthenticator createAuthenticator() { return new AndroidAuthenticator(HOST_NAME, ClientConfig.flattrCallbacks.getFlattrAppKey(), ClientConfig.flattrCallbacks.getFlattrAppSecret()); } public static void startAuthProcess(Context context) throws FlattrException { AndroidAuthenticator auth = createAuthenticator(); auth.setScope(EnumSet.of(Scope.FLATTR)); Intent intent = auth.createAuthenticateIntent(); context.startActivity(intent); } private static AccessToken retrieveToken() { if (cachedToken == null) { Log.d(TAG, "Retrieving access token"); String token = PreferenceManager.getDefaultSharedPreferences( ClientConfig.applicationCallbacks.getApplicationInstance()) .getString(PREF_ACCESS_TOKEN, null); if (token != null) { Log.d(TAG, "Found access token. Caching."); cachedToken = new AccessToken(token); } else { Log.d(TAG, "No access token found"); return null; } } return cachedToken; } /** * Returns true if FLATTR_APP_KEY and FLATTR_APP_SECRET in BuildConfig are not null and not empty */ public static boolean hasAPICredentials() { return StringUtils.isNotEmpty(ClientConfig.flattrCallbacks.getFlattrAppKey()) && StringUtils.isNotEmpty(ClientConfig.flattrCallbacks.getFlattrAppSecret()); } public static boolean hasToken() { return retrieveToken() != null; } public static void storeToken(AccessToken token) { Log.d(TAG, "Storing token"); SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(ClientConfig.applicationCallbacks.getApplicationInstance()).edit(); if (token != null) { editor.putString(PREF_ACCESS_TOKEN, token.getToken()); } else { editor.putString(PREF_ACCESS_TOKEN, null); } editor.commit(); cachedToken = token; } public static void deleteToken() { Log.d(TAG, "Deleting flattr token"); storeToken(null); } public static void clickUrl(Context context, String url) throws FlattrException { if (hasToken()) { FlattrService fs = FlattrServiceCreator.getService(retrieveToken()); fs.flattr(url); } else { Log.e(TAG, "clickUrl was called with null access token"); } } public static List<Flattr> retrieveFlattredThings() throws FlattrException { ArrayList<Flattr> myFlattrs = new ArrayList<Flattr>(); if (hasToken()) { FlattrService fs = FlattrServiceCreator.getService(retrieveToken()); Calendar firstOfMonth = Calendar.getInstance(TimeZone.getTimeZone("UTC")); firstOfMonth.set(Calendar.MILLISECOND, 0); firstOfMonth.set(Calendar.SECOND, 0); firstOfMonth.set(Calendar.MINUTE, 0); firstOfMonth.set(Calendar.HOUR_OF_DAY, 0); firstOfMonth.set(Calendar.DAY_OF_MONTH, Calendar.getInstance().getActualMinimum(Calendar.DAY_OF_MONTH)); Date firstOfMonthDate = firstOfMonth.getTime(); // subscriptions some times get flattrd slightly before midnight - give it an hour leeway firstOfMonthDate = new Date(firstOfMonthDate.getTime() - 60 * 60 * 1000); final int FLATTR_COUNT = 30; final int FLATTR_MAXPAGE = 5; for (int page = 0; page < FLATTR_MAXPAGE; page++) { for (Flattr fl : fs.getMyFlattrs(FLATTR_COUNT, page)) { if (fl.getCreated().after(firstOfMonthDate)) myFlattrs.add(fl); else break; } } Log.d(TAG, "Got my flattrs list of length " + Integer.toString(myFlattrs.size()) + " comparison date" + firstOfMonthDate); for (Flattr fl : myFlattrs) { Thing thing = fl.getThing(); Log.d(TAG, "Flattr thing: " + fl.getThingId() + " name: " + thing.getTitle() + " url: " + thing.getUrl() + " on: " + fl.getCreated()); } } else { Log.e(TAG, "retrieveFlattrdThings was called with null access token"); } return myFlattrs; } public static void handleCallback(Context context, Uri uri) { AndroidAuthenticator auth = createAuthenticator(); new FlattrTokenFetcher(context, auth, uri).executeAsync(); } public static void revokeAccessToken(Context context) { Log.d(TAG, "Revoking access token"); deleteToken(); FlattrServiceCreator.deleteFlattrService(); showRevokeDialog(context); DBWriter.clearAllFlattrStatus(context); } // ------------------------------------------------ DIALOGS public static void showRevokeDialog(final Context context) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.access_revoked_title); builder.setMessage(R.string.access_revoked_info); builder.setNeutralButton(android.R.string.ok, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } /** * Opens a dialog that ask the user to either connect the app with flattr or to be redirected to * the thing's website. * If no API credentials are available, the user will immediately be redirected to the thing's website. */ public static void showNoTokenDialogOrRedirect(final Context context, final String url) { if (hasAPICredentials()) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.no_flattr_token_title); builder.setMessage(R.string.no_flattr_token_msg); builder.setPositiveButton(R.string.authenticate_now_label, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { context.startActivity( ClientConfig.flattrCallbacks.getFlattrAuthenticationActivityIntent(context)); } } ); builder.setNegativeButton(R.string.visit_website_label, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Uri uri = Uri.parse(url); context.startActivity(new Intent(Intent.ACTION_VIEW, uri)); } } ); builder.create().show(); } else { Uri uri = Uri.parse(url); context.startActivity(new Intent(Intent.ACTION_VIEW, uri)); } } public static void showErrorDialog(final Context context, final String msg) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.error_label); builder.setMessage(msg); builder.setNeutralButton(android.R.string.ok, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } }
core/src/main/java/de/danoeh/antennapod/core/util/flattr/FlattrUtils.java
package de.danoeh.antennapod.core.util.flattr; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.preference.PreferenceManager; import android.util.Log; import org.apache.commons.lang3.StringUtils; import org.shredzone.flattr4j.FlattrService; import org.shredzone.flattr4j.exception.FlattrException; import org.shredzone.flattr4j.model.Flattr; import org.shredzone.flattr4j.model.Thing; import org.shredzone.flattr4j.oauth.AccessToken; import org.shredzone.flattr4j.oauth.AndroidAuthenticator; import org.shredzone.flattr4j.oauth.Scope; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.EnumSet; import java.util.List; import java.util.TimeZone; import de.danoeh.antennapod.core.ClientConfig; import de.danoeh.antennapod.core.R; import de.danoeh.antennapod.core.asynctask.FlattrTokenFetcher; import de.danoeh.antennapod.core.storage.DBWriter; /** * Utility methods for doing something with flattr. */ public class FlattrUtils { private static final String TAG = "FlattrUtils"; private static final String HOST_NAME = "de.danoeh.antennapod"; private static final String PREF_ACCESS_TOKEN = "de.danoeh.antennapod.preference.flattrAccessToken"; // Human-readable flattr-page. public static final String APP_LINK = "https://flattr.com/thing/745609/"; public static final String APP_THING_ID = "745609"; private static volatile AccessToken cachedToken; private static AndroidAuthenticator createAuthenticator() { return new AndroidAuthenticator(HOST_NAME, ClientConfig.flattrCallbacks.getFlattrAppKey(), ClientConfig.flattrCallbacks.getFlattrAppSecret()); } public static void startAuthProcess(Context context) throws FlattrException { AndroidAuthenticator auth = createAuthenticator(); auth.setScope(EnumSet.of(Scope.FLATTR)); Intent intent = auth.createAuthenticateIntent(); context.startActivity(intent); } private static AccessToken retrieveToken() { if (cachedToken == null) { Log.d(TAG, "Retrieving access token"); String token = PreferenceManager.getDefaultSharedPreferences( ClientConfig.applicationCallbacks.getApplicationInstance()) .getString(PREF_ACCESS_TOKEN, null); if (token != null) { Log.d(TAG, "Found access token. Caching."); cachedToken = new AccessToken(token); } else { Log.d(TAG, "No access token found"); return null; } } return cachedToken; } /** * Returns true if FLATTR_APP_KEY and FLATTR_APP_SECRET in BuildConfig are not null and not empty */ public static boolean hasAPICredentials() { return StringUtils.isNotEmpty(ClientConfig.flattrCallbacks.getFlattrAppKey()) && StringUtils.isNotEmpty(ClientConfig.flattrCallbacks.getFlattrAppSecret()); } public static boolean hasToken() { return retrieveToken() != null; } public static void storeToken(AccessToken token) { Log.d(TAG, "Storing token"); SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(ClientConfig.applicationCallbacks.getApplicationInstance()).edit(); if (token != null) { editor.putString(PREF_ACCESS_TOKEN, token.getToken()); } else { editor.putString(PREF_ACCESS_TOKEN, null); } editor.commit(); cachedToken = token; } public static void deleteToken() { Log.d(TAG, "Deleting flattr token"); storeToken(null); } public static Thing getAppThing(Context context) { FlattrService fs = FlattrServiceCreator.getService(retrieveToken()); try { Thing thing = fs.getThing(Thing.withId(APP_THING_ID)); return thing; } catch (FlattrException e) { e.printStackTrace(); showErrorDialog(context, e.getMessage()); return null; } } public static void clickUrl(Context context, String url) throws FlattrException { if (hasToken()) { FlattrService fs = FlattrServiceCreator.getService(retrieveToken()); fs.flattr(url); } else { Log.e(TAG, "clickUrl was called with null access token"); } } public static List<Flattr> retrieveFlattredThings() throws FlattrException { ArrayList<Flattr> myFlattrs = new ArrayList<Flattr>(); if (hasToken()) { FlattrService fs = FlattrServiceCreator.getService(retrieveToken()); Calendar firstOfMonth = Calendar.getInstance(TimeZone.getTimeZone("UTC")); firstOfMonth.set(Calendar.MILLISECOND, 0); firstOfMonth.set(Calendar.SECOND, 0); firstOfMonth.set(Calendar.MINUTE, 0); firstOfMonth.set(Calendar.HOUR_OF_DAY, 0); firstOfMonth.set(Calendar.DAY_OF_MONTH, Calendar.getInstance().getActualMinimum(Calendar.DAY_OF_MONTH)); Date firstOfMonthDate = firstOfMonth.getTime(); // subscriptions some times get flattrd slightly before midnight - give it an hour leeway firstOfMonthDate = new Date(firstOfMonthDate.getTime() - 60 * 60 * 1000); final int FLATTR_COUNT = 30; final int FLATTR_MAXPAGE = 5; for (int page = 0; page < FLATTR_MAXPAGE; page++) { for (Flattr fl : fs.getMyFlattrs(FLATTR_COUNT, page)) { if (fl.getCreated().after(firstOfMonthDate)) myFlattrs.add(fl); else break; } } Log.d(TAG, "Got my flattrs list of length " + Integer.toString(myFlattrs.size()) + " comparison date" + firstOfMonthDate); for (Flattr fl : myFlattrs) { Thing thing = fl.getThing(); Log.d(TAG, "Flattr thing: " + fl.getThingId() + " name: " + thing.getTitle() + " url: " + thing.getUrl() + " on: " + fl.getCreated()); } } else { Log.e(TAG, "retrieveFlattrdThings was called with null access token"); } return myFlattrs; } public static void handleCallback(Context context, Uri uri) { AndroidAuthenticator auth = createAuthenticator(); new FlattrTokenFetcher(context, auth, uri).executeAsync(); } public static void revokeAccessToken(Context context) { Log.d(TAG, "Revoking access token"); deleteToken(); FlattrServiceCreator.deleteFlattrService(); showRevokeDialog(context); DBWriter.clearAllFlattrStatus(context); } // ------------------------------------------------ DIALOGS public static void showRevokeDialog(final Context context) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.access_revoked_title); builder.setMessage(R.string.access_revoked_info); builder.setNeutralButton(android.R.string.ok, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } /** * Opens a dialog that ask the user to either connect the app with flattr or to be redirected to * the thing's website. * If no API credentials are available, the user will immediately be redirected to the thing's website. */ public static void showNoTokenDialogOrRedirect(final Context context, final String url) { if (hasAPICredentials()) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.no_flattr_token_title); builder.setMessage(R.string.no_flattr_token_msg); builder.setPositiveButton(R.string.authenticate_now_label, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { context.startActivity( ClientConfig.flattrCallbacks.getFlattrAuthenticationActivityIntent(context)); } } ); builder.setNegativeButton(R.string.visit_website_label, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Uri uri = Uri.parse(url); context.startActivity(new Intent(Intent.ACTION_VIEW, uri)); } } ); builder.create().show(); } else { Uri uri = Uri.parse(url); context.startActivity(new Intent(Intent.ACTION_VIEW, uri)); } } public static void showForbiddenDialog(final Context context, final String url) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.action_forbidden_title); builder.setMessage(R.string.action_forbidden_msg); builder.setPositiveButton(R.string.authenticate_now_label, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { context.startActivity( ClientConfig.flattrCallbacks.getFlattrAuthenticationActivityIntent(context)); } } ); builder.setNegativeButton(R.string.visit_website_label, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Uri uri = Uri.parse(url); context.startActivity(new Intent(Intent.ACTION_VIEW, uri)); } } ); builder.create().show(); } public static void showErrorDialog(final Context context, final String msg) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(R.string.error_label); builder.setMessage(msg); builder.setNeutralButton(android.R.string.ok, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } }
removed unused code. fixes AntennaPod/AntennaPod#935
core/src/main/java/de/danoeh/antennapod/core/util/flattr/FlattrUtils.java
removed unused code. fixes AntennaPod/AntennaPod#935
Java
mit
be993103042cd7d15ff6f289f1e970cd061d688b
0
talandar/ProgressiveDifficulty,talandar/ProgressiveDifficulty
package derpatiel.progressivediff.modifiers; import com.google.common.collect.Lists; import derpatiel.progressivediff.api.DifficultyModifier; import derpatiel.progressivediff.util.MobNBTHandler; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.EntityLivingBase; import net.minecraft.init.MobEffects; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.common.config.Property; import net.minecraftforge.event.entity.living.LivingAttackEvent; import java.util.List; import java.util.function.Function; public class OnHitEffectModifier extends DifficultyModifier { private int maxInstances; private int costPerLevel; private double selectionWeight; private Potion effect; private int duration; private String identifier; public OnHitEffectModifier(Potion effect, int duration, int maxInstances, int costPerLevel, double selectionWeight, String identifier){ this.maxInstances = maxInstances; this.costPerLevel = costPerLevel; this.selectionWeight = selectionWeight; this.effect = effect; this.duration = duration; this.identifier = identifier; } @Override public int getMaxInstances() { return maxInstances; } @Override public int costPerChange() { return costPerLevel; } @Override public double getWeight() { return selectionWeight; } @Override public String getIdentifier() { return identifier; } @Override public void handleDamageEvent(LivingAttackEvent event) { super.handleDamageEvent(event); if(!(event.getEntity() instanceof EntityLivingBase)) return; EntityLivingBase hitEntity = (EntityLivingBase)event.getEntity(); EntityLiving cause = (EntityLiving)event.getSource().getTrueSource(); int level = MobNBTHandler.getModifierLevel(cause,identifier); hitEntity.addPotionEffect(new PotionEffect(effect, duration,level,false,false)); } public static Function<Configuration,List<DifficultyModifier>> getFromConfig = config -> { List<DifficultyModifier> returns = Lists.newArrayList(); //SLOWNESS; String ID_SLOW = "MOD_SLOW_ON_HIT"; Property modifierEnabledProp = config.get(ID_SLOW, "EnableSlowOnHitModifier",true,"Enable the slow on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); boolean modifierEnabled = modifierEnabledProp.getBoolean(); Property MaxLevelProp = config.get(ID_SLOW, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); int maxLevel = MaxLevelProp.getInt(); Property difficultyCostPerLevelProp = config.get(ID_SLOW, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); int diffCostPerLevel = difficultyCostPerLevelProp.getInt(); Property effectDurationProp = config.get(ID_SLOW, "EffectDuration",40,"Duration of the effect when applied to the target."); int duration = effectDurationProp.getInt(); Property selectionWeightProp = config.get(ID_SLOW, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); double selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.SLOWNESS,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_SLOW)); } //MINING_FATIGUE; String ID_FATIGUE = "MOD_FATIGUE_ON_HIT"; modifierEnabledProp = config.get(ID_FATIGUE, "EnableFatigueOnHitModifier",true,"Enable the mining fatigue on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_FATIGUE, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_FATIGUE, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_FATIGUE, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_FATIGUE, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.MINING_FATIGUE,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_FATIGUE)); } //BLINDNESS; String ID_BLIND = "MOD_BLIND_ON_HIT"; modifierEnabledProp = config.get(ID_BLIND, "EnableFatigueOnHitModifier",true,"Enable the blindness on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); difficultyCostPerLevelProp = config.get(ID_BLIND, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_BLIND, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_BLIND, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.BLINDNESS,duration,1,diffCostPerLevel,selectionWeight,ID_BLIND)); } //HUNGER; String ID_HUNGER = "MOD_HUNGER_ON_HIT"; modifierEnabledProp = config.get(ID_HUNGER, "EnableHungerOnHitModifier",true,"Enable the hunger on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_HUNGER, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_HUNGER, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_HUNGER, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_HUNGER, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.HUNGER,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_HUNGER)); } //WEAKNESS; String ID_WEAKNESS = "MOD_WEAKNESS_ON_HIT"; modifierEnabledProp = config.get(ID_WEAKNESS, "EnableWeaknessOnHitModifier",true,"Enable the weakness on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_WEAKNESS, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_WEAKNESS, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_WEAKNESS, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_WEAKNESS, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.WEAKNESS,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_WEAKNESS)); } //POISON; String ID_POISON = "MOD_POISON_ON_HIT"; modifierEnabledProp = config.get(ID_POISON, "EnablePoisonOnHitModifier",true,"Enable the poison on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_POISON, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_POISON, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_POISON, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_POISON, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.POISON,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_POISON)); } //WITHER; String ID_WITHER = "MOD_WITHER_ON_HIT"; modifierEnabledProp = config.get(ID_WITHER, "EnableWitherOnHitModifier",true,"Enable the wither on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_WITHER, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_WITHER, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_WITHER, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_WITHER, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.WITHER,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_WITHER)); } return returns; }; }
src/main/java/derpatiel/progressivediff/modifiers/OnHitEffectModifier.java
package derpatiel.progressivediff.modifiers; import com.google.common.collect.Lists; import derpatiel.progressivediff.api.DifficultyModifier; import derpatiel.progressivediff.util.MobNBTHandler; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.EntityLivingBase; import net.minecraft.init.MobEffects; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.common.config.Property; import net.minecraftforge.event.entity.living.LivingAttackEvent; import java.util.List; import java.util.function.Function; public class OnHitEffectModifier extends DifficultyModifier { private int maxInstances; private int costPerLevel; private double selectionWeight; private Potion effect; private int duration; private String identifier; public OnHitEffectModifier(Potion effect, int duration, int maxInstances, int costPerLevel, double selectionWeight, String identifier){ this.maxInstances = maxInstances; this.costPerLevel = costPerLevel; this.selectionWeight = selectionWeight; this.effect = effect; this.duration = duration; this.identifier = identifier; } @Override public int getMaxInstances() { return maxInstances; } @Override public int costPerChange() { return costPerLevel; } @Override public double getWeight() { return selectionWeight; } @Override public String getIdentifier() { return identifier; } @Override public void handleDamageEvent(LivingAttackEvent event) { super.handleDamageEvent(event); if(!(event.getEntity() instanceof EntityLivingBase)) return; EntityLivingBase hitEntity = (EntityLivingBase)event.getEntity(); EntityLiving cause = (EntityLiving)event.getSource().getTrueSource(); int level = MobNBTHandler.getModifierLevel(cause,identifier); hitEntity.addPotionEffect(new PotionEffect(effect, duration,level,false,false)); } public static Function<Configuration,List<DifficultyModifier>> getFromConfig = config -> { List<DifficultyModifier> returns = Lists.newArrayList(); //SLOWNESS; String ID_SLOW = "MOD_SLOW_ON_HIT"; Property modifierEnabledProp = config.get(ID_SLOW, "EnableSlowOnHitModifier",true,"Enable the slow on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); boolean modifierEnabled = modifierEnabledProp.getBoolean(); Property MaxLevelProp = config.get(ID_SLOW, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); int maxLevel = MaxLevelProp.getInt(); Property difficultyCostPerLevelProp = config.get(ID_SLOW, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); int diffCostPerLevel = difficultyCostPerLevelProp.getInt(); Property effectDurationProp = config.get(ID_SLOW, "EffectDuration",40,"Duration of the effect when applied to the target."); int duration = effectDurationProp.getInt(); Property selectionWeightProp = config.get(ID_SLOW, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); double selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.SLOWNESS,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_SLOW)); } //MINING_FATIGUE; String ID_FATIGUE = "MOD_FATIGUE_ON_HIT"; modifierEnabledProp = config.get(ID_FATIGUE, "EnableFatigueOnHitModifier",true,"Enable the mining fatigue on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_FATIGUE, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_FATIGUE, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_FATIGUE, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_FATIGUE, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.MINING_FATIGUE,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_FATIGUE)); } //BLINDNESS; String ID_BLIND = "MOD_BLIND_ON_HIT"; modifierEnabledProp = config.get(ID_BLIND, "EnableFatigueOnHitModifier",true,"Enable the blindness on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); difficultyCostPerLevelProp = config.get(ID_BLIND, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_BLIND, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_BLIND, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.BLINDNESS,duration,1,diffCostPerLevel,selectionWeight,ID_BLIND)); } //HUNGER; String ID_HUNGER = "MOD_HUNGER_ON_HIT"; modifierEnabledProp = config.get(ID_HUNGER, "EnableHungerOnHitModifier",true,"Enable the hunger on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_HUNGER, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_HUNGER, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_HUNGER, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_HUNGER, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.HUNGER,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_HUNGER)); } //WEAKNESS; String ID_WEAKNESS = "MOD_WEAKNESS_ON_HIT"; modifierEnabledProp = config.get(ID_WEAKNESS, "EnableWeaknessOnHitModifier",true,"Enable the weakness on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_WEAKNESS, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_WEAKNESS, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_WEAKNESS, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_WEAKNESS, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.WEAKNESS,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_WEAKNESS)); } //POISON; String ID_POISON = "MOD_POISON_ON_HIT"; modifierEnabledProp = config.get(ID_POISON, "EnableWeaknessOnHitModifier",true,"Enable the poison on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_POISON, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_POISON, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_POISON, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_POISON, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.POISON,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_POISON)); } //WITHER; String ID_WITHER = "MOD_WITHER_ON_HIT"; modifierEnabledProp = config.get(ID_WITHER, "EnableWitherOnHitModifier",true,"Enable the wither on hit modifier. Adds the potion effect to targets hit by a mob with this modifier."); modifierEnabled = modifierEnabledProp.getBoolean(); MaxLevelProp = config.get(ID_WITHER, "ModifierMaxLevel",3,"Maximum level of this effect added to the target player when this is triggered."); maxLevel = MaxLevelProp.getInt(); difficultyCostPerLevelProp = config.get(ID_WITHER, "DifficultyCostPerLevel",10,"Cost of each level of the effect applied to the target player."); diffCostPerLevel = difficultyCostPerLevelProp.getInt(); effectDurationProp = config.get(ID_WITHER, "EffectDuration",40,"Duration of the effect when applied to the target."); duration = effectDurationProp.getInt(); selectionWeightProp = config.get(ID_WITHER, "ModifierWeight",1.0d,"Weight that affects how often this modifier is selected."); selectionWeight = selectionWeightProp.getDouble(); if(modifierEnabled && maxLevel>0 && diffCostPerLevel>0 && selectionWeight>0) { returns.add(new OnHitEffectModifier(MobEffects.WITHER,duration,maxLevel,diffCostPerLevel,selectionWeight,ID_WITHER)); } return returns; }; }
fix typo in OnHitEffectModifier that meant the poison on hit modifier was called weakness for it's enable property.
src/main/java/derpatiel/progressivediff/modifiers/OnHitEffectModifier.java
fix typo in OnHitEffectModifier that meant the poison on hit modifier was called weakness for it's enable property.