code
stringlengths 67
466k
| docstring
stringlengths 1
13.2k
|
---|---|
private void initComponents()//GEN-BEGIN:initComponents
{
jMenuBar1 = new javax.swing.JMenuBar();
mnuFile = new javax.swing.JMenu();
mnuFileSave = new javax.swing.JMenuItem();
jSplitPane1 = new javax.swing.JSplitPane();
jScrollPane1 = new javax.swing.JScrollPane();
jTree1 = new javax.swing.JTree();
jScrlProperty = new javax.swing.JScrollPane();
mnuFile.setMnemonic('F');
mnuFile.setText("File");
mnuFileSave.setAction(new ActionSaveAsOJBRepository(aRepository));
mnuFile.add(mnuFileSave);
jMenuBar1.add(mnuFile);
setClosable(true);
setIconifiable(true);
setMaximizable(true);
setResizable(true);
jTree1.addMouseListener(new java.awt.event.MouseAdapter()
{
public void mousePressed(java.awt.event.MouseEvent evt)
{
jTree1MousePressed(evt);
}
public void mouseReleased(java.awt.event.MouseEvent evt)
{
jTree1MousePressed(evt);
}
});
jTree1.addTreeSelectionListener(new javax.swing.event.TreeSelectionListener()
{
public void valueChanged(javax.swing.event.TreeSelectionEvent evt)
{
jTree1ValueChanged(evt);
}
});
jScrollPane1.setViewportView(jTree1);
jSplitPane1.setLeftComponent(jScrollPane1);
jSplitPane1.setRightComponent(jScrlProperty);
getContentPane().add(jSplitPane1, java.awt.BorderLayout.CENTER);
pack();
} | This method is called from within the constructor to
initialize the form.
WARNING: Do NOT modify this code. The content of this method is
always regenerated by the Form Editor. |
private javax.swing.JPopupMenu createPopupMenuForActionTarget(org.apache.ojb.tools.mapping.reversedb2.ActionTarget at)
{
javax.swing.JPopupMenu createdMenu = new javax.swing.JPopupMenu();
java.util.Iterator it = at.getActions();
while (it.hasNext())
{
createdMenu.add((javax.swing.Action)it.next());
}
return createdMenu;
} | GEN-LAST:event_jTree1ValueChanged |
public Logger getLogger(String loggerName)
{
Logger logger;
//lookup in the cache first
logger = (Logger) cache.get(loggerName);
if(logger == null)
{
try
{
// get the configuration (not from the configurator because this is independent)
logger = createLoggerInstance(loggerName);
if(getBootLogger().isDebugEnabled())
{
getBootLogger().debug("Using logger class '"
+ (getConfiguration() != null ? getConfiguration().getLoggerClass() : null)
+ "' for " + loggerName);
}
// configure the logger
getBootLogger().debug("Initializing logger instance " + loggerName);
logger.configure(conf);
}
catch(Throwable t)
{
// do reassign check and signal logger creation failure
reassignBootLogger(true);
logger = getBootLogger();
getBootLogger().error("[" + this.getClass().getName()
+ "] Could not initialize logger " + (conf != null ? conf.getLoggerClass() : null), t);
}
//cache it so we can get it faster the next time
cache.put(loggerName, logger);
// do reassign check
reassignBootLogger(false);
}
return logger;
} | returns a Logger.
@param loggerName the name of the Logger
@return Logger the returned Logger |
private Logger createLoggerInstance(String loggerName) throws Exception
{
Class loggerClass = getConfiguration().getLoggerClass();
Logger log = (Logger) ClassHelper.newInstance(loggerClass, String.class, loggerName);
log.configure(getConfiguration());
return log;
} | Creates a new Logger instance for the specified name. |
private Field getFieldRecursive(Class c, String name) throws NoSuchFieldException
{
try
{
return c.getDeclaredField(name);
}
catch (NoSuchFieldException e)
{
// if field could not be found in the inheritance hierarchy, signal error
if ((c == Object.class) || (c.getSuperclass() == null) || c.isInterface())
{
throw e;
}
// if field could not be found in class c try in superclass
else
{
return getFieldRecursive(c.getSuperclass(), name);
}
}
} | try to find a field in class c, recurse through class hierarchy if necessary
@throws NoSuchFieldException if no Field was found into the class hierarchy |
protected String buildErrorSetMsg(Object obj, Object value, Field aField)
{
String eol = SystemUtils.LINE_SEPARATOR;
StringBuffer buf = new StringBuffer();
buf
.append(eol + "[try to set 'object value' in 'target object'")
.append(eol + "target obj class: " + (obj != null ? obj.getClass().getName() : null))
.append(eol + "target field name: " + (aField != null ? aField.getName() : null))
.append(eol + "target field type: " + (aField != null ? aField.getType() : null))
.append(eol + "target field declared in: " + (aField != null ? aField.getDeclaringClass().getName() : null))
.append(eol + "object value class: " + (value != null ? value.getClass().getName() : null))
.append(eol + "object value: " + (value != null ? value : null))
.append(eol + "]");
return buf.toString();
} | Build a String representation of given arguments. |
protected PersistenceBrokerInternal createNewBrokerInstance(PBKey key) throws PBFactoryException
{
if (key == null) throw new PBFactoryException("Could not create new broker with PBkey argument 'null'");
// check if the given key really exists
if (MetadataManager.getInstance().connectionRepository().getDescriptor(key) == null)
{
throw new PBFactoryException("Given PBKey " + key + " does not match in metadata configuration");
}
if (log.isEnabledFor(Logger.INFO))
{
// only count created instances when INFO-Log-Level
log.info("Create new PB instance for PBKey " + key +
", already created persistence broker instances: " + instanceCount);
// useful for testing
++this.instanceCount;
}
PersistenceBrokerInternal instance = null;
Class[] types = {PBKey.class, PersistenceBrokerFactoryIF.class};
Object[] args = {key, this};
try
{
instance = (PersistenceBrokerInternal) ClassHelper.newInstance(implementationClass, types, args);
OjbConfigurator.getInstance().configure(instance);
instance = (PersistenceBrokerInternal) InterceptorFactory.getInstance().createInterceptorFor(instance);
}
catch (Exception e)
{
log.error("Creation of a new PB instance failed", e);
throw new PBFactoryException("Creation of a new PB instance failed", e);
}
return instance;
} | For internal use! This method creates real new PB instances |
public PersistenceBrokerInternal createPersistenceBroker(PBKey pbKey) throws PBFactoryException
{
if (log.isDebugEnabled()) log.debug("Obtain broker from pool, used PBKey is " + pbKey);
/*
try to find a valid PBKey, if given key does not full match
*/
pbKey = BrokerHelper.crossCheckPBKey(pbKey);
try
{
return createNewBrokerInstance(pbKey);
}
catch (Exception e)
{
throw new PBFactoryException("Borrow broker from pool failed, using PBKey " + pbKey, e);
}
} | Always return a new created {@link PersistenceBroker} instance
@param pbKey
@return
@throws PBFactoryException |
public void actionPerformed(java.awt.event.ActionEvent e)
{
System.out.println("Action Command: " + e.getActionCommand());
System.out.println("Action Params : " + e.paramString());
System.out.println("Action Source : " + e.getSource());
System.out.println("Action SrcCls : " + e.getSource().getClass().getName());
org.apache.ojb.broker.metadata.ClassDescriptor cld =
new org.apache.ojb.broker.metadata.ClassDescriptor(rootNode.getRepository());
// cld.setClassNameOfObject("New Class");
cld.setTableName("New Table");
rootNode.addClassDescriptor(cld);
} | Invoked when an action occurs. |
public synchronized boolean hasNext()
{
if (position < length)
{
if (position < scrolled)
{
return true;
}
else
{
boolean result = ojbIterator.hasNext();
return result;
}
}
else
{
releaseJdbcResources();
return false;
}
} | /*
@see ObjectSet#hasNext() |
public synchronized Object next()
{
if (position < scrolled)
{
position++;
return elements.get(position - 1);
}
else
{
Object next = ojbIterator.next();
elements.add(next);
position++;
scrolled++;
return next;
}
} | /*
@see ObjectSet#next() |
@Override
public ProxyAuthenticationMethod getMethod() {
switch (authenticationMethod) {
case BASIC:
return ProxyAuthenticationMethod.BASIC;
case DIGEST:
return ProxyAuthenticationMethod.DIGEST;
case URL:
return ProxyAuthenticationMethod.URL;
default:
return null;
}
} | Get the authentication method to use.
@return authentication method |
public static <T> MetaTinyType<T> metaFor(Class<?> candidate) {
for (MetaTinyType meta : metas) {
if (meta.isMetaOf(candidate)) {
return meta;
}
}
throw new IllegalArgumentException(String.format("not a tinytype: %s", candidate == null ? "null" : candidate.getCanonicalName()));
} | Provides a type-specific Meta class for the given TinyType.
@param <T> the TinyType class type
@param candidate the TinyType class to obtain a Meta for
@return a Meta implementation suitable for the candidate
@throws IllegalArgumentException for null or a non-TinyType |
public static boolean isTinyType(Class<?> candidate) {
for (MetaTinyType meta : metas) {
if (meta.isMetaOf(candidate)) {
return true;
}
}
return false;
} | Checks whether a class is a TinyType. A class is considered a TinyType if
is a direct ancestor of a tech.anima.tinytypes.*TinyType, is not abstract
and provides a ctor matching super.
@param candidate the class to be checked
@return true if the candidate is a TinyType, false otherwise. |
public NamedStyleInfo getNamedStyleInfo(String name) {
for (NamedStyleInfo info : namedStyleInfos) {
if (info.getName().equals(name)) {
return info;
}
}
return null;
} | Get layer style by name.
@param name layer style name
@return layer style |
public static void scanClassPathForFormattingAnnotations() {
ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
// scan classpath and filter out classes that don't begin with "com.nds"
Reflections reflections = new Reflections("com.nds","com.cisco");
Set<Class<?>> annotated = reflections.getTypesAnnotatedWith(DefaultFormat.class);
// Reflections ciscoReflections = new Reflections("com.cisco");
//
// annotated.addAll(ciscoReflections.getTypesAnnotatedWith(DefaultFormat.class));
for (Class<?> markerClass : annotated) {
// if the marker class is indeed implementing FoundationLoggingMarker
// interface
if (FoundationLoggingMarker.class.isAssignableFrom(markerClass)) {
final Class<? extends FoundationLoggingMarker> clazz = (Class<? extends FoundationLoggingMarker>) markerClass;
executorService.execute(new Runnable() {
@Override
public void run() {
if (markersMap.get(clazz) == null) {
try {
// generate formatter class for this marker
// class
generateAndUpdateFormatterInMap(clazz);
} catch (Exception e) {
LOGGER.trace("problem generating formatter class from static scan method. error is: " + e.toString());
}
}
}
});
} else {// if marker class does not implement FoundationLoggingMarker
// interface, log ERROR
// verify the LOGGER was initialized. It might not be as this
// Method is called in a static block
if (LOGGER == null) {
LOGGER = LoggerFactory.getLogger(AbstractFoundationLoggingMarker.class);
}
LOGGER.error("Formatter annotations should only appear on foundationLoggingMarker implementations");
}
}
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
LOGGER.trace(e.toString(), e);
}
executorService.shutdown();
// try {
// executorService.awaitTermination(15, TimeUnit.SECONDS);
// } catch (InterruptedException e) {
// LOGGER.error("creation of formatters has been interrupted");
// }
} | Scan all the class path and look for all classes that have the Format
Annotations. |
public void addAppenderEvent(final Category cat, final Appender appender) {
updateDefaultLayout(appender);
if (appender instanceof FoundationFileRollingAppender) {
final FoundationFileRollingAppender timeSizeRollingAppender = (FoundationFileRollingAppender) appender;
// update the appender with default vales such as logging pattern, file size etc.
//updateDefaultTimeAndSizeRollingAppender(timeSizeRollingAppender);
// read teh proeprties and determine if archiving should be enabled.
updateArchivingSupport(timeSizeRollingAppender);
// by default add the rolling file listener to enable application
// state.
timeSizeRollingAppender.setFileRollEventListener(FoundationRollEventListener.class.getName());
boolean rollOnStartup = true;
if (FoundationLogger.log4jConfigProps != null && FoundationLogger.log4jConfigProps.containsKey(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())) {
rollOnStartup = Boolean.valueOf(FoundationLogger.log4jConfigProps.getProperty(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString()));
}
timeSizeRollingAppender.setRollOnStartup(rollOnStartup);
// refresh the appender
timeSizeRollingAppender.activateOptions();
// timeSizeRollingAppender.setOriginalLayout(); //So application state will not make any problems
}else if(!(appender instanceof FoundationFileRollingAppender) && (appender instanceof TimeAndSizeRollingAppender)){ //TimeAndSizeRollingAppender
final TimeAndSizeRollingAppender timeSizeRollingAppender = (TimeAndSizeRollingAppender) appender;
// update the appender with default vales such as logging pattern, file size etc.
updateDefaultTimeAndSizeRollingAppender(timeSizeRollingAppender);
// read teh proeprties and determine if archiving should be enabled.
updateArchivingSupport(timeSizeRollingAppender);
// by default add the rolling file listener to enable application
// state.
timeSizeRollingAppender.setFileRollEventListener(FoundationRollEventListener.class.getName());
boolean rollOnStartup = true;
if (FoundationLogger.log4jConfigProps != null && FoundationLogger.log4jConfigProps.containsKey(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())) {
rollOnStartup = Boolean.valueOf(FoundationLogger.log4jConfigProps.getProperty(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString()));
}
timeSizeRollingAppender.setRollOnStartup(rollOnStartup);
// refresh the appender
timeSizeRollingAppender.activateOptions();
// timeSizeRollingAppender.setOriginalLayout();
}
if ( ! (appender instanceof org.apache.log4j.AsyncAppender))
initiateAsyncSupport(appender);
} | In this method perform the actual override in runtime.
@see org.apache.log4j.spi.HierarchyEventListener#addAppenderEvent(org.apache.log4j.Category, org.apache.log4j.Appender) |
private void updateDefaultTimeAndSizeRollingAppender(final FoundationFileRollingAppender appender) {
if (appender.getDatePattern().trim().length() == 0) {
appender.setDatePattern(FoundationLoggerConstants.DEFAULT_DATE_PATTERN.toString());
}
String maxFileSizeKey = "log4j.appender."+appender.getName()+".MaxFileSize";
appender.setMaxFileSize(FoundationLogger.log4jConfigProps.getProperty(maxFileSizeKey, FoundationLoggerConstants.Foundation_MAX_FILE_SIZE.toString()));
// if (appender.getMaxFileSize() == null || appender.getMaxFileSize().equals(FoundationLoggerConstants.DEFAULT_FILE_SIZE.toString())) {
// appender.setMaxFileSize(FoundationLoggerConstants.Foundation_MAX_FILE_SIZE.toString());
// }
String maxRollCountKey = "log4j.appender."+appender.getName()+".MaxRollFileCount";
appender.setMaxRollFileCount(Integer.parseInt(FoundationLogger.log4jConfigProps.getProperty(maxRollCountKey,"100")));
} | Set default values for the TimeAndSizeRollingAppender appender
@param appender |
public static void execute(PrintGetTemplateExtRequest request, PrintGetTemplateExtResponse response,
PrintDtoConverterService converterService, PrintService printService) throws GeomajasException {
PrintTemplateInfo template = request.getTemplate();
PageComponent page = (PageComponent) converterService.toInternal(template.getPage());
// Adjust the title text of the legend component if present
updateLegendChildren(request, page);
if (request.getPageSize() != null) {
page.setSize(request.getPageSize(), true);
}
SinglePageDocument pdfDoc = new SinglePageDocument(page, null);
pdfDoc.layout();
// Add document to container
String documentId = printService.putDocument(pdfDoc);
response.setDocumentId(documentId);
} | Perform the first step in generating a template-based printout of for instance the specified map view,
the map's legend,....
@param request
(Input) It specifies the template and the output format of the document
@param response
(Output) On return, it contains the id of the document (possibly the rendering step is not yet
completed)
@param converterService
@param printService
@throws GeomajasException |
final void dispatchToAppender(final String message) {
// dispatch a copy, since events should be treated as being immutable
final FoundationFileRollingAppender appender = this.getSource();
if (appender != null) {
appender.append(new FileRollEvent(this, message));
}
} | Convenience method dispatches this object to the source appender, which
will result in the custom message being appended to the new file.
@param message
The custom logging message to be appended. |
final void dispatchToAppender(final LoggingEvent customLoggingEvent) {
// wrap the LoggingEvent in a FileRollEvent to prevent recursion bug
final FoundationFileRollingAppender appender = this.getSource();
if (appender != null) {
appender.append(new FileRollEvent(customLoggingEvent, this));
}
} | Convenience method dispatches the specified event to the source appender,
which will result in the custom event data being appended to the new file.
@param customLoggingEvent
The custom Log4J event to be appended. |
public String getStatement()
{
if(sql == null)
{
StringBuffer stmt = new StringBuffer(128);
ClassDescriptor cld = getClassDescriptor();
FieldDescriptor[] fieldDescriptors = cld.getPkFields();
if(fieldDescriptors == null || fieldDescriptors.length == 0)
{
throw new OJBRuntimeException("No PK fields defined in metadata for " + cld.getClassNameOfObject());
}
FieldDescriptor field = fieldDescriptors[0];
stmt.append(SELECT);
stmt.append(field.getColumnName());
stmt.append(FROM);
stmt.append(cld.getFullTableName());
appendWhereClause(cld, false, stmt);
sql = stmt.toString();
}
return sql;
} | Return SELECT clause for object existence call |
public static Comparator getComparator()
{
return new Comparator()
{
public int compare(Object o1, Object o2)
{
FieldDescriptor fmd1 = (FieldDescriptor) o1;
FieldDescriptor fmd2 = (FieldDescriptor) o2;
if (fmd1.getColNo() < fmd2.getColNo())
{
return -1;
}
else if (fmd1.getColNo() > fmd2.getColNo())
{
return 1;
}
else
{
return 0;
}
}
};
} | returns a comparator that allows to sort a Vector of FieldMappingDecriptors
according to their m_Order entries. |
private JdbcType lookupJdbcType()
{
JdbcType result = null;
String columnType = getColumnType();
// if sql type was not set in metadata we use reflection
// to determine sql type by reflection
if (columnType == null)
{
try
{
result = JdbcTypesHelper.getJdbcTypeByReflection(m_PersistentField.getType().getName());
}
catch(Exception e)
{
String eol = SystemUtils.LINE_SEPARATOR;
throw new OJBRuntimeException("Can't automatically assign a jdbc field-type for field: "
+ eol + this.toXML() + eol + "in class: " + eol + getClassDescriptor(), e);
}
}
else
{
try
{
result = JdbcTypesHelper.getJdbcTypeByName(columnType);
}
catch(Exception e)
{
String eol = SystemUtils.LINE_SEPARATOR;
throw new OJBRuntimeException("Can't assign the specified jdbc field-type '"+columnType+"' for field: "
+ eol + this.toXML() + eol + "in class: " + eol + getClassDescriptor(), e);
}
}
return result;
} | determines the JDBC type (represented as an int value as specified
by java.sql.Types) of a FIELDDESCRIPTOR.
@return int the int value representing the Type according to
java.sql.Types. |
public void setFieldConversionClassName(String fieldConversionClassName)
{
try
{
this.fieldConversion = (FieldConversion) ClassHelper.newInstance(fieldConversionClassName);
}
catch (Exception e)
{
throw new MetadataException(
"Could not instantiate FieldConversion class using default constructor", e);
}
} | Sets the fieldConversion.
@param fieldConversionClassName The fieldConversion to set |
public String toXML()
{
RepositoryTags tags = RepositoryTags.getInstance();
String eol = SystemUtils.LINE_SEPARATOR;
//opening tag + attributes
StringBuffer result = new StringBuffer( 1024 );
result.append( " " );
result.append( tags.getOpeningTagNonClosingById( FIELD_DESCRIPTOR ) );
result.append( " " );
result.append( eol );
// // id
// String id = new Integer(getColNo()).toString();
// result += /*" " +*/ tags.getAttribute(ID, id) + eol;
// name
result.append( " " );
result.append( tags.getAttribute( FIELD_NAME, this.getAttributeName() ) );
result.append( eol );
// table not yet implemented
// column
result.append( " " );
result.append( tags.getAttribute( COLUMN_NAME, this.getColumnName() ) );
result.append( eol );
// jdbc-type
result.append( " " );
result.append( tags.getAttribute( JDBC_TYPE, this.getColumnType() ) );
result.append( eol );
// primarykey
if( this.isPrimaryKey() )
{
result.append( " " );
result.append( tags.getAttribute( PRIMARY_KEY, "true" ) );
result.append( eol );
}
// nullable
if( this.isRequired() )
{
result.append( " " );
result.append( tags.getAttribute( NULLABLE, "false" ) );
result.append( eol );
}
// indexed not yet implemented
// autoincrement
if( this.isAutoIncrement() )
{
result.append( " " );
result.append( tags.getAttribute( AUTO_INCREMENT, "true" ) );
result.append( eol );
}
// locking
if( this.isLocking() )
{
result.append( " " );
result.append( tags.getAttribute( LOCKING, "true" ) );
result.append( eol );
}
// updateLock
// default is true so only write if false
if( !this.isUpdateLock() )
{
result.append( " " );
result.append( tags.getAttribute( UPDATE_LOCK, "false" ) );
result.append( eol );
}
// default-fetch not yet implemented
// conversion
if( this.getFieldConversion().getClass() != FieldConversionDefaultImpl.class )
{
result.append( " " );
result.append( tags.getAttribute( FIELD_CONVERSION, getFieldConversion().getClass().getName() ) );
result.append( eol );
}
// length
if( this.isLengthSpecified() )
{
result.append( " " );
result.append( tags.getAttribute( LENGTH, "" + getLength() ) );
result.append( eol );
}
// precision
if( this.isPrecisionSpecified() )
{
result.append( " " );
result.append( tags.getAttribute( PRECISION, "" + getPrecision() ) );
result.append( eol );
}
// scale
if( this.isScaleSpecified() )
{
result.append( " " );
result.append( tags.getAttribute( SCALE, "" + getScale() ) );
result.append( eol );
}
// access
result.append( " " );
result.append( tags.getAttribute( ACCESS, this.getAccess() ) );
result.append( eol );
result.append( " />" );
result.append( eol );
return result.toString();
} | /*
@see XmlCapable#toXML() |
public java.sql.Connection showAndReturnConnection()
{
show();
while (!this.isDisposed)
{
try
{
synchronized (this)
{
wait();
}
}
catch (Throwable t)
{
}
}
return theConnection;
} | Performs a show(), blocks until the
dialog is disposed and returns the connection. If no valid connection is
established, null is returned. |
private void initComponents()//GEN-BEGIN:initComponents
{
java.awt.GridBagConstraints gridBagConstraints;
lblJDBCDriver = new javax.swing.JLabel();
cmbJDBCDriver = new javax.swing.JComboBox();
lblJDBCURL = new javax.swing.JLabel();
cmbJDBCURL = new javax.swing.JComboBox();
lblUsername = new javax.swing.JLabel();
tfUsername = new javax.swing.JTextField();
lblPassword = new javax.swing.JLabel();
tfPassword = new javax.swing.JPasswordField();
lblResult = new javax.swing.JTextField();
jPanel1 = new javax.swing.JPanel();
pbCancel = new javax.swing.JButton();
pbTest = new javax.swing.JButton();
pbOpen = new javax.swing.JButton();
getContentPane().setLayout(new java.awt.GridBagLayout());
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
addWindowListener(new java.awt.event.WindowAdapter()
{
public void windowClosing(java.awt.event.WindowEvent evt)
{
closeDialog(evt);
}
});
lblJDBCDriver.setText("JDBC Driver Class:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblJDBCDriver, gridBagConstraints);
cmbJDBCDriver.setFont(new java.awt.Font("Dialog", 0, 12));
cmbJDBCDriver.setMinimumSize(new java.awt.Dimension(31, 20));
cmbJDBCDriver.setPreferredSize(new java.awt.Dimension(31, 20));
cmbJDBCDriver.addItemListener(new java.awt.event.ItemListener()
{
public void itemStateChanged(java.awt.event.ItemEvent evt)
{
cmbJDBCDriverItemStateChanged(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
getContentPane().add(cmbJDBCDriver, gridBagConstraints);
lblJDBCURL.setText("JDBC URL:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblJDBCURL, gridBagConstraints);
cmbJDBCURL.setFont(new java.awt.Font("Dialog", 0, 12));
cmbJDBCURL.setMinimumSize(new java.awt.Dimension(31, 20));
cmbJDBCURL.setPreferredSize(new java.awt.Dimension(31, 20));
cmbJDBCURL.addItemListener(new java.awt.event.ItemListener()
{
public void itemStateChanged(java.awt.event.ItemEvent evt)
{
cmbJDBCURLItemStateChanged(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
getContentPane().add(cmbJDBCURL, gridBagConstraints);
lblUsername.setText("Username:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblUsername, gridBagConstraints);
tfUsername.setColumns(30);
tfUsername.setText("jTextField3");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
getContentPane().add(tfUsername, gridBagConstraints);
lblPassword.setText("Password:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblPassword, gridBagConstraints);
tfPassword.setColumns(30);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
getContentPane().add(tfPassword, gridBagConstraints);
lblResult.setBackground((java.awt.Color) javax.swing.UIManager.getDefaults().get("Label.background"));
lblResult.setEditable(false);
lblResult.setText("jTextField1");
lblResult.setBorder(null);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 4;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
getContentPane().add(lblResult, gridBagConstraints);
pbCancel.setMnemonic('c');
pbCancel.setText("Cancel");
pbCancel.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
pbCancelActionPerformed(evt);
}
});
jPanel1.add(pbCancel);
pbTest.setMnemonic('t');
pbTest.setText("Test");
pbTest.setEnabled(false);
pbTest.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
pbTestActionPerformed(evt);
}
});
jPanel1.add(pbTest);
pbOpen.setMnemonic('o');
pbOpen.setText("Open");
pbOpen.setEnabled(false);
pbOpen.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
pbOpenActionPerformed(evt);
}
});
jPanel1.add(pbOpen);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 5;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
getContentPane().add(jPanel1, gridBagConstraints);
pack();
} | This method is called from within the constructor to
initialize the form.
WARNING: Do NOT modify this code. The content of this method is
always regenerated by the Form Editor. |
private void pbOpenActionPerformed (java.awt.event.ActionEvent evt)//GEN-FIRST:event_pbOpenActionPerformed
{//GEN-HEADEREND:event_pbOpenActionPerformed
// Add your handling code here:
pbTestActionPerformed(evt);
if (theConnection != null)
{
synchronized(Main.getProperties())
{
int i = 0;
while (Main.getProperties().getProperty(Main.PROPERTY_JDBCDRIVER + i) != null)
{
Main.getProperties().remove(Main.PROPERTY_JDBCDRIVER + i);
i++;
}
while (Main.getProperties().getProperty(Main.PROPERTY_JDBCURL + i) != null)
{
Main.getProperties().remove(Main.PROPERTY_JDBCURL + i);
i++;
}
for (i = 0; i < cmbJDBCDriver.getModel().getSize(); i++)
Main.getProperties().setProperty(Main.PROPERTY_JDBCDRIVER + i, cmbJDBCDriver.getModel().getElementAt(i).toString());
for (i = 0; i < cmbJDBCURL.getModel().getSize(); i++)
Main.getProperties().setProperty(Main.PROPERTY_JDBCURL + i, cmbJDBCURL.getModel().getElementAt(i).toString());
Main.getProperties().setProperty(Main.PROPERTY_JDBCUSER, tfUsername.getText());
Main.getProperties().storeProperties("");
}
dispose();
}
} | GEN-END:initComponents |
private void cmbJDBCURLItemStateChanged(java.awt.event.ItemEvent evt)//GEN-FIRST:event_cmbJDBCURLItemStateChanged
{//GEN-HEADEREND:event_cmbJDBCURLItemStateChanged
if (this.cmbJDBCDriver.getSelectedItem() != null &&
this.cmbJDBCURL.getSelectedItem() != null)
{
this.pbTest.setEnabled(true);
this.pbOpen.setEnabled(true);
}
else
{
this.pbTest.setEnabled(false);
this.pbOpen.setEnabled(false);
}
} | GEN-LAST:event_pbOpenActionPerformed |
private java.sql.Connection connectToDB(String strJDBCDriver, String strJDBCURL,
String strUsername, String strPassword)
throws java.sql.SQLException, java.lang.ClassNotFoundException
{
Class.forName(strJDBCDriver);
java.sql.Connection conn =
java.sql.DriverManager.getConnection(strJDBCURL,
strUsername, strPassword);
return conn;
} | GEN-LAST:event_closeDialog |
public void setConnection(JdbcConnectionDescriptor jcd) throws PlatformException
{
_jcd = jcd;
String targetDatabase = (String)_dbmsToTorqueDb.get(_jcd.getDbms().toLowerCase());
if (targetDatabase == null)
{
throw new PlatformException("Database "+_jcd.getDbms()+" is not supported by torque");
}
if (!targetDatabase.equals(_targetDatabase))
{
_targetDatabase = targetDatabase;
_creationScript = null;
_initScripts.clear();
}
} | Sets the jdbc connection to use.
@param jcd The connection to use
@throws PlatformException If the target database cannot be handled with torque |
public void addDBDefinitionFiles(String srcDir, String listOfFilenames) throws IOException
{
StringTokenizer tokenizer = new StringTokenizer(listOfFilenames, ",");
File dir = new File(srcDir);
String filename;
while (tokenizer.hasMoreTokens())
{
filename = tokenizer.nextToken().trim();
if (filename.length() > 0)
{
_torqueSchemata.put("schema"+_torqueSchemata.size()+".xml",
readTextCompressed(new File(dir, filename)));
}
}
} | Adds the input files (in our case torque schema files) to use.
@param srcDir The directory containing the files
@param listOfFilenames The filenames in a comma-separated list |
public void addDBDefinitionFile(InputStream schemaStream) throws IOException
{
_torqueSchemata.put("schema"+_torqueSchemata.size()+".xml",
readStreamCompressed(schemaStream));
} | Adds an input stream of a db definition (in our case of a torque schema file).
@param schemaStream The input stream |
private String writeSchemata(File dir) throws IOException
{
writeCompressedTexts(dir, _torqueSchemata);
StringBuffer includes = new StringBuffer();
for (Iterator it = _torqueSchemata.keySet().iterator(); it.hasNext();)
{
includes.append((String)it.next());
if (it.hasNext())
{
includes.append(",");
}
}
return includes.toString();
} | Writes the torque schemata to files in the given directory and returns
a comma-separated list of the filenames.
@param dir The directory to write the files to
@return The list of filenames
@throws IOException If an error occurred |
public void createCreationScript() throws PlatformException
{
Project project = new Project();
TorqueDataModelTask modelTask = new TorqueDataModelTask();
File tmpDir = null;
File scriptFile = null;
_creationScript = null;
try
{
tmpDir = new File(getWorkDir(), "schemas");
tmpDir.mkdir();
String includes = writeSchemata(tmpDir);
scriptFile = new File(tmpDir, CREATION_SCRIPT_NAME);
project.setBasedir(tmpDir.getAbsolutePath());
// populating with defaults
modelTask.setProject(project);
modelTask.setUseClasspath(true);
modelTask.setControlTemplate("sql/db-init/Control.vm");
modelTask.setOutputDirectory(tmpDir);
modelTask.setOutputFile(CREATION_SCRIPT_NAME);
modelTask.setTargetDatabase(_targetDatabase);
FileSet files = new FileSet();
files.setDir(tmpDir);
files.setIncludes(includes);
modelTask.addFileset(files);
modelTask.execute();
_creationScript = readTextCompressed(scriptFile);
deleteDir(tmpDir);
}
catch (Exception ex)
{
// clean-up
if ((tmpDir != null) && tmpDir.exists())
{
deleteDir(tmpDir);
}
throw new PlatformException(ex);
}
} | Creates the db-creation sql script (but does not perform it).
@throws PlatformException If some error occurred |
public void createDB() throws PlatformException
{
if (_creationScript == null)
{
createCreationScript();
}
Project project = new Project();
TorqueDataModelTask modelTask = new TorqueDataModelTask();
File tmpDir = null;
File scriptFile = null;
try
{
tmpDir = new File(getWorkDir(), "schemas");
tmpDir.mkdir();
scriptFile = new File(tmpDir, CREATION_SCRIPT_NAME);
writeCompressedText(scriptFile, _creationScript);
project.setBasedir(tmpDir.getAbsolutePath());
// we use the ant task 'sql' to perform the creation script
SQLExec sqlTask = new SQLExec();
SQLExec.OnError onError = new SQLExec.OnError();
onError.setValue("continue");
sqlTask.setProject(project);
sqlTask.setAutocommit(true);
sqlTask.setDriver(_jcd.getDriver());
sqlTask.setOnerror(onError);
sqlTask.setUserid(_jcd.getUserName());
sqlTask.setPassword(_jcd.getPassWord() == null ? "" : _jcd.getPassWord());
sqlTask.setUrl(getDBCreationUrl());
sqlTask.setSrc(scriptFile);
sqlTask.execute();
deleteDir(tmpDir);
}
catch (Exception ex)
{
// clean-up
if ((tmpDir != null) && tmpDir.exists())
{
try
{
scriptFile.delete();
}
catch (NullPointerException e)
{
LoggerFactory.getLogger(this.getClass()).error("NPE While deleting scriptFile [" + scriptFile.getName() + "]", e);
}
}
throw new PlatformException(ex);
}
} | Creates the database.
@throws PlatformException If some error occurred |
public void createInitScripts() throws PlatformException
{
Project project = new Project();
TorqueSQLTask sqlTask = new TorqueSQLTask();
File schemaDir = null;
File sqlDir = null;
_initScripts.clear();
try
{
File tmpDir = getWorkDir();
schemaDir = new File(tmpDir, "schemas");
sqlDir = new File(tmpDir, "sql");
schemaDir.mkdir();
sqlDir.mkdir();
String includes = writeSchemata(schemaDir);
File sqlDbMapFile = new File(sqlDir, SQL_DB_MAP_NAME);
sqlDbMapFile.createNewFile();
project.setBasedir(sqlDir.getAbsolutePath());
// populating with defaults
sqlTask.setProject(project);
sqlTask.setUseClasspath(true);
sqlTask.setBasePathToDbProps("sql/base/");
sqlTask.setControlTemplate("sql/base/Control.vm");
sqlTask.setOutputDirectory(sqlDir);
// we put the report in the parent directory as we don't want
// to read it in later on
sqlTask.setOutputFile("../report.sql.generation");
sqlTask.setSqlDbMap(SQL_DB_MAP_NAME);
sqlTask.setTargetDatabase(_targetDatabase);
FileSet files = new FileSet();
files.setDir(schemaDir);
files.setIncludes(includes);
sqlTask.addFileset(files);
sqlTask.execute();
readTextsCompressed(sqlDir, _initScripts);
deleteDir(schemaDir);
deleteDir(sqlDir);
}
catch (Exception ex)
{
// clean-up
if ((schemaDir != null) && schemaDir.exists())
{
deleteDir(schemaDir);
}
if ((sqlDir != null) && sqlDir.exists())
{
deleteDir(sqlDir);
}
throw new PlatformException(ex);
}
} | Creates the initialization scripts (creation of tables etc.) but does
not perform them.
@throws PlatformException If some error occurred |
public void initDB() throws PlatformException
{
if (_initScripts.isEmpty())
{
createInitScripts();
}
Project project = new Project();
TorqueSQLTask sqlTask = new TorqueSQLTask();
File outputDir = null;
try
{
outputDir = new File(getWorkDir(), "sql");
outputDir.mkdir();
writeCompressedTexts(outputDir, _initScripts);
project.setBasedir(outputDir.getAbsolutePath());
// executing the generated sql, but this time with a torque task
TorqueSQLExec sqlExec = new TorqueSQLExec();
TorqueSQLExec.OnError onError = new TorqueSQLExec.OnError();
sqlExec.setProject(project);
onError.setValue("continue");
sqlExec.setAutocommit(true);
sqlExec.setDriver(_jcd.getDriver());
sqlExec.setOnerror(onError);
sqlExec.setUserid(_jcd.getUserName());
sqlExec.setPassword(_jcd.getPassWord() == null ? "" : _jcd.getPassWord());
sqlExec.setUrl(getDBManipulationUrl());
sqlExec.setSrcDir(outputDir.getAbsolutePath());
sqlExec.setSqlDbMap(SQL_DB_MAP_NAME);
sqlExec.execute();
deleteDir(outputDir);
}
catch (Exception ex)
{
// clean-up
if (outputDir != null)
{
deleteDir(outputDir);
}
throw new PlatformException(ex);
}
} | Creates the tables according to the schema files.
@throws PlatformException If some error occurred |
protected String getDBCreationUrl()
{
JdbcConnectionDescriptor jcd = getConnection();
// currently I only know about specifics for mysql
if (TORQUE_PLATFORM_MYSQL.equals(getTargetTorquePlatform()))
{
// we have to remove the db name as the jdbc driver would try to connect to
// a non-existing db
// the db-alias has this form: [host&port]/[dbname]?[options]
String dbAliasPrefix = jcd.getDbAlias();
String dbAliasSuffix = "";
int questionPos = dbAliasPrefix.indexOf('?');
if (questionPos > 0)
{
dbAliasSuffix = dbAliasPrefix.substring(questionPos);
dbAliasPrefix = dbAliasPrefix.substring(0, questionPos);
}
int slashPos = dbAliasPrefix.lastIndexOf('/');
if (slashPos > 0)
{
// it is important that the slash at the end is present
dbAliasPrefix = dbAliasPrefix.substring(0, slashPos + 1);
}
return jcd.getProtocol()+":"+jcd.getSubProtocol()+":"+dbAliasPrefix+dbAliasSuffix;
}
else if (TORQUE_PLATFORM_POSTGRESQL.equals(getTargetTorquePlatform()))
{
// we have to replace the db name with 'template1'
// the db-alias has this form: [host&port]/[dbname]?[options]
String dbAliasPrefix = jcd.getDbAlias();
String dbAliasSuffix = "";
int questionPos = dbAliasPrefix.indexOf('?');
if (questionPos > 0)
{
dbAliasSuffix = dbAliasPrefix.substring(questionPos);
dbAliasPrefix = dbAliasPrefix.substring(0, questionPos);
}
int slashPos = dbAliasPrefix.lastIndexOf('/');
if (slashPos > 0)
{
// it is important that the slash at the end is present
dbAliasPrefix = dbAliasPrefix.substring(0, slashPos + 1);
}
else
{
dbAliasPrefix += "/";
}
dbAliasPrefix += "template1";
if (dbAliasSuffix.length() > 0)
{
dbAliasPrefix += "/";
}
return jcd.getProtocol()+":"+jcd.getSubProtocol()+":"+dbAliasPrefix+dbAliasSuffix;
}
else
{
return jcd.getProtocol()+":"+jcd.getSubProtocol()+":"+jcd.getDbAlias();
}
} | Template-and-Hook method for generating the url required by the jdbc driver
to allow for creating a database (as opposed to accessing an already-existing
database). |
protected String getDBManipulationUrl()
{
JdbcConnectionDescriptor jcd = getConnection();
return jcd.getProtocol()+":"+jcd.getSubProtocol()+":"+jcd.getDbAlias();
} | Template-and-Hook method for generating the url required by the jdbc driver
to allow for modifying an existing database. |
private byte[] readStreamCompressed(InputStream stream) throws IOException
{
ByteArrayOutputStream bao = new ByteArrayOutputStream();
GZIPOutputStream gos = new GZIPOutputStream(bao);
OutputStreamWriter output = new OutputStreamWriter(gos);
BufferedReader input = new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = input.readLine()) != null)
{
output.write(line);
output.write('\n');
}
input.close();
stream.close();
output.close();
gos.close();
bao.close();
return bao.toByteArray();
} | Reads the given text stream and compressed its content.
@param stream The input stream
@return A byte array containing the GZIP-compressed content of the stream
@throws IOException If an error ocurred |
private void writeCompressedText(File file, byte[] compressedContent) throws IOException
{
ByteArrayInputStream bais = new ByteArrayInputStream(compressedContent);
GZIPInputStream gis = new GZIPInputStream(bais);
BufferedReader input = new BufferedReader(new InputStreamReader(gis));
BufferedWriter output = new BufferedWriter(new FileWriter(file));
String line;
while ((line = input.readLine()) != null)
{
output.write(line);
output.write('\n');
}
input.close();
gis.close();
bais.close();
output.close();
} | Uncompresses the given textual content and writes it to the given file.
@param file The file to write to
@param compressedContent The content
@throws IOException If an error occurred |
private void writeCompressedTexts(File dir, HashMap contents) throws IOException
{
String filename;
for (Iterator nameIt = contents.keySet().iterator(); nameIt.hasNext();)
{
filename = (String)nameIt.next();
writeCompressedText(new File(dir, filename), (byte[])contents.get(filename));
}
} | Uncompresses the textual contents in the given map and and writes them to the files
denoted by the keys of the map.
@param dir The base directory into which the files will be written
@param contents The map containing the contents indexed by the filename
@throws IOException If an error occurred |
public void setWorkDir(String dir) throws IOException
{
File workDir = new File(dir);
if (!workDir.exists() || !workDir.canWrite() || !workDir.canRead())
{
throw new IOException("Cannot access directory "+dir);
}
_workDir = workDir;
} | Sets the working directory.
@param dir The directory
@throws IOException If the directory does not exist or cannot be written/read |
private File getWorkDir() throws IOException
{
if (_workDir == null)
{
File dummy = File.createTempFile("dummy", ".log");
String workDir = dummy.getPath().substring(0, dummy.getPath().lastIndexOf(File.separatorChar));
if ((workDir == null) || (workDir.length() == 0))
{
workDir = ".";
}
dummy.delete();
_workDir = new File(workDir);
}
return _workDir;
} | Returns the temporary directory used by java.
@return The temporary directory
@throws IOException If an io error occurred |
private void deleteDir(File dir)
{
if (dir.exists() && dir.isDirectory())
{
File[] files = dir.listFiles();
for (int idx = 0; idx < files.length; idx++)
{
if (!files[idx].exists())
{
continue;
}
if (files[idx].isDirectory())
{
deleteDir(files[idx]);
}
else
{
files[idx].delete();
}
}
dir.delete();
}
} | Little helper function that recursivly deletes a directory.
@param dir The directory |
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/graph/{name}/{version}")
public Response getModuleGraph(@PathParam("name") final String moduleName,
@PathParam("version") final String moduleVersion,
@Context final UriInfo uriInfo){
LOG.info("Dependency Checker got a get module graph export request.");
if(moduleName == null || moduleVersion == null){
return Response.serverError().status(HttpStatus.NOT_ACCEPTABLE_406).build();
}
final FiltersHolder filters = new FiltersHolder();
filters.init(uriInfo.getQueryParameters());
final String moduleId = DbModule.generateID(moduleName, moduleVersion);
final AbstractGraph moduleGraph = getGraphsHandler(filters).getModuleGraph(moduleId);
return Response.ok(moduleGraph).build();
} | Perform a module dependency graph of the target and return the graph as a JSON
@param moduleName
@param moduleVersion
@param uriInfo
@return Response |
public void addLayerWithFilter(String resultTag, String serverLayerId, String filter) {
// note: layer filter specification will be overwritten if it already
// exists
layerFilters.put(resultTag, new LayerFilterSpecification(serverLayerId, filter));
} | Add a layer with an optional filter expression which should be applied on the given layer.
<p/>
If the filter contains a geometry, then this needs to be in layer CRS, it is not converted!
@param resultTag tag to make the distinction in the response object between the features for the same
serverLayerId but a different filter (e.g. client layer id)
@param serverLayerId server layerId layer to set this filter on
@param filter filter expression for the specified layer, can be null (==true filter)no client layer specific
filtering
@since 1.10.0 |
@Deprecated
public void setFilter(String serverLayerId, String filter) {
addLayerWithFilter(serverLayerId, serverLayerId, filter);
} | Set the filter expression which should be applied on the given server layer. If there has already been specified
a filter for that server layer, it will be overwritten.
<p/>
If the filter contains a geometry, then this needs to be in layer CRS, it is not converted!
@param serverLayerId server layer to set this filter on
@param filter filter expression
@deprecated use {@link #addLayerWithFilter(String, String, String)}.
@since 1.9.0 |
public String[] getLayerIds() {
Set<String> layerIds = layerFilters.keySet();
if (layerIds == null) {
return new String[0];
}
return layerIds.toArray(new String[layerIds.size()]);
} | Get the result tags for layer specific filtering (e.g. client layer id's).
@return result tags for layer specific filtering (e.g. client layer id's)
@since 1.10.0 |
public void setLayerIds(String[] serverLayerIds) {
for (String serverLayerId : serverLayerIds) {
addLayerWithFilter(serverLayerId, serverLayerId, null);
}
} | Set the server layer ids.
<p/>
Note: use {@link #addLayerWithFilter(String, String, String)} to specify filter expressions
@param serverLayerIds server layer ids
@since 1.9.0 |
@PostConstruct
protected void initFeatures() throws LayerException {
if (null == layerInfo) {
return;
}
crs = geoService.getCrs2(layerInfo.getCrs());
setFeatureSourceName(layerInfo.getFeatureInfo().getDataSourceName());
try {
if (null == super.getDataStore()) {
Map<String, Object> params = new HashMap<String, Object>();
if (null != url) {
params.put(ShapefileDataStoreFactory.URLP.key, url);
}
if (null != dbtype) {
params.put(JDBCDataStoreFactory.DBTYPE.key, dbtype);
}
if (null != parameters) {
for (Parameter parameter : parameters) {
params.put(parameter.getName(), parameter.getValue());
}
}
if (null != dataSource) {
params.put(JDBCDataStoreFactory.DATASOURCE.key, dataSource);
// these are apparently required but not used
params.put(JDBCDataStoreFactory.DATABASE.key, "some_database");
params.put(JDBCDataStoreFactory.USER.key, "some_user");
params.put(JDBCDataStoreFactory.PASSWD.key, "some_password");
params.put(JDBCDataStoreFactory.HOST.key, "some host");
params.put(JDBCDataStoreFactory.PORT.key, "0");
}
DataStore store = DataStoreFactory.create(params);
super.setDataStore(store);
}
if (null == super.getDataStore()) {
return;
}
this.featureModel = new GeoToolsFeatureModel(super.getDataStore(), layerInfo.getFeatureInfo()
.getDataSourceName(), geoService.getSridFromCrs(layerInfo.getCrs()), converterService);
featureModel.setLayerInfo(layerInfo);
featureModelUsable = true;
} catch (IOException ioe) {
if (MAGIC_STRING_LIBRARY_MISSING.equals(ioe.getMessage())) {
throw new LayerException(ioe, ExceptionCode.LAYER_MODEL_IO_EXCEPTION, url);
} else {
featureModelUsable = false;
log.warn("The layer could not be correctly initialized: " + getId(), ioe);
}
} catch (LayerException le) {
featureModelUsable = false;
log.warn("The layer could not be correctly initialized: " + getId(), le);
} catch (RuntimeException e) {
featureModelUsable = false;
log.warn("The layer could not be correctly initialized: " + getId(), e);
}
} | Finish initializing the layer.
@throws LayerException oops |
void update(Object feature) throws LayerException {
SimpleFeatureSource source = getFeatureSource();
if (source instanceof SimpleFeatureStore) {
SimpleFeatureStore store = (SimpleFeatureStore) source;
String featureId = getFeatureModel().getId(feature);
Filter filter = filterService.createFidFilter(new String[] { featureId });
transactionSynchronization.synchTransaction(store);
List<Name> names = new ArrayList<Name>();
Map<String, Attribute> attrMap = getFeatureModel().getAttributes(feature);
List<Object> values = new ArrayList<Object>();
for (Map.Entry<String, Attribute> entry : attrMap.entrySet()) {
String name = entry.getKey();
names.add(store.getSchema().getDescriptor(name).getName());
values.add(entry.getValue().getValue());
}
try {
store.modifyFeatures(names.toArray(new Name[names.size()]), values.toArray(), filter);
store.modifyFeatures(store.getSchema().getGeometryDescriptor().getName(), getFeatureModel()
.getGeometry(feature), filter);
log.debug("Updated feature {} in {}", featureId, getFeatureSourceName());
} catch (IOException ioe) {
featureModelUsable = false;
throw new LayerException(ioe, ExceptionCode.LAYER_MODEL_IO_EXCEPTION);
}
} else {
log.error("Don't know how to create or update " + getFeatureSourceName() + ", class "
+ source.getClass().getName() + " does not implement SimpleFeatureStore");
throw new LayerException(ExceptionCode.CREATE_OR_UPDATE_NOT_IMPLEMENTED, getFeatureSourceName(), source
.getClass().getName());
}
} | Update an existing feature. Made package private for testing purposes.
@param feature feature to update
@throws LayerException oops |
@Transactional(readOnly = true)
public Iterator<?> getElements(Filter filter, int offset, int maxResultSize) throws LayerException {
FeatureSource<SimpleFeatureType, SimpleFeature> source = getFeatureSource();
try {
if (source instanceof FeatureStore<?, ?>) {
SimpleFeatureStore store = (SimpleFeatureStore) source;
transactionSynchronization.synchTransaction(store);
}
Query query = new Query();
query.setFilter(filter);
query.setMaxFeatures(maxResultSize > 0 ? maxResultSize : Integer.MAX_VALUE);
query.setStartIndex(offset);
FeatureCollection<SimpleFeatureType, SimpleFeature> fc = source.getFeatures(query);
FeatureIterator<SimpleFeature> it = fc.features();
transactionSynchronization.addIterator(it);
return new JavaIterator(it);
} catch (Throwable t) { // NOSONAR avoid errors (like NPE) as well
throw new LayerException(t, ExceptionCode.UNEXPECTED_PROBLEM);
}
} | {@inheritDoc} |
@Override
protected String convert(final LoggingEvent event) {
//
// code should be unreachable.
//
final StringBuffer sbuf = new StringBuffer();
format(sbuf, event);
return sbuf.toString();
} | {@inheritDoc} |
@Override
public void format(final StringBuffer sbuf, final LoggingEvent event) {
for (int i = 0; i < patternConverters.length; i++) {
final int startField = sbuf.length();
patternConverters[i].format(event, sbuf);
patternFields[i].format(startField, sbuf);
}
} | Format event to string buffer.
@param sbuf
string buffer to receive formatted event, may not be null.
@param event
event to format, may not be null. |
public void setEditorTarget (PropertyEditorTarget target)
{
if (target instanceof DBMetaCatalogNode)
{
super.setEditorTarget(target);
this.tfCatalogName.setText((String)target.getAttribute(DBMetaCatalogNode.ATT_CATALOG_NAME));
}
else
{
throw new UnsupportedOperationException("This editor can only edit DBMetaCatalogNode objects");
}
} | GEN-END:initComponents |
private Long getSequence(String sequenceName)
{
Long result = null;
// now lookup the sequence map for calling DB
Map mapForDB = (Map) sequencesDBMap.get(getBrokerForClass()
.serviceConnectionManager().getConnectionDescriptor().getJcdAlias());
if(mapForDB != null)
{
result = (Long) mapForDB.get(sequenceName);
}
return result;
} | Returns last used sequence value or <code>null</code> if no sequence
was add for given sequence name.
@param sequenceName Name of the sequence.
@return Last used sequence value or <code>null</code> |
protected Database readModel()
{
DatabaseIO reader = new DatabaseIO();
Database model = null;
reader.setUseInternalDtd(_useInternalDtd);
if ((_singleSchemaFile != null) && !_fileSets.isEmpty())
{
throw new BuildException("Please use either the schemafile attribute or the sub fileset element, but not both");
}
if (_singleSchemaFile != null)
{
model = readSingleSchemaFile(reader, _singleSchemaFile);
}
else
{
for (Iterator it = _fileSets.iterator(); it.hasNext();)
{
FileSet fileSet = (FileSet)it.next();
File fileSetDir = fileSet.getDir(getProject());
DirectoryScanner scanner = fileSet.getDirectoryScanner(getProject());
String[] files = scanner.getIncludedFiles();
for (int idx = 0; (files != null) && (idx < files.length); idx++)
{
Database curModel = readSingleSchemaFile(reader, new File(fileSetDir, files[idx]));
if (model == null)
{
model = curModel;
}
else if (curModel != null)
{
try
{
model.mergeWith(curModel);
}
catch (IllegalArgumentException ex)
{
throw new BuildException("Could not merge with schema from file "+files[idx]+": "+ex.getLocalizedMessage(), ex);
}
}
}
}
}
return model;
} | {@inheritDoc} |
private Database readSingleSchemaFile(DatabaseIO reader, File schemaFile)
{
Database model = null;
if (!schemaFile.isFile())
{
log("Path "+schemaFile.getAbsolutePath()+" does not denote a schema file", Project.MSG_ERR);
}
else if (!schemaFile.canRead())
{
log("Could not read schema file "+schemaFile.getAbsolutePath(), Project.MSG_ERR);
}
else
{
try
{
model = reader.read(schemaFile);
log("Read schema file "+schemaFile.getAbsolutePath(), Project.MSG_INFO);
}
catch (Exception ex)
{
throw new BuildException("Could not read schema file "+schemaFile.getAbsolutePath()+": "+ex.getLocalizedMessage(), ex);
}
}
return model;
} | Reads a single schema file.
@param reader The schema reader
@param schemaFile The schema file
@return The model |
private MetadataManager initOJB()
{
try
{
if (_ojbPropertiesFile == null)
{
_ojbPropertiesFile = new File("OJB.properties");
if (!_ojbPropertiesFile.exists())
{
throw new BuildException("Could not find OJB.properties, please specify it via the ojbpropertiesfile attribute");
}
}
else
{
if (!_ojbPropertiesFile.exists())
{
throw new BuildException("Could not load the specified OJB properties file "+_ojbPropertiesFile);
}
log("Using properties file "+_ojbPropertiesFile.getAbsolutePath(), Project.MSG_INFO);
System.setProperty("OJB.properties", _ojbPropertiesFile.getAbsolutePath());
}
MetadataManager metadataManager = MetadataManager.getInstance();
RepositoryPersistor persistor = new RepositoryPersistor();
if (_repositoryFile != null)
{
if (!_repositoryFile.exists())
{
throw new BuildException("Could not load the specified repository file "+_repositoryFile);
}
log("Loading repository file "+_repositoryFile.getAbsolutePath(), Project.MSG_INFO);
// this will load the info from the specified repository file
// and merge it with the existing info (if it has been loaded)
metadataManager.mergeConnectionRepository(persistor.readConnectionRepository(_repositoryFile.getAbsolutePath()));
metadataManager.mergeDescriptorRepository(persistor.readDescriptorRepository(_repositoryFile.getAbsolutePath()));
}
else if (metadataManager.connectionRepository().getAllDescriptor().isEmpty() &&
metadataManager.getGlobalRepository().getDescriptorTable().isEmpty())
{
// Seems nothing was loaded, probably because we're not starting in the directory
// that the properties file is in, and the repository file path is relative
// So lets try to resolve this path and load the repository info manually
Properties props = new Properties();
props.load(new FileInputStream(_ojbPropertiesFile));
String repositoryPath = props.getProperty("repositoryFile", "repository.xml");
File repositoryFile = new File(repositoryPath);
if (!repositoryFile.exists())
{
repositoryFile = new File(_ojbPropertiesFile.getParentFile(), repositoryPath);
}
metadataManager.mergeConnectionRepository(persistor.readConnectionRepository(repositoryFile.getAbsolutePath()));
metadataManager.mergeDescriptorRepository(persistor.readDescriptorRepository(repositoryFile.getAbsolutePath()));
}
// we might have to determine the default pb key ourselves
if (metadataManager.getDefaultPBKey() == null)
{
for (Iterator it = metadataManager.connectionRepository().getAllDescriptor().iterator(); it.hasNext();)
{
JdbcConnectionDescriptor descriptor = (JdbcConnectionDescriptor)it.next();
if (descriptor.isDefaultConnection())
{
metadataManager.setDefaultPBKey(new PBKey(descriptor.getJcdAlias(), descriptor.getUserName(), descriptor.getPassWord()));
break;
}
}
}
return metadataManager;
}
catch (Exception ex)
{
if (ex instanceof BuildException)
{
throw (BuildException)ex;
}
else
{
throw new BuildException(ex);
}
}
} | Initializes OJB for the purposes of this task.
@return The metadata manager used by OJB |
public void execute() throws BuildException
{
if (_commands.isEmpty())
{
log("No sub tasks specified, so there is nothing to do.", Project.MSG_INFO);
return;
}
ClassLoader sysClassLoader = Thread.currentThread().getContextClassLoader();
AntClassLoader newClassLoader = new AntClassLoader(getClass().getClassLoader(), true);
// we're changing the thread classloader so that we can access resources
// from the classpath used to load this task's class
Thread.currentThread().setContextClassLoader(newClassLoader);
try
{
MetadataManager manager = initOJB();
Database dbModel = readModel();
DescriptorRepository objModel = manager.getGlobalRepository();
if (dbModel == null)
{
throw new BuildException("No database model specified");
}
for (Iterator it = _commands.iterator(); it.hasNext();)
{
Command cmd = (Command)it.next();
cmd.setPlatform(getPlatform());
cmd.execute(this, dbModel, objModel);
}
}
finally
{
// rollback of our classloader change
Thread.currentThread().setContextClassLoader(sysClassLoader);
}
} | {@inheritDoc} |
private static String getStatusFlagAsString(int status)
{
String statusName = "no match, unknown status!";
try
{
Field[] fields = Status.class.getDeclaredFields();
for (int i = 0; i < fields.length; i++)
{
if (fields[i].getInt(null) == status)
{
statusName = fields[i].getName();
break;
}
}
}
catch (Exception e)
{
statusName = "no match, unknown status!";
}
return statusName;
} | Returns a string representation of the given
{@link javax.transaction.Status} flag. |
private static Criteria buildCriteria(Object anExample)
{
Criteria criteria = new Criteria();
ClassDescriptor cld = MetadataManager.getInstance().getRepository().getDescriptorFor(anExample.getClass());
FieldDescriptor[] fds = cld.getFieldDescriptions();
PersistentField f;
Object value;
for (int i = 0; i < fds.length; i++)
{
try
{
f = fds[i].getPersistentField();
value = f.get(anExample);
if (value != null)
{
criteria.addEqualTo(f.getName(), value);
}
}
catch (Throwable ex)
{
LoggerFactory.getDefaultLogger().error(ex);
}
}
return criteria;
} | Build Criteria based on example object<br>
all non null values are used as EqualToCriteria |
public void addPathClass(String aPath, Class aClass)
{
List pathClasses = (List) m_pathClasses.get(aPath);
if(pathClasses == null)
{
setPathClass(aPath, aClass);
}
else
{
pathClasses.add(aClass);
}
} | Add a hint Class for a path. Used for relationships to extents.<br>
SqlStatment will use these hint classes when resolving the path.
Without these hints SqlStatment will use the base class the
relationship points to ie: Article instead of CdArticle.
@param aPath the path segment ie: allArticlesInGroup
@param aClass the Class ie: CdArticle
@see org.apache.ojb.broker.QueryTest#testInversePathExpression() |
public void setPathClass(String aPath, Class aClass)
{
List pathClasses = new ArrayList();
pathClasses.add(aClass);
m_pathClasses.put(aPath, pathClasses);
} | Set the Class for a path. Used for relationships to extents.<br>
SqlStatment will use this class when resolving the path.
Without this hint SqlStatment will use the base class the
relationship points to ie: Article instead of CdArticle.
Using this method is the same as adding just one hint
@param aPath the path segment ie: allArticlesInGroup
@param aClass the Class ie: CdArticle
@see org.apache.ojb.broker.QueryTest#testInversePathExpression()
@see #addPathClass |
public void addGroupBy(String fieldName)
{
if (fieldName != null)
{
m_groupby.add(new FieldHelper(fieldName, false));
}
} | Adds a groupby fieldName for ReportQueries.
@param fieldName The groupby to set |
public void addOrderBy(String fieldName, boolean sortAscending)
{
if (fieldName != null)
{
m_orderby.add(new FieldHelper(fieldName, sortAscending));
}
} | Adds a field for orderBy
@param fieldName The field name to be used
@param sortAscending true for ASCENDING, false for DESCENDING |
public List getPrefetchedRelationships()
{
// BRJ:
// combine data from query and criteria
// TODO: to be removed when Criteria#addPrefetchedRelationship is removed
ArrayList temp = new ArrayList();
temp.addAll(m_prefetchedRelationships);
if (getCriteria() != null)
{
temp.addAll(getCriteria().getPrefetchedRelationships());
}
return temp;
} | /* (non-Javadoc)
@see org.apache.ojb.broker.query.Query#getPrefetchedRelationships() |
public void setObjectProjectionAttribute(String objectProjectionAttribute)
{
ClassDescriptor baseCld = MetadataManager.getInstance().getRepository().getDescriptorFor(m_baseClass);
ArrayList descs = baseCld.getAttributeDescriptorsForPath(objectProjectionAttribute);
int pathLen = descs.size();
if ((pathLen > 0) && (descs.get(pathLen - 1) instanceof ObjectReferenceDescriptor))
{
ObjectReferenceDescriptor ord =
((ObjectReferenceDescriptor) descs.get(pathLen - 1));
setObjectProjectionAttribute(objectProjectionAttribute,
ord.getItemClass());
}
} | Use this method to query some related class by object references,
for example query.setObjectProjectionAttribute("ref1.ref2.ref3"); |
public Connection getInnermostDelegate()
{
Connection c = _conn;
while (c != null && c instanceof WrappedConnection)
{
c = ((WrappedConnection) c).getDelegate();
if (this == c)
{
return null;
}
}
return c;
} | If my underlying <tt>Connection</tt> is not a
<tt>WrappedConnection</tt>, returns it,
otherwise recursively invokes this method on
my delegate.
<p>
Hence this method will return the first
delegate that is not a <tt>WrappedConnection</tt>,
or <tt>null</tt> when no non-<tt>WrappedConnection</tt>
delegate can be found by transversing this chain.
<p>
This method is useful when you may have nested
<tt>WrappedConnection</tt>s, and you want to make
sure to obtain a "genuine" {@link java.sql.Connection}. |
public String putDocument(Document document) {
String key = UUID.randomUUID().toString();
documentMap.put(key, document);
return key;
} | Puts a new document in the service. The generate key is globally unique.
@param document document
@return key unique key to reference the document |
public Document removeDocument(String key) throws PrintingException {
if (documentMap.containsKey(key)) {
return documentMap.remove(key);
} else {
throw new PrintingException(PrintingException.DOCUMENT_NOT_FOUND, key);
}
} | Gets a document from the service.
@param key
unique key to reference the document
@return the document or null if no such document |
private static Query buildQuery(ClassDescriptor cld)
{
FieldDescriptor[] pkFields = cld.getPkFields();
Criteria crit = new Criteria();
for(int i = 0; i < pkFields.length; i++)
{
crit.addEqualTo(pkFields[i].getAttributeName(), null);
}
return new QueryByCriteria(cld.getClassOfObject(), crit);
} | Build a Pk-Query base on the ClassDescriptor.
@param cld
@return a select by PK query |
public String getMessage(Locale locale) {
if (getCause() != null) {
String message = getShortMessage(locale) + ", " + translate("ROOT_CAUSE", locale) + " ";
if (getCause() instanceof GeomajasException) {
return message + ((GeomajasException) getCause()).getMessage(locale);
}
return message + getCause().getMessage();
} else {
return getShortMessage(locale);
}
} | Get the exception message using the requested locale.
@param locale locale for message
@return exception message |
public String getShortMessage(Locale locale) {
String message;
message = translate(Integer.toString(exceptionCode), locale);
if (message != null && msgParameters != null && msgParameters.length > 0) {
for (int i = 0; i < msgParameters.length; i++) {
boolean isIncluded = false;
String needTranslationParam = "$${" + i + "}";
if (message.contains(needTranslationParam)) {
String translation = translate(msgParameters[i], locale);
if (null == translation && null != msgParameters[i]) {
translation = msgParameters[i].toString();
}
if (null == translation) {
translation = "[null]";
}
message = message.replace(needTranslationParam, translation);
isIncluded = true;
}
String verbatimParam = "${" + i + "}";
String rs = null == msgParameters[i] ? "[null]" : msgParameters[i].toString();
if (message.contains(verbatimParam)) {
message = message.replace(verbatimParam, rs);
isIncluded = true;
}
if (!isIncluded) {
message = message + " (" + rs + ")"; // NOSONAR replace/contains makes StringBuilder use difficult
}
}
}
return message;
} | Get the short exception message using the requested locale. This does not include the cause exception message.
@param locale locale for message
@return (short) exception message |
public PlanarImage toDirectColorModel(RenderedImage img) {
BufferedImage dest = new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_4BYTE_ABGR);
BufferedImage source = new BufferedImage(img.getColorModel(), (WritableRaster) img.getData(), img
.getColorModel().isAlphaPremultiplied(), null);
ColorConvertOp op = new ColorConvertOp(null);
op.filter(source, dest);
return PlanarImage.wrapRenderedImage(dest);
} | Converts an image to a RGBA direct color model using a workaround via buffered image directly calling the
ColorConvert operation fails for unknown reasons ?!
@param img image to convert
@return converted image |
public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo info)
{
Util.log("In OTMJCAManagedConnectionFactory.createManagedConnection");
try
{
Kit kit = getKit();
PBKey key = ((OTMConnectionRequestInfo) info).getPbKey();
OTMConnection connection = kit.acquireConnection(key);
return new OTMJCAManagedConnection(this, connection, key);
}
catch (ResourceException e)
{
throw new OTMConnectionRuntimeException(e.getMessage());
}
} | return a new managed connection. This connection is wrapped around the real connection and delegates to it
to get work done.
@param subject
@param info
@return |
public void render(OutputStream outputStream, Format format, int dpi) throws PrintingException {
try {
if (baos == null) {
prepare();
}
writeDocument(outputStream, format, dpi);
} catch (Exception e) { // NOSONAR
throw new PrintingException(e, PrintingException.DOCUMENT_RENDER_PROBLEM);
}
} | Renders the document to the specified output stream. |
private void prepare() throws IOException, DocumentException, PrintingException {
if (baos == null) {
baos = new ByteArrayOutputStream(); // let it grow as much as needed
}
baos.reset();
boolean resize = false;
if (page.getConstraint().getWidth() == 0 || page.getConstraint().getHeight() == 0) {
resize = true;
}
// Create a document in the requested ISO scale.
Document document = new Document(page.getBounds(), 0, 0, 0, 0);
PdfWriter writer;
writer = PdfWriter.getInstance(document, baos);
// Render in correct colors for transparent rasters
writer.setRgbTransparencyBlending(true);
// The mapView is not scaled to the document, we assume the mapView
// has the right ratio.
// Write document title and metadata
document.open();
PdfContext context = new PdfContext(writer);
context.initSize(page.getBounds());
// first pass of all children to calculate size
page.calculateSize(context);
if (resize) {
// we now know the bounds of the document
// round 'm up and restart with a new document
int width = (int) Math.ceil(page.getBounds().getWidth());
int height = (int) Math.ceil(page.getBounds().getHeight());
page.getConstraint().setWidth(width);
page.getConstraint().setHeight(height);
document = new Document(new Rectangle(width, height), 0, 0, 0, 0);
writer = PdfWriter.getInstance(document, baos);
// Render in correct colors for transparent rasters
writer.setRgbTransparencyBlending(true);
document.open();
baos.reset();
context = new PdfContext(writer);
context.initSize(page.getBounds());
}
// int compressionLevel = writer.getCompressionLevel(); // For testing
// writer.setCompressionLevel(0);
// Actual drawing
document.addTitle("Geomajas");
// second pass to layout
page.layout(context);
// finally render (uses baos)
page.render(context);
document.add(context.getImage());
// Now close the document
document.close();
} | Prepare the document before rendering.
@param outputStream output stream to render to, null if only for layout
@param format format
@throws DocumentException oops
@throws IOException oops
@throws PrintingException oops |
private static synchronized boolean isLog4JConfigured()
{
if(!log4jConfigured)
{
Enumeration en = org.apache.log4j.Logger.getRootLogger().getAllAppenders();
if (!(en instanceof org.apache.log4j.helpers.NullEnumeration))
{
log4jConfigured = true;
}
else
{
Enumeration cats = LogManager.getCurrentLoggers();
while (cats.hasMoreElements())
{
org.apache.log4j.Logger c = (org.apache.log4j.Logger) cats.nextElement();
if (!(c.getAllAppenders() instanceof org.apache.log4j.helpers.NullEnumeration))
{
log4jConfigured = true;
}
}
}
if(log4jConfigured)
{
String msg = "Log4J is already configured, will not search for log4j properties file";
LoggerFactory.getBootLogger().info(msg);
}
else
{
LoggerFactory.getBootLogger().info("Log4J is not configured");
}
}
return log4jConfigured;
} | Helper method to check if log4j is already configured |
private static synchronized void initializeLog4JSubSystem(String configFile)
{
LoggerFactory.getBootLogger().info("Initializing Log4J using file: '" + configFile + "'");
if(configFile == null || "".equals(configFile.trim()))
{
// no configuration available
LoggerFactory.getBootLogger().warn("No log4j configuration file specified");
}
else
{
// try resource look in classpath
URL url = ClassHelper.getResource(configFile);
LoggerFactory.getBootLogger().info("Initializing Log4J : resource from config file:" + url);
if (url != null)
{
PropertyConfigurator.configure(url);
}
// if file is not in classpath try ordinary filesystem lookup
else
{
PropertyConfigurator.configure(configFile);
}
}
log4jConfigured = true;
} | Initialization of log4j <br>
<b>NOTE</b> - if log4j property file is called log4j.properties then
log4j will be configured already. |
private org.apache.log4j.Logger getLogger()
{
/*
Logger interface extends Serializable, thus Log field is
declared 'transient' and we have to null-check
*/
if (logger == null)
{
logger = org.apache.log4j.Logger.getLogger(name);
}
return logger;
} | Gets the logger.
@return Returns a Category |
public final void debug(Object pObject)
{
getLogger().log(FQCN, Level.DEBUG, pObject, null);
} | generate a message for loglevel DEBUG
@param pObject the message Object |
public final void info(Object pObject)
{
getLogger().log(FQCN, Level.INFO, pObject, null);
} | generate a message for loglevel INFO
@param pObject the message Object |
public final void warn(Object pObject)
{
getLogger().log(FQCN, Level.WARN, pObject, null);
} | generate a message for loglevel WARN
@param pObject the message Object |
public final void error(Object pObject)
{
getLogger().log(FQCN, Level.ERROR, pObject, null);
} | generate a message for loglevel ERROR
@param pObject the message Object |
public final void fatal(Object pObject)
{
getLogger().log(FQCN, Level.FATAL, pObject, null);
} | generate a message for loglevel FATAL
@param pObject the message Object |
public boolean writeLock(TransactionImpl tx, Object obj)
{
LockEntry writer = getWriter(obj);
if (writer == null)
{
if (setWriter(tx, obj))
return true;
else
return writeLock(tx, obj);
}
if (writer.isOwnedBy(tx))
{
return true; // If I'm the writer, then I can write.
}
return false;
} | acquire a write lock on Object obj for Transaction tx.
@param tx the transaction requesting the lock
@param obj the Object to be locked
@return true if successful, else false |
protected void putToFieldCache(Object key, Object value)
{
if (key != null)
{
if (fkCache == null)
{
fkCache = new ReferenceIdentityMap (ReferenceIdentityMap.WEAK, ReferenceIdentityMap.HARD, true);
}
if (value != null)
fkCache.put(key, value);
else
fkCache.remove (key);
}
} | /*
Use ReferenceIdentityMap (with weak key and hard value setting) instead of
WeakHashMap to hold anonymous field values. Here is an snip of the mail from Andy Malakov:
<snip>
I found that usage of database identity in Java produces quite interesting problem in OJB:
In my application all persistent Java objects use database identity instead of Java reference identity
(i.e. Persistable.equals() is redefined so that two persistent objects are the same if they have the same
primary key and top-level class).
In OJB, for each field declared in repository there is dedicated instance of AnonymousPersistentField that stores
object-to-field-value mapping in WeakHashMap (in fkCache attribute). Despite usage of cache
(ObjectCachePerBrokerImpl in my case) it is possible that identical DB objects will end up as different
Java objects during retrieval of complex objects.
Now imagine what happens when two identical instances are retrieved:
1)
When first instance is retrieved it stores its foreign keys in AnonymousPersistentField.fkCache under instance's
identity. (happens in RowReaderDefaultImpl.buildWithReflection())
2)
When second object is retrieved and stored in fkCache, first instance is probably still cached
[WeakHashMap entries are cleaned up only during GC]. Since keys are identical WeakHashMap only updates entry
value and DOES NOT update entry key.
3)
If Full GC happens after that moment it will dispose fcCache entry if the FIRST reference becomes
soft-referenced only.
</snip> |
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
response.setContentType("text/html");
response.setHeader("Pragma", "no-cache");
PrintWriter out = response.getWriter();
out.println("<html><head><title>OJB Distributed Locking Servlet Status Page</title>");
out.println("</head><body><h1>OJB Distributed Locking Servlet</h1>");
out.println("The servlet is running.<p>");
if(lastError == null)
{
out.println("The LockServer is running.<p>");
out.println("LockManager info: " + lockmanager.getLockInfo() + "<p>");
out.println("Processed Lock Request: " + numRequests + "<p>");
}
else
{
out.println("<h2>The LockServer has a problem!</h2>");
out.println("The error message is:<p>");
out.println(lastError.getMessage() + "<p>");
lastError.printStackTrace(out);
lastError = null;
}
out.println("</body></html>");
} | /* (non-Javadoc)
@see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) |
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
// update counter
numRequests++;
try
{
// read request:
LockManagerRemoteImpl.LockInfo info = (LockManagerRemoteImpl.LockInfo) buildObjectFromRequest(request);
Object result = null;
// now execute the command specified by the selector
try
{
switch(info.methodName)
{
case LockManagerRemoteImpl.METHOD_READ_LOCK:
{
result = new Boolean(lockmanager.readLock(info.key, info.resourceId, info.isolationLevel));
break;
}
case LockManagerRemoteImpl.METHOD_RELEASE_SINGLE_LOCK:
{
result = new Boolean(lockmanager.releaseLock(info.key, info.resourceId));
break;
}
case LockManagerRemoteImpl.METHOD_RELEASE_LOCKS:
{
lockmanager.releaseLocks(info.key);
result = Boolean.TRUE;
break;
}
case LockManagerRemoteImpl.METHOD_WRITE_LOCK:
{
result = new Boolean(lockmanager.writeLock(info.key, info.resourceId,
info.isolationLevel));
break;
}
case LockManagerRemoteImpl.METHOD_UPGRADE_LOCK:
{
result = new Boolean(lockmanager.upgradeLock(info.key, info.resourceId, info.isolationLevel));
break;
}
case LockManagerRemoteImpl.METHOD_CHECK_READ:
{
result = new Boolean(lockmanager.hasRead(info.key, info.resourceId));
break;
}
case LockManagerRemoteImpl.METHOD_CHECK_WRITE:
{
result = new Boolean(lockmanager.hasWrite(info.key, info.resourceId));
break;
}
case LockManagerRemoteImpl.METHOD_CHECK_UPGRADE:
{
result = new Boolean(lockmanager.hasUpgrade(info.key, info.resourceId));
break;
}
case LockManagerRemoteImpl.METHOD_LOCK_INFO:
{
result = lockmanager.getLockInfo();
break;
}
case LockManagerRemoteImpl.METHOD_LOCK_TIMEOUT:
{
result = new Long(lockmanager.getLockTimeout());
break;
}
case LockManagerRemoteImpl.METHOD_BLOCK_TIMEOUT:
{
result = new Long(lockmanager.getBlockTimeout());
break;
}
// case LockManagerRemoteImpl.METHOD_LOCK_TIMEOUT_SET:
// {
// lockmanager.setLockTimeout(info.lockTimeout);
// break;
// }
//
// case LockManagerRemoteImpl.METHOD_BLOCK_TIMEOUT_SET:
// {
// lockmanager.setBlockTimeout(info.blockTimeout);
// break;
// }
default :
{
throw new LockRuntimeException("Unknown command:" + info.methodName);
}
}
}
catch(RuntimeException e)
{
result = new LockRuntimeException("Error while invoke specified method in servlet.", e);
}
ObjectOutputStream oos = new ObjectOutputStream(response.getOutputStream());
oos.writeObject(result);
oos.flush();
oos.close();
}
catch(Throwable t)
{
lastError = t;
t.printStackTrace();
}
} | /* (non-Javadoc)
@see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) |
public void setLayerInfo(VectorLayerInfo layerInfo) throws LayerException {
this.layerInfo = layerInfo;
if (null != sessionFactory) {
setSessionFactory(sessionFactory);
}
} | Set the layer configuration.
@param layerInfo layer info
@throws LayerException oops |
protected Class<?> getPropertyClass(ClassMetadata meta, String propertyName) throws HibernateLayerException {
// try to assure the correct separator is used
propertyName = propertyName.replace(XPATH_SEPARATOR, SEPARATOR);
if (propertyName.contains(SEPARATOR)) {
String directProperty = propertyName.substring(0, propertyName.indexOf(SEPARATOR));
try {
Type prop = meta.getPropertyType(directProperty);
if (prop.isCollectionType()) {
CollectionType coll = (CollectionType) prop;
prop = coll.getElementType((SessionFactoryImplementor) sessionFactory);
}
ClassMetadata propMeta = sessionFactory.getClassMetadata(prop.getReturnedClass());
return getPropertyClass(propMeta, propertyName.substring(propertyName.indexOf(SEPARATOR) + 1));
} catch (HibernateException e) {
throw new HibernateLayerException(e, ExceptionCode.HIBERNATE_COULD_NOT_RESOLVE, propertyName,
meta.getEntityName());
}
} else {
try {
return meta.getPropertyType(propertyName).getReturnedClass();
} catch (HibernateException e) {
throw new HibernateLayerException(e, ExceptionCode.HIBERNATE_COULD_NOT_RESOLVE, propertyName,
meta.getEntityName());
}
}
} | Return the class of one of the properties of another class from which the Hibernate metadata is given.
@param meta
The parent class to search a property in.
@param propertyName
The name of the property in the parent class (provided by meta)
@return Returns the class of the property in question.
@throws HibernateLayerException
Throws an exception if the property name could not be retrieved. |
public void setSessionFactory(SessionFactory sessionFactory) throws HibernateLayerException {
try {
this.sessionFactory = sessionFactory;
if (null != layerInfo) {
entityMetadata = sessionFactory.getClassMetadata(layerInfo.getFeatureInfo().getDataSourceName());
}
} catch (Exception e) { // NOSONAR
throw new HibernateLayerException(e, ExceptionCode.HIBERNATE_NO_SESSION_FACTORY);
}
} | Set session factory.
@param sessionFactory session factory
@throws HibernateLayerException could not get class metadata for data source |
static JDOClass getJDOClass(Class c)
{
JDOClass rc = null;
try
{
JavaModelFactory javaModelFactory = RuntimeJavaModelFactory.getInstance();
JavaModel javaModel = javaModelFactory.getJavaModel(c.getClassLoader());
JDOModel m = JDOModelFactoryImpl.getInstance().getJDOModel(javaModel);
rc = m.getJDOClass(c.getName());
}
catch (RuntimeException ex)
{
throw new JDOFatalInternalException("Not a JDO class: " + c.getName());
}
return rc;
} | this method looks up the appropriate JDOClass for a given persistent Class.
It uses the JDOModel to perfom this lookup.
@param c the persistent Class
@return the JDOCLass object |
static Object getLCState(StateManagerInternal sm)
{
// unfortunately the LifeCycleState classes are package private.
// so we have to do some dirty reflection hack to access them
try
{
Field myLC = sm.getClass().getDeclaredField("myLC");
myLC.setAccessible(true);
return myLC.get(sm);
}
catch (NoSuchFieldException e)
{
return e;
}
catch (IllegalAccessException e)
{
return e;
}
} | obtains the internal JDO lifecycle state of the input StatemanagerInternal.
This Method is helpful to display persistent objects internal state.
@param sm the StateManager to be inspected
@return the LifeCycleState of a StateManager instance |
public boolean readLock(TransactionImpl tx, Object obj)
{
LockEntry writer = getWriter(obj);
Collection readers = getReaders(obj);
if (writer == null)
{
// only one reader at a time
if (readers.size() == 0)
{
if (addReader(tx, obj))
{
readers = getReaders(obj);
if (readers.size() == 1)
{
return true;
}
else
{
removeReader(tx, obj);
return readLock(tx, obj);
}
}
else
return readLock(tx, obj);
}
else if ((readers.size() == 1) && (((LockEntry) readers.iterator().next()).isOwnedBy(tx)))
{
// I'm the reader, thus I am allowed to read even more !
return true;
}
}
else if (writer.isOwnedBy(tx))
{
return true; // If I'm the writer, I can read.
}
return false;
} | acquire a read lock on Object obj for Transaction tx.
@param tx the transaction requesting the lock
@param obj the Object to be locked
@return true if successful, else false |
@PostConstruct
protected void postConstruct() throws GeomajasException {
if (null == baseTmsUrl) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING, "baseTmsUrl");
}
// Make sure we have a base URL we can work with:
if ((baseTmsUrl.startsWith("http://") || baseTmsUrl.startsWith("https://")) && !baseTmsUrl.endsWith("/")) {
baseTmsUrl += "/";
}
// Make sure there is a correct RasterLayerInfo object:
if (layerInfo == null || layerInfo == UNUSABLE_LAYER_INFO) {
try {
tileMap = configurationService.getCapabilities(this);
version = tileMap.getVersion();
extension = tileMap.getTileFormat().getExtension();
layerInfo = configurationService.asLayerInfo(tileMap);
usable = true;
} catch (TmsLayerException e) {
// a layer needs an info object to keep the DtoConfigurationPostProcessor happy !
layerInfo = UNUSABLE_LAYER_INFO;
usable = false;
log.warn("The layer could not be correctly initialized: " + getId(), e);
}
} else if (extension == null) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING, "extension");
}
if (layerInfo != null) {
// Finally prepare some often needed values:
state = new TileServiceState(geoService, layerInfo);
// when proxying the real url will be resolved later on, just use a simple one for now
boolean proxying = useCache || useProxy || null != authentication;
if (tileMap != null && !proxying) {
urlBuilder = new TileMapUrlBuilder(tileMap);
} else {
urlBuilder = new SimpleTmsUrlBuilder(extension);
}
}
} | Finish initializing the service. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.