code
stringlengths 67
466k
| docstring
stringlengths 1
13.2k
|
---|---|
public Object javaToSql(Object source)
{
if (source instanceof Boolean)
{
if (source.equals(Boolean.TRUE))
{
return I_TRUE;
}
else
{
return I_FALSE;
}
}
else
{
return source;
}
} | /*
@see FieldConversion#javaToSql(Object) |
public Object sqlToJava(Object source)
{
if (source instanceof Integer)
{
if (source.equals(I_TRUE))
{
return Boolean.TRUE;
}
else
{
return Boolean.FALSE;
}
}
else
{
return source;
}
} | /*
@see FieldConversion#sqlToJava(Object) |
protected Field getField()
{
// make sure class was initialized
if (!isInitialized)
{
/*
first we build a graph of fields for nested fields support,
but for best performance on non-nested fields we also keep
the latest field separate and set a 'is nested' flag.
*/
fieldsList = getFieldGraph(makeAccessible());
field = (Field) fieldsList.get(fieldsList.size() - 1);
nonNested = fieldsList.size() == 1;
isInitialized = true;
}
return field;
} | Returns the underlying field object.
If parameter <tt>setAccessible</tt> is true the
field access checking was suppressed. |
public void set(Object target, Object value) throws MetadataException
{
// if target null, we have nothing to do
if(target == null) return;
Object current = target;
if (isNestedField())
{
List fields = getFieldsList();
int size = fields.size() - 1;
Field field;
for (int i = 0; i < size; i++)
{
field = (Field) fields.get(i);
Object attribute;
try
{
attribute = getValueFrom(field, current);
}
catch (Exception e)
{
throw new MetadataException("Can't read field '" + field.getName() + "' of type " + field.getType().getName(), e);
}
if (attribute != null || value != null)
{
// if the intermediary nested object is null, we have to create
// a new instance to set the value
if (attribute == null)
{
try
{
attribute = ClassHelper.newInstance(field.getType());
}
catch (Exception e)
{
throw new MetadataException("Can't create nested object of type '"
+ field.getType() + "' for field '"
+ field.getName() + "'", e);
}
}
try
{
//field.set(current, attribute);
setValueFor(field, current, attribute);
}
//catch (IllegalAccessException e)
catch (Exception e)
{
throw new MetadataException("Can't set nested object of type '"
+ field.getType() + "' for field '"
+ field.getName() + "'", e);
}
}
else
{
return;
}
current = attribute;
}
}
setValueFor(getField(), current, value);
} | do not override this method, have a look at {@link #setValueFor(java.lang.reflect.Field, Object, Object)} |
public Object get(Object target) throws MetadataException
{
Object result = target;
if (isNestedField())
{
List fields = getFieldsList();
for (int i = 0; i < fields.size(); i++)
{
if (result == null) break;
result = getValueFrom((Field) fields.get(i), result);
}
}
else
{
result = result != null ? getValueFrom(getField(), result) : null;
}
return result;
} | do not override this method, have a look at {@link #getValueFrom(java.lang.reflect.Field, Object)} |
@Override
public Object execute(Map<String, Object> args) {
StringBuffer result = new StringBuffer();
result.append(this.getContextPath());
this.addUrlParameter(args, result);
return result.toString();
} | {@inheritDoc}
@see com.mitchellbosecke.pebble.extension.Function#execute(java.util.Map) |
public void setStatusBarMessage(final String message)
{
// Guaranteed to return a non-null array
Object[] listeners = listenerList.getListenerList();
// Process the listeners last to first, notifying
// those that are interested in this event
for (int i = listeners.length-2; i>=0; i-=2) {
if (listeners[i]==StatusMessageListener.class)
{
((StatusMessageListener)listeners[i+1]).statusMessageReceived(message);
}
}
} | Set a status message in the JTextComponent passed to this
model.
@param message The message that should be displayed. |
public void reportSqlError(String message, java.sql.SQLException sqlEx)
{
StringBuffer strBufMessages = new StringBuffer();
java.sql.SQLException currentSqlEx = sqlEx;
do
{
strBufMessages.append("\n" + sqlEx.getErrorCode() + ":" + sqlEx.getMessage());
currentSqlEx = currentSqlEx.getNextException();
} while (currentSqlEx != null);
System.err.println(message + strBufMessages.toString());
sqlEx.printStackTrace();
} | Method for reporting SQLException. This is used by
the treenodes if retrieving information for a node
is not successful.
@param message The message describing where the error occurred
@param sqlEx The exception to be reported. |
public Object copy(final Object obj, final PersistenceBroker broker)
{
return clone(obj, IdentityMapFactory.getIdentityMap(), broker);
} | Uses an IdentityMap to make sure we don't recurse infinitely on the same object in a cyclic object model.
Proxies
@param obj
@return |
protected Object getObjectFromResultSet() throws PersistenceBrokerException
{
try
{
// if all primitive attributes of the object are contained in the ResultSet
// the fast direct mapping can be used
return super.getObjectFromResultSet();
}
// if the full loading failed we assume that at least PK attributes are contained
// in the ResultSet and perform a slower Identity based loading...
// This may of course also fail and can throw another PersistenceBrokerException
catch (PersistenceBrokerException e)
{
Identity oid = getIdentityFromResultSet();
return getBroker().getObjectByIdentity(oid);
}
} | returns a proxy or a fully materialized Object from the current row of the
underlying resultset. |
public void createInsertionSql(Database model, Platform platform, Writer writer) throws IOException
{
for (Iterator it = _beans.iterator(); it.hasNext();)
{
writer.write(platform.getInsertSql(model, (DynaBean)it.next()));
if (it.hasNext())
{
writer.write("\n");
}
}
} | Generates and writes the sql for inserting the currently contained data objects.
@param model The database model
@param platform The platform
@param writer The output stream |
public void insert(Platform platform, Database model, int batchSize) throws SQLException
{
if (batchSize <= 1)
{
for (Iterator it = _beans.iterator(); it.hasNext();)
{
platform.insert(model, (DynaBean)it.next());
}
}
else
{
for (int startIdx = 0; startIdx < _beans.size(); startIdx += batchSize)
{
platform.insert(model, _beans.subList(startIdx, startIdx + batchSize));
}
}
} | Inserts the currently contained data objects into the database.
@param platform The (connected) database platform for inserting data
@param model The database model
@param batchSize The batch size; use 1 for not using batch mode |
public List<Toponym> search(String q, int maxRows, boolean fuzzy) throws Exception {
List<Toponym> searchResult = new ArrayList<Toponym>();
String url = "q=" + URLEncoder.encode(q, "UTF8");
url += "&isNameRequired=true";
if (fuzzy) {
url += "&fuzzy=" + FUZZY_VALUE;
}
if (maxRows > 0) {
url += "&maxRows=" + maxRows;
}
url += "&style=" + Style.SHORT;
if (null != userName) {
url += "&username=" + userName;
}
InputStream inputStream = connect(url);
if (null != inputStream) {
SAXBuilder parser = new SAXBuilder();
Document doc = parser.build(connect(url));
Element root = doc.getRootElement();
// check for exception
Element message = root.getChild("status");
if (message != null) {
throw new Exception(message.getAttributeValue("message"));
}
for (Object obj : root.getChildren("geoname")) {
Element toponymElement = (Element) obj;
Toponym toponym = getToponymFromElement(toponymElement);
searchResult.add(toponym);
}
}
return searchResult;
} | Full text search on the GeoNames database. Modified version from the geonames library to allow using
isNameRequired and fuzzy.
<p/>
This service gets the number of toponyms defined by the 'maxRows' parameter. The parameter 'style' determines
which fields are returned by the service.
@param q search string
@param maxRows max number of rows in result, or 0 for all
@param fuzzy should search be fuzzy?
@return list of found toponyms
@throws Exception oops
@see <a href="http://www.geonames.org/export/geonames-search.html">search web service documentation</a> |
private InputStream connect(String url) throws IOException {
URLConnection conn = new URL(URL_BASE + url).openConnection();
conn.setConnectTimeout(CONNECT_TIMEOUT);
conn.setReadTimeout(READ_TIMEOUT);
conn.setRequestProperty("User-Agent", USER_AGENT);
return conn.getInputStream();
} | Open the connection to the server.
@param url the url to connect to
@return returns the input stream for the connection
@throws IOException cannot get result |
private void initComponents()//GEN-BEGIN:initComponents
{
java.awt.GridBagConstraints gridBagConstraints;
jPanel1 = new javax.swing.JPanel();
lblEnabled = new javax.swing.JLabel();
cbEnabled = new javax.swing.JCheckBox();
lblDisabledByParent = new javax.swing.JLabel();
cbDisabledByParent = new javax.swing.JCheckBox();
jLabel4 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
lblSchemaName = new javax.swing.JLabel();
tfSchemaName = new javax.swing.JTextField();
setLayout(new java.awt.GridBagLayout());
addComponentListener(new java.awt.event.ComponentAdapter()
{
public void componentShown(java.awt.event.ComponentEvent evt)
{
formComponentShown(evt);
}
public void componentHidden(java.awt.event.ComponentEvent evt)
{
formComponentHidden(evt);
}
});
addHierarchyListener(new java.awt.event.HierarchyListener()
{
public void hierarchyChanged(java.awt.event.HierarchyEvent evt)
{
formHierarchyChanged(evt);
}
});
jPanel1.setLayout(new java.awt.GridLayout(4, 2));
lblEnabled.setText("enabled");
lblEnabled.setDisplayedMnemonic('e');
jPanel1.add(lblEnabled);
cbEnabled.setMnemonic('e');
cbEnabled.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
cbEnabledActionPerformed(evt);
}
});
jPanel1.add(cbEnabled);
lblDisabledByParent.setText("disabled by Parent:");
jPanel1.add(lblDisabledByParent);
cbDisabledByParent.setEnabled(false);
cbDisabledByParent.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
cbDisabledByParentActionPerformed(evt);
}
});
jPanel1.add(cbDisabledByParent);
jPanel1.add(jLabel4);
jPanel1.add(jLabel3);
lblSchemaName.setText("Schema Name:");
lblSchemaName.setLabelFor(tfSchemaName);
jPanel1.add(lblSchemaName);
tfSchemaName.setEditable(false);
tfSchemaName.setText("jTextField1");
tfSchemaName.setBorder(null);
jPanel1.add(tfSchemaName);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
add(jPanel1, gridBagConstraints);
} | This method is called from within the constructor to
initialize the form.
WARNING: Do NOT modify this code. The content of this method is
always regenerated by the Form Editor. |
public void setModel (PropertySheetModel pm)
{
if (pm instanceof org.apache.ojb.tools.mapping.reversedb.DBSchema)
{
this.aSchema = (org.apache.ojb.tools.mapping.reversedb.DBSchema)pm;
this.readValuesFromSchema();
}
else
throw new IllegalArgumentException();
} | GEN-LAST:event_formComponentShown |
public void destroy()
{
removeAllListeners();
if (connectionManager != null)
{
if(connectionManager.isInLocalTransaction())
{
connectionManager.localRollback();
}
connectionManager.releaseConnection();
}
this.setClosed(true);
this.descriptorRepository = null;
this.pbKey = null;
this.pbf = null;
this.connectionManager = null;
this.dbAccess = null;
this.objectCache = null;
this.sequenceManager = null;
this.sqlGenerator = null;
this.statementManager = null;
} | Release all resources used by this
class - CAUTION: No further operations can be
done with this instance after calling this method. |
public synchronized void abortTransaction() throws TransactionNotInProgressException
{
if(isInTransaction())
{
fireBrokerEvent(BEFORE_ROLLBACK_EVENT);
setInTransaction(false);
clearRegistrationLists();
referencesBroker.removePrefetchingListeners();
/*
arminw:
check if we in local tx, before do local rollback
Necessary, because ConnectionManager may do a rollback by itself
or in managed environments the used connection is already be closed
*/
if(connectionManager.isInLocalTransaction()) this.connectionManager.localRollback();
fireBrokerEvent(AFTER_ROLLBACK_EVENT);
}
} | Abort and close the transaction.
Calling abort abandons all persistent object modifications and releases the
associated locks.
If transaction is not in progress a TransactionNotInProgressException is thrown |
public synchronized void beginTransaction() throws TransactionInProgressException, TransactionAbortedException
{
if (isInTransaction())
{
throw new TransactionInProgressException("PersistenceBroker is already in transaction");
}
fireBrokerEvent(BEFORE_BEGIN_EVENT);
setInTransaction(true);
this.connectionManager.localBegin();
fireBrokerEvent(AFTER_BEGIN_EVENT);
} | begin a transaction against the underlying RDBMS.
Calling <code>beginTransaction</code> multiple times,
without an intervening call to <code>commitTransaction</code> or <code>abortTransaction</code>,
causes the exception <code>TransactionInProgressException</code> to be thrown
on the second and subsequent calls. |
public synchronized void commitTransaction() throws TransactionNotInProgressException, TransactionAbortedException
{
if (!isInTransaction())
{
throw new TransactionNotInProgressException("PersistenceBroker is NOT in transaction, can't commit");
}
fireBrokerEvent(BEFORE_COMMIT_EVENT);
setInTransaction(false);
clearRegistrationLists();
referencesBroker.removePrefetchingListeners();
/*
arminw:
In managed environments it should be possible to close a used connection before
the tx was commited, thus it will be possible that the PB instance is in PB-tx, but
the connection is already closed. To avoid problems check if CM is in local tx before
do the CM.commit call
*/
if(connectionManager.isInLocalTransaction())
{
this.connectionManager.localCommit();
}
fireBrokerEvent(AFTER_COMMIT_EVENT);
} | Commit and close the transaction.
Calling <code>commit</code> commits to the database all
UPDATE, INSERT and DELETE statements called within the transaction and
releases any locks held by the transaction.
If beginTransaction() has not been called before a
TransactionNotInProgressException exception is thrown.
If the transaction cannot be commited a TransactionAbortedException exception is thrown. |
public void delete(Object obj, boolean ignoreReferences) throws PersistenceBrokerException
{
if(isTxCheck() && !isInTransaction())
{
if(logger.isEnabledFor(Logger.ERROR))
{
String msg = "No running PB-tx found. Please, only delete objects in context of a PB-transaction" +
" to avoid side-effects - e.g. when rollback of complex objects.";
try
{
throw new Exception("** Delete object without active PersistenceBroker transaction **");
}
catch(Exception e)
{
logger.error(msg, e);
}
}
}
try
{
doDelete(obj, ignoreReferences);
}
finally
{
markedForDelete.clear();
}
} | Deletes the concrete representation of the specified object in the underlying
persistence system. This method is intended for use in top-level api or
by internal calls.
@param obj The object to delete.
@param ignoreReferences With this flag the automatic deletion/unlinking
of references can be suppressed (independent of the used auto-delete setting in metadata),
except {@link org.apache.ojb.broker.metadata.SuperReferenceDescriptor}
these kind of reference (descriptor) will always be performed. If <em>true</em>
all "normal" referenced objects will be ignored, only the specified object is handled.
@throws PersistenceBrokerException |
private void doDelete(Object obj, boolean ignoreReferences) throws PersistenceBrokerException
{
//logger.info("DELETING " + obj);
// object is not null
if (obj != null)
{
obj = getProxyFactory().getRealObject(obj);
/**
* Kuali Foundation modification -- 8/24/2007
*/
if ( obj == null ) return;
/**
* End of Kuali Foundation modification
*/
/**
* MBAIRD
* 1. if we are marked for delete already, avoid recursing on this object
*
* arminw:
* use object instead Identity object in markedForDelete List,
* because using objects we get a better performance. I can't find
* side-effects in doing so.
*/
if (markedForDelete.contains(obj))
{
return;
}
ClassDescriptor cld = getClassDescriptor(obj.getClass());
//BRJ: check for valid pk
if (!serviceBrokerHelper().assertValidPkForDelete(cld, obj))
{
String msg = "Cannot delete object without valid PKs. " + obj;
logger.error(msg);
return;
}
/**
* MBAIRD
* 2. register object in markedForDelete map.
*/
markedForDelete.add(obj);
Identity oid = serviceIdentity().buildIdentity(cld, obj);
// Invoke events on PersistenceBrokerAware instances and listeners
BEFORE_DELETE_EVENT.setTarget(obj);
fireBrokerEvent(BEFORE_DELETE_EVENT);
BEFORE_DELETE_EVENT.setTarget(null);
// now perform deletion
performDeletion(cld, obj, oid, ignoreReferences);
// Invoke events on PersistenceBrokerAware instances and listeners
AFTER_DELETE_EVENT.setTarget(obj);
fireBrokerEvent(AFTER_DELETE_EVENT);
AFTER_DELETE_EVENT.setTarget(null);
// let the connection manager to execute batch
connectionManager.executeBatchIfNecessary();
}
} | do delete given object. Should be used by all intern classes to delete
objects. |
private void performDeletion(final ClassDescriptor cld, final Object obj, final Identity oid, final boolean ignoreReferences) throws PersistenceBrokerException
{
// 1. delete dependend collections
if (!ignoreReferences && cld.getCollectionDescriptors().size() > 0)
{
deleteCollections(obj, cld.getCollectionDescriptors());
}
// 2. delete object from directly mapped table
try
{
dbAccess.executeDelete(cld, obj); // use obj not oid to delete, BRJ
}
catch(OptimisticLockException e)
{
// ensure that the outdated object be removed from cache
objectCache.remove(oid);
throw e;
}
// 3. Add OID to the set of deleted objects
deletedDuringTransaction.add(oid);
// 4. delete dependend upon objects last to avoid FK violations
if (cld.getObjectReferenceDescriptors().size() > 0)
{
deleteReferences(cld, obj, oid, ignoreReferences);
}
// remove obj from the object cache:
objectCache.remove(oid);
} | This method perform the delete of the specified object
based on the {@link org.apache.ojb.broker.metadata.ClassDescriptor}. |
private void deleteByQuery(Query query, ClassDescriptor cld) throws PersistenceBrokerException
{
if (logger.isDebugEnabled())
{
logger.debug("deleteByQuery " + cld.getClassNameOfObject() + ", " + query);
}
if (query instanceof QueryBySQL)
{
String sql = ((QueryBySQL) query).getSql();
this.dbAccess.executeUpdateSQL(sql, cld);
}
else
{
// if query is Identity based transform it to a criteria based query first
if (query instanceof QueryByIdentity)
{
QueryByIdentity qbi = (QueryByIdentity) query;
Object oid = qbi.getExampleObject();
// make sure it's an Identity
if (!(oid instanceof Identity))
{
oid = serviceIdentity().buildIdentity(oid);
}
query = referencesBroker.getPKQuery((Identity) oid);
}
if (!cld.isInterface())
{
this.dbAccess.executeDelete(query, cld);
}
// if class is an extent, we have to delete all extent classes too
String lastUsedTable = cld.getFullTableName();
if (cld.isExtent())
{
Iterator extents = getDescriptorRepository().getAllConcreteSubclassDescriptors(cld).iterator();
while (extents.hasNext())
{
ClassDescriptor extCld = (ClassDescriptor) extents.next();
// read same table only once
if (!extCld.getFullTableName().equals(lastUsedTable))
{
lastUsedTable = extCld.getFullTableName();
this.dbAccess.executeDelete(query, extCld);
}
}
}
}
} | Extent aware Delete by Query
@param query
@param cld
@throws PersistenceBrokerException |
private void deleteReferences(ClassDescriptor cld, Object obj, Identity oid, boolean ignoreReferences) throws PersistenceBrokerException
{
List listRds = cld.getObjectReferenceDescriptors();
// get all members of obj that are references and delete them
Iterator i = listRds.iterator();
while (i.hasNext())
{
ObjectReferenceDescriptor rds = (ObjectReferenceDescriptor) i.next();
if ((!ignoreReferences && rds.getCascadingDelete() == ObjectReferenceDescriptor.CASCADE_OBJECT)
|| rds.isSuperReferenceDescriptor())
{
Object referencedObject = rds.getPersistentField().get(obj);
if (referencedObject != null)
{
if(rds.isSuperReferenceDescriptor())
{
ClassDescriptor base = cld.getSuperClassDescriptor();
/*
arminw: If "table-per-subclass" inheritance is used we have to
guarantee that all super-class table entries are deleted too.
Thus we have to perform the recursive deletion of all super-class
table entries.
*/
performDeletion(base, referencedObject, oid, ignoreReferences);
}
else
{
doDelete(referencedObject, ignoreReferences);
}
}
}
}
} | Deletes references that <b>obj</b> points to.
All objects which we have a FK poiting to (Via ReferenceDescriptors)
will be deleted if auto-delete is true <b>AND</b>
the member field containing the object reference is NOT null.
@param obj Object which we will delete references for
@param listRds list of ObjectRederenceDescriptors
@param ignoreReferences With this flag the automatic deletion/unlinking
of references can be suppressed (independent of the used auto-delete setting in metadata),
except {@link org.apache.ojb.broker.metadata.SuperReferenceDescriptor}
these kind of reference (descriptor) will always be performed.
@throws PersistenceBrokerException if some goes wrong - please see the error message for details |
private void deleteCollections(Object obj, List listCds) throws PersistenceBrokerException
{
// get all members of obj that are collections and delete all their elements
Iterator i = listCds.iterator();
while (i.hasNext())
{
CollectionDescriptor cds = (CollectionDescriptor) i.next();
if(cds.getCascadingDelete() != ObjectReferenceDescriptor.CASCADE_NONE)
{
if(cds.isMtoNRelation())
{
// if this is a m:n mapped table, remove entries from indirection table
mtoNBroker.deleteMtoNImplementor(cds, obj);
}
/*
if cascading delete is on, delete all referenced objects.
NOTE: User has to take care to populate all referenced objects before delete
the main object to avoid referential constraint violation
*/
if (cds.getCascadingDelete() == ObjectReferenceDescriptor.CASCADE_OBJECT)
{
Object col = cds.getPersistentField().get(obj);
if (col != null)
{
Iterator colIterator = BrokerHelper.getCollectionIterator(col);
while (colIterator.hasNext())
{
doDelete(colIterator.next(), false);
}
}
}
}
}
} | Deletes collections of objects poiting to <b>obj</b>.
All object which have a FK poiting to this object (Via CollectionDescriptors)
will be deleted if auto-delete is true <b>AND</b>
the member field containing the object reference if NOT null.
@param obj Object which we will delete collections for
@param listCds list of ObjectReferenceDescriptors
@throws PersistenceBrokerException if some goes wrong - please see the error message for details |
public void store(Object obj) throws PersistenceBrokerException
{
obj = extractObjectToStore(obj);
// only do something if obj != null
if(obj == null) return;
ClassDescriptor cld = getClassDescriptor(obj.getClass());
/*
if one of the PK fields was null, we assume the objects
was new and needs insert
*/
boolean insert = serviceBrokerHelper().hasNullPKField(cld, obj);
Identity oid = serviceIdentity().buildIdentity(cld, obj);
/*
if PK values are set, lookup cache or db to see whether object
needs insert or update
*/
if (!insert)
{
insert = objectCache.lookup(oid) == null
&& !serviceBrokerHelper().doesExist(cld, oid, obj);
}
store(obj, oid, cld, insert);
} | Store an Object.
@see org.apache.ojb.broker.PersistenceBroker#store(Object) |
private Object extractObjectToStore(Object obj)
{
Object result = obj;
// only do something if obj != null
if(result != null)
{
// ProxyObjects only have to be updated if their real
// subjects have been loaded
result = getProxyFactory().getRealObjectIfMaterialized(obj);
// null for unmaterialized Proxy
if (result == null)
{
if(logger.isDebugEnabled())
logger.debug("No materialized object could be found -> nothing to store," +
" object was " + ObjectUtils.identityToString(obj));
}
}
return result;
} | Check if the given object is <code>null</code> or an unmaterialized proxy object - in
both cases <code>null</code> will be returned, else the given object itself or the
materialized proxy object will be returned. |
public void store(Object obj, Identity oid, ClassDescriptor cld, boolean insert, boolean ignoreReferences)
{
if(obj == null || nowStoring.contains(obj))
{
return;
}
/*
if the object has been deleted during this transaction,
then we must insert it
*/
//System.out.println("## insert: " +insert + " / deleted: " + deletedDuringTransaction);
if (!insert)
{
insert = deletedDuringTransaction.contains(oid);
}
//************************************************
// now store it:
if(isTxCheck() && !isInTransaction())
{
if(logger.isEnabledFor(Logger.ERROR))
{
try
{
throw new Exception("** Try to store object without active PersistenceBroker transaction **");
}
catch(Exception e)
{
logger.error("No running tx found, please only store in context of an PB-transaction" +
", to avoid side-effects - e.g. when rollback of complex objects", e);
}
}
}
// Invoke events on PersistenceBrokerAware instances and listeners
if (insert)
{
BEFORE_STORE_EVENT.setTarget(obj);
fireBrokerEvent(BEFORE_STORE_EVENT);
BEFORE_STORE_EVENT.setTarget(null);
}
else
{
BEFORE_UPDATE_EVENT.setTarget(obj);
fireBrokerEvent(BEFORE_UPDATE_EVENT);
BEFORE_UPDATE_EVENT.setTarget(null);
}
try
{
nowStoring.add(obj);
storeToDb(obj, cld, oid, insert, ignoreReferences);
}
finally
{
// to optimize calls to DB don't remove already stored objects
nowStoring.remove(obj);
}
// Invoke events on PersistenceBrokerAware instances and listeners
if (insert)
{
AFTER_STORE_EVENT.setTarget(obj);
fireBrokerEvent(AFTER_STORE_EVENT);
AFTER_STORE_EVENT.setTarget(null);
}
else
{
AFTER_UPDATE_EVENT.setTarget(obj);
fireBrokerEvent(AFTER_UPDATE_EVENT);
AFTER_UPDATE_EVENT.setTarget(null);
}
// end of store operation
//************************************************
// if the object was stored, remove it from deleted set
if(deletedDuringTransaction.size() > 0) deletedDuringTransaction.remove(oid);
// let the connection manager to execute batch
connectionManager.executeBatchIfNecessary();
} | Method which start the real store work (insert or update)
and is intended for use by top-level api or internal calls.
@param obj The object to store.
@param oid The {@link Identity} of the object to store.
@param cld The {@link org.apache.ojb.broker.metadata.ClassDescriptor} of the object.
@param insert If <em>true</em> an insert operation will be performed, else update operation.
@param ignoreReferences With this flag the automatic storing/linking
of references can be suppressed (independent of the used auto-update setting in metadata),
except {@link org.apache.ojb.broker.metadata.SuperReferenceDescriptor}
these kind of reference (descriptor) will always be performed. If <em>true</em>
all "normal" referenced objects will be ignored, only the specified object is handled. |
protected void store(Object obj, Identity oid, ClassDescriptor cld, boolean insert)
{
store(obj, oid, cld, insert, false);
} | Internal used method which start the real store work. |
private void storeReferences(Object obj, ClassDescriptor cld, boolean insert, boolean ignoreReferences)
{
// get all members of obj that are references and store them
Collection listRds = cld.getObjectReferenceDescriptors();
// return if nothing to do
if(listRds == null || listRds.size() == 0)
{
return;
}
Iterator i = listRds.iterator();
while (i.hasNext())
{
ObjectReferenceDescriptor rds = (ObjectReferenceDescriptor) i.next();
/*
arminw: the super-references (used for table per subclass inheritance) must
be performed in any case. The "normal" 1:1 references can be ignored when
flag "ignoreReferences" is set
*/
if((!ignoreReferences && rds.getCascadingStore() != ObjectReferenceDescriptor.CASCADE_NONE)
|| rds.isSuperReferenceDescriptor())
{
storeAndLinkOneToOne(false, obj, cld, rds, insert);
}
}
} | Store all object references that <b>obj</b> points to.
All objects which we have a FK pointing to (Via ReferenceDescriptors) will be
stored if auto-update is true <b>AND</b> the member field containing the object
reference is NOT null.
With flag <em>ignoreReferences</em> the storing/linking
of references can be suppressed (independent of the used auto-update setting),
except {@link org.apache.ojb.broker.metadata.SuperReferenceDescriptor}
these kind of reference (descriptor) will always be performed.
@param obj Object which we will store references for |
private void storeAndLinkOneToOne(boolean onlyLink, Object obj, ClassDescriptor cld,
ObjectReferenceDescriptor rds, boolean insert)
{
Object ref = rds.getPersistentField().get(obj);
if (!onlyLink && rds.getCascadingStore() == ObjectReferenceDescriptor.CASCADE_OBJECT)
{
if(rds.isSuperReferenceDescriptor())
{
ClassDescriptor superCld = rds.getClassDescriptor().getSuperClassDescriptor();
Identity oid = serviceIdentity().buildIdentity(superCld, ref);
storeToDb(ref, superCld, oid, insert);
}
else store(ref);
}
/**
* Kuali Foundation modification -- 1/10/2008
*/
ref = getProxyFactory().getRealObject(ref);
/**
* End of Kuali Foundation modification
*/
link(obj, cld, rds, ref, insert);
} | Store/Link 1:1 reference.
@param obj real object the reference starts
@param rds {@link ObjectReferenceDescriptor} of the real object
@param insert flag for insert operation |
private void storeCollections(Object obj, ClassDescriptor cld, boolean insert) throws PersistenceBrokerException
{
// get all members of obj that are collections and store all their elements
Collection listCods = cld.getCollectionDescriptors();
// return if nothing to do
if (listCods.size() == 0)
{
return;
}
Iterator i = listCods.iterator();
while (i.hasNext())
{
CollectionDescriptor cod = (CollectionDescriptor) i.next();
// if CASCADE_NONE was set, do nothing with referenced objects
if (cod.getCascadingStore() != ObjectReferenceDescriptor.CASCADE_NONE)
{
Object referencedObjects = cod.getPersistentField().get(obj);
if (cod.isMtoNRelation())
{
storeAndLinkMtoN(false, obj, cod, referencedObjects, insert);
}
else
{
storeAndLinkOneToMany(false, obj, cod, referencedObjects, insert);
}
// BRJ: only when auto-update = object (CASCADE_OBJECT)
//
if ((cod.getCascadingStore() == ObjectReferenceDescriptor.CASCADE_OBJECT)
&& (referencedObjects instanceof ManageableCollection))
{
((ManageableCollection) referencedObjects).afterStore(this);
}
}
}
} | Store/Link collections of objects poiting to <b>obj</b>.
More info please see comments in source.
@param obj real object which we will store collections for
@throws PersistenceBrokerException if some goes wrong - please see the error message for details |
private void storeAndLinkMtoN(boolean onlyLink, Object obj, CollectionDescriptor cod,
Object referencedObjects, boolean insert)
{
/*
- if the collection is a collectionproxy and it's not already loaded
no need to perform an update on the referenced objects
- on insert we link and insert the referenced objects, because the proxy
collection maybe "inherited" from the object before the PK was replaced
*/
if(insert || !(referencedObjects instanceof CollectionProxy
&& !((CollectionProxy) referencedObjects).isLoaded()))
{
// if referenced objects are null, assign empty list
if(referencedObjects == null)
{
referencedObjects = Collections.EMPTY_LIST;
}
/*
NOTE: Take care of referenced objects, they could be of type Collection or
an Array or of type ManageableCollection, thus it is not guaranteed that we
can cast to Collection!!!
if we store an object with m:n reference and no references could be
found, we remove all entires of given object in indirection table
*/
Iterator referencedObjectsIterator;
if(!onlyLink && cod.getCascadingStore() == ObjectReferenceDescriptor.CASCADE_OBJECT)
{
referencedObjectsIterator = BrokerHelper.getCollectionIterator(referencedObjects);
while (referencedObjectsIterator.hasNext())
{
store(referencedObjectsIterator.next());
}
}
Collection existingMtoNKeys;
if(!insert)
{
existingMtoNKeys = mtoNBroker.getMtoNImplementor(cod, obj);
// we can't reuse iterator
referencedObjectsIterator = BrokerHelper.getCollectionIterator(referencedObjects);
// remove all entries in indirection table which not be part of referenced objects
mtoNBroker.deleteMtoNImplementor(cod, obj, referencedObjectsIterator, existingMtoNKeys);
}
else
{
existingMtoNKeys = Collections.EMPTY_LIST;
}
// we can't reuse iterator
referencedObjectsIterator = BrokerHelper.getCollectionIterator(referencedObjects);
while (referencedObjectsIterator.hasNext())
{
Object refObj = referencedObjectsIterator.next();
// Now store indirection record
// BRJ: this could cause integrity problems because
// obj may not be stored depending on auto-update
mtoNBroker.storeMtoNImplementor(cod, obj, refObj, existingMtoNKeys);
}
}
} | Store/Link m:n collection references.
@param obj real object the reference starts
@param cod {@link CollectionDescriptor} of the real object
@param referencedObjects the referenced objects ({@link ManageableCollection} or Collection or Array) or null
@param insert flag for insert operation |
private void storeAndLinkOneToMany(boolean linkOnly, Object obj, CollectionDescriptor cod,
Object referencedObjects, boolean insert)
{
if(referencedObjects == null)
{
return;
}
/*
Only make sense to perform (link or/and store) real referenced objects
or materialized collection proxy objects, because on unmaterialized collection
nothing has changed.
- if the collection is a collectionproxy and it's not already loaded
no need to perform an update on the referenced objects
- on insert we link and insert the referenced objects, because the proxy
collection maybe "inherited" from the object before the PK was replaced
*/
if(insert || !(referencedObjects instanceof CollectionProxyDefaultImpl
&& !((CollectionProxyDefaultImpl) referencedObjects).isLoaded()))
{
Iterator it = BrokerHelper.getCollectionIterator(referencedObjects);
Object refObj;
while(it.hasNext())
{
refObj = it.next();
/*
TODO: Check this!
arminw:
When it's necessary to 'link' (set the FK) the 1:n reference objects?
1. set FK in refObj if it is materialized
2. if the referenced object is a proxy AND the main object needs insert
we have to materialize the real object, because the user may move a collection
of proxy objects from object A to new object B. In this case we have to replace the
FK in the proxy object with new key of object B.
*/
if(insert || getProxyFactory().isMaterialized(refObj))
{
ClassDescriptor refCld = getClassDescriptor(getProxyFactory().getRealClass(refObj));
// get the real object before linking
refObj = getProxyFactory().getRealObject(refObj);
link(refObj, refCld, cod, obj, insert);
// if enabled cascade store and not only link, store the refObj
if(!linkOnly && cod.getCascadingStore() == ObjectReferenceDescriptor.CASCADE_OBJECT)
{
store(refObj);
}
}
}
}
} | Store/Link 1:n collection references.
@param obj real object the reference starts
@param linkOnly if true the referenced objects will only be linked (FK set, no reference store).
Reference store setting in descriptor will be ignored in this case
@param cod {@link CollectionDescriptor} of the real object
@param referencedObjects the referenced objects ({@link ManageableCollection} or Collection or Array) or null
@param insert flag for insert operation |
public void link(Object targetObject, ClassDescriptor cld, ObjectReferenceDescriptor rds, Object referencedObject, boolean insert)
{
// MBAIRD: we have 'disassociated' this object from the referenced object,
// the object represented by the reference descriptor is now null, so set
// the fk in the target object to null.
// arminw: if an insert was done and ref object was null, we should allow
// to pass FK fields of main object (maybe only the FK fields are set)
if (referencedObject == null)
{
/*
arminw:
if update we set FK fields to 'null', because reference was disassociated
We do nothing on insert, maybe only the FK fields of main object (without
materialization of the reference object) are set by the user
*/
if(!insert)
{
unlinkFK(targetObject, cld, rds);
}
}
else
{
setFKField(targetObject, cld, rds, referencedObject);
}
} | Assign FK value to target object by reading PK values of referenced object.
@param targetObject real (non-proxy) target object
@param cld {@link ClassDescriptor} of the real target object
@param rds An {@link ObjectReferenceDescriptor} or {@link CollectionDescriptor}
associated with the real object.
@param referencedObject referenced object or proxy
@param insert Show if "linking" is done while insert or update. |
public void unlinkFK(Object targetObject, ClassDescriptor cld, ObjectReferenceDescriptor rds)
{
setFKField(targetObject, cld, rds, null);
} | Unkink FK fields of target object.
@param targetObject real (non-proxy) target object
@param cld {@link ClassDescriptor} of the real target object
@param rds An {@link ObjectReferenceDescriptor} or {@link CollectionDescriptor}
associated with the real object. |
private void setFKField(Object targetObject, ClassDescriptor cld, ObjectReferenceDescriptor rds, Object referencedObject)
{
ValueContainer[] refPkValues;
FieldDescriptor fld;
FieldDescriptor[] objFkFields = rds.getForeignKeyFieldDescriptors(cld);
if (objFkFields == null)
{
throw new PersistenceBrokerException("No foreign key fields defined for class '"+cld.getClassNameOfObject()+"'");
}
if(referencedObject == null)
{
refPkValues = null;
}
else
{
Class refClass = proxyFactory.getRealClass(referencedObject);
ClassDescriptor refCld = getClassDescriptor(refClass);
refPkValues = brokerHelper.getKeyValues(refCld, referencedObject, false);
}
for (int i = 0; i < objFkFields.length; i++)
{
fld = objFkFields[i];
/*
arminw:
we set the FK value when the extracted PK fields from the referenced object are not null at all
or if null, the FK field was not a PK field of target object too.
Should be ok, because the values of the extracted PK field values should never be null and never
change, so it doesn't matter if the target field is a PK too.
*/
if(refPkValues != null || !fld.isPrimaryKey())
{
fld.getPersistentField().set(targetObject, refPkValues != null ? refPkValues[i].getValue(): null);
}
}
} | Set the FK value on the target object, extracted from the referenced object. If the referenced object was
<i>null</i> the FK values were set to <i>null</i>, expect when the FK field was declared as PK.
@param targetObject real (non-proxy) target object
@param cld {@link ClassDescriptor} of the real target object
@param rds An {@link ObjectReferenceDescriptor} or {@link CollectionDescriptor}
@param referencedObject The referenced object or <i>null</i> |
public void linkOneToOne(Object obj, ClassDescriptor cld, ObjectReferenceDescriptor rds, boolean insert)
{
storeAndLinkOneToOne(true, obj, cld, rds, true);
} | Assign FK value of main object with PK values of the reference object.
@param obj real object with reference (proxy) object (or real object with set FK values on insert)
@param cld {@link ClassDescriptor} of the real object
@param rds An {@link ObjectReferenceDescriptor} of real object.
@param insert Show if "linking" is done while insert or update. |
public void linkOneToMany(Object obj, CollectionDescriptor cod, boolean insert)
{
Object referencedObjects = cod.getPersistentField().get(obj);
storeAndLinkOneToMany(true, obj, cod,referencedObjects, insert);
} | Assign FK value to all n-side objects referenced by given object.
@param obj real object with 1:n reference
@param cod {@link CollectionDescriptor} of referenced 1:n objects
@param insert flag signal insert operation, false signals update operation |
public void linkMtoN(Object obj, CollectionDescriptor cod, boolean insert)
{
Object referencedObjects = cod.getPersistentField().get(obj);
storeAndLinkMtoN(true, obj, cod, referencedObjects, insert);
} | Assign FK values and store entries in indirection table
for all objects referenced by given object.
@param obj real object with 1:n reference
@param cod {@link CollectionDescriptor} of referenced 1:n objects
@param insert flag signal insert operation, false signals update operation |
public void retrieveAllReferences(Object pInstance) throws PersistenceBrokerException
{
if (logger.isDebugEnabled())
{
logger.debug("Manually retrieving all references for object " + serviceIdentity().buildIdentity(pInstance));
}
ClassDescriptor cld = getClassDescriptor(pInstance.getClass());
getInternalCache().enableMaterializationCache();
// to avoid problems with circular references, locally cache the current object instance
Identity oid = serviceIdentity().buildIdentity(pInstance);
// boolean needLocalRemove = false;
if(getInternalCache().doLocalLookup(oid) == null)
{
getInternalCache().doInternalCache(oid, pInstance, MaterializationCache.TYPE_TEMP);
// needLocalRemove = true;
}
try
{
referencesBroker.retrieveReferences(pInstance, cld, true);
referencesBroker.retrieveCollections(pInstance, cld, true);
// arminw: should no longer needed since we use TYPE_TEMP for this kind of objects
// // do locally remove the object to avoid problems with object state detection (insert/update),
// // because objects found in the cache detected as 'old' means 'update'
// if(needLocalRemove) getInternalCache().doLocalRemove(oid);
getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
getInternalCache().doLocalClear();
throw e;
}
} | Retrieve all References (also Collection-attributes) of a given instance.
Loading is forced, even if the collection- and reference-descriptors differ.
@param pInstance the persistent instance to work with |
public void retrieveReference(Object pInstance, String pAttributeName) throws PersistenceBrokerException
{
if (logger.isDebugEnabled())
{
logger.debug("Retrieving reference named ["+pAttributeName+"] on object of type ["+
pInstance.getClass().getName()+"]");
}
ClassDescriptor cld = getClassDescriptor(pInstance.getClass());
CollectionDescriptor cod = cld.getCollectionDescriptorByName(pAttributeName);
getInternalCache().enableMaterializationCache();
// to avoid problems with circular references, locally cache the current object instance
Identity oid = serviceIdentity().buildIdentity(pInstance);
boolean needLocalRemove = false;
if(getInternalCache().doLocalLookup(oid) == null)
{
getInternalCache().doInternalCache(oid, pInstance, MaterializationCache.TYPE_TEMP);
needLocalRemove = true;
}
try
{
if (cod != null)
{
referencesBroker.retrieveCollection(pInstance, cld, cod, true);
}
else
{
ObjectReferenceDescriptor ord = cld.getObjectReferenceDescriptorByName(pAttributeName);
if (ord != null)
{
referencesBroker.retrieveReference(pInstance, cld, ord, true);
}
else
{
throw new PersistenceBrokerException("did not find attribute " + pAttributeName +
" for class " + pInstance.getClass().getName());
}
}
// do locally remove the object to avoid problems with object state detection (insert/update),
// because objects found in the cache detected as 'old' means 'update'
if(needLocalRemove) getInternalCache().doLocalRemove(oid);
getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
getInternalCache().doLocalClear();
throw e;
}
} | retrieve a single reference- or collection attribute
of a persistent instance.
@param pInstance the persistent instance
@param pAttributeName the name of the Attribute to load |
public void checkRefreshRelationships(Object obj, Identity oid, ClassDescriptor cld)
{
Iterator iter;
CollectionDescriptor cds;
ObjectReferenceDescriptor rds;
// to avoid problems with circular references, locally cache the current object instance
Object tmp = getInternalCache().doLocalLookup(oid);
if(tmp != null && getInternalCache().isEnabledMaterialisationCache())
{
/*
arminw: This should fix OJB-29, infinite loops on bidirectional 1:1 relations with
refresh attribute 'true' for both references. OJB now assume that the object is already
refreshed when it's cached in the materialisation cache
*/
return;
}
try
{
getInternalCache().enableMaterializationCache();
if(tmp == null)
{
getInternalCache().doInternalCache(oid, obj, MaterializationCache.TYPE_TEMP);
}
if(logger.isDebugEnabled()) logger.debug("Refresh relationships for " + oid);
iter = cld.getCollectionDescriptors().iterator();
while (iter.hasNext())
{
cds = (CollectionDescriptor) iter.next();
if (cds.isRefresh())
{
referencesBroker.retrieveCollection(obj, cld, cds, false);
}
}
iter = cld.getObjectReferenceDescriptors().iterator();
while (iter.hasNext())
{
rds = (ObjectReferenceDescriptor) iter.next();
if (rds.isRefresh())
{
referencesBroker.retrieveReference(obj, cld, rds, false);
}
}
getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
getInternalCache().doLocalClear();
throw e;
}
} | Check if the references of the specified object have enabled
the <em>refresh</em> attribute and refresh the reference if set <em>true</em>.
@throws PersistenceBrokerException if there is a error refreshing collections or references
@param obj The object to check.
@param oid The {@link Identity} of the object.
@param cld The {@link org.apache.ojb.broker.metadata.ClassDescriptor} of the object. |
public ManageableCollection getCollectionByQuery(Class collectionClass, Query query)
throws PersistenceBrokerException
{
return referencesBroker.getCollectionByQuery(collectionClass, query, false);
} | retrieve a collection of type collectionClass matching the Query query
@see org.apache.ojb.broker.PersistenceBroker#getCollectionByQuery(Class, Query) |
private Object getPlainDBObject(ClassDescriptor cld, Identity oid) throws ClassNotPersistenceCapableException
{
Object newObj = null;
// Class is NOT an Interface: it has a directly mapped table and we lookup this table first:
if (!cld.isInterface())
{
// 1. try to retrieve skalar fields from directly mapped table columns
newObj = dbAccess.materializeObject(cld, oid);
}
// if we did not find the object yet AND if the cld represents an Extent,
// we can lookup all tables of the extent classes:
if (newObj == null && cld.isExtent())
{
Iterator extents = getDescriptorRepository().getAllConcreteSubclassDescriptors(cld).iterator();
while (extents.hasNext())
{
ClassDescriptor extCld = (ClassDescriptor) extents.next();
newObj = dbAccess.materializeObject(extCld, oid);
if (newObj != null)
{
break;
}
}
}
return newObj;
} | Retrieve an plain object (without populated references) by it's identity
from the database
@param cld the real {@link org.apache.ojb.broker.metadata.ClassDescriptor} of the object to refresh
@param oid the {@link org.apache.ojb.broker.Identity} of the object
@return A new plain object read from the database or <em>null</em> if not found
@throws ClassNotPersistenceCapableException |
private Object getDBObject(Identity oid) throws ClassNotPersistenceCapableException
{
Class c = oid.getObjectsRealClass();
if (c == null)
{
logger.info("Real class for used Identity object is 'null', use top-level class instead");
c = oid.getObjectsTopLevelClass();
}
ClassDescriptor cld = getClassDescriptor(c);
Object newObj = getPlainDBObject(cld, oid);
// loading references is useful only when the Object could be found in db:
if (newObj != null)
{
if (oid.getObjectsRealClass() == null)
{
oid.setObjectsRealClass(newObj.getClass());
}
/*
* synchronize on newObj so the ODMG-layer can take a snapshot only of
* fully cached (i.e. with all references + collections) objects
*/
synchronized (newObj)
{
objectCache.enableMaterializationCache();
try
{
// cache object immediately , so that references
// can be established from referenced Objects back to this Object
objectCache.doInternalCache(oid, newObj, ObjectCacheInternal.TYPE_NEW_MATERIALIZED);
/*
* Chris Lewington: can cause problems with multiple objects
* mapped to one table, as follows:
*
* if the class searched on does not match the retrieved
* class, eg a search on an OID retrieves a row but it could
* be a different class (OJB gets all column values),
* then trying to resolve references will fail as the object
* will not match the Class Descriptor.
*
* To be safe, get the descriptor of the retrieved object
* BEFORE resolving refs
*/
ClassDescriptor newObjCld = getClassDescriptor(newObj.getClass());
// don't force loading of references:
final boolean unforced = false;
// 2. retrieve non-skalar fields that contain objects retrievable from other tables
referencesBroker.retrieveReferences(newObj, newObjCld, unforced);
// 3. retrieve collection fields from foreign-key related tables:
referencesBroker.retrieveCollections(newObj, newObjCld, unforced);
objectCache.disableMaterializationCache();
}
catch(RuntimeException e)
{
objectCache.doLocalClear();
throw e;
}
}
}
return newObj;
} | Retrieve an full materialized (dependent on the metadata settings)
object by it's identity from the database, as well as caching the
object
@param oid The {@link org.apache.ojb.broker.Identity} of the object to for
@return A new object read from the database or <em>null</em> if not found
@throws ClassNotPersistenceCapableException |
public Iterator getIteratorByQuery(Query query) throws PersistenceBrokerException
{
Class itemClass = query.getSearchClass();
ClassDescriptor cld = getClassDescriptor(itemClass);
return getIteratorFromQuery(query, cld);
} | returns an Iterator that iterates Objects of class c if calling the .next()
method. The Elements returned come from a SELECT ... WHERE Statement
that is defined by the Query query.
If itemProxy is null, no proxies are used. |
protected OJBIterator getIteratorFromQuery(Query query, ClassDescriptor cld) throws PersistenceBrokerException
{
RsIteratorFactory factory = RsIteratorFactoryImpl.getInstance();
OJBIterator result = getRsIteratorFromQuery(query, cld, factory);
if (query.usePaging())
{
result = new PagingIterator(result, query.getStartAtIndex(), query.getEndAtIndex());
}
return result;
} | Get an extent aware Iterator based on the Query
@param query
@param cld the ClassDescriptor
@return OJBIterator |
public Object doGetObjectByIdentity(Identity id) throws PersistenceBrokerException
{
if (logger.isDebugEnabled()) logger.debug("getObjectByIdentity " + id);
// check if object is present in ObjectCache:
Object obj = objectCache.lookup(id);
// only perform a db lookup if necessary (object not cached yet)
if (obj == null)
{
obj = getDBObject(id);
}
else
{
ClassDescriptor cld = getClassDescriptor(obj.getClass());
// if specified in the ClassDescriptor the instance must be refreshed
if (cld.isAlwaysRefresh())
{
refreshInstance(obj, id, cld);
}
// now refresh all references
checkRefreshRelationships(obj, id, cld);
}
// Invoke events on PersistenceBrokerAware instances and listeners
AFTER_LOOKUP_EVENT.setTarget(obj);
fireBrokerEvent(AFTER_LOOKUP_EVENT);
AFTER_LOOKUP_EVENT.setTarget(null);
//logger.info("RETRIEVING object " + obj);
return obj;
} | Internal used method to retrieve object based on Identity.
@param id
@return
@throws PersistenceBrokerException |
private void refreshInstance(Object cachedInstance, Identity oid, ClassDescriptor cld)
{
// read in fresh copy from the db, but do not cache it
Object freshInstance = getPlainDBObject(cld, oid);
// update all primitive typed attributes
FieldDescriptor[] fields = cld.getFieldDescriptions();
FieldDescriptor fmd;
PersistentField fld;
for (int i = 0; i < fields.length; i++)
{
fmd = fields[i];
fld = fmd.getPersistentField();
fld.set(cachedInstance, fld.get(freshInstance));
}
} | refresh all primitive typed attributes of a cached instance
with the current values from the database.
refreshing of reference and collection attributes is not done
here.
@param cachedInstance the cached instance to be refreshed
@param oid the Identity of the cached instance
@param cld the ClassDescriptor of cachedInstance |
public Object getObjectByQuery(Query query) throws PersistenceBrokerException
{
Object result = null;
if (query instanceof QueryByIdentity)
{
// example obj may be an entity or an Identity
Object obj = query.getExampleObject();
if (obj instanceof Identity)
{
Identity oid = (Identity) obj;
result = getObjectByIdentity(oid);
}
else
{
// TODO: This workaround doesn't allow 'null' for PK fields
if (!serviceBrokerHelper().hasNullPKField(getClassDescriptor(obj.getClass()), obj))
{
Identity oid = serviceIdentity().buildIdentity(obj);
result = getObjectByIdentity(oid);
}
}
}
else
{
Class itemClass = query.getSearchClass();
ClassDescriptor cld = getClassDescriptor(itemClass);
/*
use OJB intern Iterator, thus we are able to close used
resources instantly
*/
OJBIterator it = getIteratorFromQuery(query, cld);
/*
arminw:
patch by Andre Clute, instead of taking the first found result
try to get the first found none null result.
He wrote:
I have a situation where an item with a certain criteria is in my
database twice -- once deleted, and then a non-deleted version of it.
When I do a PB.getObjectByQuery(), the RsIterator get's both results
from the database, but the first row is the deleted row, so my RowReader
filters it out, and do not get the right result.
*/
try
{
while (result==null && it.hasNext())
{
result = it.next();
}
} // make sure that we close the used resources
finally
{
if(it != null) it.releaseDbResources();
}
}
return result;
} | retrieve an Object by query
I.e perform a SELECT ... FROM ... WHERE ... in an RDBMS |
public Enumeration getPKEnumerationByQuery(Class primaryKeyClass, Query query) throws PersistenceBrokerException
{
if (logger.isDebugEnabled()) logger.debug("getPKEnumerationByQuery " + query);
query.setFetchSize(1);
ClassDescriptor cld = getClassDescriptor(query.getSearchClass());
return new PkEnumeration(query, cld, primaryKeyClass, this);
} | returns an Enumeration of PrimaryKey Objects for objects of class DataClass.
The Elements returned come from a SELECT ... WHERE Statement
that is defined by the fields and their coresponding values of listFields
and listValues.
Useful for EJB Finder Methods...
@param primaryKeyClass the pk class for the searched objects
@param query the query |
public void store(Object obj, ObjectModification mod) throws PersistenceBrokerException
{
obj = extractObjectToStore(obj);
// null for unmaterialized Proxy
if (obj == null)
{
return;
}
ClassDescriptor cld = getClassDescriptor(obj.getClass());
// this call ensures that all autoincremented primary key attributes are filled
Identity oid = serviceIdentity().buildIdentity(cld, obj);
// select flag for insert / update selection by checking the ObjectModification
if (mod.needsInsert())
{
store(obj, oid, cld, true);
}
else if (mod.needsUpdate())
{
store(obj, oid, cld, false);
}
/*
arminw
TODO: Why we need this behaviour? What about 1:1 relations?
*/
else
{
// just store 1:n and m:n associations
storeCollections(obj, cld, mod.needsInsert());
}
} | Makes object obj persistent in the underlying persistence system.
E.G. by INSERT INTO ... or UPDATE ... in an RDBMS.
The ObjectModification parameter can be used to determine whether INSERT or update is to be used.
This functionality is typically called from transaction managers, that
track which objects have to be stored. If the object is an unmaterialized
proxy the method return immediately. |
private void storeToDb(Object obj, ClassDescriptor cld, Identity oid, boolean insert, boolean ignoreReferences)
{
// 1. link and store 1:1 references
storeReferences(obj, cld, insert, ignoreReferences);
Object[] pkValues = oid.getPrimaryKeyValues();
if (!serviceBrokerHelper().assertValidPksForStore(cld.getPkFields(), pkValues))
{
// BRJ: fk values may be part of pk, but the are not known during
// creation of Identity. so we have to get them here
pkValues = serviceBrokerHelper().getKeyValues(cld, obj);
if (!serviceBrokerHelper().assertValidPksForStore(cld.getPkFields(), pkValues))
{
String append = insert ? " on insert" : " on update" ;
throw new PersistenceBrokerException("assertValidPkFields failed for Object of type: " + cld.getClassNameOfObject() + append);
}
}
// get super class cld then store it with the object
/*
now for multiple table inheritance
1. store super classes, topmost parent first
2. go down through heirarchy until current class
3. todo: store to full extent?
// arminw: TODO: The extend-attribute feature dosn't work, should we remove this stuff?
This if-clause will go up the inheritance heirarchy to store all the super classes.
The id for the top most super class will be the id for all the subclasses too
*/
if(cld.getSuperClass() != null)
{
ClassDescriptor superCld = getDescriptorRepository().getDescriptorFor(cld.getSuperClass());
storeToDb(obj, superCld, oid, insert);
// arminw: why this?? I comment out this section
// storeCollections(obj, cld.getCollectionDescriptors(), insert);
}
// 2. store primitive typed attributes (Or is THIS step 3 ?)
// if obj not present in db use INSERT
if (insert)
{
dbAccess.executeInsert(cld, obj);
if(oid.isTransient())
{
// Create a new Identity based on the current set of primary key values.
oid = serviceIdentity().buildIdentity(cld, obj);
}
}
// else use UPDATE
else
{
try
{
dbAccess.executeUpdate(cld, obj);
}
catch(OptimisticLockException e)
{
// ensure that the outdated object be removed from cache
objectCache.remove(oid);
throw e;
}
}
// cache object for symmetry with getObjectByXXX()
// Add the object to the cache.
objectCache.doInternalCache(oid, obj, ObjectCacheInternal.TYPE_WRITE);
// 3. store 1:n and m:n associations
if(!ignoreReferences) storeCollections(obj, cld, insert);
} | I pulled this out of internal store so that when doing multiple table
inheritance, i can recurse this function.
@param obj
@param cld
@param oid BRJ: what is it good for ???
@param insert
@param ignoreReferences |
public Iterator getReportQueryIteratorByQuery(Query query) throws PersistenceBrokerException
{
ClassDescriptor cld = getClassDescriptor(query.getSearchClass());
return getReportQueryIteratorFromQuery(query, cld);
} | Get an Iterator based on the ReportQuery
@param query
@return Iterator |
private OJBIterator getRsIteratorFromQuery(Query query, ClassDescriptor cld, RsIteratorFactory factory)
throws PersistenceBrokerException
{
query.setFetchSize(1);
if (query instanceof QueryBySQL)
{
if(logger.isDebugEnabled()) logger.debug("Creating SQL-RsIterator for class ["+cld.getClassNameOfObject()+"]");
return factory.createRsIterator((QueryBySQL) query, cld, this);
}
if (!cld.isExtent() || !query.getWithExtents())
{
// no extents just use the plain vanilla RsIterator
if(logger.isDebugEnabled()) logger.debug("Creating RsIterator for class ["+cld.getClassNameOfObject()+"]");
return factory.createRsIterator(query, cld, this);
}
if(logger.isDebugEnabled()) logger.debug("Creating ChainingIterator for class ["+cld.getClassNameOfObject()+"]");
ChainingIterator chainingIter = new ChainingIterator();
// BRJ: add base class iterator
if (!cld.isInterface())
{
if(logger.isDebugEnabled()) logger.debug("Adding RsIterator for class ["+cld.getClassNameOfObject()+"] to ChainingIterator");
chainingIter.addIterator(factory.createRsIterator(query, cld, this));
}
Iterator extents = getDescriptorRepository().getAllConcreteSubclassDescriptors(cld).iterator();
while (extents.hasNext())
{
ClassDescriptor extCld = (ClassDescriptor) extents.next();
// read same table only once
if (chainingIter.containsIteratorForTable(extCld.getFullTableName()))
{
if(logger.isDebugEnabled()) logger.debug("Skipping class ["+extCld.getClassNameOfObject()+"]");
}
else
{
if(logger.isDebugEnabled()) logger.debug("Adding RsIterator of class ["+extCld.getClassNameOfObject()+"] to ChainingIterator");
// add the iterator to the chaining iterator.
chainingIter.addIterator(factory.createRsIterator(query, extCld, this));
}
}
return chainingIter;
} | Get an extent aware RsIterator based on the Query
@param query
@param cld
@param factory the Factory for the RsIterator
@return OJBIterator |
private OJBIterator getReportQueryIteratorFromQuery(Query query, ClassDescriptor cld) throws PersistenceBrokerException
{
RsIteratorFactory factory = ReportRsIteratorFactoryImpl.getInstance();
OJBIterator result = getRsIteratorFromQuery(query, cld, factory);
if (query.usePaging())
{
result = new PagingIterator(result, query.getStartAtIndex(), query.getEndAtIndex());
}
return result;
} | Get an extent aware Iterator based on the ReportQuery
@param query
@param cld
@return OJBIterator |
public Object createProxy(Class baseClassForProxy, Identity realSubjectsIdentity)
{
try
{
// the invocation handler manages all delegation stuff
IndirectionHandler handler = getProxyFactory().createIndirectionHandler(pbKey, realSubjectsIdentity);
// the proxy simply provides the interface of the real subject
if (VirtualProxy.class.isAssignableFrom(baseClassForProxy))
{
Constructor constructor = baseClassForProxy.getDeclaredConstructor(new Class[]{ IndirectionHandler.class });
return constructor.newInstance(new Object[]{ handler });
}
else
{
return getProxyFactory().createProxy(baseClassForProxy,handler);
}
}
catch (Exception ex)
{
throw new PersistenceBrokerException("Unable to create proxy using class:"+baseClassForProxy.getName(), ex);
}
} | Creates a proxy instance.
@param baseClassForProxy The base class that the Proxy should extend. For dynamic Proxies, the method of
generation is dependent on the ProxyFactory implementation.
@param realSubjectsIdentity The identity of the subject
@return An instance of the proxy subclass
@throws PersistenceBrokerException If there is an error creating the proxy object |
private ObjectCache getCachePerClass(Class objectClass, int methodCall)
{
ObjectCache cache = (ObjectCache) cachesByClass.get(objectClass.getName());
if (cache == null && methodCall == AbstractMetaCache.METHOD_CACHE)
{
/**
* the cache wasn't found, and the cachesByClass didn't contain the key with a
* null value, so create a new cache for this classtype
*/
cache = new ObjectCacheJCSImpl(objectClass.getName());
cachesByClass.put(objectClass.getName(), cache);
}
return cache;
} | Gets the cache for the given class
@param objectClass The class to look up the cache for
@return The cache |
public Object javaToSql(Object source)
{
synchronized (byteOut)
{
try
{
if (gzipOut == null)
{
gzipOut = new GZIPOutputStream(uuOut);
objOut = new ObjectOutputStream(gzipOut);
}
/*
** Clear out the byte array
*/
byteOut.reset();
objOut.writeObject(source);
objOut.flush();
gzipOut.finish();
gzipOut.flush();
return (byteOut.toString());
}
catch (Throwable t)
{
throw new ConversionException(t);
}
}
} | /*
* @see FieldConversion#javaToSql(Object) |
public Object sqlToJava(Object source)
{
try
{
ByteArrayInputStream stringIn =
new ByteArrayInputStream(((String) source).getBytes());
Base64.InputStream uuIn =
new Base64.InputStream(stringIn, Base64.DECODE, false);
GZIPInputStream gzipIn = new GZIPInputStream(uuIn);
ObjectInputStream objIn = new ObjectInputStream(gzipIn);
Object result = objIn.readObject();
objIn.close();
return result;
}
catch (Throwable t)
{
throw new ConversionException(t);
}
} | /*
* @see FieldConversion#sqlToJava(Object) |
public static List<String> getAllArtifacts(final DbModule module) {
final List<String> gavcs = module.getArtifacts();
for(final DbModule submodule: module.getSubmodules()){
gavcs.addAll(getAllArtifacts(submodule));
}
return gavcs;
} | Return a gavc list of all the artifact contained into the module (an its submodules)
@param module
@return List<String> |
public static Set<Artifact> getAllArtifacts(final Module module) {
final Set<Artifact> artifacts = new HashSet<Artifact>();
artifacts.addAll(module.getArtifacts());
for(final Module submodule: module.getSubmodules()){
artifacts.addAll(getAllArtifacts(submodule));
}
return artifacts;
} | Return a list of all the artifact contained into the module (an its submodules)
@param module
@return Set<Artifact> |
public static Set<Dependency> getAllDependencies(final Module module) {
final Set<Dependency> dependencies = new HashSet<Dependency>();
dependencies.addAll(module.getDependencies());
for(final Module submodule: module.getSubmodules()){
dependencies.addAll(getAllDependencies(submodule));
}
return dependencies;
} | Return a list of all the dependencies contained into the module (an its submodules)
@param module
@return Set<Dependency> |
public static List<DbDependency> getAllDbDependencies(final DbModule module) {
final List<DbDependency> dependencies = new ArrayList<DbDependency>();
dependencies.addAll(module.getDependencies());
for(final DbModule submodule: module.getSubmodules()){
dependencies.addAll(getAllDbDependencies(submodule));
}
return dependencies;
} | Return a list of all the dependencies contained into the module (an its submodules)
@param module
@return List<DbDependency> |
public static String getModuleName(final String moduleId) {
final int splitter = moduleId.indexOf(':');
if(splitter == -1){
return moduleId;
}
return moduleId.substring(0, splitter);
} | Split a module Id to get the module name
@param moduleId
@return String |
public static String getModuleVersion(final String moduleId) {
final int splitter = moduleId.lastIndexOf(':');
if(splitter == -1){
return moduleId;
}
return moduleId.substring(splitter+1);
} | Split a module Id to get the module version
@param moduleId
@return String |
public static String getGroupId(final String gavc) {
final int splitter = gavc.indexOf(':');
if(splitter == -1){
return gavc;
}
return gavc.substring(0, splitter);
} | Split an artifact gavc to get the groupId
@param gavc
@return String |
public static DbArtifact createDbArtifact(final String gavc) {
final DbArtifact artifact = new DbArtifact();
final String[] artifactInfo = gavc.split(":");
if(artifactInfo.length > 0){
artifact.setGroupId(artifactInfo[0]);
}
if(artifactInfo.length > 1){
artifact.setArtifactId(artifactInfo[1]);
}
if(artifactInfo.length > 2){
artifact.setVersion(artifactInfo[2]);
}
if(artifactInfo.length > 3){
artifact.setClassifier(artifactInfo[3]);
}
if(artifactInfo.length > 4){
artifact.setExtension(artifactInfo[4]);
}
if(artifactInfo.length > 5){
artifact.setOrigin(artifactInfo[5]);
}
return artifact;
} | Generates an artifact starting from gavc
WARNING: use this method only if you have a missing reference in the database!!!
@param gavc
@return DbArtifact |
public static Artifact createArtifact(final String gavc) {
String groupId = null, artifactId = null, version = null, classifier = null, extension = null, origin = null;
final String[] artifactInfo = gavc.split(":");
if(artifactInfo.length > 0){
groupId = artifactInfo[0];
}
if(artifactInfo.length > 1){
artifactId = artifactInfo[1];
}
if(artifactInfo.length > 2){
version = artifactInfo[2];
}
if(artifactInfo.length > 3){
classifier = artifactInfo[3];
}
if(artifactInfo.length > 4){
extension = artifactInfo[4];
}
if(artifactInfo.length > 5){
origin = artifactInfo[5];
}
return DataModelFactory.createArtifact(groupId, artifactId, version, classifier, null, extension, origin);
} | Generates an artifact starting from gavc
WARNING: use this method only if you have a missing reference in the database!!!
@param gavc
@return DbArtifact |
public static List<DbModule> getAllSubmodules(final DbModule module) {
final List<DbModule> submodules = new ArrayList<DbModule>();
submodules.addAll(module.getSubmodules());
for(final DbModule submodule: module.getSubmodules()){
submodules.addAll(getAllSubmodules(submodule));
}
return submodules;
} | Return the list of all the module submodules
@param module
@return List<DbModule> |
public static void sort(final List<Artifact> targets) {
int n = targets.size();
while(n != 0){
int newn = 0;
for(int i = 1 ; i <= n-1 ; i++){
if (targets.get(i-1).toString().compareTo(targets.get(i).toString()) > 0){
Collections.swap(targets, i - 1, i);
newn = i;
}
}
n = newn;
}
} | Bubble sort
@param targets |
public void init(final MultivaluedMap<String, String> queryParameters) {
final String scopeCompileParam = queryParameters.getFirst(ServerAPI.SCOPE_COMPILE_PARAM);
if(scopeCompileParam != null){
this.scopeComp = Boolean.valueOf(scopeCompileParam);
}
final String scopeProvidedParam = queryParameters.getFirst(ServerAPI.SCOPE_PROVIDED_PARAM);
if(scopeProvidedParam != null){
this.scopePro = Boolean.valueOf(scopeProvidedParam);
}
final String scopeRuntimeParam = queryParameters.getFirst(ServerAPI.SCOPE_RUNTIME_PARAM);
if(scopeRuntimeParam != null){
this.scopeRun = Boolean.valueOf(scopeRuntimeParam);
}
final String scopeTestParam = queryParameters.getFirst(ServerAPI.SCOPE_TEST_PARAM);
if(scopeTestParam != null){
this.scopeTest = Boolean.valueOf(scopeTestParam);
}
} | The parameter must never be null
@param queryParameters |
private PersistenceBroker obtainBroker()
{
PersistenceBroker _broker;
try
{
if (pbKey == null)
{
//throw new OJBRuntimeException("Not possible to do action, cause no tx runnning and no PBKey is set");
log.warn("No tx runnning and PBKey is null, try to use the default PB");
_broker = PersistenceBrokerFactory.defaultPersistenceBroker();
}
else
{
_broker = PersistenceBrokerFactory.createPersistenceBroker(pbKey);
}
}
catch (PBFactoryException e)
{
log.error("Could not obtain PB for PBKey " + pbKey, e);
throw new OJBRuntimeException("Unexpected micro-kernel exception", e);
}
return _broker;
} | Used to get PB, when no tx is running. |
public void releaseLocks(Object key)
{
LockInfo info = new LockInfo(key, null, METHOD_RELEASE_LOCKS);
try
{
byte[] requestBarr = serialize(info);
performRequest(requestBarr);
}
catch(Throwable t)
{
throw new LockRuntimeException("Cannot release locks using owner key '" + key + "'", t);
}
} | } |
private void addSequence(String sequenceName, HighLowSequence seq)
{
// lookup the sequence map for calling DB
String jcdAlias = getBrokerForClass()
.serviceConnectionManager().getConnectionDescriptor().getJcdAlias();
Map mapForDB = (Map) sequencesDBMap.get(jcdAlias);
if(mapForDB == null)
{
mapForDB = new HashMap();
}
mapForDB.put(sequenceName, seq);
sequencesDBMap.put(jcdAlias, mapForDB);
} | Put new sequence object for given sequence name.
@param sequenceName Name of the sequence.
@param seq The sequence object to add. |
protected void removeSequence(String sequenceName)
{
// lookup the sequence map for calling DB
Map mapForDB = (Map) sequencesDBMap.get(getBrokerForClass()
.serviceConnectionManager().getConnectionDescriptor().getJcdAlias());
if(mapForDB != null)
{
synchronized(SequenceManagerHighLowImpl.class)
{
mapForDB.remove(sequenceName);
}
}
} | Remove the sequence for given sequence name.
@param sequenceName Name of the sequence to remove. |
public PBKey getPBKey()
{
if (pbKey == null)
{
this.pbKey = new PBKey(this.getJcdAlias(), this.getUserName(), this.getPassWord());
}
return pbKey;
} | Return a key to identify the connection descriptor. |
public void setJdbcLevel(String jdbcLevel)
{
if (jdbcLevel != null)
{
try
{
double intLevel = Double.parseDouble(jdbcLevel);
setJdbcLevel(intLevel);
}
catch(NumberFormatException nfe)
{
setJdbcLevel(2.0);
logger.info("Specified JDBC level was not numeric (Value=" + jdbcLevel + "), used default jdbc level of 2.0 ");
}
}
else
{
setJdbcLevel(2.0);
logger.info("Specified JDBC level was null, used default jdbc level of 2.0 ");
}
} | Sets the jdbcLevel. parse the string setting and check that it is indeed an integer.
@param jdbcLevel The jdbcLevel to set |
public String toXML()
{
RepositoryTags tags = RepositoryTags.getInstance();
String eol = SystemUtils.LINE_SEPARATOR;
StringBuffer strReturn = new StringBuffer( 1024 );
strReturn.append( eol );
strReturn.append( " <!-- Descriptor for Connection " );
strReturn.append( getProtocol() );
strReturn.append( ":" );
strReturn.append( getSubProtocol() );
strReturn.append( ":" );
strReturn.append( getDbAlias() );
strReturn.append( " -->" );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getOpeningTagNonClosingById( JDBC_CONNECTION_DESCRIPTOR ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( JCD_ALIAS, this.getJcdAlias() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( DEFAULT_CONNECTION, "" + this.isDefaultConnection() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( DBMS_NAME, this.getDbms() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( JDBC_LEVEL, "" + this.getJdbcLevel() ) );
strReturn.append( eol );
//username is optional
String user = getUserName();
if( user != null )
{
strReturn.append( " " );
strReturn.append( tags.getAttribute( USER_NAME, user ) );
strReturn.append( eol );
}
// password is optional
String passwd = getPassWord();
if( passwd != null )
{
strReturn.append( " " );
strReturn.append( tags.getAttribute( USER_PASSWD, passwd ) );
strReturn.append( eol );
}
// JDBC Datasource or DriverManager information are alternatives:
String dsn = getDatasourceName();
if( dsn != null )
{
strReturn.append( " " );
strReturn.append( tags.getAttribute( DATASOURCE_NAME, this.getDatasourceName() ) );
strReturn.append( eol );
}
else
{
strReturn.append( " " );
strReturn.append( tags.getAttribute( DRIVER_NAME, this.getDriver() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( URL_PROTOCOL, this.getProtocol() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( URL_SUBPROTOCOL, this.getSubProtocol() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( encode( tags.getAttribute( URL_DBALIAS, this.getDbAlias() ) ) );
strReturn.append( eol );
}
strReturn.append( " " );
strReturn.append( tags.getAttribute( EAGER_RELEASE, "" + this.getEagerRelease() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( BATCH_MODE, "" + this.getBatchMode() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( USE_AUTOCOMMIT, "" + this.getUseAutoCommit() ) );
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getAttribute( IGNORE_AUTOCOMMIT_EXCEPTION, "" + this.isIgnoreAutoCommitExceptions() ) );
strReturn.append( eol );
strReturn.append( " >" );
strReturn.append( eol );
strReturn.append( eol );
strReturn.append( this.getConnectionPoolDescriptor().toXML() );
strReturn.append( eol );
if( this.getSequenceDescriptor() != null )
{
strReturn.append( this.getSequenceDescriptor().toXML() );
}
strReturn.append( eol );
strReturn.append( " " );
strReturn.append( tags.getClosingTagById( JDBC_CONNECTION_DESCRIPTOR ) );
strReturn.append( eol );
return strReturn.toString();
} | /*
@see XmlCapable#toXML() |
public synchronized boolean checkWrite(TransactionImpl tx, Object obj)
{
if (log.isDebugEnabled()) log.debug("LM.checkWrite(tx-" + tx.getGUID() + ", " + new Identity(obj, tx.getBroker()).toString() + ")");
LockStrategy lockStrategy = LockStrategyFactory.getStrategyFor(obj);
return lockStrategy.checkWrite(tx, obj);
} | checks if there is a writelock for transaction tx on object obj.
Returns true if so, else false. |
public void checkpoint(ObjectEnvelope mod)
throws org.apache.ojb.broker.PersistenceBrokerException
{
mod.doDelete();
mod.setModificationState(StateTransient.getInstance());
} | rollback the transaction |
public void commit(ObjectEnvelope mod) throws PersistenceBrokerException
{
mod.doDelete();
mod.setModificationState(StateTransient.getInstance());
} | commit the associated transaction |
public void checkpoint(ObjectEnvelope mod) throws PersistenceBrokerException
{
mod.doInsert();
mod.setModificationState(StateOldClean.getInstance());
} | checkpoint the ObjectModification |
public void commit(ObjectEnvelope mod) throws PersistenceBrokerException
{
mod.doInsert();
mod.setModificationState(StateOldClean.getInstance());
} | commit the associated transaction |
public void writeObject(Object o, GraphicsDocument document, boolean asChild) throws RenderException {
GeometryCollection coll = (GeometryCollection) o;
document.writeElement("path", asChild);
document.writeAttribute("fill-rule", "evenodd");
document.writeAttributeStart("d");
for (int i = 0; i < coll.getNumGeometries(); i++) {
document.writeObject(coll.getGeometryN(i), true); // TODO delegate to appropriate writers, is this correct?
}
document.writeAttributeEnd();
} | Writes a <code>GeometryCollection</code> object.
@param o The <code>LineString</code> to be encoded. |
private FieldDescriptor extractIdentityColumnField(ClassDescriptor cld)
{
FieldDescriptor[] pkFields = cld.getPkFields();
for(int i = 0; i < pkFields.length; i++)
{
// to find the identity column we search for a autoincrement
// read-only field
if(pkFields[i].isAutoIncrement() && pkFields[i].isAccessReadOnly())
{
return pkFields[i];
}
}
return null;
} | Gets the identity column descriptor for the given class
or return <code>null</code> if none defined.
@param cld The class descriptor
@return The class's identity column or <code>null</code> if it does not have one |
protected Query[] buildPrefetchQueries(Collection proxies, Collection realSubjects)
{
Collection queries = new ArrayList();
Collection idsSubset;
Object proxy;
IndirectionHandler handler;
Identity id;
Class realClass;
HashMap classToIds = new HashMap();
Class topLevelClass = getItemClassDescriptor().getClassOfObject();
PersistenceBroker pb = getBroker();
ObjectCache cache = pb.serviceObjectCache();
for (Iterator it = proxies.iterator(); it.hasNext(); )
{
proxy = it.next();
handler = ProxyHelper.getIndirectionHandler(proxy);
if (handler == null)
{
continue;
}
id = handler.getIdentity();
if (cache.lookup(id) != null)
{
realSubjects.add(pb.getObjectByIdentity(id));
continue;
}
realClass = id.getObjectsRealClass();
if (realClass == null)
{
realClass = Object.class; // to remember that the real class is unknown
}
idsSubset = (HashSet) classToIds.get(realClass);
if (idsSubset == null)
{
idsSubset = new HashSet();
classToIds.put(realClass, idsSubset);
}
idsSubset.add(id);
if (idsSubset.size() == pkLimit)
{
Query query;
if (realClass == Object.class)
{
query = buildPrefetchQuery(topLevelClass, idsSubset, true);
}
else
{
query = buildPrefetchQuery(realClass, idsSubset, false);
}
queries.add(query);
idsSubset.clear();
}
}
for (Iterator it = classToIds.entrySet().iterator(); it.hasNext(); )
{
Map.Entry entry = (Map.Entry) it.next();
realClass = (Class) entry.getKey();
idsSubset = (HashSet) entry.getValue();
if (idsSubset.size() > 0)
{
Query query;
if (realClass == Object.class)
{
query = buildPrefetchQuery(topLevelClass, idsSubset, true);
}
else
{
query = buildPrefetchQuery(realClass, idsSubset, false);
}
queries.add(query);
}
}
return (Query[]) queries.toArray(new Query[queries.size()]);
} | Build the multiple queries for one relationship because of limitation of IN(...)
@param proxies Collection containing all proxy objects to load
@param realSubjects Collection where real subjects found in the cache should be added. |
public static void main(final String[] args) throws ExceptionInInitializerError {
try {
final GrapesServer grapesServer = new GrapesServer();
grapesServer.run(args);
} catch (Exception e) {
LOG.error("Grapes server failed to start:" + e.getMessage());
throw new ExceptionInInitializerError(e);
}
} | Runs Grapes |
public int getAcceptableActions(Component c)
{
if (c instanceof javax.swing.JTree
&& ((javax.swing.JTree)c).getModel() instanceof OjbMetaDataTreeModel)
return DnDWorkerConstants.DRAG_COPY | DnDWorkerConstants.DRAG_LINK;
else return DnDWorkerConstants.NONE;
} | Return a bitmask of acceptable actions. In most cases you will only support
DRAG_COPY, but sometimes you might support DRAG_LINK or DRAG_MOVE as well.
@param c The component that acts as the drag source
@return A bitmask of possible drag actions for the given Component |
public Transferable getTransferable(Component c)
{
if (c instanceof javax.swing.JTree
&& ((javax.swing.JTree)c).getModel() instanceof OjbMetaDataTreeModel)
{
try
{
javax.swing.JTree tree = (javax.swing.JTree)c;
OjbMetaDataTreeModel model = (OjbMetaDataTreeModel)tree.getModel();
AttributeDescriptorBase descriptors[] = new AttributeDescriptorBase[tree.getSelectionCount()];
for (int i = 0; tree.getSelectionPaths() != null && i < tree.getSelectionPaths().length; i++)
{
Object o = ((OjbMetaTreeNode)tree.getSelectionPaths()[i].getLastPathComponent()).getAssociatedDescriptor();
if (o instanceof AttributeDescriptorBase)
{
System.err.println(" adding Node" + o);
descriptors[i] = (AttributeDescriptorBase) o;
}
}
return new OjbMetadataTransferable(descriptors);
}
catch (Throwable t)
{
t.printStackTrace();
}
}
return null;
} | Return a Transferable with the data you whish to export. You also get
the Component the DnD actions has been started for. If the component
supports selection you must first check which items are selected and
afterwards put those items in the Transferable.
@param c The component that acts as the drag source
@return a Transferable containing the exported data |
protected <CONTAINER extends CacheContainer> CONTAINER getContainer(String keyKey, String contextKey,
String[] keys, CacheCategory category, PipelineContext pipelineContext, Class<CONTAINER> containerClass) {
return cachingSupportService.getContainer(keyKey, contextKey, keys, category, pipelineContext, this,
containerClass);
} | Get the requested object from the cache. The key is either obtained from the pipeline context (keyKey) if
possible. Alternatively, the {@link CacheContainer} is built to determine the cache key.
@param keyKey key to put the cache key in the pipeline context
@param contextKey key to put the cache context in the pipeline context
@param keys keys which need to be include in the cache context
@param category cache category
@param pipelineContext pipeline context
@param containerClass container class
@param <CONTAINER> container class
@return cache container |
protected void putContainer(PipelineContext pipelineContext, CacheCategory category, String[] keys, String keyKey,
String contextKey, CacheContainer cacheContainer, Envelope envelope) {
cachingSupportService.putContainer(pipelineContext, this, category, keys, keyKey, contextKey, cacheContainer,
envelope);
} | Put {@link CacheContainer} in the cache. The cache key is stored in the pipeline context.
@param pipelineContext pipeline context
@param category cache category
@param keys keys which need to be include in the cache context
@param contextKey key to put the cache context in the pipeline context
@param keyKey key to put the cache key in the pipeline context
@param cacheContainer cache container
@param envelope envelope |
public List<String> getModuleVersions(final String name, final FiltersHolder filters) {
final List<String> versions = repositoryHandler.getModuleVersions(name, filters);
if (versions.isEmpty()) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("Module " + name + " does not exist.").build());
}
return versions;
} | Returns the available module names regarding the filters
@param name String
@param filters FiltersHolder
@return List<String> |
public DbModule getModule(final String moduleId) {
final DbModule dbModule = repositoryHandler.getModule(moduleId);
if (dbModule == null) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("Module " + moduleId + " does not exist.").build());
}
return dbModule;
} | Returns a module
@param moduleId String
@return DbModule |
public void deleteModule(final String moduleId) {
final DbModule module = getModule(moduleId);
repositoryHandler.deleteModule(module.getId());
for (final String gavc : DataUtils.getAllArtifacts(module)) {
repositoryHandler.deleteArtifact(gavc);
}
} | Delete a module
@param moduleId String |
public List<DbLicense> getModuleLicenses(final String moduleId,
final LicenseMatcher licenseMatcher) {
final DbModule module = getModule(moduleId);
final List<DbLicense> licenses = new ArrayList<>();
final FiltersHolder filters = new FiltersHolder();
final ArtifactHandler artifactHandler = new ArtifactHandler(repositoryHandler, licenseMatcher);
for (final String gavc : DataUtils.getAllArtifacts(module)) {
licenses.addAll(artifactHandler.getArtifactLicenses(gavc, filters));
}
return licenses;
} | Return a licenses view of the targeted module
@param moduleId String
@return List<DbLicense> |
public void promoteModule(final String moduleId) {
final DbModule module = getModule(moduleId);
for (final String gavc : DataUtils.getAllArtifacts(module)) {
final DbArtifact artifact = repositoryHandler.getArtifact(gavc);
artifact.setPromoted(true);
repositoryHandler.store(artifact);
}
repositoryHandler.promoteModule(module);
} | Perform the module promotion
@param moduleId String |
public PromotionReportView getPromotionReport(final String moduleId) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format(":: Starting promo report [%s]", moduleId));
}
final PromotionReportView fromCache = cache.get(moduleId);
if (null != fromCache) {
return fromCache;
}
final DependencyHandler depHandler = new DependencyHandler(repositoryHandler);
final ModelMapper modelMapper = new ModelMapper(repositoryHandler);
final DbModule module = getModule(moduleId);
final DbOrganization organization = getOrganization(module);
final PromotionReportView report = new PromotionReportView();
report.setRootModule(DataModelFactory.createModule(module.getName(), module.getVersion()));
// if (!report.isSnapshot()) {
// filters initialization
final FiltersHolder filters = new FiltersHolder();
filters.addFilter(new PromotedFilter(false));
filters.addFilter(new CorporateFilter(organization));
// Checks if each dependency module has been promoted
final List<Dependency> deps = depHandler.getModuleDependencies(moduleId, filters);
removeDuplicates(deps);
for (final Dependency dependency : deps) {
final DbModule depModule = repositoryHandler.getRootModuleOf(dependency.getTarget().getGavc());
if (depModule != null && !depModule.getId().equals(moduleId) && !depModule.isPromoted()) {
report.addUnPromotedDependency(dependency.getTarget().getGavc());
report.addDependencyPromotionReport(depModule.getId(), getPromotionReport(depModule.getId()));
}
}
// Checks if the module has dependencies that shouldn't be used
final List<String> treatedArtifacts = new ArrayList<>();
for (final DbDependency dependency : DataUtils.getAllDbDependencies(module)) {
if(dependency.getScope().equals(Scope.TEST)) {
continue;
}
final DbArtifact artifactDep = repositoryHandler.getArtifact(dependency.getTarget());
if (artifactDep == null) {
// handle the case of a corporate artifact which is not available in the repository
continue;
}
if (artifactDep.getDoNotUse() && !treatedArtifacts.contains(artifactDep.getGavc())) {
DbComment dbComment = repositoryHandler.getLatestComment(artifactDep.getGavc(), artifactDep.getClass().getSimpleName());
report.addDoNotUseArtifact(modelMapper.getArtifact(artifactDep), dbComment == null ? null : modelMapper.getComment(dbComment));
treatedArtifacts.add(artifactDep.getGavc());
}
// Checks if the module has third party dependency license missing
// filter the corporate dependencies and check the third party
List<String> artifactLicenses = artifactDep.getLicenses();
if (!filters.getCorporateFilter().filter(dependency)) {
if (artifactLicenses.isEmpty()) {
if(LOG.isWarnEnabled()) {
LOG.warn(String.format("Missing license on artifact [%s]", artifactDep.getGavc()));
}
report.addMissingThirdPartyDependencyLicenses(modelMapper.getArtifact(artifactDep));
} else {
// Check if the existing license name exists in the database
for (String licenseName : artifactLicenses) {
if (null == licenseName) {
continue;
}
// DbLicense currentLicense = repositoryHandler.getLicense(licenseName);
final Set<DbLicense> matchingLicenses = licenseMatcher.getMatchingLicenses(licenseName);
if (matchingLicenses.isEmpty()) {
if(LOG.isWarnEnabled()) {
LOG.warn(String.format("Artifact license string [%s] unknown to Grapes", licenseName));
}
report.addMissingThirdPartyDependencyLicenses(modelMapper.getArtifact(artifactDep));
} else {
matchingLicenses.forEach(lic -> {
//
// Check if the third party license is approved. If approved == null it is still valid license
// add to a not approved list
//
if (lic.isApproved() != null && !lic.isApproved()) {
report.addUnacceptedLicenseEntry(modelMapper.getArtifact(artifactDep).getGavc(), lic.getName());
if (LOG.isWarnEnabled()) {
LOG.warn(String.format("License [%s] is used by [%s], but is not accepted ", lic.getName(), artifactDep.getGavc()));
}
}
});
}
}
}
}
}
// }
report.compute();
if (LOG.isDebugEnabled()) {
LOG.debug(String.format(":: Done promo report %s", moduleId));
}
cache.put(moduleId, report);
return report;
} | Provide a report about the promotion feasibility
@param moduleId String
@return PromotionReportView |
Subsets and Splits