code
stringlengths 67
466k
| docstring
stringlengths 1
13.2k
|
---|---|
public static JdbcType getJdbcTypeByReflection(String fieldType)
{
JdbcType result;
if (fieldType.equalsIgnoreCase(Character.class.getName()) || fieldType.equalsIgnoreCase("char"))
result = getJdbcTypeByName("char");
else if (fieldType.equalsIgnoreCase(Short.class.getName()) || fieldType.equalsIgnoreCase("short"))
result = getJdbcTypeByName("smallint");
else if (fieldType.equalsIgnoreCase(Integer.class.getName()) || fieldType.equalsIgnoreCase("int"))
result = getJdbcTypeByName("integer");
else if (fieldType.equalsIgnoreCase(Long.class.getName()) || fieldType.equalsIgnoreCase("long"))
result = getJdbcTypeByName("bigint");
else if (fieldType.equalsIgnoreCase(Byte.class.getName()) || fieldType.equalsIgnoreCase("byte"))
result = getJdbcTypeByName("tinyint");
else if (fieldType.equalsIgnoreCase(Float.class.getName()) || fieldType.equalsIgnoreCase("float"))
result = getJdbcTypeByName("real");
else if (fieldType.equalsIgnoreCase(Double.class.getName()) || fieldType.equalsIgnoreCase("double"))
result = getJdbcTypeByName("float");
else if (fieldType.equalsIgnoreCase(String.class.getName()))
result = getJdbcTypeByName("varchar");
/*
TODO: arminw: useful? This only will work in conjunction with a FieldConversion
*/
else if (fieldType.equalsIgnoreCase(java.util.Date.class.getName()))
result = getJdbcTypeByName("date");
else if (fieldType.equalsIgnoreCase(Date.class.getName()))
result = getJdbcTypeByName("date");
else if (fieldType.equalsIgnoreCase(Time.class.getName()))
result = getJdbcTypeByName("time");
else if (fieldType.equalsIgnoreCase(Timestamp.class.getName()))
result = getJdbcTypeByName("timestamp");
else if (fieldType.equalsIgnoreCase(BigDecimal.class.getName()))
result = getJdbcTypeByName("decimal");
else if (fieldType.equalsIgnoreCase(Ref.class.getName()))
result = getJdbcTypeByName("ref");
else if (fieldType.equalsIgnoreCase(Struct.class.getName()))
result = getJdbcTypeByName("struct");
else if (fieldType.equalsIgnoreCase(Boolean.class.getName()) || fieldType.equalsIgnoreCase("boolean"))
result = getJdbcTypeByName("bit");
//#ifdef JDBC30
else if (fieldType.equalsIgnoreCase(URL.class.getName()))
result = getJdbcTypeByName("datalink");
//#endif
else
throw new OJBRuntimeException("The type " + fieldType + " can not be handled by OJB automatically."
+ " Please specify a type as defined by java.sql.Types in your field-descriptor");
return result;
} | Try to automatically assign a jdbc type for the given
java type name. This method is used if e.g. in metadata a
column type was not set.
@see FieldDescriptor#getJdbcType |
public static Object getObjectFromColumn(ResultSet rs, Integer jdbcType, int columnId)
throws SQLException
{
return getObjectFromColumn(rs, null, jdbcType, null, columnId);
} | Returns an java object read from the specified ResultSet column. |
private static Object getObjectFromColumn(ResultSet rs, CallableStatement stmt, Integer jdbcType, String columnName, int columnId)
throws SQLException
{
return getJdbcTypeByTypesIndex(jdbcType).getObjectFromColumn(rs, stmt, columnName, columnId);
} | Returns an java object for the given jdbcType by extract from the given
CallableStatement or ResultSet.
NOTE: Exactly one of the arguments of type CallableStatement or ResultSet
have to be non-null.
If the 'columnId' argument is equals {@link JdbcType#MIN_INT}, then the given 'columnName'
argument is used to lookup column. Else the given 'columnId' is used as column index. |
public static String getSqlTypeAsString(int jdbcType)
{
String statusName = "*can't find String representation for sql type '" + jdbcType + "'*";
try
{
Field[] fields = Types.class.getDeclaredFields();
for (int i = 0; i < fields.length; i++)
{
if (fields[i].getInt(null) == jdbcType)
{
statusName = fields[i].getName();
break;
}
}
}
catch (Exception ignore)
{
// ignore it
}
return statusName;
} | Returns a string representation of the given {@link java.sql.Types} value. |
private List<Object> create(
final Object rowObj,
final ExecutionContext ec,
final Object previousRow) {
if (rowObj instanceof ExcelFixtureRowHandler) {
final ExcelFixtureRowHandler rowHandler = (ExcelFixtureRowHandler) rowObj;
return rowHandler.handleRow(ec, this, previousRow);
} else {
repositoryService.persist(rowObj);
ec.addResult(this, rowObj);
return Collections.singletonList(rowObj);
}
} | endregion |
public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException, java.io.IOException
{
if (flavor.isMimeTypeEqual(OJBMETADATA_FLAVOR))
return selectedDescriptors;
else
throw new UnsupportedFlavorException(flavor);
} | Returns an object which represents the data to be transferred. The class
of the object returned is defined by the representation class of the flavor.
@param flavor the requested flavor for the data
@see DataFlavor#getRepresentationClass
@exception IOException if the data is no longer available
in the requested flavor.
@exception UnsupportedFlavorException if the requested data flavor is
not supported. |
public boolean isDataFlavorSupported(DataFlavor flavor)
{
return java.util.Arrays.asList(_flavors).contains(flavor);
} | Returns whether or not the specified data flavor is supported for
this object.
@param flavor the requested flavor for the data
@return boolean indicating whether or not the data flavor is supported |
private void initComponents()//GEN-BEGIN:initComponents
{
java.awt.GridBagConstraints gridBagConstraints;
lblJDBCDriver = new javax.swing.JLabel();
tfJDBCDriver = new javax.swing.JTextField();
lblJDBCURL = new javax.swing.JLabel();
tfJDBCURL = new javax.swing.JTextField();
lblUsername = new javax.swing.JLabel();
tfUsername = new javax.swing.JTextField();
lblPassword = new javax.swing.JLabel();
tfPassword = new javax.swing.JPasswordField();
jPanel1 = new javax.swing.JPanel();
pbCancel = new javax.swing.JButton();
pbConnect = new javax.swing.JButton();
getContentPane().setLayout(new java.awt.GridBagLayout());
addWindowListener(new java.awt.event.WindowAdapter()
{
public void windowClosing(java.awt.event.WindowEvent evt)
{
closeDialog(evt);
}
});
lblJDBCDriver.setText("JDBC Driver Class:");
lblJDBCDriver.setLabelFor(tfJDBCDriver);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblJDBCDriver, gridBagConstraints);
tfJDBCDriver.setText("jTextField1");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
getContentPane().add(tfJDBCDriver, gridBagConstraints);
lblJDBCURL.setText("JDBC URL:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblJDBCURL, gridBagConstraints);
tfJDBCURL.setColumns(40);
tfJDBCURL.setText("jTextField2");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
getContentPane().add(tfJDBCURL, gridBagConstraints);
lblUsername.setText("Username:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblUsername, gridBagConstraints);
tfUsername.setText("jTextField3");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
getContentPane().add(tfUsername, gridBagConstraints);
lblPassword.setText("Password:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(lblPassword, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
getContentPane().add(tfPassword, gridBagConstraints);
pbCancel.setText("Cancel");
pbCancel.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
pbCancelActionPerformed(evt);
}
});
jPanel1.add(pbCancel);
pbConnect.setText("Connect");
pbConnect.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent evt)
{
pbConnectActionPerformed(evt);
}
});
jPanel1.add(pbConnect);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 4;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
getContentPane().add(jPanel1, gridBagConstraints);
pack();
} | This method is called from within the constructor to
initialize the form.
WARNING: Do NOT modify this code. The content of this method is
always regenerated by the Form Editor. |
private void pbConnectActionPerformed (java.awt.event.ActionEvent evt)//GEN-FIRST:event_pbConnectActionPerformed
{//GEN-HEADEREND:event_pbConnectActionPerformed
// Add your handling code here:
mainFrame.setProperty(JFrmMainFrame.JDBC_DRIVER, tfJDBCDriver.getText());
mainFrame.setProperty(JFrmMainFrame.JDBC_URL, tfJDBCURL.getText());
mainFrame.setProperty(JFrmMainFrame.JDBC_USER, tfUsername.getText());
mainFrame.setProperty(JFrmMainFrame.JDBC_PASSWORD, new String(this.tfPassword.getPassword()));
new org.apache.ojb.tools.mapping.reversedb.gui.actions.DBConnectAction(mainFrame).actionPerformed(null);
dispose();
} | GEN-LAST:event_pbCancelActionPerformed |
public void applyDefaults() {
if (size == -1) {
size = 8;
}
if (family == null) {
family = "Verdana";
}
if (weight == null) {
weight = "normal";
}
if (style == null) {
style = "normal";
}
if (color == null) {
color = "#000000"; // black
}
if (opacity == -1) {
opacity = 1;
}
} | Applies default values to all properties that have not been set.
@since 1.8.0 |
protected void load()
{
// properties file may be set as a System property.
// if no property is set take default name.
String fn = System.getProperty(OJB_PROPERTIES_FILE, OJB_PROPERTIES_FILE);
setFilename(fn);
super.load();
// default repository & connection descriptor file
repositoryFilename = getString("repositoryFile", OJB_METADATA_FILE);
// object cache class
objectCacheClass = getClass("ObjectCacheClass", ObjectCacheDefaultImpl.class, ObjectCache.class);
// load PersistentField Class
persistentFieldClass =
getClass("PersistentFieldClass", PersistentFieldDirectImpl.class, PersistentField.class);
// load PersistenceBroker Class
persistenceBrokerClass =
getClass("PersistenceBrokerClass", PersistenceBrokerImpl.class, PersistenceBroker.class);
// load ListProxy Class
listProxyClass = getClass("ListProxyClass", ListProxyDefaultImpl.class);
// load SetProxy Class
setProxyClass = getClass("SetProxyClass", SetProxyDefaultImpl.class);
// load CollectionProxy Class
collectionProxyClass = getClass("CollectionProxyClass", CollectionProxyDefaultImpl.class);
// load IndirectionHandler Class
indirectionHandlerClass =
getClass("IndirectionHandlerClass", IndirectionHandlerJDKImpl.class, IndirectionHandler.class);
// load ProxyFactory Class
proxyFactoryClass =
getClass("ProxyFactoryClass", ProxyFactoryJDKImpl.class, ProxyFactory.class);
// load configuration for ImplicitLocking parameter:
useImplicitLocking = getBoolean("ImplicitLocking", false);
// load configuration for LockAssociations parameter:
lockAssociationAsWrites = (getString("LockAssociations", "WRITE").equalsIgnoreCase("WRITE"));
// load OQL Collection Class
oqlCollectionClass = getClass("OqlCollectionClass", DListImpl.class, ManageableCollection.class);
// set the limit for IN-sql , -1 for no limits
sqlInLimit = getInteger("SqlInLimit", -1);
//load configuration for PB pool
maxActive = getInteger(PoolConfiguration.MAX_ACTIVE,
PoolConfiguration.DEFAULT_MAX_ACTIVE);
maxIdle = getInteger(PoolConfiguration.MAX_IDLE,
PoolConfiguration.DEFAULT_MAX_IDLE);
maxWait = getLong(PoolConfiguration.MAX_WAIT,
PoolConfiguration.DEFAULT_MAX_WAIT);
timeBetweenEvictionRunsMillis = getLong(PoolConfiguration.TIME_BETWEEN_EVICTION_RUNS_MILLIS,
PoolConfiguration.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS);
minEvictableIdleTimeMillis = getLong(PoolConfiguration.MIN_EVICTABLE_IDLE_TIME_MILLIS,
PoolConfiguration.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS);
whenExhaustedAction = getByte(PoolConfiguration.WHEN_EXHAUSTED_ACTION,
PoolConfiguration.DEFAULT_WHEN_EXHAUSTED_ACTION);
useSerializedRepository = getBoolean("useSerializedRepository", false);
} | Loads the configuration from file "OBJ.properties". If the system
property "OJB.properties" is set, then the configuration in that file is
loaded. Otherwise, the file "OJB.properties" is tried. If that is also
unsuccessful, then the configuration is filled with default values. |
public LockEntry getWriter(Object obj)
{
PersistenceBroker broker = getBroker();
Identity oid = new Identity(obj, broker);
LockEntry result = null;
try
{
result = getWriterRemote(oid);
}
catch (Throwable e)
{
log.error(e);
}
return result;
} | returns the LockEntry for the Writer of object obj.
If now writer exists, null is returned. |
public Collection getReaders(Object obj)
{
Collection result = null;
try
{
Identity oid = new Identity(obj, getBroker());
byte selector = (byte) 'r';
byte[] requestBarr = buildRequestArray(oid, selector);
HttpURLConnection conn = getHttpUrlConnection();
//post request
BufferedOutputStream out = new BufferedOutputStream(conn.getOutputStream());
out.write(requestBarr,0,requestBarr.length);
out.flush();
// read result from
InputStream in = conn.getInputStream();
ObjectInputStream ois = new ObjectInputStream(in);
result = (Collection) ois.readObject();
// cleanup
ois.close();
out.close();
conn.disconnect();
}
catch (Throwable t)
{
throw new PersistenceBrokerException(t);
}
return result;
} | returns a collection of Reader LockEntries for object obj.
If now LockEntries could be found an empty Vector is returned. |
public boolean addReader(TransactionImpl tx, Object obj)
{
try
{
LockEntry lock = new LockEntry(new Identity(obj,getBroker()).toString(),
tx.getGUID(),
System.currentTimeMillis(),
LockStrategyFactory.getIsolationLevel(obj),
LockEntry.LOCK_READ);
addReaderRemote(lock);
return true;
}
catch (Throwable t)
{
log.error("Cannot store LockEntry for object " + obj + " in transaction " + tx, t);
return false;
}
} | Add a reader lock entry for transaction tx on object obj
to the persistent storage. |
public void removeReader(TransactionImpl tx, Object obj)
{
try
{
LockEntry lock = new LockEntry(new Identity(obj,getBroker()).toString(), tx.getGUID());
removeReaderRemote(lock);
}
catch (Throwable t)
{
log.error("Cannot remove LockEntry for object " + obj + " in transaction " + tx);
}
} | remove a reader lock entry for transaction tx on object obj
from the persistent storage. |
public void removeWriter(LockEntry writer)
{
try
{
removeWriterRemote(writer);
}
catch (Throwable t)
{
log.error("Cannot remove LockEntry", t);
}
} | remove a writer lock entry for transaction tx on object obj
from the persistent storage. |
public boolean upgradeLock(LockEntry reader)
{
try
{
upgradeLockRemote(reader);
reader.setLockType(LockEntry.LOCK_WRITE);
return true;
}
catch (Throwable t)
{
log.error("Cannot upgrade LockEntry " + reader, t);
return false;
}
} | upgrade a reader lock entry for transaction tx on object obj
and write it to the persistent storage. |
public boolean setWriter(TransactionImpl tx, Object obj)
{
try
{
LockEntry lock = new LockEntry(new Identity(obj,getBroker()).toString(),
tx.getGUID(),
System.currentTimeMillis(),
LockStrategyFactory.getIsolationLevel(obj),
LockEntry.LOCK_WRITE);
setWriterRemote(lock);
return true;
}
catch (Throwable t)
{
log.error("Cannot set LockEntry for object " + obj + " in transaction " + tx);
return false;
}
} | generate a writer lock entry for transaction tx on object obj
and write it to the persistent storage. |
public boolean hasReadLock(TransactionImpl tx, Object obj)
{
try
{
LockEntry lock = new LockEntry(new Identity(obj,getBroker()).toString(), tx.getGUID());
boolean result = hasReadLockRemote(lock);
return result;
}
catch (Throwable t)
{
log.error("Cannot check read lock for object " + obj + " in transaction " + tx, t);
return false;
}
} | check if there is a reader lock entry for transaction tx on object obj
in the persistent storage. |
public void addColumnPair(String localColumn, String remoteColumn)
{
if (!_localColumns.contains(localColumn))
{
_localColumns.add(localColumn);
}
if (!_remoteColumns.contains(remoteColumn))
{
_remoteColumns.add(remoteColumn);
}
} | Adds a column pair to this foreignkey.
@param localColumn The column in the local table
@param remoteColumn The column in the remote table |
@Override
public void execute(SearchByLocationRequest request, SearchByLocationResponse response) throws Exception {
if (null == request.getLayerIds()) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING, "layerIds");
}
String crsCode = request.getCrs();
if (null == crsCode) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING, "crs");
}
Geometry location = converter.toInternal(request.getLocation());
int queryType = request.getQueryType();
double ratio = request.getRatio();
int searchType = request.getSearchType();
Crs crs = geoService.getCrs2(request.getCrs());
// Check if a buffer should be added around the location:
Geometry geometry = location;
if (request.getBuffer() > 0) {
geometry = location.buffer(request.getBuffer());
}
log.debug("search by location " + geometry);
for (String clientLayerId : request.getLayerIds()) {
String serverLayerId = request.getServerLayerId(clientLayerId);
if (null == serverLayerId) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING,
"serverLayerId for clientLayerId " + clientLayerId);
}
if (securityContext.isLayerVisible(serverLayerId)) {
VectorLayer vectorLayer = configurationService.getVectorLayer(serverLayerId);
if (vectorLayer != null) {
String geomName = vectorLayer.getLayerInfo().getFeatureInfo().getGeometryType().getName();
// Transform geometry to layer CRS:
Geometry layerGeometry = geoService.transform(geometry, crs, layerService.getCrs(vectorLayer));
log.trace("on layer " + serverLayerId + " use " + layerGeometry);
// Create the correct Filter object:
Filter f = null;
switch (queryType) {
case SearchByLocationRequest.QUERY_INTERSECTS:
f = filterCreator.createIntersectsFilter(layerGeometry, geomName);
break;
case SearchByLocationRequest.QUERY_CONTAINS:
f = filterCreator.createContainsFilter(layerGeometry, geomName);
break;
case SearchByLocationRequest.QUERY_TOUCHES:
f = filterCreator.createTouchesFilter(layerGeometry, geomName);
break;
case SearchByLocationRequest.QUERY_WITHIN:
f = filterCreator.createWithinFilter(layerGeometry, geomName);
break;
default:
throw new IllegalArgumentException("Unknown query type " + queryType);
}
//Set the per layer filter
if (null != request.getFilter(clientLayerId)) {
if (null == f) {
f = filterCreator.parseFilter(request.getFilter(clientLayerId));
} else {
f = filterCreator.createAndFilter(
filterCreator.parseFilter(request.getFilter(clientLayerId)), f);
}
}
//Set the global filter
if (null != request.getFilter()) {
if (null == f) {
f = filterCreator.parseFilter(request.getFilter());
} else {
f = filterCreator.createAndFilter(filterCreator.parseFilter(request.getFilter()), f);
}
}
// Get the features:
List<InternalFeature> temp = layerService.getFeatures(serverLayerId, crs, f, null, request
.getFeatureIncludes());
if (temp.size() > 0) {
List<Feature> features = new ArrayList<Feature>();
// Calculate overlap ratio in case of intersects:
if (queryType == SearchByLocationRequest.QUERY_INTERSECTS && ratio >= 0 && ratio < 1) {
for (InternalFeature feature : temp) {
double minimalOverlap = feature.getGeometry().getArea() * ratio;
Geometry overlap = geometry.intersection(feature.getGeometry());
double effectiveOverlap = overlap.getArea();
if (minimalOverlap <= effectiveOverlap) {
log.trace("found " + feature);
Feature dto = converter.toDto(feature);
dto.setCrs(crsCode);
features.add(dto);
}
}
} else {
for (InternalFeature feature : temp) {
log.trace("found " + feature);
Feature dto = converter.toDto(feature);
dto.setCrs(crsCode);
features.add(dto);
}
}
// features.size can again be 0... so check:
if (features.size() > 0) {
// We have a response for this layer!
response.addLayer(clientLayerId, features);
// If searchType == SEARCH_FIRST_LAYER, we should search no further:
if (searchType == SearchByLocationRequest.SEARCH_FIRST_LAYER) {
break;
}
}
}
}
}
}
} | The command's execution method. It will go over all given layers (provided they're vector layers), and fetch the
features, using the location geometry and the query type. In case the query type is "intersects", the overlapping
ratio is also checked. The resulting list of features is added to the command result so it can be send back to
the client. |
public int compare(Object objA, Object objB)
{
String idAStr = _table.getColumn((String)objA).getProperty("id");
String idBStr = _table.getColumn((String)objB).getProperty("id");
int idA;
int idB;
try {
idA = Integer.parseInt(idAStr);
}
catch (Exception ex) {
return 1;
}
try {
idB = Integer.parseInt(idBStr);
}
catch (Exception ex) {
return -1;
}
return idA < idB ? -1 : (idA > idB ? 1 : 0);
} | Compares two columns given by their names.
@param objA The name of the first column
@param objB The name of the second column
@return
@see java.util.Comparator#compare(java.lang.Object, java.lang.Object) |
private void initMapping(String attributePath, String aliasPath)
{
Iterator aliasSegmentItr = pathToSegments(aliasPath).iterator();
String currPath = "";
String separator = "";
while (aliasSegmentItr.hasNext())
{
currPath = currPath + separator + (String) aliasSegmentItr.next();
int beginIndex = attributePath.indexOf(currPath);
if (beginIndex == -1)
{
break;
}
int endIndex = beginIndex + currPath.length();
m_mapping.put(attributePath.substring(0, endIndex), m_name);
separator = ".";
}
} | generates the mapping from the aliasPath
@param aliasPath the portion of attributePath which should be aliased |
public String getAlias(String path)
{
if (m_allPathsAliased && m_attributePath.lastIndexOf(path) != -1)
{
return m_name;
}
Object retObj = m_mapping.get(path);
if (retObj != null)
{
return (String) retObj;
}
return null;
} | Returns the name of this alias if path has been added
to the aliased portions of attributePath
@param path the path to test for inclusion in the alias |
public void addClass(ClassDescriptorDef classDef)
{
classDef.setOwner(this);
// Regardless of the format of the class name, we're using the fully qualified format
// This is safe because of the package & class naming constraints of the Java language
_classDefs.put(classDef.getQualifiedName(), classDef);
} | Adds the class descriptor to this model.
@param classDef The class descriptor
@return The class descriptor or <code>null</code> if there is no such class in this model |
public void process() throws ConstraintException
{
ClassDescriptorDef classDef;
// process all classes
for (Iterator it = getClasses(); it.hasNext();)
{
classDef = (ClassDescriptorDef)it.next();
if (!classDef.hasBeenProcessed())
{
classDef.process();
}
}
} | Processes all classes (flattens the hierarchy such that every class has declarations for all fields,
references,collections that it will have in the descriptor) and applies modifications (removes ignored
features, changes declarations).
@throws ConstraintException If a constraint has been violated |
public void checkConstraints(String checkLevel) throws ConstraintException
{
// check constraints now after all classes have been processed
for (Iterator it = getClasses(); it.hasNext();)
{
((ClassDescriptorDef)it.next()).checkConstraints(checkLevel);
}
// additional model constraints that either deal with bigger parts of the model or
// can only be checked after the individual classes have been checked (e.g. specific
// attributes have been ensured)
new ModelConstraints().check(this, checkLevel);
} | Checks constraints on this model.
@param checkLevel The amount of checks to perform
@throws ConstraintException If a constraint has been violated |
public FinishRequest toFinishRequest(boolean includeHeaders) {
if (includeHeaders) {
return new FinishRequest(body, copyHeaders(headers), statusCode);
} else {
String mime = null;
if (body!=null) {
mime = "text/plain";
if (headers!=null && (headers.containsKey("Content-Type") || headers.containsKey("content-type"))) {
mime = headers.get("Content-Type");
if (mime==null) {
mime = headers.get("content-type");
}
}
}
return new FinishRequest(body, mime, statusCode);
}
} | Convert the message to a FinishRequest |
public boolean isJavaInheritance()
{
if(javaInheritance == null)
{
javaInheritance = getClassDescriptor().getSuperClassDescriptor().getClassOfObject()
.isAssignableFrom(getClassDescriptor().getClassOfObject()) ? Boolean.TRUE : Boolean.FALSE;
}
return javaInheritance.booleanValue();
} | If this method returns <em>true</em> the inheritance described by this object
is a <em>normal</em> JAVA inheritance. If <em>false</em> the inheritance is only declared
in the O/R mapping it's a <em>declarative inheritance</em>, the referenced "super class" in <strong>not</strong>
a JAVA super class of the main class. |
public InternalFeature getAttributes(VectorLayer layer, InternalFeature feature, Object featureBean)
throws LayerException {
String layerId = layer.getId();
Map<String, Attribute> featureAttributes = getRealAttributes(layer, featureBean);
feature.setAttributes(featureAttributes); // to allow isAttributeReadable to see full object
addSyntheticAttributes(feature, featureAttributes, layer);
if (securityContext.isFeatureVisible(layerId, feature)) {
feature.setAttributes(filterAttributes(layerId, layer.getLayerInfo().getFeatureInfo().getAttributesMap(),
feature, featureAttributes));
feature.setEditable(securityContext.isFeatureUpdateAuthorized(layerId, feature));
feature.setDeletable(securityContext.isFeatureDeleteAuthorized(layerId, feature));
return feature;
}
return null;
} | Get the attributes for a feature, and put them in the feature object.
<p/>
The attributes are converted lazily if requested by the layer.
<p/>
The feature is filled into the passed feature object. If the feature should not be visible according to security,
null is returned (the original (passed) feature should be discarded in that case). The attributes are filtered
according to security settings. The editable and deletable states for the feature are also set.
@param layer layer which contains the feature
@param feature feature for the result
@param featureBean plain object for feature
@return feature with filled attributes or null when feature not visible
@throws LayerException problem converting attributes |
public void setAttributeEditable(Attribute attribute, boolean editable) {
attribute.setEditable(editable);
if (!(attribute instanceof LazyAttribute)) { // should not instantiate lazy attributes!
if (attribute instanceof ManyToOneAttribute) {
setAttributeEditable(((ManyToOneAttribute) attribute).getValue(), editable);
} else if (attribute instanceof OneToManyAttribute) {
List<AssociationValue> values = ((OneToManyAttribute) attribute).getValue();
for (AssociationValue value : values) {
setAttributeEditable(value, editable);
}
}
}
} | Set editable state on an attribute. This needs to also set the state on the associated attributes.
@param attribute attribute for which the editable state needs to be set
@param editable new editable state |
private void initComponents()//GEN-BEGIN:initComponents
{
java.awt.GridBagConstraints gridBagConstraints;
lblColumnName = new javax.swing.JLabel();
tfColumnName = new javax.swing.JTextField();
setLayout(new java.awt.GridBagLayout());
lblColumnName.setText("Column Name:");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
add(lblColumnName, gridBagConstraints);
tfColumnName.setEditable(false);
tfColumnName.setText("jTextField1");
tfColumnName.setBorder(null);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
add(tfColumnName, gridBagConstraints);
} | This method is called from within the constructor to
initialize the form.
WARNING: Do NOT modify this code. The content of this method is
always regenerated by the Form Editor. |
public void setEditorTarget (PropertyEditorTarget target)
{
if (target instanceof DBMetaColumnNode)
{
super.setEditorTarget(target);
this.tfColumnName.setText((String)target.getAttribute(DBMetaColumnNode.ATT_COLUMN_NAME));
}
else
{
throw new UnsupportedOperationException("This editor can only edit DBMetaColumnNode objects");
}
} | GEN-END:initComponents |
private void increaseBeliefCount(String bName) {
Object belief = this.getBelief(bName);
int count = 0;
if (belief!=null) {
count = (Integer) belief;
}
this.setBelief(bName, count + 1);
} | If the belief its a count of some sort his counting its increased by one.
@param bName
- the name of the belief count. |
private void setBelief(String bName, Object value) {
introspector.setBeliefValue(this.getLocalName(), bName, value, null);
} | Modifies the belief referenced by bName parameter.
@param bName
- the name of the belief to update.
@param value
- the new value for the belief |
public static Organization createOrganization(final String name){
final Organization organization = new Organization();
organization.setName(name);
return organization;
} | Generates an organization regarding the parameters.
@param name String
@return Organization |
public static Module createModule(final String name,final String version){
final Module module = new Module();
module.setName(name);
module.setVersion(version);
module.setPromoted(false);
return module;
} | Generates a module regarding the parameters.
@param name String
@param version String
@return Module |
public static Artifact createArtifact(final String groupId, final String artifactId, final String version, final String classifier, final String type, final String extension, final String origin){
final Artifact artifact = new Artifact();
artifact.setGroupId(groupId);
artifact.setArtifactId(artifactId);
artifact.setVersion(version);
if(classifier != null){
artifact.setClassifier(classifier);
}
if(type != null){
artifact.setType(type);
}
if(extension != null){
artifact.setExtension(extension);
}
artifact.setOrigin(origin == null ? "maven" : origin);
return artifact;
} | Generates an artifact regarding the parameters.
<P> <b>WARNING:</b> The parameters grId/arId/version should be filled!!! Only classifier and type are not mandatory.
@param groupId String
@param artifactId String
@param version String
@param classifier String
@param type String
@param extension String
@return Artifact |
public static License createLicense(final String name, final String longName, final String comments, final String regexp, final String url){
final License license = new License();
license.setName(name);
license.setLongName(longName);
license.setComments(comments);
license.setRegexp(regexp);
license.setUrl(url);
return license;
} | Generates a License regarding the parameters.
@param name String
@param longName String
@param comments String
@param regexp String
@param url String
@return License |
public static Dependency createDependency(final Artifact artifact, final Scope scope){
final Dependency dependency = new Dependency();
dependency.setTarget(artifact);
dependency.setScope(scope);
return dependency;
} | Generates a dependency regarding the parameters.
@param artifact Artifact
@param scope Scope
@return Dependency |
public static Dependency createDependency(final Artifact artifact, final String scope) throws UnsupportedScopeException{
try{
final Scope depScope = Scope.valueOf(scope.toUpperCase());
return createDependency(artifact, depScope);
}
catch(IllegalArgumentException e){
LOG.log(Level.SEVERE, String.format("Cannot identify scope for string %s. Details: %s", scope, e.getMessage()), e);
throw new UnsupportedScopeException();
}
} | Generates a dependency regarding the parameters.
@param artifact Artifact
@param scope String
@return Dependency
@throws UnsupportedScopeException |
public static PromotionDetails createPromotionDetails(final Boolean canBePromoted, final Boolean isSnapshot, final List<String> unPromotedDependencies, final List<Artifact> doNotUseArtifacts) throws IOException{
try{
final PromotionDetails promotionDetails = new PromotionDetails();
promotionDetails.setPromotable(canBePromoted);
promotionDetails.setSnapshot(isSnapshot);
promotionDetails.setUnPromotedDependencies(unPromotedDependencies);
promotionDetails.setDoNotUseArtifacts(doNotUseArtifacts);
return promotionDetails;
}
catch(Exception e){
throw new IOException(e);
}
} | Generates a PromotionDetails regarding the parameters.
@param canBePromoted Boolean
@param isSnapshot Boolean
@param unPromotedDependencies List<String>
@param doNotUseArtifacts List<Artifact>
@return PromotionDetails
@throws IOException |
public static Delivery createDelivery(final String commercialName, final String commercialVersion, final String releaseDate, final List<String> dependencies) {
final Delivery delivery = new Delivery();
delivery.setCommercialName(commercialName);
delivery.setCommercialVersion(commercialVersion);
delivery.setReleaseDate(releaseDate);
delivery.setDependencies(dependencies);
return delivery;
} | Generates a PromotionDetails regarding the parameters.
@param commercialName String
@param commercialVersion String
@param releaseDate String
@param dependencies List<String>
@return Delivery |
public static Comment createComment(final String entityId,
final String entityType,
final String action,
final String commentedText,
final String user,
final Date date) {
final Comment comment = new Comment();
comment.setEntityId(entityId);
comment.setEntityType(entityType);
comment.setAction(action);
comment.setCommentText(commentedText);
comment.setCommentedBy(user);
comment.setCreatedDateTime(date);
return comment;
} | Generates a comment regarding the parameters.
@param entityId - id of the commented entity
@param entityType - type of the entity
@param action - the action performed by the user
@param commentedText - comment text
@param user - comment left by
@param date - date comment was created
@return - comment entity |
protected String readLineWithMessage(String message)
{
System.out.print(message + " ");
try
{
BufferedReader rin = new BufferedReader(new InputStreamReader(System.in));
return rin.readLine();
}
catch (Exception e)
{
return "";
}
} | read a single line from stdin and return as String |
public void apply()
{
// this will be our new object
Product newProduct = new Product();
// thma: attention, no sequence numbers yet for ojb/prevalyer
newProduct.setId((int)System.currentTimeMillis());
// now read in all relevant information and fill the new object:
System.out.println("please enter a new product");
String in = readLineWithMessage("enter name:");
newProduct.setName(in);
in = readLineWithMessage("enter price:");
newProduct.setPrice(Double.parseDouble(in));
in = readLineWithMessage("enter available stock:");
newProduct.setStock(Integer.parseInt(in));
// now perform persistence operations
try
{
// 1. open transaction
broker.beginTransaction();
// 2. make the new object persistent
broker.store(newProduct);
broker.commitTransaction();
}
catch (PersistenceBrokerException ex)
{
// if something went wrong: rollback
broker.abortTransaction();
System.out.println(ex.getMessage());
ex.printStackTrace();
}
} | perform this use case |
public void setObjectForStatement(PreparedStatement ps, int index, Object value, int sqlType) throws SQLException
{
switch (sqlType)
{
case Types.BIT :
ps.setObject(index, value);
break;
case Types.BLOB :
case Types.LONGVARBINARY :
case Types.VARBINARY :
if (value instanceof byte[])
{
byte buf[] = (byte[]) value;
ByteArrayInputStream inputStream = new ByteArrayInputStream(buf);
ps.setBinaryStream(index, inputStream, buf.length);
break;
}
case Types.CLOB :
Reader reader = null;
int length = 0;
if (value instanceof String)
{
reader = new StringReader((String) value);
length = (((String) value)).length();
}
else if (value instanceof char[])
{
String string = new String((char[])value);
reader = new StringReader(string);
length = string.length();
}
else if (value instanceof byte[])
{
byte buf[] = (byte[]) value;
ByteArrayInputStream inputStream = new ByteArrayInputStream(buf);
reader = new InputStreamReader(inputStream);
} | /*
@see Platform#setObjectForStatement(PreparedStatement, int, Object, int) |
public void writeObjects(boolean reuse) throws TransactionAbortedException, LockNotGrantedException
{
PersistenceBroker broker = transaction.getBroker();
ConnectionManagerIF connMan = broker.serviceConnectionManager();
boolean saveBatchMode = connMan.isBatchMode();
try
{
if(log.isDebugEnabled())
{
log.debug(
"PB is in internal tx: "
+ broker.isInTransaction()
+ " broker was: "
+ broker);
}
// all neccessary db operations are executed within a PersistenceBroker transaction:
if(!broker.isInTransaction())
{
log.error("PB associated with current odmg-tx is not in tx");
throw new TransactionAbortedException("Underlying PB is not in tx, was begin call done before commit?");
}
// Committing has to be done in two phases. First implicitly upgrade to lock on all related
// objects of objects in this transaction. Then the list of locked objects has to be
// reordered to solve referential integrity dependencies, then the objects are
// written into the database.
// 0. turn on the batch mode
connMan.setBatchMode(true);
// 1. mark objects no longer available in collection
// for delete and add new found objects
checkAllEnvelopes(broker);
// 2. mark all dependend objects for cascading insert/delete
cascadingDependents();
// 3. upgrade implicit locks.
//upgradeImplicitLocksAndCheckIfCommitIsNeeded();
upgradeLockIfNeeded();
// 4. Reorder objects
reorder();
// System.out.println("## ordering: ");
// for(int i = 0; i < mvOrderOfIds.size(); i++)
// {
// System.out.println("" + mvOrderOfIds.get(i));
// }
// System.out.println("## ordering end");
// 5. write objects.
writeAllEnvelopes(reuse);
// 6. execute batch
connMan.executeBatch();
// 7. Update all Envelopes to new CleanState
prepareForReuse(reuse);
// 6. commit cleanup
afterWriteCleanup();
}
catch(Exception e)
{
connMan.clearBatch();
/*
arminw:
log only a warn message, because in top-level methods
a error log will be done ditto
*/
if(e instanceof OptimisticLockException)
{
// make error log to show the full stack trace one time
log.error("Optimistic lock exception while write objects", e);
// PB OptimisticLockException should be clearly signalled to the user
Object sourceObject = ((OptimisticLockException) e).getSourceObject();
throw new LockNotGrantedException("Optimistic lock exception occur, source object was (" + sourceObject + ")," +
" message was (" + e.getMessage() + ")");
}
else if(!(e instanceof RuntimeException))
{
log.warn("Error while write objects for tx " + transaction, e);
throw new ODMGRuntimeException("Unexpected error while write objects: " + e.getMessage());
}
else
{
log.warn("Error while write objects for tx " + transaction, e);
throw (RuntimeException) e;
}
}
finally
{
needsCommit = false;
connMan.setBatchMode(saveBatchMode);
}
} | Perform write to DB on all registered object wrapper ({@link ObjectEnvelope})
@param reuse When all registered objects be re-used after writing to
DB set <em>true</em>, else set <em>false</em> to improve performance. |
private void writeAllEnvelopes(boolean reuse)
{
// perform remove of m:n indirection table entries first
performM2NUnlinkEntries();
Iterator iter;
// using clone to avoid ConcurentModificationException
iter = ((List) mvOrderOfIds.clone()).iterator();
while(iter.hasNext())
{
ObjectEnvelope mod = (ObjectEnvelope) mhtObjectEnvelopes.get(iter.next());
boolean insert = false;
if(needsCommit)
{
insert = mod.needsInsert();
mod.getModificationState().commit(mod);
if(reuse && insert)
{
getTransaction().doSingleLock(mod.getClassDescriptor(), mod.getObject(), mod.getIdentity(), Transaction.WRITE);
}
}
/*
arminw: important to call this cleanup method for each registered
ObjectEnvelope, because this method will e.g. remove proxy listener
objects for registered objects.
*/
mod.cleanup(reuse, insert);
}
// add m:n indirection table entries
performM2NLinkEntries();
} | commit all envelopes against the current broker |
private void checkAllEnvelopes(PersistenceBroker broker)
{
Iterator iter = ((List) mvOrderOfIds.clone()).iterator();
while(iter.hasNext())
{
ObjectEnvelope mod = (ObjectEnvelope) mhtObjectEnvelopes.get(iter.next());
// only non transient objects should be performed
if(!mod.getModificationState().isTransient())
{
mod.markReferenceElements(broker);
}
}
} | Mark objects no longer available in collection for delete and new objects for insert.
@param broker the PB to persist all objects |
private void prepareForReuse(boolean reuse)
{
if(reuse)
{
// using clone to avoid ConcurentModificationException
Iterator iter = ((List) mvOrderOfIds.clone()).iterator();
while(iter.hasNext())
{
ObjectEnvelope mod = (ObjectEnvelope) mhtObjectEnvelopes.get(iter.next());
if(!needsCommit || (mod.getModificationState() == StateOldClean.getInstance()
|| mod.getModificationState().isTransient()))
{
// nothing to do
}
else
{
mod.setModificationState(mod.getModificationState().markClean());
}
}
}
} | This method have to be called to reuse all registered {@link ObjectEnvelope}
objects after transaction commit/flush/checkpoint call. |
private void upgradeLockIfNeeded()
{
// using clone to avoid ConcurentModificationException
Iterator iter = ((List) mvOrderOfIds.clone()).iterator();
TransactionImpl tx = getTransaction();
ObjectEnvelope mod;
while(iter.hasNext())
{
mod = (ObjectEnvelope) mhtObjectEnvelopes.get(iter.next());
// ignore transient objects
if(!mod.getModificationState().isTransient())
{
/*
now we check if all modified objects has a write lock. On insert of new
objects we don't need a write lock.
*/
if(!mod.needsInsert())
{
if((mod.needsDelete() || mod.needsUpdate()
|| mod.hasChanged(tx.getBroker())))
{
needsCommit = true;
// mark object dirty
mod.setModificationState(mod.getModificationState().markDirty());
ClassDescriptor cld = mod.getClassDescriptor();
// if the object isn't already locked, we will do it now
if(!mod.isWriteLocked())
{
tx.doSingleLock(cld, mod.getObject(), mod.getIdentity(), Transaction.WRITE);
}
}
}
else
{
needsCommit = true;
}
}
}
} | Checks the status of all modified objects and
upgrade the lock if needed, cleanup the {@link ObjectEnvelope}
objects. |
public void rollback()
{
try
{
Iterator iter = mvOrderOfIds.iterator();
while(iter.hasNext())
{
ObjectEnvelope mod = (ObjectEnvelope) mhtObjectEnvelopes.get(iter.next());
if(log.isDebugEnabled())
log.debug("rollback: " + mod);
// if the Object has been modified by transaction, mark object as dirty
if(mod.hasChanged(transaction.getBroker()))
{
mod.setModificationState(mod.getModificationState().markDirty());
}
mod.getModificationState().rollback(mod);
}
}
finally
{
needsCommit = false;
}
afterWriteCleanup();
} | perform rollback on all tx-states |
public void remove(Object pKey)
{
Identity id;
if(pKey instanceof Identity)
{
id = (Identity) pKey;
}
else
{
id = transaction.getBroker().serviceIdentity().buildIdentity(pKey);
}
mhtObjectEnvelopes.remove(id);
mvOrderOfIds.remove(id);
} | remove an objects entry from the object registry |
public ObjectEnvelope get(Object pKey, boolean isNew)
{
PersistenceBroker broker = transaction.getBroker();
Identity oid = broker.serviceIdentity().buildIdentity(pKey);
return get(oid, pKey, isNew);
} | retrieve an objects ObjectEnvelope state from the hashtable.
If no ObjectEnvelope is found, a new one is created and returned.
@return the resulting ObjectEnvelope |
public ObjectEnvelope get(Identity oid, Object pKey, boolean isNew)
{
ObjectEnvelope result = getByIdentity(oid);
if(result == null)
{
result = new ObjectEnvelope(this, oid, pKey, isNew);
mhtObjectEnvelopes.put(oid, result);
mvOrderOfIds.add(oid);
if(log.isDebugEnabled())
log.debug("register: " + result);
}
return result;
} | retrieve an objects ObjectEnvelope state from the hashtable.
If no ObjectEnvelope is found, a new one is created and returned.
@return the resulting ObjectEnvelope |
private void reorder()
{
if(getTransaction().isOrdering() && needsCommit && mhtObjectEnvelopes.size() > 1)
{
ObjectEnvelopeOrdering ordering = new ObjectEnvelopeOrdering(mvOrderOfIds, mhtObjectEnvelopes);
ordering.reorder();
Identity[] newOrder = ordering.getOrdering();
mvOrderOfIds.clear();
for(int i = 0; i < newOrder.length; i++)
{
mvOrderOfIds.add(newOrder[i]);
}
}
} | Reorder the objects in the table to resolve referential integrity dependencies. |
private void cascadeMarkedForInsert()
{
// This list was used to avoid endless recursion on circular references
List alreadyPrepared = new ArrayList();
for(int i = 0; i < markedForInsertList.size(); i++)
{
ObjectEnvelope mod = (ObjectEnvelope) markedForInsertList.get(i);
// only if a new object was found we cascade to register the dependent objects
if(mod.needsInsert())
{
cascadeInsertFor(mod, alreadyPrepared);
alreadyPrepared.clear();
}
}
markedForInsertList.clear();
} | Starts recursive insert on all insert objects object graph |
private void cascadeInsertFor(ObjectEnvelope mod, List alreadyPrepared)
{
// avoid endless recursion, so use List for registration
if(alreadyPrepared.contains(mod.getIdentity())) return;
alreadyPrepared.add(mod.getIdentity());
ClassDescriptor cld = getTransaction().getBroker().getClassDescriptor(mod.getObject().getClass());
List refs = cld.getObjectReferenceDescriptors(true);
cascadeInsertSingleReferences(mod, refs, alreadyPrepared);
List colls = cld.getCollectionDescriptors(true);
cascadeInsertCollectionReferences(mod, colls, alreadyPrepared);
} | Walk through the object graph of the specified insert object. Was used for
recursive object graph walk. |
private void cascadeMarkedForDeletion()
{
List alreadyPrepared = new ArrayList();
for(int i = 0; i < markedForDeletionList.size(); i++)
{
ObjectEnvelope mod = (ObjectEnvelope) markedForDeletionList.get(i);
// if the object wasn't associated with another object, start cascade delete
if(!isNewAssociatedObject(mod.getIdentity()))
{
cascadeDeleteFor(mod, alreadyPrepared);
alreadyPrepared.clear();
}
}
markedForDeletionList.clear();
} | Starts recursive delete on all delete objects object graph |
private void cascadeDeleteFor(ObjectEnvelope mod, List alreadyPrepared)
{
// avoid endless recursion
if(alreadyPrepared.contains(mod.getIdentity())) return;
alreadyPrepared.add(mod.getIdentity());
ClassDescriptor cld = getTransaction().getBroker().getClassDescriptor(mod.getObject().getClass());
List refs = cld.getObjectReferenceDescriptors(true);
cascadeDeleteSingleReferences(mod, refs, alreadyPrepared);
List colls = cld.getCollectionDescriptors(true);
cascadeDeleteCollectionReferences(mod, colls, alreadyPrepared);
} | Walk through the object graph of the specified delete object. Was used for
recursive object graph walk. |
boolean replaceRegisteredIdentity(Identity newOid, Identity oldOid)
{
/*
TODO: Find a better solution
*/
boolean result = false;
Object oe = mhtObjectEnvelopes.remove(oldOid);
if(oe != null)
{
mhtObjectEnvelopes.put(newOid, oe);
int index = mvOrderOfIds.indexOf(oldOid);
mvOrderOfIds.remove(index);
mvOrderOfIds.add(index, newOid);
result = true;
if(log.isDebugEnabled()) log.debug("Replace identity: " + oldOid + " --replaced-by--> " + newOid);
}
else
{
log.warn("Can't replace unregistered object identity (" + oldOid + ") with new identity (" + newOid + ")");
}
return result;
} | Replace the {@link org.apache.ojb.broker.Identity}
of a registered {@link ObjectEnvelope} object.
@param newOid
@param oldOid
@return Returns <em>true</em> if successful. |
public void setup() {
LogActivator.logToFile(logger, this.getName(), Level.ALL);
// Add the agent to the introspector and obtain the introspector
// instance.
introspector = JadeAgentIntrospector.getMyInstance(this);
logger.fine("Agent " + this.getLocalName()
+ " added to the Intrsopector");
try {
this.myMockConfiguration = (MockConfiguration) this.getArguments()[0];
} catch (Exception e) {
logger.warning("There was an error reading the BridgeMockAgent configuration.");
logger.fine("The stack trace: \n" + e.getStackTrace());
// TODO think about implement this exception or not.
// throw new BadConfigurationException();
throw new IllegalArgumentException(
"The mock agent didn't receive a configuration"
+ " object. The first argument have to be a MockConfiguration object");
}
// Initialize the believes counts and the mailbox list.
// introspector.storeBeliefValue(this, Definitions.RECEIVED_MESSAGE_COUNT, 0);
// introspector.storeBeliefValue(this, Definitions.SENDED_MESSAGE_COUNT, 0);
this.mailbox = new ArrayList<ACLMessage>();
// Attemps to register the aggent.
boolean register = false;
for (int i = 0; !register; i++) {
try {
AgentRegistration.registerAgent(this,
myMockConfiguration.getDFservice(),
Definitions.BRIDGE_SERVICE_TYPE);
// TODO Service_Type could be a custom field of
// MockCondiguration.
register = true;
} catch (FIPAException e) {
logger.warning("Exception while registering the RespositoryMockAgent");
}
if (i >= Definitions.REG_ATTEMPTS) {
break;
// TODO check if is necessary to implement this exception.
// throw new
// UnableToRegisterException(e.getStackTrace().toString());
}
}
// Adds the behavior that listen for incoming messages.
addBehaviour(new Listen(this));
} | Initializes the Agent.
@see jade.core.Agent#setup() |
private ManageableCollection getCollectionByQuery(Class collectionClass, Class itemClass, Query query)
throws ClassNotPersistenceCapableException, PersistenceBrokerException
{
if (log.isDebugEnabled()) log.debug("getCollectionByQuery (" + collectionClass + ", " + itemClass + ", " + query + ")");
ClassDescriptor cld = pb.getClassDescriptor(itemClass);
ManageableCollection result = null;
OJBIterator iter = null;
int fullSize = -1;
int size = 0;
final boolean isRetrievalTasksCreated = batchRetrieval && m_retrievalTasks == null;
if (isRetrievalTasksCreated)
{
// Maps ReferenceDescriptors to HashSets of owners
m_retrievalTasks = new HashMap();
}
// ==> enable materialization cache
pb.getInternalCache().enableMaterializationCache();
try
{
result = (ManageableCollection) collectionClass.newInstance();
// now iterate over all elements and add them to the new collection
// lifecycle events are disabled
iter = pb.getIteratorFromQuery(query, cld);
iter.disableLifeCycleEvents();
// BRJ : get fullSizefor Query
// to be removed when Query.fullSize is removed
if (iter instanceof PagingIterator)
{
fullSize = iter.fullSize();
}
while (iter.hasNext())
{
Object candidate = iter.next();
/**
* MBAIRD
* candidate CAN be null in the case of materializing from an iterator based
* on a query for a class that is mapped to a table that has other classes
* mapped to that table as well, but aren't extents.
*/
if (candidate != null)
{
IndirectionHandler handler = ProxyHelper.getIndirectionHandler(candidate);
if ((handler != null) || itemClass.isAssignableFrom(candidate.getClass()))
{
result.ojbAdd(candidate);
// BRJ: count added objects
// to be removed when Query.fullSize is removed
size++;
}
else
{
//warn the user
log.warn("Candidate object ["+candidate
+"] class ["+candidate.getClass().getName()
+"] is not a subtype of ["+itemClass.getName()
+"] or any type of proxy. NOT INCLUDED in result collection");
}
if (prefetchProxies && (handler != null)
&& (cld.getProxyPrefetchingLimit() > 0)
&& addRetrievalTask(candidate, this))
{
new PBMaterializationListener(candidate, m_retrievalTasks,
this, cld.getProxyPrefetchingLimit());
}
}
}
if (isRetrievalTasksCreated)
{
// turn off auto prefetching for related proxies
final Class saveClassToPrefetch = classToPrefetch;
classToPrefetch = null;
try
{
performRetrievalTasks();
}
finally
{
classToPrefetch = saveClassToPrefetch;
}
}
// BRJ: fire LifeCycleEvents after execution of RetrievalTasks
// to ensure objects are fully materialized
Iterator resultIter = result.ojbIterator();
while (resultIter.hasNext())
{
Object obj = resultIter.next();
afterLookupEvent.setTarget(obj);
pb.fireBrokerEvent(afterLookupEvent);
afterLookupEvent.setTarget(null);
}
// ==> disable materialization cache
pb.getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
// ==> clear materialization cache
pb.getInternalCache().doLocalClear();
throw e;
}
catch (Exception ex)
{
// ==> clear materialization cache
pb.getInternalCache().doLocalClear();
log.error(ex);
throw new PersistenceBrokerException(ex);
}
finally
{
if (iter != null)
{
iter.releaseDbResources();
}
if (isRetrievalTasksCreated)
{
m_retrievalTasks = null;
}
}
// BRJ: store fullSize in Query to re-enable deprecated functionality
// to be removed when Query.fullSize is removed
if (fullSize < 0)
{
fullSize = size; // use size of result
}
query.fullSize(fullSize);
return result;
} | retrieve a collection of itemClass Objects matching the Query query
@param collectionClass type the collection to be returned
@param itemClass Class of item in collection
@param query the query |
public ManageableCollection getCollectionByQuery(Class collectionClass, Query query, boolean lazy) throws PersistenceBrokerException
{
ManageableCollection result;
try
{
// BRJ: return empty Collection for null query
if (query == null)
{
result = (ManageableCollection)collectionClass.newInstance();
}
else
{
if (lazy)
{
result = pb.getProxyFactory().createCollectionProxy(pb.getPBKey(), query, collectionClass);
}
else
{
result = getCollectionByQuery(collectionClass, query.getSearchClass(), query);
}
}
return result;
}
catch (Exception e)
{
if(e instanceof PersistenceBrokerException)
{
throw (PersistenceBrokerException) e;
}
else
{
throw new PersistenceBrokerException(e);
}
}
} | retrieve a collection of type collectionClass matching the Query query
if lazy = true return a CollectionProxy
@param collectionClass
@param query
@param lazy
@return ManageableCollection
@throws PersistenceBrokerException |
public Collection getCollectionByQuery(Query query, boolean lazy) throws PersistenceBrokerException
{
// thma: the following cast is safe because:
// 1. ManageableVector implements Collection (will be returned if lazy == false)
// 2. CollectionProxy implements Collection (will be returned if lazy == true)
return (Collection) getCollectionByQuery(RemovalAwareCollection.class, query, lazy);
} | retrieve a collection of itemClass Objects matching the Query query |
public void performRetrievalTasks()
{
if (m_retrievalTasks == null)
{
return;
}
while (m_retrievalTasks.size() > 0)
{
HashMap tmp = m_retrievalTasks;
m_retrievalTasks = new HashMap();
// during execution of these tasks new tasks may be added
for (Iterator it = tmp.entrySet().iterator(); it.hasNext(); )
{
Map.Entry entry = (Map.Entry) it.next();
Object key = entry.getKey();
if (!(key instanceof ObjectReferenceDescriptor))
{
continue;
}
ObjectReferenceDescriptor ord = (ObjectReferenceDescriptor) key;
RelationshipPrefetcher prefetcher;
ArrayList owners = (ArrayList) entry.getValue();
// if (ord instanceof SuperReferenceDescriptor || ord.isLazy() || (ord.getItemProxyClass() != null))
if (ord.isLazy() || (ord.getItemProxyClass() != null))
{
continue;
}
prefetcher = pb.getRelationshipPrefetcherFactory().createRelationshipPrefetcher(ord);
prefetcher.prefetchRelationship(owners);
it.remove();
}
}
} | Perform the stored retrieval tasks
BRJ: made it public to access it from BasePrefetcher
TODO: this is a quick fix ! |
public void retrieveReference(Object obj, ClassDescriptor cld, ObjectReferenceDescriptor rds, boolean forced)
{
PersistentField refField;
Object refObj = null;
if (forced || rds.getCascadeRetrieve())
{
pb.getInternalCache().enableMaterializationCache();
try
{
Identity id = getReferencedObjectIdentity(obj, rds, cld);
boolean isRefObjDefined = true;
if (id == null)
{
refObj = null;
} //JMM : why not see if the object has already been loaded
else if ( pb.serviceObjectCache().lookup(id) != null )
{
refObj = pb.doGetObjectByIdentity(id);
if (rds.isSuperReferenceDescriptor())
{
// walk the super-references
ClassDescriptor superCld = cld.getRepository().getDescriptorFor(rds.getItemClass());
retrieveReferences(refObj, superCld, false);
retrieveCollections(refObj, superCld, false);
}
}
else if ((m_retrievalTasks != null)
&& !rds.isLazy()
&& (rds.getItemProxyClass() == null))
{
addRetrievalTask(obj, rds);
isRefObjDefined = false;
}
else
{
refObj = getReferencedObject(id, rds);
}
if (isRefObjDefined)
{
refField = rds.getPersistentField();
refField.set(obj, refObj);
if ((refObj != null) && prefetchProxies
&& (m_retrievalTasks != null)
&& (rds.getProxyPrefetchingLimit() > 0))
{
IndirectionHandler handler = ProxyHelper.getIndirectionHandler(refObj);
if ((handler != null)
&& addRetrievalTask(obj, rds))
{
new PBMaterializationListener(obj, m_retrievalTasks,
rds, rds.getProxyPrefetchingLimit());
}
}
}
pb.getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
pb.getInternalCache().doLocalClear();
throw e;
}
}
} | Retrieve a single Reference.
This implementation retrieves a referenced object from the data backend
if <b>cascade-retrieve</b> is true or if <b>forced</b> is true.
@param obj - object that will have it's field set with a referenced object.
@param cld - the ClassDescriptor describring obj
@param rds - the ObjectReferenceDescriptor of the reference attribute to be loaded
@param forced - if set to true, the reference is loaded even if the rds differs. |
public void retrieveProxyReference(Object obj, ClassDescriptor cld, ObjectReferenceDescriptor rds, boolean forced)
{
PersistentField refField;
Object refObj = null;
pb.getInternalCache().enableMaterializationCache();
try
{
Identity id = getReferencedObjectIdentity(obj, rds, cld);
if (id != null){
refObj = pb.createProxy(rds.getItemClass(), id);
}
refField = rds.getPersistentField();
refField.set(obj, refObj);
if ((refObj != null) && prefetchProxies
&& (m_retrievalTasks != null)
&& (rds.getProxyPrefetchingLimit() > 0))
{
IndirectionHandler handler = ProxyHelper.getIndirectionHandler(refObj);
if ((handler != null)
&& addRetrievalTask(obj, rds))
{
new PBMaterializationListener(obj, m_retrievalTasks,
rds, rds.getProxyPrefetchingLimit());
}
}
pb.getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
pb.getInternalCache().doLocalClear();
throw e;
}
} | Retrieve a single Reference.
This implementation retrieves a referenced object from the data backend
if <b>cascade-retrieve</b> is true or if <b>forced</b> is true.
@param obj - object that will have it's field set with a referenced object.
@param cld - the ClassDescriptor describring obj
@param rds - the ObjectReferenceDescriptor of the reference attribute to be loaded
@param forced - if set to true, the reference is loaded even if the rds differs. |
public void retrieveReferences(Object newObj, ClassDescriptor cld, boolean forced) throws PersistenceBrokerException
{
Iterator i = cld.getObjectReferenceDescriptors().iterator();
// turn off auto prefetching for related proxies
final Class saveClassToPrefetch = classToPrefetch;
classToPrefetch = null;
pb.getInternalCache().enableMaterializationCache();
try
{
while (i.hasNext())
{
ObjectReferenceDescriptor rds = (ObjectReferenceDescriptor) i.next();
retrieveReference(newObj, cld, rds, forced);
}
pb.getInternalCache().disableMaterializationCache();
}
catch(RuntimeException e)
{
pb.getInternalCache().doLocalClear();
throw e;
}
finally
{
classToPrefetch = saveClassToPrefetch;
}
} | Retrieve all References
@param newObj the instance to be loaded or refreshed
@param cld the ClassDescriptor of the instance
@param forced if set to true loading is forced even if cld differs. |
private Identity getReferencedObjectIdentity(Object obj, ObjectReferenceDescriptor rds, ClassDescriptor cld)
{
Object[] fkValues = rds.getForeignKeyValues(obj, cld);
FieldDescriptor[] fkFieldDescriptors = rds.getForeignKeyFieldDescriptors(cld);
boolean hasNullifiedFKValue = hasNullifiedFK(fkFieldDescriptors, fkValues);
/*
BRJ: if all fk values are null there's no referenced object
arminw: Supposed the given object has nullified FK values but the referenced
object still exists. This could happend after serialization of the main object. In
this case all anonymous field (AK) information is lost, because AnonymousPersistentField class
use the object identity to cache the AK values. But we can build Identity anyway from the reference
*/
if (hasNullifiedFKValue)
{
if(BrokerHelper.hasAnonymousKeyReference(cld, rds))
{
Object referencedObject = rds.getPersistentField().get(obj);
if(referencedObject != null)
{
return pb.serviceIdentity().buildIdentity(referencedObject);
}
}
}
else
{
// ensure that top-level extents are used for Identities
return pb.serviceIdentity().buildIdentity(rds.getItemClass(), pb.getTopLevelClass(rds.getItemClass()), fkValues);
}
return null;
} | retrieves an Object reference's Identity.
<br>
Null is returned if all foreign keys are null |
private boolean hasNullifiedFK(FieldDescriptor[] fkFieldDescriptors, Object[] fkValues)
{
boolean result = true;
for (int i = 0; i < fkValues.length; i++)
{
if (!pb.serviceBrokerHelper().representsNull(fkFieldDescriptors[i], fkValues[i]))
{
result = false;
break;
}
}
return result;
} | to avoid creation of unmaterializable proxies |
private Object getReferencedObject(Identity id, ObjectReferenceDescriptor rds)
{
Class baseClassForProxy;
if (rds.isLazy())
{
/*
arminw:
use real reference class instead of the top-level class,
because we want to use a proxy representing the real class
not only the top-level class - right?
*/
// referencedProxy = getClassDescriptor(referencedClass).getDynamicProxyClass();
//referencedProxy = rds.getItemClass();
/*
* andrew.clute:
* With proxy generation now handled by the ProxyFactory implementations, the class of the Item
* is now the nessecary parameter to generate a proxy.
*/
baseClassForProxy = rds.getItemClass();
}
else
{
/*
* andrew.clute:
* If the descriptor does not mark it as lazy, then the class for the proxy must be of type VirtualProxy
*/
baseClassForProxy = rds.getItemProxyClass();
}
if (baseClassForProxy != null)
{
try
{
return pb.createProxy(baseClassForProxy, id);
}
catch (Exception e)
{
log.error("Error while instantiate object " + id + ", msg: "+ e.getMessage(), e);
if(e instanceof PersistenceBrokerException)
{
throw (PersistenceBrokerException) e;
}
else
{
throw new PersistenceBrokerException(e);
}
}
}
else
{
return pb.doGetObjectByIdentity(id);
}
} | retrieves an Object reference by its Identity.
<br>
If there is a Proxy-class is defined in the ReferenceDescriptor or
if the ReferenceDescriptor is lazy, a Proxy-object is returned.
<br>
If no Proxy-class is defined, a getObjectByIdentity(...) lookup is performed. |
public void retrieveCollection(Object obj, ClassDescriptor cld, CollectionDescriptor cds, boolean forced)
{
doRetrieveCollection(obj, cld, cds, forced, cds.isLazy());
} | Retrieve a single Collection on behalf of <b>obj</b>.
The Collection is retrieved only if <b>cascade.retrieve is true</b>
or if <b>forced</b> is set to true. *
@param obj - the object to be updated
@param cld - the ClassDescriptor describing obj
@param cds - the CollectionDescriptor describing the collection attribute to be loaded
@param forced - if set to true loading is forced, even if cds differs. |
public void retrieveProxyCollection(Object obj, ClassDescriptor cld, CollectionDescriptor cds, boolean forced)
{
doRetrieveCollection(obj, cld, cds, forced, true);
} | Retrieve a single Proxied Collection on behalf of <b>obj</b>.
The Collection is retrieved only if <b>cascade.retrieve is true</b>
or if <b>forced</b> is set to true. *
@param obj - the object to be updated
@param cld - the ClassDescriptor describing obj
@param cds - the CollectionDescriptor describing the collection attribute to be loaded
@param forced - if set to true a proxy will be placed, even if cds differs. |
private Query getFKQuery(Object obj, ClassDescriptor cld, CollectionDescriptor cds)
{
Query fkQuery;
QueryByCriteria fkQueryCrit;
if (cds.isMtoNRelation())
{
fkQueryCrit = getFKQueryMtoN(obj, cld, cds);
}
else
{
fkQueryCrit = getFKQuery1toN(obj, cld, cds);
}
// check if collection must be ordered
if (!cds.getOrderBy().isEmpty())
{
Iterator iter = cds.getOrderBy().iterator();
while (iter.hasNext())
{
fkQueryCrit.addOrderBy((FieldHelper)iter.next());
}
}
// BRJ: customize the query
if (cds.getQueryCustomizer() != null)
{
fkQuery = cds.getQueryCustomizer().customizeQuery(obj, pb, cds, fkQueryCrit);
}
else
{
fkQuery = fkQueryCrit;
}
return fkQuery;
} | Answer the foreign key query to retrieve the collection
defined by CollectionDescriptor |
private QueryByCriteria getFKQueryMtoN(Object obj, ClassDescriptor cld, CollectionDescriptor cod)
{
ValueContainer[] values = pb.serviceBrokerHelper().getKeyValues(cld, obj);
Object[] thisClassFks = cod.getFksToThisClass();
Object[] itemClassFks = cod.getFksToItemClass();
ClassDescriptor refCld = pb.getClassDescriptor(cod.getItemClass());
Criteria criteria = new Criteria();
for (int i = 0; i < thisClassFks.length; i++)
{
criteria.addEqualTo(cod.getIndirectionTable() + "." + thisClassFks[i], values[i].getValue());
}
for (int i = 0; i < itemClassFks.length; i++)
{
criteria.addEqualToField(cod.getIndirectionTable() + "." + itemClassFks[i],
refCld.getPkFields()[i].getAttributeName());
}
return QueryFactory.newQuery(refCld.getClassOfObject(), cod.getIndirectionTable(), criteria);
} | Get Foreign key query for m:n <br>
supports UNIDIRECTIONAL m:n using QueryByMtoNCriteria
@return org.apache.ojb.broker.query.QueryByCriteria
@param obj the owner of the relationship
@param cld the ClassDescriptor for the owner
@param cod the CollectionDescriptor |
private QueryByCriteria getFKQuery1toN(Object obj, ClassDescriptor cld, CollectionDescriptor cod)
{
ValueContainer[] container = pb.serviceBrokerHelper().getKeyValues(cld, obj);
ClassDescriptor refCld = pb.getClassDescriptor(cod.getItemClass());
FieldDescriptor[] fields = cod.getForeignKeyFieldDescriptors(refCld);
Criteria criteria = new Criteria();
for (int i = 0; i < fields.length; i++)
{
FieldDescriptor fld = fields[i];
criteria.addEqualTo(fld.getAttributeName(), container[i].getValue());
}
return QueryFactory.newQuery(refCld.getClassOfObject(), criteria);
} | Get Foreign key query for 1:n
@return org.apache.ojb.broker.query.QueryByCriteria
@param obj
@param cld
@param cod |
public Query getPKQuery(Identity oid)
{
Object[] values = oid.getPrimaryKeyValues();
ClassDescriptor cld = pb.getClassDescriptor(oid.getObjectsTopLevelClass());
FieldDescriptor[] fields = cld.getPkFields();
Criteria criteria = new Criteria();
for (int i = 0; i < fields.length; i++)
{
FieldDescriptor fld = fields[i];
criteria.addEqualTo(fld.getAttributeName(), values[i]);
}
return QueryFactory.newQuery(cld.getClassOfObject(), criteria);
} | Answer the primary key query to retrieve an Object
@param oid the Identity of the Object to retrieve
@return The resulting query |
public void retrieveCollections(Object newObj, ClassDescriptor cld, boolean forced) throws PersistenceBrokerException
{
doRetrieveCollections(newObj, cld, forced, false);
} | Retrieve all Collection attributes of a given instance
@param newObj the instance to be loaded or refreshed
@param cld the ClassDescriptor of the instance
@param forced if set to true, loading is forced even if cld differs |
public void retrieveProxyCollections(Object newObj, ClassDescriptor cld, boolean forced) throws PersistenceBrokerException
{
doRetrieveCollections(newObj, cld, forced, true);
} | Retrieve all Collection attributes of a given instance, and make all of the Proxy Collections
@param newObj the instance to be loaded or refreshed
@param cld the ClassDescriptor of the instance
@param forced if set to true, loading is forced even if cld differs |
public void removePrefetchingListeners()
{
if (prefetchingListeners != null)
{
for (Iterator it = prefetchingListeners.iterator(); it.hasNext(); )
{
PBPrefetchingListener listener = (PBPrefetchingListener) it.next();
listener.removeThisListener();
}
prefetchingListeners.clear();
}
} | remove all prefetching listeners |
public void bind(Object parameter)
throws org.odmg.QueryParameterCountInvalidException, org.odmg.QueryParameterTypeInvalidException
{
try
{
SelectionCriteria crit = (SelectionCriteria) getBindIterator().next();
crit.bind(parameter);
// BRJ: bind is called twice for between
if (crit instanceof BetweenCriteria && !crit.isBound())
{
getBindIterator().previous();
}
}
catch (Exception e)
{
throw new org.odmg.QueryParameterCountInvalidException(e.getMessage());
}
} | Bind a parameter to the query.
A parameter is denoted in the query string passed to <code>create</code> by <i>$i</i>,
where <i>i</i> is the rank of the parameter, beginning with 1.
The parameters are set consecutively by calling this method <code>bind</code>.
The <i>ith</i> variable is set by the <i>ith</i> call to the <code>bind</code> method.
If any of the <i>$i</i> are not set by a call to <code>bind</code> at the point
<code>execute</code> is called, <code>QueryParameterCountInvalidException</code> is thrown.
The parameters must be objects, and the result is an <code>Object</code>.
Objects must be used instead of primitive types (<code>Integer</code> instead
of <code>int</code>) for passing the parameters.
<p>
If the parameter is of the wrong type,
<code>QueryParameterTypeInvalidException</code> is thrown.
After executing a query, the parameter list is reset.
@param parameter A value to be substituted for a query parameter.
@exception org.odmg.QueryParameterCountInvalidException The number of calls to
<code>bind</code> has exceeded the number of parameters in the query.
@exception org.odmg.QueryParameterTypeInvalidException The type of the parameter does
not correspond with the type of the parameter in the query. |
public void create(String queryString) throws org.odmg.QueryInvalidException
{
create(queryString, Query.NO_START_AT_INDEX, Query.NO_END_AT_INDEX);
} | Create an OQL query from the string parameter.
In order to execute a query, an <code>OQLQuery</code> object must be created
by calling <code>Implementation.newOQLQuery</code>, then calling the
<code>create</code> method with the query string.
The <code>create</code> method might throw <code>QueryInvalidException</code>
if the query could not be compiled properly. Some implementations may not want
to compile the query before <code>execute</code> is called. In this case
<code>QueryInvalidException</code> is thrown when <code>execute</code> is called.
@param queryString An OQL query.
@exception QueryInvalidException The query syntax is invalid. |
public Object execute() throws org.odmg.QueryException
{
if (log.isDebugEnabled()) log.debug("Start execute query");
//obtain current ODMG transaction
TransactionImpl tx = odmg.getTxManager().getTransaction();
// create PBCapsule
PBCapsule capsule = null;
ManageableCollection result = null;
try
{
capsule = new PBCapsule(odmg.getCurrentPBKey(), tx);
PersistenceBroker broker = capsule.getBroker();
// ask the broker to perfom the query.
// the concrete result type is configurable
if (!(query instanceof ReportQuery))
{
result = broker.getCollectionByQuery(this.getCollectionClass(), query);
performLockingIfRequired(tx, broker, result);
}
else
{
Iterator iter = null;
result = new ManageableArrayList();
iter = broker.getReportQueryIteratorByQuery(query);
try
{
while (iter.hasNext())
{
Object[] res = (Object[]) iter.next();
if (res.length == 1)
{
if (res[0] != null) // skip null values
{
result.ojbAdd(res[0]);
}
}
else
{
// skip null tuples
for (int i = 0; i < res.length; i++)
{
if (res[i] != null)
{
result.ojbAdd(res);
break;
}
}
}
}
}
finally
{
if (iter instanceof OJBIterator)
{
((OJBIterator) iter).releaseDbResources();
}
}
}
// reset iterator to start of list so we can reuse this query
ListIterator it = getBindIterator();
while (it.hasPrevious())
{
it.previous();
}
}
finally
{
if (capsule != null) capsule.destroy();
}
return result;
} | Execute the query.
After executing a query, the parameter list is reset.
Some implementations may throw additional exceptions that are also derived
from <code>ODMGException</code>.
@return The object that represents the result of the query.
The returned data, whatever its OQL type, is encapsulated into an object.
For instance, when OQL returns an integer, the result is put into an
<code>Integer</code> object. When OQL returns a collection (literal or object),
the result is always a Java collection object of the same kind
(for instance, a <code>DList</code>).
@exception org.odmg.QueryException An exception has occurred while executing the query. |
@PostConstruct
protected void init() throws IOException {
// base configuration from XML file
if (null != configurationFile) {
log.debug("Get base configuration from {}", configurationFile);
manager = new DefaultCacheManager(configurationFile);
} else {
GlobalConfigurationBuilder builder = new GlobalConfigurationBuilder();
builder.globalJmxStatistics().allowDuplicateDomains(true);
manager = new DefaultCacheManager(builder.build());
}
if (listener == null) {
listener = new InfinispanCacheListener();
}
manager.addListener(listener);
// cache for caching the cache configurations (hmmm, sounds a bit strange)
Map<String, Map<CacheCategory, CacheService>> cacheCache =
new HashMap<String, Map<CacheCategory, CacheService>>();
// build default configuration
if (null != defaultConfiguration) {
setCaches(cacheCache, null, defaultConfiguration);
}
// build layer specific configurations
for (Layer layer : layerMap.values()) {
CacheInfo ci = configurationService.getLayerExtraInfo(layer.getLayerInfo(), CacheInfo.class);
if (null != ci) {
setCaches(cacheCache, layer, ci);
}
}
} | Finish initializing service.
@throws IOException oop |
private void setCaches(Map<String, Map<CacheCategory, CacheService>> cacheCache, Layer layer,
CacheInfo cacheInfo) {
Map<CacheCategory, CacheService> ciCaches;
ciCaches = cacheCache.get(cacheInfo.getId());
if (null == ciCaches) {
ciCaches = createCaches(cacheInfo);
cacheCache.put(cacheInfo.getId(), ciCaches);
}
for (Map.Entry<CacheCategory, CacheService> ciEntry : ciCaches.entrySet()) {
CacheSelector cs = new CacheSelector(ciEntry.getKey(), layer);
caches.put(cs, ciEntry.getValue());
}
} | Add layer specific cache configuration in the caches variable. This assures each cache is only built once and
possibly reused (thanks to cacheCache).
@param cacheCache cache for built caches
@param layer layer (or null)
@param cacheInfo cache configuration info |
public void matchList(String ElementName, Vector values, int oper)
{
// Delete the old search filter
m_filter = null;
// If not NOT_IN, assume IN
// (Since ints are passed by value, it is OK to change it)
if (oper != NOT_IN)
{
oper = IN;
// Convert the vector of match strings to an array of strings
}
String[] value_string_array = new String[values.size()];
values.copyInto(value_string_array);
// Create a leaf node for this list and store it as the filter
m_filter = new SearchBaseLeaf(ElementName, oper, value_string_array);
} | Change the search filter to one that specifies an element to
match or not match one of a list of values.
The old search filter is deleted.
@param ElementName is the name of the element to be matched
@param values is a vector of possible matches
@param oper is the IN or NOT_IN operator to indicate how to matche |
public void matchList(String ElementName, String[] values, int oper)
{
// Delete the old search filter
m_filter = null;
// If not NOT_IN, assume IN
// (Since ints are passed by value, it is OK to change it)
if (oper != NOT_IN)
{
oper = IN;
// Create a leaf node for this list and store it as the filter
}
m_filter = new SearchBaseLeaf(ElementName, oper, values);
} | Change the search filter to one that specifies an element to not
match one of a list of values.
The old search filter is deleted.
@param ElementName is the name of the element to be matched
@param values is an array of possible matches
@param oper is the IN or NOT_IN operator to indicate how to matche |
public void matchValue(String ElementName, String value, int oper)
{
// Delete the old search filter
m_filter = null;
// If not NOT_IN, assume IN
// (Since ints are passed by value, it is OK to change it)
if (oper != NOT_IN)
{
oper = IN;
// Create a String array in which to hold the one name,
// and put that name in the array
}
String[] ValueArray = new String[1];
ValueArray[0] = value;
// Create a leaf node for this list and store it as the filter
m_filter = new SearchBaseLeaf(ElementName, oper, ValueArray);
} | Change the search filter to one that specifies an element to not
match one single value.
The old search filter is deleted.
@param ElementName is the name of the element to be matched
@param value is the value to not be matched
@param oper is the IN or NOT_IN operator to indicate how to matche |
public void matchValue(String ElementName, int value, int oper)
{
// Delete the old search filter
m_filter = null;
// If not NOT_IN, assume IN
// (Since ints are passed by value, it is OK to change it)
if (oper != NOT_IN)
{
oper = IN;
// Create a leaf node for this list and store it as the filter
}
m_filter = new SearchBaseLeafInt(ElementName, oper, new int[]
{
value
});
} | -----------------------------------------------------------
@param ElementName
@param value
@param oper |
public void compareFilter(String ElementName, String value, int oper) throws DBException
{
// Delete the old search filter
m_filter = null;
// If this is not a binary operator, throw an exception
if ((oper & BINARY_OPER_MASK) == 0)
{
throw new DBException();
// Create a SearchBaseLeafComparison node and store it as the filter
}
m_filter = new SearchBaseLeafComparison(ElementName, oper, value);
} | Change the search filter to one that compares an element name to a value.
The old search filter is deleted.
@param ElementName is the name of the element to be tested
@param value is the value to be compared against
@param oper is the binary comparison operator to be used
@exception DBException |
public void matchSet(Hashtable elements, int combine_op, int compare_op) throws DBException
{
// Delete the old search filter
m_filter = null;
// If combine_op is not a logical operator, throw an exception
if ((combine_op & LOGICAL_OPER_MASK) == 0)
{
throw new DBException();
// If compare_op is not a binary operator, throw an exception
}
if ((compare_op & BINARY_OPER_MASK) == 0)
{
throw new DBException();
// Create a vector that will hold the comparison nodes for all elements in the hashtable
}
Vector compareVector = new Vector();
// For each of the elements in the hashtable, create a comparison node for the match
for (Enumeration e = elements.keys(); e.hasMoreElements();)
{
// Get the element name from the enumerator
// and its value
String elementName = (String) e.nextElement();
String elementValue = (String) elements.get(elementName);
// Create a comparison node for this list and store it as the filter
SearchBaseLeafComparison comparenode = new SearchBaseLeafComparison(elementName,
compare_op, elementValue);
// Add this leaf node to the vector
compareVector.addElement(comparenode);
}
// Now return a node that holds this set of leaf nodes
m_filter = new SearchBaseNode(combine_op, compareVector);
} | Change the search filter to one that specifies a set of elements and their values
that must match, and the operator to use to combine the elements.
Each key is compared for an equal match to the value, and all
comparisons are combined by the specified logical operator (OR or AND).
The old search filter is deleted.
@param elements is a hashtable holding key-value pairs
@param combine_op is the logical operator to be used to combine the comparisons
@param compare_op is the binary operator to be used for the comparisons
@exception DBException |
public void matchSet(String[] ElementNames, String[] ElementValues, int op) throws DBException
{
// Delete the old search filter
m_filter = null;
// If this is not a logical operator, throw an exception
if ((op & LOGICAL_OPER_MASK) == 0)
{
throw new DBException();
// Create a vector that will hold the leaf nodes for all elements in the hashtable
}
Vector leafVector = new Vector();
// For each of the elements in the array, create a leaf node for the match
int numnames = ElementNames.length;
for (int i = 0; i < numnames; i++)
{
// Create a leaf node for this list and store it as the filter
SearchBaseLeaf leafnode = new SearchBaseLeaf(ElementNames[i], IN, ElementValues[i]);
// Add this leaf node to the vector
leafVector.addElement(leafnode);
}
// Now return a node that holds this set of leaf nodes
m_filter = new SearchBaseNode(op, leafVector);
} | Change the search filter to one that specifies a set of elements and their values
that must match, and the operator to use to combine the elements.
Each element name is compared for an equal match to the value, and all
comparisons are combined by the specified logical operator (OR or AND).
The old search filter is deleted.
@param ElementNames is an array of names of elements to be tested
@param ElementValues is an array of values for the corresponding element
@param op is the logical operator to be used to combine the comparisons
@exception DBException |
public void combine(Vector new_filters, int op) throws DBException
{
// If this is not a logical operator, throw an exception
if ((op & LOGICAL_OPER_MASK) == 0)
{
throw new DBException();
// Create a new vector consisting of just the filters
// from the SearchFilter classes in new_filters
}
Vector filters = new Vector();
// Now add in all the nodes of the new filters
for (Enumeration e = new_filters.elements(); e.hasMoreElements();)
{
// Get the search filter from the vector
SearchFilter f = (SearchFilter) e.nextElement();
filters.addElement(f.getFilter());
}
// Create a node for this list and return it
m_filter = new SearchBaseNode(op, m_filter, filters);
} | Combine other search filters with this one, using the specific operator.
@param new_filters is a vector of SearchFilter classes to be combined
@param op is the logical operator to be used to combine the filters
@exception DBException |
public void combine(SearchFilter new_filter, int op) throws DBException
{
// If this is not a logical operator, throw an exception
if ((op & LOGICAL_OPER_MASK) == 0)
{
throw new DBException();
// Create a new vector consisting of just the filters
// from the SearchFilter classes in new_filters
}
Vector filters = new Vector();
filters.addElement(new_filter.getFilter());
// Create a node for this list and return it
m_filter = new SearchBaseNode(op, m_filter, filters);
} | Combine one other search filters with this one, using the specific operator.
@param new_filter is the SearchFilter class to be combined
@param op is the logical operator to be used to combine the filters
@exception DBException |
protected static String ConvertBinaryOperator(int oper)
{
// Convert the operator into the proper string
String oper_string;
switch (oper)
{
default:
case EQUAL:
oper_string = "=";
break;
case LIKE:
oper_string = "LIKE";
break;
case NOT_EQUAL:
oper_string = "!=";
break;
case LESS_THAN:
oper_string = "<";
break;
case GREATER_THAN:
oper_string = ">";
break;
case GREATER_EQUAL:
oper_string = ">=";
break;
case LESS_EQUAL:
oper_string = "<=";
break;
}
return oper_string;
} | Static method to convert a binary operator into a string.
@param oper is the binary comparison operator to be converted |
public void apply()
{
System.out.println("The list of available products:");
// build a query that select all objects of Class Product, without any further criteria
// according to ODMG the Collection containing all instances of a persistent class is called "Extent"
Query query = new QueryByCriteria(Product.class, null);
try
{
// ask the broker to retrieve the Extent collection
Collection allProducts = broker.getCollectionByQuery(query);
// now iterate over the result to print each product
java.util.Iterator iter = allProducts.iterator();
while (iter.hasNext())
{
System.out.println(iter.next());
}
}
catch (Throwable t)
{
t.printStackTrace();
}
} | perform this use case |
public void writeObject(Object o, GraphicsDocument document, boolean asChild) throws RenderException {
document.writeElement("vml:shape", asChild);
Point p = (Point) o;
String adj = document.getFormatter().format(p.getX()) + ","
+ document.getFormatter().format(p.getY());
document.writeAttribute("adj", adj);
} | Writes the object to the specified document, optionally creating a child
element. The object in this case should be a point.
@param o the object (of type Point).
@param document the document to write to.
@param asChild create child element if true.
@throws RenderException |
public void bind(Object newValue)
{
if (getValue() == null)
{
setValue(newValue);
}
else
{
setValue2(newValue);
setBound(true);
}
} | sets the value of the criteria to newValue.
Used by the ODMG OQLQuery.bind() operation
BRJ: bind get's called twice so we need to know which value to set |
Subsets and Splits