code
stringlengths 67
466k
| docstring
stringlengths 1
13.2k
|
---|---|
public JdbcConnectionDescriptor getDescriptor(PBKey pbKey)
{
JdbcConnectionDescriptor result = (JdbcConnectionDescriptor) jcdMap.get(pbKey);
if (result == null)
{
result = deepCopyOfFirstFound(pbKey.getAlias());
if (result != null)
{
result.setUserName(pbKey.getUser());
result.setPassWord(pbKey.getPassword());
// this build connection descriptor could not be the default connection
result.setDefaultConnection(false);
log.info("Automatic create of new jdbc-connection-descriptor for PBKey " + pbKey);
addDescriptor(result);
}
else
{
log.info("Could not find " + JdbcConnectionDescriptor.class.getName() + " for PBKey " + pbKey);
}
}
return result;
} | Returns the matching {@link JdbcConnectionDescriptor}
or <code>null</code> if no descriptor could be found. The user name
and pass word will be set to match the supplied </code>PBKey</code>
object. If the original user name and pass word are desired, the PBKey
should be obtained with {@link #getStandardPBKeyForJcdAlias(String)}. |
private JdbcConnectionDescriptor deepCopyOfFirstFound(String jcdAlias)
{
Iterator it = jcdMap.values().iterator();
JdbcConnectionDescriptor jcd;
while (it.hasNext())
{
jcd = (JdbcConnectionDescriptor) it.next();
if (jcdAlias.equals(jcd.getJcdAlias()))
{
return (JdbcConnectionDescriptor) SerializationUtils.clone(jcd);
}
}
return null;
} | Returns a deep copy of the first found connection descriptor
with the given <code>jcdAlias</code> name or <code>null</code>
if none found. |
public void addDescriptor(JdbcConnectionDescriptor jcd)
{
synchronized (jcdMap)
{
if (jcdMap.containsKey(jcd.getPBKey()))
{
throw new MetadataException("Found duplicate connection descriptor using PBKey " +
jcd.getPBKey() + ", remove the old descriptor first, before add the new one. " + jcd);
}
jcdMap.put(jcd.getPBKey(), jcd);
// only if the jcdAlias was not found, put the new PBKey,
// because we don't want to replace the original PBKey with
// automatic generated descriptors PBKey's - see method getDescriptor(PBKey key)
if (!jcdAliasToPBKeyMap.containsKey(jcd.getJcdAlias()))
{
jcdAliasToPBKeyMap.put(jcd.getJcdAlias(), jcd.getPBKey());
}
if (log.isDebugEnabled()) log.debug("New descriptor was added: " + jcd);
}
} | Add a new {@link JdbcConnectionDescriptor}. |
public JdbcConnectionDescriptor addDescriptor(String jcdAlias, String jdbcDriver, String jdbcConnectionUrl, String username, String password)
{
JdbcConnectionDescriptor jcd = new JdbcConnectionDescriptor();
HashMap props = utils.parseConnectionUrl(jdbcConnectionUrl);
jcd.setJcdAlias(jcdAlias);
jcd.setProtocol((String)props.get(JdbcMetadataUtils.PROPERTY_PROTOCOL));
jcd.setSubProtocol((String)props.get(JdbcMetadataUtils.PROPERTY_SUBPROTOCOL));
jcd.setDbAlias((String)props.get(JdbcMetadataUtils.PROPERTY_DBALIAS));
String platform = utils.findPlatformFor(jcd.getSubProtocol(), jdbcDriver);
jcd.setDbms(platform);
jcd.setJdbcLevel(2.0);
jcd.setDriver(jdbcDriver);
if (username != null)
{
jcd.setUserName(username);
jcd.setPassWord(password);
}
if ("default".equals(jcdAlias))
{
jcd.setDefaultConnection(true);
// arminw: MM will search for the default key
// MetadataManager.getInstance().setDefaultPBKey(jcd.getPBKey());
}
addDescriptor(jcd);
return jcd;
} | Creates and adds a new connection descriptor for the given JDBC connection url.
This method tries to guess the platform to be used, but it should be checked
afterwards nonetheless using the {@link JdbcConnectionDescriptor#getDbms()} method.
For properties that are not part of the url, the following standard values are
explicitly set:
<ul>
<li>jdbc level = 2.0</li>
</ul>
@param jcdAlias The connection alias for the created connection; if 'default' is used,
then the new descriptor will become the default connection descriptor
@param jdbcDriver The fully qualified jdbc driver name
@param jdbcConnectionUrl The connection url of the form '[protocol]:[sub protocol]:{database-specific path]'
where protocol is usually 'jdbc'
@param username The user name (can be <code>null</code>)
@param password The password (can be <code>null</code>)
@return The created connection descriptor
@see JdbcConnectionDescriptor#getDbms() |
public JdbcConnectionDescriptor addDescriptor(String jcdAlias, DataSource dataSource, String username, String password)
{
JdbcConnectionDescriptor jcd = new JdbcConnectionDescriptor();
jcd.setJcdAlias(jcdAlias);
jcd.setDataSource(dataSource);
if (username != null)
{
jcd.setUserName(username);
jcd.setPassWord(password);
}
utils.fillJCDFromDataSource(jcd, dataSource, username, password);
if ("default".equals(jcdAlias))
{
jcd.setDefaultConnection(true);
// arminw: MM will search for the default key
// MetadataManager.getInstance().setDefaultPBKey(jcd.getPBKey());
}
addDescriptor(jcd);
return jcd;
} | Creates and adds a new connection descriptor for the given JDBC data source.
This method tries to guess the platform to be used, but it should be checked
afterwards nonetheless using the {@link JdbcConnectionDescriptor#getDbms()} method.
Note that the descriptor won't have a value for the driver because it is not possible
to retrieve the driver classname from the data source.
@param jcdAlias The connection alias for the created connection; if 'default' is used,
then the new descriptor will become the default connection descriptor
@param dataSource The data source
@param username The user name (can be <code>null</code>)
@param password The password (can be <code>null</code>)
@return The created connection descriptor
@see JdbcConnectionDescriptor#getDbms() |
public void removeDescriptor(Object validKey)
{
PBKey pbKey;
if (validKey instanceof PBKey)
{
pbKey = (PBKey) validKey;
}
else if (validKey instanceof JdbcConnectionDescriptor)
{
pbKey = ((JdbcConnectionDescriptor) validKey).getPBKey();
}
else
{
throw new MetadataException("Could not remove descriptor, given object was no vaild key: " +
validKey);
}
Object removed = null;
synchronized (jcdMap)
{
removed = jcdMap.remove(pbKey);
jcdAliasToPBKeyMap.remove(pbKey.getAlias());
}
log.info("Remove descriptor: " + removed);
} | Remove a descriptor.
@param validKey This could be the {@link JdbcConnectionDescriptor}
itself, or the associated {@link JdbcConnectionDescriptor#getPBKey PBKey}. |
protected Identity createTransientOrRealIdentity(ClassDescriptor cld, Object objOrProxy)
{
if(objOrProxy == null) throw new OJBRuntimeException("Can't create Identity for 'null'-object");
Identity result = null;
Class topLevelClass = null;
Class realClass = null;
Object[] pks = null;
try
{
final IndirectionHandler handler = ProxyHelper.getIndirectionHandler(objOrProxy);
synchronized(objOrProxy)
{
if(handler != null)
{
result = handler.getIdentity();
}
else
{
// now we are sure that the specified object is not a proxy
realClass = objOrProxy.getClass();
topLevelClass = broker.getTopLevelClass(objOrProxy.getClass());
if(cld == null)
{
cld = broker.getClassDescriptor(objOrProxy.getClass());
}
BrokerHelper helper = broker.serviceBrokerHelper();
FieldDescriptor[] fields = cld.getPkFields();
pks = new Object[fields.length];
FieldDescriptor fld;
for(int i = 0; i < fields.length; i++)
{
fld = fields[i];
/*
we check all PK fields for 'null'-values
*/
Object value = fld.getPersistentField().get(objOrProxy);
if(helper.representsNull(fld, value))
{
result = (Identity) objectToIdentityMap.get(objOrProxy);
if(result == null)
{
pks[i] = transientSequenceManager.getUniqueValue(fld);
result = new Identity(realClass, topLevelClass, pks, true);
//if(activeTx) objectToIdentityMap.put(objOrProxy, result);
objectToIdentityMap.put(objOrProxy, result);
}
break;
}
else
{
pks[i] = value;
}
}
if(result == null)
{
result = new Identity(realClass, topLevelClass, pks, false);
}
}
}
}
catch(ClassNotPersistenceCapableException e)
{
throw e;
}
catch(Exception e)
{
throw createException(e, "Can not init Identity for given object.", objOrProxy, topLevelClass, realClass, pks);
}
return result;
} | This methods creates a new transient (if at least one PK field is 'null') or persistent
(if the PK fields are populated) {@link org.apache.ojb.broker.Identity} instance. If the specified object
is transient and former call for the same object returns already a transient Identity, the same transient
Identity object will be returned. |
private Object[] reorderFieldValues(FieldDescriptor[] flds, String[] fieldNames, Object[] fieldValues)
{
String fieldName;
Object[] orderedValues = new Object[flds.length];
for(int i = 0; i < flds.length; i++)
{
fieldName = flds[i].getPersistentField().getName();
int realPosition = findIndexForName(fieldNames, fieldName);
orderedValues[i] = fieldValues[realPosition];
}
return orderedValues;
} | This method orders the specified field values based on the
specified {@link org.apache.ojb.broker.metadata.FieldDescriptor}.
@param flds The {@link org.apache.ojb.broker.metadata.FieldDescriptor} array.
@param fieldNames The field names.
@param fieldValues The field values.
@return The ordered field values. |
private int findIndexForName(String[] fieldNames, String searchName)
{
for(int i = 0; i < fieldNames.length; i++)
{
if(searchName.equals(fieldNames[i]))
{
return i;
}
}
throw new PersistenceBrokerException("Can't find field name '" + searchName +
"' in given array of field names");
} | Find the index of the specified name in field name array. |
private boolean isOrdered(FieldDescriptor[] flds, String[] pkFieldNames)
{
if((flds.length > 1 && pkFieldNames == null) || flds.length != pkFieldNames.length)
{
throw new PersistenceBrokerException("pkFieldName length does not match number of defined PK fields." +
" Expected number of PK fields is " + flds.length + ", given number was " +
(pkFieldNames != null ? pkFieldNames.length : 0));
}
boolean result = true;
for(int i = 0; i < flds.length; i++)
{
FieldDescriptor fld = flds[i];
result = result && fld.getPersistentField().getName().equals(pkFieldNames[i]);
}
return result;
} | Checks length and compare order of field names with declared PK fields in metadata. |
private PersistenceBrokerException createException(final Exception ex, String message, final Object objectToIdentify, Class topLevelClass, Class realClass, Object[] pks)
{
final String eol = SystemUtils.LINE_SEPARATOR;
StringBuffer msg = new StringBuffer();
if(message == null)
{
msg.append("Unexpected error: ");
}
else
{
msg.append(message).append(" :");
}
if(topLevelClass != null) msg.append(eol).append("objectTopLevelClass=").append(topLevelClass.getName());
if(realClass != null) msg.append(eol).append("objectRealClass=").append(realClass.getName());
if(pks != null) msg.append(eol).append("pkValues=").append(ArrayUtils.toString(pks));
if(objectToIdentify != null) msg.append(eol).append("object to identify: ").append(objectToIdentify);
if(ex != null)
{
// add causing stack trace
Throwable rootCause = ExceptionUtils.getRootCause(ex);
if(rootCause != null)
{
msg.append(eol).append("The root stack trace is --> ");
String rootStack = ExceptionUtils.getStackTrace(rootCause);
msg.append(eol).append(rootStack);
}
return new PersistenceBrokerException(msg.toString(), ex);
}
else
{
return new PersistenceBrokerException(msg.toString());
}
} | Helper method which supports creation of proper error messages.
@param ex An exception to include or <em>null</em>.
@param message The error message or <em>null</em>.
@param objectToIdentify The current used object or <em>null</em>.
@param topLevelClass The object top-level class or <em>null</em>.
@param realClass The object real class or <em>null</em>.
@param pks The associated PK values of the object or <em>null</em>.
@return The generated exception. |
public void reorder()
{
int newOrderIndex = 0;
long t1 = 0, t2 = 0, t3;
if (log.isDebugEnabled())
{
t1 = System.currentTimeMillis();
}
newOrder = new Identity[originalOrder.size()];
if(log.isDebugEnabled()) log.debug("Orginal order: " + originalOrder);
// set up the vertex array in the order the envelopes were added
List vertexList = new ArrayList(originalOrder.size());
// int vertexIndex = 0;
for (Iterator it = originalOrder.iterator(); it.hasNext();)
{
ObjectEnvelope envelope = (ObjectEnvelope) envelopes.get(it.next());
if (envelope.needsUpdate() || envelope.needsInsert() || envelope.needsDelete())
{
Vertex vertex = new Vertex(envelope);
vertexList.add(vertex);
if (log.isDebugEnabled())
{
log.debug("Add new Vertex object "+envelope.getIdentity()+" to VertexList");
}
}
else
{
// envelope is clean - just add identity to new order
newOrder[newOrderIndex++] = envelope.getIdentity();
if (log.isDebugEnabled())
{
log.debug("Add unmodified object "+envelope.getIdentity()+" to new OrderList");
}
}
}
vertices = (Vertex[]) vertexList.toArray(new Vertex[vertexList.size()]);
// set up the edges
edgeList = new ArrayList(2 * vertices.length);
for (int i = 0; i < vertices.length; i++)
{
addEdgesForVertex(vertices[i]);
}
if (log.isDebugEnabled())
{
t2 = System.currentTimeMillis();
log.debug("Building object envelope graph took " + (t2 - t1) + " ms");
log.debug("Object envelope graph contains " + vertices.length + " vertices" + " and " + edgeList.size()
+ " edges");
}
int remainingVertices = vertices.length;
int iterationCount = 0;
while (remainingVertices > 0)
{
// update iteration count
iterationCount++;
// update incoming edge counts
for (Iterator it = edgeList.iterator(); it.hasNext();)
{
Edge edge = (Edge) it.next();
if (!edge.isProcessed())
{
if(log.isDebugEnabled())
{
final String msg = "Add weight '"+edge.getWeight()+"' for terminal vertex " + edge.getTerminalVertex() + " of edge " + edge;
log.debug(msg);
}
edge.getTerminalVertex().incrementIncomingEdgeWeight(edge.getWeight());
}
}
// find minimum weight of incoming edges of a vertex
int minIncomingEdgeWeight = Integer.MAX_VALUE;
for (int i = 0; i < vertices.length; i++)
{
Vertex vertex = vertices[i];
if (!vertex.isProcessed() && minIncomingEdgeWeight > vertex.getIncomingEdgeWeight())
{
minIncomingEdgeWeight = vertex.getIncomingEdgeWeight();
if (minIncomingEdgeWeight == 0)
{
// we won't get any lower
break;
}
}
}
// process vertices having minimum incoming edge weight
int processCount = 0;
for (int i = 0; i < vertices.length; i++)
{
Vertex vertex = vertices[i];
if (!vertex.isProcessed() && vertex.getIncomingEdgeWeight() == minIncomingEdgeWeight)
{
newOrder[newOrderIndex++] = vertex.getEnvelope().getIdentity();
vertex.markProcessed();
processCount++;
if (log.isDebugEnabled())
{
log.debug("add minimum edge weight - "+minIncomingEdgeWeight
+ ", newOrderList: " + ArrayUtils.toString(newOrder));
}
}
vertex.resetIncomingEdgeWeight();
}
if (log.isDebugEnabled())
{
log.debug("Processed " + processCount + " of " + remainingVertices
+ " remaining vertices in iteration #" + iterationCount);
}
remainingVertices -= processCount;
}
if (log.isDebugEnabled())
{
t3 = System.currentTimeMillis();
log.debug("New ordering: " + ArrayUtils.toString(newOrder));
log.debug("Processing object envelope graph took " + (t3 - t2) + " ms");
}
} | Reorders the object envelopes. The new order is available from the
<code>ordering</code> property.
@see #getOrdering() |
private void addEdgesForVertex(Vertex vertex)
{
ClassDescriptor cld = vertex.getEnvelope().getClassDescriptor();
Iterator rdsIter = cld.getObjectReferenceDescriptors(true).iterator();
while (rdsIter.hasNext())
{
ObjectReferenceDescriptor rds = (ObjectReferenceDescriptor) rdsIter.next();
addObjectReferenceEdges(vertex, rds);
}
Iterator cdsIter = cld.getCollectionDescriptors(true).iterator();
while (cdsIter.hasNext())
{
CollectionDescriptor cds = (CollectionDescriptor) cdsIter.next();
addCollectionEdges(vertex, cds);
}
} | Adds all edges for a given object envelope vertex. All edges are
added to the edgeList map.
@param vertex the Vertex object to find edges for |
private void addObjectReferenceEdges(Vertex vertex, ObjectReferenceDescriptor rds)
{
Object refObject = rds.getPersistentField().get(vertex.getEnvelope().getRealObject());
Class refClass = rds.getItemClass();
for (int i = 0; i < vertices.length; i++)
{
Edge edge = null;
// ObjectEnvelope envelope = vertex.getEnvelope();
Vertex refVertex = vertices[i];
ObjectEnvelope refEnvelope = refVertex.getEnvelope();
if (refObject == refEnvelope.getRealObject())
{
edge = buildConcrete11Edge(vertex, refVertex, rds.hasConstraint());
}
else if (refClass.isInstance(refVertex.getEnvelope().getRealObject()))
{
edge = buildPotential11Edge(vertex, refVertex, rds.hasConstraint());
}
if (edge != null)
{
if (!edgeList.contains(edge))
{
edgeList.add(edge);
}
else
{
edge.increaseWeightTo(edge.getWeight());
}
}
}
} | Finds edges based to a specific object reference descriptor and
adds them to the edge map.
@param vertex the object envelope vertex holding the object reference
@param rds the object reference descriptor |
private void addCollectionEdges(Vertex vertex, CollectionDescriptor cds)
{
ObjectEnvelope envelope = vertex.getEnvelope();
Object col = cds.getPersistentField().get(envelope.getRealObject());
Object[] refObjects;
if (col == null || (ProxyHelper.isCollectionProxy(col) && !ProxyHelper.getCollectionProxy(col).isLoaded()))
{
refObjects = EMPTY_OBJECT_ARRAY;
}
else
{
refObjects = BrokerHelper.getCollectionArray(col);
}
Class refClass = cds.getItemClass();
for (int i = 0; i < vertices.length; i++)
{
Edge edge = null;
Vertex refVertex = vertices[i];
ObjectEnvelope refEnvelope = refVertex.getEnvelope();
if (refClass.isInstance(refEnvelope.getRealObject()))
{
if (containsObject(refEnvelope.getRealObject(), refObjects))
{
if (cds.isMtoNRelation())
{
edge = buildConcreteMNEdge(vertex, refVertex);
}
else
{
edge = buildConcrete1NEdge(vertex, refVertex);
}
}
else
{
if (cds.isMtoNRelation())
{
edge = buildPotentialMNEdge(vertex, refVertex);
}
else
{
edge = buildPotential1NEdge(vertex, refVertex);
}
}
}
if (edge != null)
{
if (!edgeList.contains(edge))
{
edgeList.add(edge);
}
else
{
edge.increaseWeightTo(edge.getWeight());
}
}
}
} | Finds edges base on a specific collection descriptor (1:n and m:n)
and adds them to the edge map.
@param vertex the object envelope vertex holding the collection
@param cds the collection descriptor |
private static boolean containsObject(Object searchFor, Object[] searchIn)
{
for (int i = 0; i < searchIn.length; i++)
{
if (searchFor == searchIn[i])
{
return true;
}
}
return false;
} | Helper method that searches an object array for the occurence of a
specific object based on reference equality
@param searchFor the object to search for
@param searchIn the array to search in
@return true if the object is found, otherwise false |
protected Edge buildConcrete11Edge(Vertex vertex1, Vertex vertex2, boolean fkToRef)
{
ModificationState state1 = vertex1.getEnvelope().getModificationState();
ModificationState state2 = vertex2.getEnvelope().getModificationState();
if (state1.needsUpdate() || state1.needsInsert())
{
if (state2.needsInsert())
{
// (2) must be inserted before (1) can point to it
return new Edge(vertex2, vertex1, fkToRef ? CONCRETE_EDGE_WEIGHT_WITH_FK : CONCRETE_EDGE_WEIGHT);
}
}
else if (state1.needsDelete())
{
if (state2.needsDelete())
{
// (1) points to (2) and must be deleted first
return new Edge(vertex1, vertex2, fkToRef ? CONCRETE_EDGE_WEIGHT_WITH_FK : CONCRETE_EDGE_WEIGHT);
}
}
return null;
} | Checks if the database operations associated with two object envelopes
that are related via an 1:1 (or n:1) reference needs to be performed
in a particular order and if so builds and returns a corresponding
directed edge weighted with <code>CONCRETE_EDGE_WEIGHT</code>.
The following cases are considered (* means object needs update, + means
object needs insert, - means object needs to be deleted):
<table>
<tr><td>(1)* -(1:1)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)* -(1:1)-> (2)+</td><td>(2)->(1) edge</td></tr>
<tr><td>(1)* -(1:1)-> (2)-</td><td>no edge (cannot occur)</td></tr>
<tr><td>(1)+ -(1:1)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)+ -(1:1)-> (2)+</td><td>(2)->(1) edge</td></tr>
<tr><td>(1)+ -(1:1)-> (2)-</td><td>no edge (cannot occur)</td></tr>
<tr><td>(1)- -(1:1)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)- -(1:1)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)- -(1:1)-> (2)-</td><td>(1)->(2) edge</td></tr>
<table>
@param vertex1 object envelope vertex of the object holding the reference
@param vertex2 object envelope vertex of the referenced object
@return an Edge object or null if the two database operations can
be performed in any order |
protected Edge buildPotential11Edge(Vertex vertex1, Vertex vertex2, boolean fkToRef)
{
ModificationState state1 = vertex1.getEnvelope().getModificationState();
ModificationState state2 = vertex2.getEnvelope().getModificationState();
if (state1.needsUpdate() || state1.needsDelete())
{
if (state2.needsDelete())
{
// old version of (1) might point to (2)
return new Edge(vertex1, vertex2, fkToRef ? POTENTIAL_EDGE_WEIGHT_WITH_FK : POTENTIAL_EDGE_WEIGHT);
}
}
return null;
} | Checks if the database operations associated with two object envelopes
that might have been related via an 1:1 (or n:1) reference before
the current transaction needs to be performed
in a particular order and if so builds and returns a corresponding
directed edge weighted with <code>POTENTIAL_EDGE_WEIGHT</code>.
The following cases are considered (* means object needs update, + means
object needs insert, - means object needs to be deleted):
<table>
<tr><td>(1)* -(1:1)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)* -(1:1)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)* -(1:1)-> (2)-</td><td>(1)->(2) edge</td></tr>
<tr><td>(1)+ -(1:1)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)+ -(1:1)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)+ -(1:1)-> (2)-</td><td>no edge</td></tr>
<tr><td>(1)- -(1:1)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)- -(1:1)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)- -(1:1)-> (2)-</td><td>(1)->(2) edge</td></tr>
<table>
@param vertex1 object envelope vertex of the object that might have
hold the reference
@param vertex2 object envelope vertex of the potentially referenced
object
@return an Edge object or null if the two database operations can
be performed in any order |
protected Edge buildConcreteMNEdge(Vertex vertex1, Vertex vertex2)
{
ModificationState state1 = vertex1.getEnvelope().getModificationState();
ModificationState state2 = vertex2.getEnvelope().getModificationState();
if (state1.needsUpdate() || state1.needsInsert())
{
if (state2.needsInsert())
{
// (2) must be inserted before we can create a link to it
return new Edge(vertex2, vertex1, CONCRETE_EDGE_WEIGHT);
}
}
else if (state1.needsDelete())
{
if (state2.needsDelete())
{
// there is a link from (1) to (2) which must be deleted first,
// which will happen when deleting (1) - thus:
return new Edge(vertex1, vertex2, POTENTIAL_EDGE_WEIGHT);
}
}
return null;
} | Checks if the database operations associated with two object envelopes
that are related via an m:n collection reference needs to be performed
in a particular order and if so builds and returns a corresponding
directed edge weighted with <code>CONCRETE_EDGE_WEIGHT</code>.
The following cases are considered (* means object needs update, + means
object needs insert, - means object needs to be deleted):
<table>
<tr><td>(1)* -(m:n)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)* -(m:n)-> (2)+</td><td>(2)->(1) edge</td></tr>
<tr><td>(1)* -(m:n)-> (2)-</td><td>no edge (cannot occur)</td></tr>
<tr><td>(1)+ -(m:n)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)+ -(m:n)-> (2)+</td><td>(2)->(1) edge</td></tr>
<tr><td>(1)+ -(m:n)-> (2)-</td><td>no edge (cannot occur)</td></tr>
<tr><td>(1)- -(m:n)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)- -(m:n)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)- -(m:n)-> (2)-</td><td>(1)->(2) edge</td></tr>
<table>
@param vertex1 object envelope vertex of the object holding the
collection
@param vertex2 object envelope vertex of the object contained in the
collection
@return an Edge object or null if the two database operations can
be performed in any order |
protected Edge buildPotentialMNEdge(Vertex vertex1, Vertex vertex2)
{
ModificationState state1 = vertex1.getEnvelope().getModificationState();
ModificationState state2 = vertex2.getEnvelope().getModificationState();
if (state1.needsUpdate() || state1.needsDelete())
{
if (state2.needsDelete())
{
// old version of (1) might comprise a link to (2)
return new Edge(vertex1, vertex2, POTENTIAL_EDGE_WEIGHT);
}
}
return null;
} | Checks if the database operations associated with two object envelopes
that might have been related via an m:n collection reference before
the current transaction needs to be performed
in a particular order and if so builds and returns a corresponding
directed edge weighted with <code>POTENTIAL_EDGE_WEIGHT</code>.
The following cases are considered (* means object needs update, + means
object needs insert, - means object needs to be deleted):
<table>
<tr><td>(1)* -(m:n)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)* -(m:n)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)* -(m:n)-> (2)-</td><td>(1)->(2) edge</td></tr>
<tr><td>(1)+ -(m:n)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)+ -(m:n)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)+ -(m:n)-> (2)-</td><td>no edge</td></tr>
<tr><td>(1)- -(m:n)-> (2)*</td><td>no edge</td></tr>
<tr><td>(1)- -(m:n)-> (2)+</td><td>no edge</td></tr>
<tr><td>(1)- -(m:n)-> (2)-</td><td>(1)->(2) edge</td></tr>
<table>
@param vertex1 object envelope vertex of the object holding the
collection
@param vertex2 object envelope vertex of the object that might have
been contained in the collection
@return an Edge object or null if the two database operations can
be performed in any order |
protected static Map<String, String> getHeadersAsMap(ResponseEntity response) {
Map<String, List<String>> headers = new HashMap<>(response.getHeaders());
Map<String, String> map = new HashMap<>();
for ( Map.Entry<String, List<String>> header :headers.entrySet() ) {
String headerValue = Joiner.on(",").join(header.getValue());
map.put(header.getKey(), headerValue);
}
return map;
} | Flat the map of list of string to map of strings, with theoriginal values, seperated by comma |
private void init()
{
jdbcProperties = new Properties();
dbcpProperties = new Properties();
setFetchSize(0);
this.setTestOnBorrow(true);
this.setTestOnReturn(false);
this.setTestWhileIdle(false);
this.setLogAbandoned(false);
this.setRemoveAbandoned(false);
} | Set some initial values. |
public void addAttribute(String attributeName, String attributeValue)
{
if (attributeName != null && attributeName.startsWith(JDBC_PROPERTY_NAME_PREFIX))
{
final String jdbcPropertyName = attributeName.substring(JDBC_PROPERTY_NAME_LENGTH);
jdbcProperties.setProperty(jdbcPropertyName, attributeValue);
}
else if (attributeName != null && attributeName.startsWith(DBCP_PROPERTY_NAME_PREFIX))
{
final String dbcpPropertyName = attributeName.substring(DBCP_PROPERTY_NAME_LENGTH);
dbcpProperties.setProperty(dbcpPropertyName, attributeValue);
}
else
{
super.addAttribute(attributeName, attributeValue);
}
} | Sets a custom configuration attribute.
@param attributeName the attribute name. Names starting with
{@link #JDBC_PROPERTY_NAME_PREFIX} will be used (without the prefix) by the
ConnectionFactory when creating connections from DriverManager
(not used for external DataSource connections). Names starting with
{@link #DBCP_PROPERTY_NAME_PREFIX} to Commons DBCP (if used, also without prefix).
@param attributeValue the attribute value |
protected boolean _load ()
{
java.util.Iterator it =
this.getOjbMetaTreeModel ().getRepository().iterator();
java.util.ArrayList newChildren = new java.util.ArrayList();
/* @todo make this work */
// newChildren.add(new OjbMetaJdbcConnectionDescriptorNode(
// this.getOjbMetaTreeModel ().getRepository(),
// this.getOjbMetaTreeModel (),
// this,
// this.getOjbMetaTreeModel ().getRepository().getDefaultJdbcConnection()));
while (it.hasNext())
{
org.apache.ojb.broker.metadata.ClassDescriptor cld = (org.apache.ojb.broker.metadata.ClassDescriptor)it.next();
OjbMetaClassDescriptorNode cldNode =
new OjbMetaClassDescriptorNode(this.getOjbMetaTreeModel ().getRepository(),
this.getOjbMetaTreeModel (),
this, cld);
cldToNodes.put(cld, cldNode);
newChildren.add(cldNode);
}
java.util.Collections.sort(newChildren);
this.alChildren = newChildren;
this.getOjbMetaTreeModel ().nodeStructureChanged(this);
return true;
} | Purpose of this method is to fill the children of the node. It should
replace all children in alChildren (the arraylist containing the children)
of this node and notify the TreeModel that a change has occurred. |
public void setEditorTarget (PropertyEditorTarget target)
{
if (target instanceof DBMetaTableNode)
{
super.setEditorTarget(target);
this.tfTableName.setText((String)target.getAttribute(DBMetaTableNode.ATT_TABLE_NAME));
}
else
{
throw new UnsupportedOperationException("This editor can only edit DBMetaTableNode objects");
}
} | GEN-END:initComponents |
@Api
public void setAuthentications(String token, List<Authentication> authentications) {
this.token = token;
this.authentications.clear();
if (null != authentications) {
for (Authentication auth : authentications) {
for (BaseAuthorization ba : auth.getAuthorizations()) {
if (ba instanceof AuthorizationNeedsWiring) {
((AuthorizationNeedsWiring) ba).wire(applicationContext);
}
}
this.authentications.add(auth);
}
}
userInfoInit();
} | Set the token and authentications for this security context.
<p/>
This method can be overwritten to handle custom policies.
@param token current token
@param authentications authentications for token |
private void userInfoInit() {
boolean first = true;
userId = null;
userLocale = null;
userName = null;
userOrganization = null;
userDivision = null;
if (null != authentications) {
for (Authentication auth : authentications) {
userId = combine(userId, auth.getUserId());
userName = combine(userName, auth.getUserName());
if (first) {
userLocale = auth.getUserLocale();
first = false;
} else {
if (null != auth.getUserLocale() &&
(null == userLocale || !userLocale.equals(auth.getUserLocale()))) {
userLocale = null;
}
}
userOrganization = combine(userOrganization, auth.getUserOrganization());
userDivision = combine(userDivision, auth.getUserDivision());
}
}
// now calculate the "id" for this context, this should be independent of the data order, so sort
Map<String, List<String>> idParts = new HashMap<String, List<String>>();
if (null != authentications) {
for (Authentication auth : authentications) {
List<String> auths = new ArrayList<String>();
for (BaseAuthorization ba : auth.getAuthorizations()) {
auths.add(ba.getId());
}
Collections.sort(auths);
idParts.put(auth.getSecurityServiceId(), auths);
}
}
StringBuilder sb = new StringBuilder();
List<String> sortedKeys = new ArrayList<String>(idParts.keySet());
Collections.sort(sortedKeys);
for (String key : sortedKeys) {
if (sb.length() > 0) {
sb.append('|');
}
List<String> auths = idParts.get(key);
first = true;
for (String ak : auths) {
if (first) {
first = false;
} else {
sb.append('|');
}
sb.append(ak);
}
sb.append('@');
sb.append(key);
}
id = sb.toString();
} | Calculate UserInfo strings. |
private String combine(String org, String add) {
if (null == org) {
return add;
}
if (null == add || org.equals(add) || org.startsWith(add + ", ") || org.endsWith(", " + add)) {
return org;
}
return org + ", " + add;
} | Combine user information strings.
<p/>
Extra information is appended (separated by a comma) if not yet present in the string.
@param org
base string to append to (avoiding duplication).
@param add
string to add
@return org + ", " + add |
@Api
public void restoreSecurityContext(SavedAuthorization savedAuthorization) {
List<Authentication> auths = new ArrayList<Authentication>();
if (null != savedAuthorization) {
for (SavedAuthentication sa : savedAuthorization.getAuthentications()) {
Authentication auth = new Authentication();
auth.setSecurityServiceId(sa.getSecurityServiceId());
auth.setAuthorizations(sa.getAuthorizations());
auths.add(auth);
}
}
setAuthentications(null, auths);
userInfoInit();
} | Restore authentications from persisted state.
@param savedAuthorization saved authorizations |
public static String nameClass(String tableName)
{
StringBuffer sb = new StringBuffer();
char[] chars = new char[tableName.length()];
chars = tableName.toCharArray();
char c;
boolean nextup = false;
for (int i = 0; i < chars.length; i++) {
if (i==0) c = Character.toUpperCase(chars[i]);
else if (chars[i]=='_') {
nextup = true;
continue;
}
else if (nextup) {
nextup = false;
c = Character.toUpperCase(chars[i]);
}
else c = Character.toLowerCase(chars[i]);
sb.append(c);
}
return sb.toString();
} | /* (non-Javadoc)
@see org.apache.ojb.tools.mapping.reversedb.Namer#nameClass(java.lang.String, java.lang.String) |
public void work(RepositoryHandler repoHandler, DbProduct product) {
if (!product.getDeliveries().isEmpty()) {
product.getDeliveries().forEach(delivery -> {
final Set<Artifact> artifacts = new HashSet<>();
final DataFetchingUtils utils = new DataFetchingUtils();
final DependencyHandler depHandler = new DependencyHandler(repoHandler);
final Set<String> deliveryDependencies = utils.getDeliveryDependencies(repoHandler, depHandler, delivery);
final Set<String> fullGAVCSet = deliveryDependencies.stream().filter(DataUtils::isFullGAVC).collect(Collectors.toSet());
final Set<String> shortIdentiferSet = deliveryDependencies.stream().filter(entry -> !DataUtils.isFullGAVC(entry)).collect(Collectors.toSet());
processDependencySet(repoHandler,
shortIdentiferSet,
batch -> String.format(BATCH_TEMPLATE_REGEX, StringUtils.join(batch, '|')),
1,
artifacts::add
);
processDependencySet(repoHandler,
fullGAVCSet,
batch -> QueryUtils.quoteIds(batch, BATCH_TEMPLATE),
10,
artifacts::add
);
if (!artifacts.isEmpty()) {
delivery.setAllArtifactDependencies(new ArrayList<>(artifacts));
}
});
repoHandler.store(product);
}
} | refresh all deliveries dependencies for a particular product |
protected void appendColumn(TableAlias anAlias, FieldDescriptor field, StringBuffer buf)
{
buf.append(anAlias.alias);
buf.append(".");
buf.append(field.getColumnName());
} | Append a Column with alias: A0 name -> A0.name
@param anAlias the TableAlias
@param field
@param buf |
protected List appendListOfColumnsForSelect(StringBuffer buf)
{
FieldDescriptor[] fieldDescriptors = getFieldsForSelect();
ArrayList columnList = new ArrayList();
TableAlias searchAlias = getSearchTable();
for (int i = 0; i < fieldDescriptors.length; i++)
{
FieldDescriptor field = fieldDescriptors[i];
TableAlias alias = getTableAliasForClassDescriptor(field.getClassDescriptor());
if (alias == null)
{
alias = searchAlias;
}
if (i > 0)
{
buf.append(",");
}
appendColumn(alias, field, buf);
columnList.add(field.getAttributeName());
}
appendClazzColumnForSelect(buf);
return columnList;
} | Appends to the statement a comma separated list of column names.
DO NOT use this if order of columns is important. The row readers build reflectively and look up
column names to find values, so this is safe. In the case of update, you CANNOT use this as the
order of columns is important.
@return list of column names for the set of all unique columns for multiple classes mapped to the
same table. |
private ClassDescriptor[] getMultiJoinedClassDescriptors(ClassDescriptor cld)
{
DescriptorRepository repository = cld.getRepository();
Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true);
ClassDescriptor[] result = new ClassDescriptor[multiJoinedClasses.length];
for (int i = 0 ; i < multiJoinedClasses.length; i++)
{
result[i] = repository.getDescriptorFor(multiJoinedClasses[i]);
}
return result;
} | Get MultiJoined ClassDescriptors
@param cld |
private void appendClazzColumnForSelect(StringBuffer buf)
{
ClassDescriptor cld = getSearchClassDescriptor();
ClassDescriptor[] clds = getMultiJoinedClassDescriptors(cld);
if (clds.length == 0)
{
return;
}
buf.append(",CASE");
for (int i = clds.length; i > 0; i--)
{
buf.append(" WHEN ");
ClassDescriptor subCld = clds[i - 1];
FieldDescriptor[] fieldDescriptors = subCld.getPkFields();
TableAlias alias = getTableAliasForClassDescriptor(subCld);
for (int j = 0; j < fieldDescriptors.length; j++)
{
FieldDescriptor field = fieldDescriptors[j];
if (j > 0)
{
buf.append(" AND ");
}
appendColumn(alias, field, buf);
buf.append(" IS NOT NULL");
}
buf.append(" THEN '").append(subCld.getClassNameOfObject()).append("'");
}
buf.append(" ELSE '").append(cld.getClassNameOfObject()).append("'");
buf.append(" END AS " + SqlHelper.OJB_CLASS_COLUMN);
} | Create the OJB_CLAZZ pseudo column based on CASE WHEN.
This column defines the Class to be instantiated.
@param buf |
protected FieldDescriptor[] getFieldsForSelect()
{
if (fieldsForSelect == null || fieldsForSelect.get() == null)
{
fieldsForSelect = new WeakReference(buildFieldsForSelect(getSearchClassDescriptor()));
}
return (FieldDescriptor[]) fieldsForSelect.get();
} | Return the Fields to be selected.
@return the Fields to be selected |
protected FieldDescriptor[] buildFieldsForSelect(ClassDescriptor cld)
{
DescriptorRepository repository = cld.getRepository();
Set fields = new ListOrderedSet(); // keep the order of the fields
// add Standard Fields
// MBAIRD: if the object being queried on has multiple classes mapped to the table,
// then we will get all the fields that are a unique set across all those classes so if we need to
// we can materialize an extent
FieldDescriptor fds[] = repository.getFieldDescriptorsForMultiMappedTable(cld);
for (int i = 0; i < fds.length; i++)
{
fields.add(fds[i]);
}
// add inherited Fields. This is important when querying for a class having a super-reference
fds = cld.getFieldDescriptor(true);
for (int i = 0; i < fds.length; i++)
{
fields.add(fds[i]);
}
// add Fields of joined subclasses
Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true);
for (int c = 0; c < multiJoinedClasses.length; c++)
{
ClassDescriptor subCld = repository.getDescriptorFor(multiJoinedClasses[c]);
fds = subCld.getFieldDescriptions();
for (int i = 0; i < fds.length; i++)
{
fields.add(fds[i]);
}
}
FieldDescriptor[] result = new FieldDescriptor[fields.size()];
fields.toArray(result);
return result;
} | Return the Fields to be selected.
@param cld the ClassDescriptor
@return the Fields to be selected |
protected List appendListOfColumns(String[] columns, StringBuffer buf)
{
ArrayList columnList = new ArrayList();
for (int i = 0; i < columns.length; i++)
{
if (i > 0)
{
buf.append(",");
}
appendColName(columns[i], false, null, buf);
columnList.add(columns[i]);
}
return columnList;
} | Appends to the statement a comma separated list of column names.
@param columns defines the columns to be selected (for reports)
@return list of column names |
private void appendSuperClassJoin(ClassDescriptor cld, ClassDescriptor cldSuper, StringBuffer stmt, StringBuffer where)
{
stmt.append(",");
appendTable(cldSuper, stmt);
if (where != null)
{
if (where.length() > 0)
{
where.append(" AND ");
}
// get reference field in super class
// TODO: do not use the superclassfield anymore, just assume that the id is the same in both tables - @see PBroker.storeToDb
int superFieldRef = cld.getSuperClassFieldRef();
FieldDescriptor refField = cld.getFieldDescriptorByIndex(superFieldRef);
appendTable(cldSuper, where);
where.append(".");
appendField(cldSuper.getAutoIncrementFields()[0], where);
where.append(" = ");
appendTable(cld, where);
where.append(".");
appendField(refField, where);
}
} | /*
arminw:
TODO: this feature doesn't work, so remove this in future |
public Object javaToSql(Object source) throws ConversionException
{
if (source == null)
{
return NULLVALUE;
}
try
{
List stringList = (List) source;
if (stringList.isEmpty())
{
return NULLVALUE;
}
StringBuffer result = new StringBuffer();
for (int i = 0; i < stringList.size(); i++)
{
String newSt = (String) stringList.get(i);
// introduced in JDK 1.4, replace with commons-lang
// newSt = newSt.replaceAll("#", "##");
newSt = StringUtils.replace(newSt, "#", "##");
if (i > 0)
{
result.append("#");
}
result.append(newSt);
}
return result.toString();
}
catch (ClassCastException e)
{
throw new ConversionException("Object is not a List of String it is a"
+ source.getClass().getName());
}
} | /* (non-Javadoc)
@see org.apache.ojb.broker.accesslayer.conversions.FieldConversion#javaToSql(java.lang.Object) |
public Object sqlToJava(Object source) throws ConversionException
{
if (source == null)
{
return null;
}
if (source.toString().equals(NULLVALUE))
{
return null;
}
if (source.toString().equals(EMPTYCOLLEC))
{
return new ArrayList();
}
List v = new ArrayList();
StringBuffer input = new StringBuffer();
StringBuffer newString = new StringBuffer();
int pos = 0;
int length;
input.append(source.toString());
length = input.length();
while (pos < length)
{
if (input.charAt(pos) != '#')
{
newString.append(input.charAt(pos));
}
else
{
if (input.charAt(pos + 1) != '#')
{
v.add(newString.toString());
newString = new StringBuffer();
}
else
{
newString.append('#');
++pos;
}
}
++pos;
}
v.add(newString.toString());
return v;
} | /* (non-Javadoc)
@see org.apache.ojb.broker.accesslayer.conversions.FieldConversion#sqlToJava(java.lang.Object) |
public void execute(Task task, Database dbModel, DescriptorRepository objModel) throws BuildException
{
try
{
DdlUtilsDataHandling handling = new DdlUtilsDataHandling();
handling.setModel(dbModel, objModel);
handling.setPlatform(getPlatform());
if (_singleDataFile != null)
{
readSingleDataFile(task, handling, _singleDataFile);
}
else
{
for (Iterator it = _fileSets.iterator(); it.hasNext();)
{
FileSet fileSet = (FileSet)it.next();
File fileSetDir = fileSet.getDir(task.getProject());
DirectoryScanner scanner = fileSet.getDirectoryScanner(task.getProject());
String[] files = scanner.getIncludedFiles();
for (int idx = 0; (files != null) && (idx < files.length); idx++)
{
readSingleDataFile(task, handling, new File(fileSetDir, files[idx]));
}
}
}
}
catch (Exception ex)
{
if (ex instanceof BuildException)
{
throw (BuildException)ex;
}
else
{
throw new BuildException(ex);
}
}
} | {@inheritDoc} |
private void readSingleDataFile(Task task, DdlUtilsDataHandling handling, File dataFile)
{
if (!dataFile.exists())
{
task.log("Could not find data file "+dataFile.getAbsolutePath(), Project.MSG_ERR);
}
else if (!dataFile.isFile())
{
task.log("Path "+dataFile.getAbsolutePath()+" does not denote a data file", Project.MSG_ERR);
}
else if (!dataFile.canRead())
{
task.log("Could not read data file "+dataFile.getAbsolutePath(), Project.MSG_ERR);
}
else
{
int batchSize = 1;
if ((_useBatchMode != null) && _useBatchMode.booleanValue())
{
if (_batchSize != null)
{
batchSize = _batchSize.intValue();
}
}
try
{
handling.insertData(new FileReader(dataFile), batchSize);
task.log("Read data file "+dataFile.getAbsolutePath(), Project.MSG_INFO);
}
catch (Exception ex)
{
if (isFailOnError())
{
throw new BuildException("Could not read data file "+dataFile.getAbsolutePath(), ex);
}
else
{
task.log("Could not read data file "+dataFile.getAbsolutePath(), Project.MSG_ERR);
}
}
}
} | Reads a single data file.
@param task The parent task
@param reader The data reader
@param schemaFile The schema file |
public void performDeletion()
{
if(deletionMap == null)
return;
else
{
PersistenceBroker broker = tx.getBroker();
Iterator it = deletionMap.values().iterator();
while(it.hasNext())
{
NamedEntry namedEntry = (NamedEntry) it.next();
broker.delete(namedEntry);
}
}
} | Have to be performed after the "normal" objects be written
to DB and before method {@link #performInsert()}. |
public void performInsert()
{
if(insertMap == null)
return;
else
{
PersistenceBroker broker = tx.getBroker();
Iterator it = insertMap.values().iterator();
while(it.hasNext())
{
NamedEntry namedEntry = (NamedEntry) it.next();
namedEntry.prepareForStore(broker);
broker.store(namedEntry, ObjectModification.INSERT);
}
}
} | Have to be performed after the "normal" objects be written
to DB and after method {@link #performDeletion()}. |
Object lookup(String key) throws ObjectNameNotFoundException
{
Object result = null;
NamedEntry entry = localLookup(key);
// can't find local bound object
if(entry == null)
{
try
{
PersistenceBroker broker = tx.getBroker();
// build Identity to lookup entry
Identity oid = broker.serviceIdentity().buildIdentity(NamedEntry.class, key);
entry = (NamedEntry) broker.getObjectByIdentity(oid);
}
catch(Exception e)
{
log.error("Can't materialize bound object for key '" + key + "'", e);
}
}
if(entry == null)
{
log.info("No object found for key '" + key + "'");
}
else
{
Object obj = entry.getObject();
// found a persistent capable object associated with that key
if(obj instanceof Identity)
{
Identity objectIdentity = (Identity) obj;
result = tx.getBroker().getObjectByIdentity(objectIdentity);
// lock the persistance capable object
RuntimeObject rt = new RuntimeObject(result, objectIdentity, tx, false);
tx.lockAndRegister(rt, Transaction.READ, tx.getRegistrationList());
}
else
{
// nothing else to do
result = obj;
}
}
if(result == null) throw new ObjectNameNotFoundException("Can't find named object for name '" + key + "'");
return result;
} | Return a named object associated with the specified key. |
void unbind(String key)
{
NamedEntry entry = new NamedEntry(key, null, false);
localUnbind(key);
addForDeletion(entry);
} | Remove a named object |
public DescriptorRepository getRepository()
{
DescriptorRepository repository;
if (enablePerThreadChanges)
{
repository = (DescriptorRepository) threadedRepository.get();
if (repository == null)
{
repository = getGlobalRepository();
log.info(MSG_STR);
}
// arminw:
// TODO: Be more strict in per thread mode and throw a exception when not find descriptor for calling thread?
// if (repository == null)
// {
// throw new MetadataException("Can't find a DescriptorRepository for current thread, don't forget" +
// " to set a DescriptorRepository if enable per thread changes before perform other action");
// }
return repository;
}
else
{
return globalRepository;
}
} | Returns the current valid {@link org.apache.ojb.broker.metadata.DescriptorRepository} for
the caller. This is the provided way to obtain the
{@link org.apache.ojb.broker.metadata.DescriptorRepository}.
<br>
When {@link #isEnablePerThreadChanges per thread descriptor handling} is enabled
it search for a specific {@link org.apache.ojb.broker.metadata.DescriptorRepository}
for the calling thread, if none can be found the global descriptor was returned.
@see MetadataManager#getGlobalRepository
@see MetadataManager#copyOfGlobalRepository |
public void mergeConnectionRepository(
ConnectionRepository targetRepository, ConnectionRepository sourceRepository, boolean deep)
{
List list = sourceRepository.getAllDescriptor();
for (Iterator iterator = list.iterator(); iterator.hasNext();)
{
JdbcConnectionDescriptor jcd = (JdbcConnectionDescriptor) iterator.next();
if (deep)
{
//TODO: adopt copy/clone methods for metadata classes?
jcd = (JdbcConnectionDescriptor) SerializationUtils.clone(jcd);
}
targetRepository.addDescriptor(jcd);
}
} | Merge the given source {@link ConnectionRepository} with the
existing target. If parameter
<tt>deep</tt> is set <code>true</code> deep copies of source objects were made.
<br/>
Note: Using <tt>deep copy mode</tt> all descriptors will be serialized
by using the default class loader to resolve classes. This can be problematic
when classes are loaded by a context class loader.
<p>
Note: All classes within the repository structure have to implement
<code>java.io.Serializable</code> to be able to create a cloned copy. |
public void mergeDescriptorRepository(
DescriptorRepository targetRepository, DescriptorRepository sourceRepository, boolean deep)
{
Iterator it = sourceRepository.iterator();
while (it.hasNext())
{
ClassDescriptor cld = (ClassDescriptor) it.next();
if (deep)
{
//TODO: adopt copy/clone methods for metadata classes?
cld = (ClassDescriptor) SerializationUtils.clone(cld);
}
targetRepository.put(cld.getClassOfObject(), cld);
cld.setRepository(targetRepository);
}
} | Merge the given {@link org.apache.ojb.broker.metadata.DescriptorRepository}
files, the source objects will be pushed to the target repository. If parameter
<tt>deep</tt> is set <code>true</code> deep copies of source objects were made.
<br/>
Note: Using <tt>deep copy mode</tt> all descriptors will be serialized
by using the default class loader to resolve classes. This can be problematic
when classes are loaded by a context class loader.
<p>
Note: All classes within the repository structure have to implement
<code>java.io.Serializable</code> to be able to create a cloned copy.
@see #isEnablePerThreadChanges
@see #setEnablePerThreadChanges |
public DescriptorRepository readDescriptorRepository(String fileName)
{
try
{
RepositoryPersistor persistor = new RepositoryPersistor();
return persistor.readDescriptorRepository(fileName);
}
catch (Exception e)
{
throw new MetadataException("Can not read repository " + fileName, e);
}
} | Read ClassDescriptors from the given repository file.
@see #mergeDescriptorRepository |
public DescriptorRepository readDescriptorRepository(InputStream inst)
{
try
{
RepositoryPersistor persistor = new RepositoryPersistor();
return persistor.readDescriptorRepository(inst);
}
catch (Exception e)
{
throw new MetadataException("Can not read repository " + inst, e);
}
} | Read ClassDescriptors from the given InputStream.
@see #mergeDescriptorRepository |
public ConnectionRepository readConnectionRepository(String fileName)
{
try
{
RepositoryPersistor persistor = new RepositoryPersistor();
return persistor.readConnectionRepository(fileName);
}
catch (Exception e)
{
throw new MetadataException("Can not read repository " + fileName, e);
}
} | Read JdbcConnectionDescriptors from the given repository file.
@see #mergeConnectionRepository |
public ConnectionRepository readConnectionRepository(InputStream inst)
{
try
{
RepositoryPersistor persistor = new RepositoryPersistor();
return persistor.readConnectionRepository(inst);
}
catch (Exception e)
{
throw new MetadataException("Can not read repository from " + inst, e);
}
} | Read JdbcConnectionDescriptors from this InputStream.
@see #mergeConnectionRepository |
public void setDescriptor(DescriptorRepository repository, boolean global)
{
if (global)
{
if (log.isDebugEnabled()) log.debug("Set new global repository: " + repository);
globalRepository = repository;
}
else
{
if (log.isDebugEnabled()) log.debug("Set new threaded repository: " + repository);
threadedRepository.set(repository);
}
} | Set the {@link org.apache.ojb.broker.metadata.DescriptorRepository} - if <i>global</i> was true, the
given descriptor aquire global availability (<i>use with care!</i>),
else the given descriptor was associated with the calling thread.
@see #isEnablePerThreadChanges
@see #setEnablePerThreadChanges |
public void addProfile(Object key, DescriptorRepository repository)
{
if (metadataProfiles.contains(key))
{
throw new MetadataException("Duplicate profile key. Key '" + key + "' already exists.");
}
metadataProfiles.put(key, repository);
} | Add a metadata profile.
@see #loadProfile |
public void loadProfile(Object key)
{
if (!isEnablePerThreadChanges())
{
throw new MetadataException("Can not load profile with disabled per thread mode");
}
DescriptorRepository rep = (DescriptorRepository) metadataProfiles.get(key);
if (rep == null)
{
throw new MetadataException("Can not find profile for key '" + key + "'");
}
currentProfileKey.set(key);
setDescriptor(rep);
} | Load the given metadata profile for the current thread. |
public void setDefaultPBKey(PBKey defaultPBKey)
{
if(this.defaultPBKey != null)
{
log.warn("The used default PBKey change. Current key is " + this.defaultPBKey + ", new key will be " + defaultPBKey);
}
this.defaultPBKey = defaultPBKey;
log.info("Set default PBKey for convenience broker creation: " + defaultPBKey);
} | Set the {@link PBKey} used in convinience method
{@link org.apache.ojb.broker.PersistenceBrokerFactory#defaultPersistenceBroker}.
<br/>
It's only allowed to use one {@link JdbcConnectionDescriptor} with enabled
{@link JdbcConnectionDescriptor#isDefaultConnection() default connection}. In this case
OJB will automatically set the default key.
<br/>
Note: It's recommended to set this key only once and not to change at runtime
of OJB to avoid side-effects.
If set more then one time a warning will be logged.
@throws MetadataException if key was set more than one time |
private PBKey buildDefaultKey()
{
List descriptors = connectionRepository().getAllDescriptor();
JdbcConnectionDescriptor descriptor;
PBKey result = null;
for (Iterator iterator = descriptors.iterator(); iterator.hasNext();)
{
descriptor = (JdbcConnectionDescriptor) iterator.next();
if (descriptor.isDefaultConnection())
{
if(result != null)
{
log.error("Found additional connection descriptor with enabled 'default-connection' "
+ descriptor.getPBKey() + ". This is NOT allowed. Will use the first found descriptor " + result
+ " as default connection");
}
else
{
result = descriptor.getPBKey();
}
}
}
if(result == null)
{
log.info("No 'default-connection' attribute set in jdbc-connection-descriptors," +
" thus it's currently not possible to use 'defaultPersistenceBroker()' " +
" convenience method to lookup PersistenceBroker instances. But it's possible"+
" to enable this at runtime using 'setDefaultKey' method.");
}
return result;
} | Try to build an default PBKey for convenience PB create method.
@return PBKey or <code>null</code> if default key was not declared in
metadata |
private void writeObject(ObjectOutputStream out) throws IOException
{
out.defaultWriteObject();
out.writeInt(table.length);
// Have to use null-terminated list because size might shrink
// during iteration
for (Iterator iter = entrySet().iterator(); iter.hasNext();)
{
Map.Entry entry = (Map.Entry) iter.next();
out.writeObject(entry.getKey());
out.writeObject(entry.getValue());
}
out.writeObject(null);
} | Writes this object to the given output stream.
@param out the output stream to write to
@throws java.io.IOException if the stream raises it |
private Object toReference(int type, Object referent, int hash)
{
switch (type)
{
case HARD:
return referent;
case SOFT:
return new SoftRef(hash, referent, queue);
case WEAK:
return new WeakRef(hash, referent, queue);
default:
throw new Error();
}
} | Constructs a reference of the given type to the given
referent. The reference is registered with the queue
for later purging.
@param type HARD, SOFT or WEAK
@param referent the object to refer to
@param hash the hash code of the <I>key</I> of the mapping;
this number might be different from referent.hashCode() if
the referent represents a value and not a key |
private Entry getEntry(Object key)
{
if (key == null) return null;
int hash = hashCode(key);
int index = indexFor(hash);
for (Entry entry = table[index]; entry != null; entry = entry.next)
{
if ((entry.hash == hash) && equals(key, entry.getKey()))
{
return entry;
}
}
return null;
} | Returns the entry associated with the given key.
@param key the key of the entry to look up
@return the entry associated with that key, or null
if the key is not in this map |
private int indexFor(int hash)
{
// mix the bits to avoid bucket collisions...
hash += ~(hash << 15);
hash ^= (hash >>> 10);
hash += (hash << 3);
hash ^= (hash >>> 6);
hash += ~(hash << 11);
hash ^= (hash >>> 16);
return hash & (table.length - 1);
} | Converts the given hash code into an index into the
hash table. |
public boolean containsKey(Object key)
{
purge();
Entry entry = getEntry(key);
if (entry == null) return false;
return entry.getValue() != null;
} | Returns <Code>true</Code> if this map contains the given key.
@return true if the given key is in this map |
public Object get(Object key)
{
purge();
Entry entry = getEntry(key);
if (entry == null) return null;
return entry.getValue();
} | Returns the value associated with the given key, if any.
@return the value associated with the given key, or <Code>null</Code>
if the key maps to no value |
public Object put(Object key, Object value)
{
if (key == null) throw new NullPointerException("null keys not allowed");
if (value == null) throw new NullPointerException("null values not allowed");
purge();
if (size + 1 > threshold) resize();
int hash = hashCode(key);
int index = indexFor(hash);
Entry entry = table[index];
while (entry != null)
{
if ((hash == entry.hash) && equals(key, entry.getKey()))
{
Object result = entry.getValue();
entry.setValue(value);
return result;
}
entry = entry.next;
}
this.size++;
modCount++;
key = toReference(keyType, key, hash);
value = toReference(valueType, value, hash);
table[index] = new Entry(key, hash, value, table[index]);
return null;
} | Associates the given key with the given value.<P>
Neither the key nor the value may be null.
@param key the key of the mapping
@param value the value of the mapping
@return the last value associated with that key, or
null if no value was associated with the key
@throws java.lang.NullPointerException if either the key or value
is null |
public Object remove(Object key)
{
if (key == null) return null;
purge();
int hash = hashCode(key);
int index = indexFor(hash);
Entry previous = null;
Entry entry = table[index];
while (entry != null)
{
if ((hash == entry.hash) && equals(key, entry.getKey()))
{
if (previous == null)
table[index] = entry.next;
else
previous.next = entry.next;
this.size--;
modCount++;
return entry.getValue();
}
previous = entry;
entry = entry.next;
}
return null;
} | Removes the key and its associated value from this map.
@param key the key to remove
@return the value associated with that key, or null if
the key was not in the map |
public Collection values()
{
if (values != null) return values;
values = new AbstractCollection()
{
public int size()
{
return size;
}
public void clear()
{
ReferenceMap.this.clear();
}
public Iterator iterator()
{
return new ValueIterator();
}
};
return values;
} | Returns a collection view of this map's values.
@return a collection view of this map's values. |
protected String getColName(TableAlias aTableAlias, PathInfo aPathInfo, boolean translate)
{
FieldDescriptor fld = null;
String result;
if (translate)
{
fld = getFieldDescriptor(aTableAlias, aPathInfo);
}
if (fld != null)
{
// BRJ : No alias for delete
result = fld.getColumnName();
}
else
{
result = aPathInfo.column;
}
return result;
} | /* (non-Javadoc)
@see org.apache.ojb.broker.accesslayer.sql.SqlQueryStatement#getColName(org.apache.ojb.broker.accesslayer.sql.SqlQueryStatement.TableAlias, org.apache.ojb.broker.util.SqlHelper.PathInfo, boolean) |
public Object toInternal(Attribute<?> attribute) throws GeomajasException {
if (attribute instanceof PrimitiveAttribute<?>) {
return toPrimitiveObject((PrimitiveAttribute<?>) attribute);
} else if (attribute instanceof AssociationAttribute<?>) {
return toAssociationObject((AssociationAttribute<?>) attribute);
} else {
throw new GeomajasException(ExceptionCode.CONVERSION_PROBLEM, attribute);
}
} | Converts a DTO attribute into a generic attribute object.
@param attribute
The DTO attribute.
@return The server side attribute representation. As we don't know at this point what kind of object the
attribute is (that's a problem for the <code>FeatureModel</code>), we return an <code>Object</code>. |
private Attribute<?> toAssociationDto(Object value, AssociationAttributeInfo associationAttributeInfo)
throws GeomajasException {
if (associationAttributeInfo.getType() == AssociationType.MANY_TO_ONE) {
return new ManyToOneAttribute(createAssociationValue(value, associationAttributeInfo));
} else if (associationAttributeInfo.getType() == AssociationType.ONE_TO_MANY) {
// Value should be an array of objects...
List<AssociationValue> associationValues = new ArrayList<AssociationValue>();
if (value != null && value instanceof Object[]) {
Object[] array = (Object[]) value;
for (Object bean : array) {
associationValues.add(createAssociationValue(bean, associationAttributeInfo));
}
}
return new OneToManyAttribute(associationValues);
}
return null;
} | ------------------------------------------------------------------------- |
public Feature toDto(InternalFeature feature, int featureIncludes) throws GeomajasException {
if (feature == null) {
return null;
}
Feature dto = new Feature(feature.getId());
if ((featureIncludes & VectorLayerService.FEATURE_INCLUDE_ATTRIBUTES) != 0 && null != feature.getAttributes()) {
// need to assure lazy attributes are converted to non-lazy attributes
Map<String, Attribute> attributes = new HashMap<String, Attribute>();
for (Map.Entry<String, Attribute> entry : feature.getAttributes().entrySet()) {
Attribute value = entry.getValue();
if (value instanceof LazyAttribute) {
value = ((LazyAttribute) value).instantiate();
}
attributes.put(entry.getKey(), value);
}
dto.setAttributes(attributes);
}
if ((featureIncludes & VectorLayerService.FEATURE_INCLUDE_LABEL) != 0) {
dto.setLabel(feature.getLabel());
}
if ((featureIncludes & VectorLayerService.FEATURE_INCLUDE_GEOMETRY) != 0) {
dto.setGeometry(toDto(feature.getGeometry()));
}
if ((featureIncludes & VectorLayerService.FEATURE_INCLUDE_STYLE) != 0 && null != feature.getStyleInfo()) {
dto.setStyleId(feature.getStyleInfo().getStyleId());
}
InternalFeatureImpl vFeature = (InternalFeatureImpl) feature;
dto.setClipped(vFeature.isClipped());
dto.setUpdatable(feature.isEditable());
dto.setDeletable(feature.isDeletable());
return dto;
} | Convert the server side feature to a DTO feature that can be sent to the client.
@param feature
The server-side feature representation.
@param featureIncludes
Indicate which aspects of the should be included see {@link VectorLayerService}
@return Returns the DTO feature. |
@Override
public Geometry toDto(com.vividsolutions.jts.geom.Geometry geometry) throws GeomajasException {
if (geometry == null) {
return null;
}
try {
return GeometryConverterService.fromJts(geometry);
} catch (JtsConversionException jce) {
throw new GeomajasException(jce, ExceptionCode.CANNOT_CONVERT_GEOMETRY, geometry.getClass().getName());
}
} | ------------------------------------------------------------------------- |
public VectorTile toDto(InternalTile tile, String crs, int featureIncludes) throws GeomajasException {
return toDto(tile);
} | Convert a server-side tile representations into a DTO tile.
@param tile
The server-side representation of a tile.
@param crs
crs to include in features (if any)
@param featureIncludes
Indicate which aspects of the should be included see {@link org.geomajas.layer.VectorLayerService}
@return Returns the DTO version that can be sent to the client. |
public VectorTile toDto(InternalTile tile) throws GeomajasException {
if (null != tile) {
VectorTile dto = new VectorTile();
dto.setClipped(tile.isClipped());
dto.setCode(tile.getCode());
dto.setCodes(tile.getCodes());
dto.setScreenHeight(tile.getScreenHeight());
dto.setScreenWidth(tile.getScreenWidth());
dto.setFeatureContent(tile.getFeatureContent());
dto.setLabelContent(tile.getLabelContent());
dto.setContentType(tile.getContentType());
return dto;
}
return null;
} | Convert a server-side tile representations into a DTO tile.
@param tile
The server-side representation of a tile.
@return Returns the DTO version that can be sent to the client. |
public Envelope toInternal(Bbox bbox) {
return new Envelope(bbox.getX(), bbox.getMaxX(), bbox.getY(), bbox.getMaxY());
} | Convert a {@link Bbox} to a JTS envelope.
@param bbox bounding box
@return JTS envelope |
public Bbox toDto(Envelope envelope) {
return new Bbox(envelope.getMinX(), envelope.getMinY(), envelope.getWidth(), envelope.getHeight());
} | Convert JTS envelope into a {@link Bbox}.
@param envelope
JTS envelope
@return Geomajas {@link Bbox} |
public Class<? extends com.vividsolutions.jts.geom.Geometry> toInternal(LayerType layerType) {
switch (layerType) {
case GEOMETRY:
return com.vividsolutions.jts.geom.Geometry.class;
case LINESTRING:
return LineString.class;
case MULTILINESTRING:
return MultiLineString.class;
case POINT:
return Point.class;
case MULTIPOINT:
return MultiPoint.class;
case POLYGON:
return Polygon.class;
case MULTIPOLYGON:
return MultiPolygon.class;
case RASTER:
return null;
default:
throw new IllegalStateException("Don't know how to handle layer type " + layerType);
}
} | Convert a layer type to a geometry class.
@param layerType
layer type
@return JTS class |
public LayerType toDto(Class<? extends com.vividsolutions.jts.geom.Geometry> geometryClass) {
if (geometryClass == LineString.class) {
return LayerType.LINESTRING;
} else if (geometryClass == MultiLineString.class) {
return LayerType.MULTILINESTRING;
} else if (geometryClass == Point.class) {
return LayerType.POINT;
} else if (geometryClass == MultiPoint.class) {
return LayerType.MULTIPOINT;
} else if (geometryClass == Polygon.class) {
return LayerType.POLYGON;
} else if (geometryClass == MultiPolygon.class) {
return LayerType.MULTIPOLYGON;
} else {
return LayerType.GEOMETRY;
}
} | Convert a geometry class to a layer type.
@param geometryClass
JTS geometry class
@return Geomajas layer type |
public Object sqlToJava(Object source)
{
if (source instanceof java.sql.Timestamp)
{
return new java.util.Date( ((java.sql.Timestamp) source).getTime());
}
else
{
return source;
}
} | /*
@see FieldConversion#sqlToJava(Object) |
public static DataStore create(Map<String, Object> parameters) throws IOException {
Object url = parameters.get(ShapefileDataStoreFactory.URLP.key);
Logger log = LoggerFactory.getLogger(DataStoreFactory.class);
if (url instanceof String) {
parameters.put(ShapefileDataStoreFactory.URLP.key, ResourceUtils.getURL((String) url).toExternalForm());
}
if (DATASTORE_CACHE.containsKey(parameters)) {
return DATASTORE_CACHE.get(parameters);
}
DataStore store = DataStoreFinder.getDataStore(parameters);
Object typed = parameters.get(USE_TYPED_FIDS);
if (typed instanceof String) {
Boolean t = Boolean.valueOf((String) typed);
if (!t) {
if (store != null) {
log.warn("Non-typed FIDs are only supported by first-generation JDBC datastores, "
+ "using default fid format for datastore class " + store.getClass().getName());
}
}
}
if (null == store) {
StringBuilder availableStr = new StringBuilder();
StringBuilder missingStr = new StringBuilder();
Iterator<DataStoreFactorySpi> all = DataStoreFinder.getAllDataStores();
while (all.hasNext()) {
DataStoreFactorySpi factory = all.next();
if (!factory.isAvailable()) {
log.warn("Datastore factory " + factory.getDisplayName() + "(" + factory.getDescription()
+ ") is not available");
if (missingStr.length() != 0) {
missingStr.append(",");
}
missingStr.append(factory.getDisplayName());
} else {
if (availableStr.length() != 0) {
availableStr.append(",");
}
availableStr.append(factory.getDisplayName());
}
}
throw new IOException(
"No datastore found. Possible causes are missing factory or missing library for your datastore"
+ " (e.g. database driver).\nCheck the isAvailable() method of your"
+ " DataStoreFactory class to find out which libraries are needed.\n"
+ "Unavailable factories : " + missingStr + "\n" + "Available factories : " + availableStr
+ "\n");
}
DATASTORE_CACHE.put(parameters, store);
return store;
} | Creates a suitable {@link DataStore} for the specified parameters.
@param parameters list of GeoTools parameters.
@return data store, never null
@throws IOException could not create data store |
public void apply()
{
String in = readLineWithMessage("Delete Product with id:");
int id = Integer.parseInt(in);
// We don't have a reference to the selected Product.
// So first we have to lookup the object,
// we do this by a query by example (QBE):
// 1. build an example object with matching primary key values:
Product example = new Product();
example.setId(id);
// 2. build a QueryByIdentity from this sample instance:
Query query = new QueryByIdentity(example);
try
{
// start broker transaction
broker.beginTransaction();
// lookup the product specified by the QBE
Product toBeDeleted = (Product) broker.getObjectByQuery(query);
// now ask broker to delete the object
broker.delete(toBeDeleted);
// commit transaction
broker.commitTransaction();
}
catch (Throwable t)
{
// rollback in case of errors
broker.abortTransaction();
t.printStackTrace();
}
} | perform this use case |
public void put(String key, Object object, Envelope envelope) {
index.put(key, envelope);
cache.put(key, object);
} | Put a spatial object in the cache and index it.
@param key key for object
@param object object itself
@param envelope envelope for object |
public <TYPE> TYPE get(String key, Class<TYPE> type) {
return cache.get(key, type);
} | Get the spatial object from the cache.
@param key key to get object for
@param type type of object which should be returned
@return object for key or null if object does not exist or is a different type |
public void invalidate(Envelope envelope) {
List<String> keys = index.getOverlappingKeys(envelope);
if (CacheIndexService.ALL_KEYS == keys) {
log.debug("clear all keys from cache");
clear();
} else {
log.debug("invalidate keys {}", keys);
for (String key : keys) {
remove(key);
}
}
} | Invalidate all entries which (may) overlap with the given geometry.
@param envelope envelope to test |
public void put(Object key, Object value)
{
keyTable.put(key, value);
valueTable.put(value, key);
} | put a (key, value) pair into the table.
@param key the key object.
@param value the value object. |
public void removeByKey(Object key)
{
Object value = keyTable.remove(key);
if (value != null)
{
valueTable.remove(value);
}
} | remove a (key, value)-entry by its key
@param key the key object |
public void removeByValue(Object value)
{
Object key = valueTable.remove(value);
if (key != null)
{
keyTable.remove(key);
}
} | remove a (key, value)-entry by its value
@param value the value object |
public void addFkToThisClass(String column)
{
if (fksToThisClass == null)
{
fksToThisClass = new Vector();
}
fksToThisClass.add(column);
fksToThisClassAry = null;
} | add a FK column pointing to This Class |
public void addFkToItemClass(String column)
{
if (fksToItemClass == null)
{
fksToItemClass = new Vector();
}
fksToItemClass.add(column);
fksToItemClassAry = null;
} | add a FK column pointing to the item Class |
public String toXML()
{
RepositoryTags tags = RepositoryTags.getInstance();
String eol = SystemUtils.LINE_SEPARATOR;
// write opening tag
String result = " " + tags.getOpeningTagNonClosingById(COLLECTION_DESCRIPTOR) + eol;
// write attributes
// name
result += " " + tags.getAttribute(FIELD_NAME,this.getAttributeName()) + eol;
// collection class is optional
if (getCollectionClassName() != null)
{
result += " " + tags.getAttribute(COLLECTION_CLASS,this.getCollectionClassName()) + eol;
}
// element-class-ref
result += " " + tags.getAttribute(ITEMS_CLASS,this.getItemClassName()) + eol;
// indirection-table is optional
if (isMtoNRelation())
{
result += " " + tags.getAttribute(INDIRECTION_TABLE,getIndirectionTable()) + eol;
}
// proxyReference is optional, disabled by default
if (isLazy())
{
result += " " + tags.getAttribute(PROXY_REFERENCE,"true") + eol;
result += " " + tags.getAttribute(PROXY_PREFETCHING_LIMIT, "" + this.getProxyPrefetchingLimit()) + eol;
}
//reference refresh is optional, disabled by default
if (isRefresh())
{
result += " " + tags.getAttribute(REFRESH,"true") + eol;
}
//auto retrieve
result += " " + tags.getAttribute(AUTO_RETRIEVE, "" + getCascadeRetrieve()) + eol;
//auto update
result += " " + tags.getAttribute(AUTO_UPDATE, getCascadeAsString(getCascadingStore())) + eol;
//auto delete
result += " " + tags.getAttribute(AUTO_DELETE, getCascadeAsString(getCascadingDelete())) + eol;
//otm-dependent is optional, disabled by default
if (getOtmDependent())
{
result += " " + tags.getAttribute(OTM_DEPENDENT, "true") + eol;
}
// close opening tag
result += " >" + eol;
// write elements
// inverse fk elements
for (int i=0;i<getForeignKeyFields().size();i++)
{
Object obj = getForeignKeyFields().get(i);
if (obj instanceof Integer)
{
String fkId = obj.toString();
result += " " + tags.getOpeningTagNonClosingById(INVERSE_FK) + " ";
result += tags.getAttribute(FIELD_ID_REF, fkId) + "/>" + eol;
}
else
{
String fk = (String) obj;
result += " " + tags.getOpeningTagNonClosingById(INVERSE_FK) + " ";
result += tags.getAttribute(FIELD_REF, fk) + "/>" + eol;
}
}
// write optional M:N elements
// m:n relationship settings, optional
if (isMtoNRelation())
{
// foreign keys to this class
for (int i=0;i<getFksToThisClass().length;i++)
{
String fkId = getFksToThisClass()[i];
result += " " + tags.getOpeningTagNonClosingById(FK_POINTING_TO_THIS_CLASS) + " ";
result += tags.getAttribute(COLUMN_NAME, fkId) + "/>" + eol;
}
// foreign keys to item class
for (int i=0;i<getFksToItemClass().length;i++)
{
String fkId = getFksToItemClass()[i];
result += " " + tags.getOpeningTagNonClosingById(FK_POINTING_TO_ITEMS_CLASS) + " ";
result += tags.getAttribute(COLUMN_NAME, fkId) + "/>" + eol;
}
}
// closing tag
result += " " + tags.getClosingTagById(COLLECTION_DESCRIPTOR) + eol;
return result;
} | /*
@see XmlCapable#toXML() |
protected String sp_createSequenceQuery(String sequenceName, long maxKey)
{
return "insert into " + SEQ_TABLE_NAME + " ("
+ SEQ_NAME_STRING + "," + SEQ_ID_STRING +
") values ('" + sequenceName + "'," + maxKey + ")";
} | Insert syntax for our special table
@param sequenceName
@param maxKey
@return sequence insert statement |
protected long getUniqueLong(FieldDescriptor field) throws SequenceManagerException
{
boolean needsCommit = false;
long result = 0;
/*
arminw:
use the associated broker instance, check if broker was in tx or
we need to commit used connection.
*/
PersistenceBroker targetBroker = getBrokerForClass();
if(!targetBroker.isInTransaction())
{
targetBroker.beginTransaction();
needsCommit = true;
}
try
{
// lookup sequence name
String sequenceName = calculateSequenceName(field);
try
{
result = buildNextSequence(targetBroker, field.getClassDescriptor(), sequenceName);
/*
if 0 was returned we assume that the stored procedure
did not work properly.
*/
if (result == 0)
{
throw new SequenceManagerException("No incremented value retrieved");
}
}
catch (Exception e)
{
// maybe the sequence was not created
log.info("Could not grab next key, message was " + e.getMessage() +
" - try to write a new sequence entry to database");
try
{
// on create, make sure to get the max key for the table first
long maxKey = SequenceManagerHelper.getMaxForExtent(targetBroker, field);
createSequence(targetBroker, field, sequenceName, maxKey);
}
catch (Exception e1)
{
String eol = SystemUtils.LINE_SEPARATOR;
throw new SequenceManagerException(eol + "Could not grab next id, failed with " + eol +
e.getMessage() + eol + "Creation of new sequence failed with " +
eol + e1.getMessage() + eol, e1);
}
try
{
result = buildNextSequence(targetBroker, field.getClassDescriptor(), sequenceName);
}
catch (Exception e1)
{
throw new SequenceManagerException("Could not grab next id although a sequence seems to exist", e);
}
}
}
finally
{
if(targetBroker != null && needsCommit)
{
targetBroker.commitTransaction();
}
}
return result;
} | Gets the actual key - will create a new row with the max key of table if it
does not exist.
@param field
@return
@throws SequenceManagerException |
protected long buildNextSequence(PersistenceBroker broker, ClassDescriptor cld, String sequenceName)
throws LookupException, SQLException, PlatformException
{
CallableStatement cs = null;
try
{
Connection con = broker.serviceConnectionManager().getConnection();
cs = getPlatform().prepareNextValProcedureStatement(con, PROCEDURE_NAME, sequenceName);
cs.executeUpdate();
return cs.getLong(1);
}
finally
{
try
{
if (cs != null)
cs.close();
}
catch (SQLException ignore)
{
// ignore it
}
}
} | Calls the stored procedure stored procedure throws an
error if it doesn't exist.
@param broker
@param cld
@param sequenceName
@return
@throws LookupException
@throws SQLException |
protected void createSequence(PersistenceBroker broker, FieldDescriptor field,
String sequenceName, long maxKey) throws Exception
{
Statement stmt = null;
try
{
stmt = broker.serviceStatementManager().getGenericStatement(field.getClassDescriptor(), Query.NOT_SCROLLABLE);
stmt.execute(sp_createSequenceQuery(sequenceName, maxKey));
}
catch (Exception e)
{
log.error(e);
throw new SequenceManagerException("Could not create new row in "+SEQ_TABLE_NAME+" table - TABLENAME=" +
sequenceName + " field=" + field.getColumnName(), e);
}
finally
{
try
{
if (stmt != null) stmt.close();
}
catch (SQLException sqle)
{
if(log.isDebugEnabled())
log.debug("Threw SQLException while in createSequence and closing stmt", sqle);
// ignore it
}
}
} | Creates new row in table
@param broker
@param field
@param sequenceName
@param maxKey
@throws Exception |
static void init() {// NOPMD
determineIfNTEventLogIsSupported();
URL resource = null;
final String configurationOptionStr = OptionConverter.getSystemProperty(DEFAULT_CONFIGURATION_KEY, null);
if (configurationOptionStr != null) {
try {
resource = new URL(configurationOptionStr);
} catch (MalformedURLException ex) {
// so, resource is not a URL:
// attempt to get the resource from the class path
resource = Loader.getResource(configurationOptionStr);
}
}
if (resource == null) {
resource = Loader.getResource(DEFAULT_CONFIGURATION_FILE); // NOPMD
}
if (resource == null) {
System.err.println("[FoundationLogger] Can not find resource: " + DEFAULT_CONFIGURATION_FILE); // NOPMD
throw new FoundationIOException("Can not find resource: " + DEFAULT_CONFIGURATION_FILE); // NOPMD
}
// update the log manager to use the Foundation repository.
final RepositorySelector foundationRepositorySelector = new FoundationRepositorySelector(FoundationLogFactory.foundationLogHierarchy);
LogManager.setRepositorySelector(foundationRepositorySelector, null);
// set logger to info so we always want to see these logs even if root
// is set to ERROR.
final Logger logger = getLogger(FoundationLogger.class);
final String logPropFile = resource.getPath();
log4jConfigProps = getLogProperties(resource);
// select and configure again so the loggers are created with the right
// level after the repository selector was updated.
OptionConverter.selectAndConfigure(resource, null, FoundationLogFactory.foundationLogHierarchy);
// start watching for property changes
setUpPropFileReloading(logger, logPropFile, log4jConfigProps);
// add syslog appender or windows event viewer appender
// setupOSSystemLog(logger, log4jConfigProps);
// parseMarkerPatterns(log4jConfigProps);
// parseMarkerPurePattern(log4jConfigProps);
// udpateMarkerStructuredLogOverrideMap(logger);
AbstractFoundationLoggingMarker.init();
updateSniffingLoggersLevel(logger);
setupJULSupport(resource);
} | Initialize that Foundation Logging library. |
private static void updateSniffingLoggersLevel(Logger logger) {
InputStream settingIS = FoundationLogger.class
.getResourceAsStream("/sniffingLogger.xml");
if (settingIS == null) {
logger.debug("file sniffingLogger.xml not found in classpath");
} else {
try {
SAXBuilder builder = new SAXBuilder();
Document document = builder.build(settingIS);
settingIS.close();
Element rootElement = document.getRootElement();
List<Element> sniffingloggers = rootElement
.getChildren("sniffingLogger");
for (Element sniffinglogger : sniffingloggers) {
String loggerName = sniffinglogger.getAttributeValue("id");
Logger.getLogger(loggerName).setLevel(Level.TRACE);
}
} catch (Exception e) {
logger.error(
"cannot load the sniffing logger configuration file. error is: "
+ e, e);
throw new IllegalArgumentException(
"Problem parsing sniffingLogger.xml", e);
}
}
} | The sniffing Loggers are some special Loggers, whose level will be set to TRACE forcedly.
@param logger |
private static void setUpPropFileReloading(final Logger logger, final String logPropFile, final Properties properties) {
int fileReloadDelay = FILE_RELOAD_DELAY;
if (properties.containsKey(Foundation_FILE_RELOAD_DELAY)) {
final String fileReloadDelayStr = properties.getProperty(Foundation_FILE_RELOAD_DELAY);
try {
fileReloadDelay = Integer.parseInt(fileReloadDelayStr);
} catch (NumberFormatException e) {
logger.error("Can not format to integer the property: " + Foundation_FILE_RELOAD_DELAY + ". using default of: " + FILE_RELOAD_DELAY);
}
}
PropertyConfigurator.configureAndWatch(logPropFile, fileReloadDelay);
} | } |
protected void forcedLog(Marker marker, String fqcn, Priority level, Object message, Throwable t) {
callAppenders(new FoundationLof4jLoggingEvent(marker, fqcn, this, level, message, t));
} | This method creates a new logging event and logs the event without
further checks. |
public void cache(Identity oid, Object obj)
{
try
{
jcsCache.put(oid.toString(), obj);
}
catch (CacheException e)
{
throw new RuntimeCacheException(e);
}
} | makes object obj persistent to the Objectcache under the key oid. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.