code
stringlengths
67
466k
docstring
stringlengths
1
13.2k
public SqlStatement getPreparedDeleteStatement(ClassDescriptor cld) { SqlForClass sfc = getSqlForClass(cld); SqlStatement sql = sfc.getDeleteSql(); if(sql == null) { ProcedureDescriptor pd = cld.getDeleteProcedure(); if(pd == null) { sql = new SqlDeleteByPkStatement(cld, logger); } else { sql = new SqlProcedureStatement(pd, logger); } // set the sql string sfc.setDeleteSql(sql); if(logger.isDebugEnabled()) { logger.debug("SQL:" + sql.getStatement()); } } return sql; }
generate a prepared DELETE-Statement for the Class described by cld. @param cld the ClassDescriptor
public SqlStatement getPreparedInsertStatement(ClassDescriptor cld) { SqlStatement sql; SqlForClass sfc = getSqlForClass(cld); sql = sfc.getInsertSql(); if(sql == null) { ProcedureDescriptor pd = cld.getInsertProcedure(); if(pd == null) { sql = new SqlInsertStatement(cld, logger); } else { sql = new SqlProcedureStatement(pd, logger); } // set the sql string sfc.setInsertSql(sql); if(logger.isDebugEnabled()) { logger.debug("SQL:" + sql.getStatement()); } } return sql; }
generate a prepared INSERT-Statement for the Class described by cld. @param cld the ClassDescriptor
public SelectStatement getPreparedSelectByPkStatement(ClassDescriptor cld) { SelectStatement sql; SqlForClass sfc = getSqlForClass(cld); sql = sfc.getSelectByPKSql(); if(sql == null) { sql = new SqlSelectByPkStatement(m_platform, cld, logger); // set the sql string sfc.setSelectByPKSql(sql); if(logger.isDebugEnabled()) { logger.debug("SQL:" + sql.getStatement()); } } return sql; }
generate a prepared SELECT-Statement for the Class described by cld @param cld the ClassDescriptor
public SelectStatement getPreparedSelectStatement(Query query, ClassDescriptor cld) { SelectStatement sql = new SqlSelectStatement(m_platform, cld, query, logger); if (logger.isDebugEnabled()) { logger.debug("SQL:" + sql.getStatement()); } return sql; }
generate a select-Statement according to query @param query the Query @param cld the ClassDescriptor
public SqlStatement getPreparedUpdateStatement(ClassDescriptor cld) { SqlForClass sfc = getSqlForClass(cld); SqlStatement result = sfc.getUpdateSql(); if(result == null) { ProcedureDescriptor pd = cld.getUpdateProcedure(); if(pd == null) { result = new SqlUpdateStatement(cld, logger); } else { result = new SqlProcedureStatement(pd, logger); } // set the sql string sfc.setUpdateSql(result); if(logger.isDebugEnabled()) { logger.debug("SQL:" + result.getStatement()); } } return result; }
generate a prepared UPDATE-Statement for the Class described by cld @param cld the ClassDescriptor
public String getInsertMNStatement(String table, String[] pkColumns1, String[] pkColumns2) { SqlStatement sql; String result; String[] cols = new String[pkColumns1.length + pkColumns2.length]; System.arraycopy(pkColumns1, 0, cols, 0, pkColumns1.length); System.arraycopy(pkColumns2, 0, cols, pkColumns1.length, pkColumns2.length); sql = new SqlInsertMNStatement(table, cols, logger); result = sql.getStatement(); if (logger.isDebugEnabled()) { logger.debug("SQL:" + result); } return result; }
generate an INSERT-Statement for M:N indirection table @param table @param pkColumns1 @param pkColumns2
public String getSelectMNStatement(String table, String[] selectColumns, String[] columns) { SqlStatement sql; String result; sql = new SqlSelectMNStatement(table, selectColumns, columns, logger); result = sql.getStatement(); if (logger.isDebugEnabled()) { logger.debug("SQL:" + result); } return result; }
generate a SELECT-Statement for M:N indirection table @param table the indirection table @param selectColumns selected columns @param columns for where
public String getDeleteMNStatement(String table, String[] pkColumns1, String[] pkColumns2) { SqlStatement sql; String result; String[] cols; if (pkColumns2 == null) { cols = pkColumns1; } else { cols = new String[pkColumns1.length + pkColumns2.length]; System.arraycopy(pkColumns1, 0, cols, 0, pkColumns1.length); System.arraycopy(pkColumns2, 0, cols, pkColumns1.length, pkColumns2.length); } sql = new SqlDeleteMNStatement(table, cols, logger); result = sql.getStatement(); if (logger.isDebugEnabled()) { logger.debug("SQL:" + result); } return result; }
generate a DELETE-Statement for M:N indirection table @param table @param pkColumns1 @param pkColumns2
public String asSQLStatement(Criteria crit, ClassDescriptor cld) { Enumeration e = crit.getElements(); StringBuffer statement = new StringBuffer(); while (e.hasMoreElements()) { Object o = e.nextElement(); if (o instanceof Criteria) { String addAtStart; String addAtEnd; Criteria pc = (Criteria) o; // need to add parenthesises? if (pc.isEmbraced()) { addAtStart = " ("; addAtEnd = ") "; } else { addAtStart = ""; addAtEnd = ""; } switch (pc.getType()) { case (Criteria.OR) : { statement.append(" OR ").append(addAtStart); statement.append(asSQLStatement(pc, cld)); statement.append(addAtEnd); break; } case (Criteria.AND) : { statement.insert(0, "( "); statement.append(") "); statement.append(" AND ").append(addAtStart); statement.append(asSQLStatement(pc, cld)); statement.append(addAtEnd); break; } } } else { SelectionCriteria c = (SelectionCriteria) o; if (statement.length() == 0) { statement.append(asSQLClause(c, cld)); } else { statement.insert(0, "("); statement.append(") "); statement.append(" AND "); statement.append(asSQLClause(c, cld)); } } } // while if (statement.length() == 0) { return null; } return statement.toString(); }
@param crit Selection criteria 26/06/99 Change statement to a StringBuffer for efficiency
protected String asSQLClause(SelectionCriteria c, ClassDescriptor cld) { if (c instanceof FieldCriteria) return toSQLClause((FieldCriteria) c, cld); if (c instanceof NullCriteria) return toSQLClause((NullCriteria) c); if (c instanceof BetweenCriteria) return toSQLClause((BetweenCriteria) c, cld); if (c instanceof InCriteria) return toSQLClause((InCriteria) c); if (c instanceof SqlCriteria) return toSQLClause((SqlCriteria) c); if (c instanceof ExistsCriteria) return toSQLClause((ExistsCriteria) c, cld); return toSQLClause(c, cld); }
Answer the SQL-Clause for a SelectionCriteria @param c SelectionCriteria @param cld ClassDescriptor
private String toSQLClause(NullCriteria c) { String colName = (String)c.getAttribute(); return colName + c.getClause(); }
Answer the SQL-Clause for a NullCriteria @param c NullCriteria
private String toSQLClause(FieldCriteria c, ClassDescriptor cld) { String colName = toSqlClause(c.getAttribute(), cld); return colName + c.getClause() + c.getValue(); }
Answer the SQL-Clause for a FieldCriteria @param c FieldCriteria @param cld ClassDescriptor
private String toSQLClause(InCriteria c) { StringBuffer buf = new StringBuffer(); Collection values = (Collection) c.getValue(); int size = values.size(); buf.append(c.getAttribute()); buf.append(c.getClause()); buf.append("("); for (int i = 0; i < size - 1; i++) { buf.append("?,"); } buf.append("?)"); return buf.toString(); }
Answer the SQL-Clause for an InCriteria @param c SelectionCriteria
private String toSQLClause(SelectionCriteria c, ClassDescriptor cld) { String colName = toSqlClause(c.getAttribute(), cld); return colName + c.getClause() + " ? "; }
Answer the SQL-Clause for a SelectionCriteria @param c SelectionCriteria @param cld ClassDescriptor
private String toSQLClause(ExistsCriteria c, ClassDescriptor cld) { StringBuffer buf = new StringBuffer(); Query subQuery = (Query) c.getValue(); buf.append(c.getClause()); buf.append(" ("); // If it's a proper call if (cld != null) { buf.append( getPreparedSelectStatement( subQuery, cld.getRepository().getDescriptorFor(subQuery.getSearchClass()))); // Otherwise it's most likely a call to toString() } else { buf.append(subQuery); } buf.append(")"); return buf.toString(); }
Answer the SQL-Clause for an ExistsCriteria @param c ExistsCriteria @param cld ClassDescriptor
public SqlStatement getPreparedDeleteStatement(Query query, ClassDescriptor cld) { return new SqlDeleteByQuery(m_platform, cld, query, logger); }
generate a prepared DELETE-Statement according to query @param query the Query @param cld the ClassDescriptor
protected SqlForClass getSqlForClass(ClassDescriptor cld) { SqlForClass result = (SqlForClass) sqlForClass.get(cld); if(result == null) { result = newInstanceSqlForClass(); sqlForClass.put(cld, result); } return result; }
Returns the {@link SqlForClass} instance for the given class descriptor. @param cld The class descriptor. @return The {@link SqlForClass}.
private void lockReachableObjects(Identity oid, Object userObject, Object cacheObject, int lock, Stack stack, boolean buildingObject) throws LockingException { ContextEntry entry; boolean onlyDependants = !_tx.getKit().isImplicitLockingUsed(); ClassDescriptor mif = _pb.getClassDescriptor(userObject.getClass()); // N:1 relations Iterator iter = mif.getObjectReferenceDescriptors().iterator(); ObjectReferenceDescriptor rds = null; PersistentField f; Object relUserObj; Identity relOid; boolean isDependent; while (iter.hasNext()) { rds = (ObjectReferenceDescriptor) iter.next(); isDependent = rds.getOtmDependent(); if (onlyDependants && !isDependent) { continue; } f = rds.getPersistentField(); relUserObj = f.get(userObject); if (relUserObj != null) { relOid = new Identity(relUserObj, _pb); entry = (ContextEntry) _objects.get(relOid); if ((entry == null) || (entry.userObject != relUserObj)) { entry = insertInternal(relOid, relUserObj, lock, isDependent, oid, stack); if (buildingObject && (entry != null)) { f.set(userObject, entry.userObject); f.set(cacheObject, entry.cacheObject); } } } } // 1:N relations Iterator collections = mif.getCollectionDescriptors().iterator(); CollectionDescriptor cds; Object userCol; Iterator userColIterator; Class type; ArrayList newUserCol = null; ArrayList newCacheCol = null; while (collections.hasNext()) { cds = (CollectionDescriptor) collections.next(); f = cds.getPersistentField(); type = f.getType(); isDependent = cds.getOtmDependent(); if (onlyDependants && !isDependent) { continue; } userCol = f.get(userObject); if (userCol != null) { if ((userCol instanceof CollectionProxyDefaultImpl) && !((CollectionProxyDefaultImpl) userCol).isLoaded()) { continue; } if (buildingObject) { newUserCol = new ArrayList(); newCacheCol = new ArrayList(); } if (Collection.class.isAssignableFrom(type)) { userColIterator = ((Collection) userCol).iterator(); } else if (type.isArray()) { userColIterator = new ArrayIterator(userCol); } else { throw new OJBRuntimeException( userCol.getClass() + " can not be managed by OJB OTM, use Array or Collection instead !"); } while (userColIterator.hasNext()) { relUserObj = userColIterator.next(); relOid = new Identity(relUserObj, _pb); entry = (ContextEntry) _objects.get(relOid); if ((entry == null) || (entry.userObject != relUserObj)) { entry = insertInternal(relOid, relUserObj, lock, isDependent, null, stack); } if (buildingObject && (entry != null)) { newUserCol.add(entry.userObject); newCacheCol.add(entry.cacheObject); } } if (buildingObject) { setCollectionField(userObject, f, newUserCol); setCollectionField(cacheObject, f, newCacheCol); } } } }
Lock all objects reachable via 1:N and N:1 relations, @param lock The lock type to use
private boolean isEqual(Object fld1, Object fld2) { if (fld1 == null || fld2 == null) { return (fld1 == fld2); } else if ((fld1 instanceof BigDecimal) && (fld2 instanceof BigDecimal)) { return (((BigDecimal) fld1).compareTo((BigDecimal) fld2) == 0); } else if ((fld1 instanceof Date) && (fld2 instanceof Date)) { return (((Date) fld1).getTime() == ((Date) fld2).getTime()); } else { return fld1.equals(fld2); } }
This method compared simple field values: there are some tricks...
private boolean hasBidirectionalAssociation(Class clazz) { ClassDescriptor cdesc; Collection refs; boolean hasBidirAssc; if (_withoutBidirAssc.contains(clazz)) { return false; } if (_withBidirAssc.contains(clazz)) { return true; } // first time we meet this class, let's look at metadata cdesc = _pb.getClassDescriptor(clazz); refs = cdesc.getObjectReferenceDescriptors(); hasBidirAssc = false; REFS_CYCLE: for (Iterator it = refs.iterator(); it.hasNext(); ) { ObjectReferenceDescriptor ord; ClassDescriptor relCDesc; Collection relRefs; ord = (ObjectReferenceDescriptor) it.next(); relCDesc = _pb.getClassDescriptor(ord.getItemClass()); relRefs = relCDesc.getObjectReferenceDescriptors(); for (Iterator relIt = relRefs.iterator(); relIt.hasNext(); ) { ObjectReferenceDescriptor relOrd; relOrd = (ObjectReferenceDescriptor) relIt.next(); if (relOrd.getItemClass().equals(clazz)) { hasBidirAssc = true; break REFS_CYCLE; } } } if (hasBidirAssc) { _withBidirAssc.add(clazz); } else { _withoutBidirAssc.add(clazz); } return hasBidirAssc; }
Does the given class has bidirectional assiciation with some other class?
private ArrayList handleDependentReferences(Identity oid, Object userObject, Object[] origFields, Object[] newFields, Object[] newRefs) throws LockingException { ClassDescriptor mif = _pb.getClassDescriptor(userObject.getClass()); FieldDescriptor[] fieldDescs = mif.getFieldDescriptions(); Collection refDescs = mif.getObjectReferenceDescriptors(); int count = 1 + fieldDescs.length; ArrayList newObjects = new ArrayList(); int countRefs = 0; for (Iterator it = refDescs.iterator(); it.hasNext(); count++, countRefs++) { ObjectReferenceDescriptor rds = (ObjectReferenceDescriptor) it.next(); Identity origOid = (origFields == null ? null : (Identity) origFields[count]); Identity newOid = (Identity) newFields[count]; if (rds.getOtmDependent()) { if ((origOid == null) && (newOid != null)) { ContextEntry entry = (ContextEntry) _objects.get(newOid); if (entry == null) { Object relObj = newRefs[countRefs]; insertInternal(newOid, relObj, LockType.WRITE_LOCK, true, oid, new Stack()); newObjects.add(newOid); } } else if ((origOid != null) && ((newOid == null) || !newOid.equals(origOid))) { markDelete(origOid, oid, false); } } } return newObjects; }
Mark for creation all newly introduced dependent references. Mark for deletion all nullified dependent references. @return the list of created objects
private ArrayList handleDependentCollections(Identity oid, Object obj, Object[] origCollections, Object[] newCollections, Object[] newCollectionsOfObjects) throws LockingException { ClassDescriptor mif = _pb.getClassDescriptor(obj.getClass()); Collection colDescs = mif.getCollectionDescriptors(); ArrayList newObjects = new ArrayList(); int count = 0; for (Iterator it = colDescs.iterator(); it.hasNext(); count++) { CollectionDescriptor cds = (CollectionDescriptor) it.next(); if (cds.getOtmDependent()) { ArrayList origList = (origCollections == null ? null : (ArrayList) origCollections[count]); ArrayList newList = (ArrayList) newCollections[count]; if (origList != null) { for (Iterator it2 = origList.iterator(); it2.hasNext(); ) { Identity origOid = (Identity) it2.next(); if ((newList == null) || !newList.contains(origOid)) { markDelete(origOid, oid, true); } } } if (newList != null) { int countElem = 0; for (Iterator it2 = newList.iterator(); it2.hasNext(); countElem++) { Identity newOid = (Identity) it2.next(); if ((origList == null) || !origList.contains(newOid)) { ContextEntry entry = (ContextEntry) _objects.get(newOid); if (entry == null) { ArrayList relCol = (ArrayList) newCollectionsOfObjects[count]; Object relObj = relCol.get(countElem); insertInternal(newOid, relObj, LockType.WRITE_LOCK, true, null, new Stack()); newObjects.add(newOid); } } } } } } return newObjects; }
Mark for creation all objects that were included into dependent collections. Mark for deletion all objects that were excluded from dependent collections.
private int doCascadeDelete(Identity oid, Object obj) { ClassDescriptor mif = _pb.getClassDescriptor(ProxyHelper.getRealClass(obj)); Collection refDescs = mif.getObjectReferenceDescriptors(); Collection colDescs = mif.getCollectionDescriptors(); int countCascadeDeleted = 0; for (Iterator it = refDescs.iterator(); it.hasNext(); ) { ObjectReferenceDescriptor rds = (ObjectReferenceDescriptor) it.next(); if (rds.getOtmDependent()) { PersistentField f = rds.getPersistentField(); Object relObj = f.get(obj); if (relObj != null) { countCascadeDeleted += markDelete(new Identity(relObj, _pb), oid, false); } } } for (Iterator it = colDescs.iterator(); it.hasNext(); ) { CollectionDescriptor cds = (CollectionDescriptor) it.next(); if (cds.getOtmDependent()) { PersistentField f = cds.getPersistentField(); Class type = f.getType(); Object col = f.get(obj); if (col != null) { Iterator colIterator; if (Collection.class.isAssignableFrom(type)) { colIterator = ((Collection) col).iterator(); } else if (type.isArray()) { colIterator = new ArrayIterator(col); } else { continue; } while (colIterator.hasNext()) { countCascadeDeleted += markDelete(new Identity(colIterator.next(), _pb), oid, true); } } } } return countCascadeDeleted; }
Mark for deletion all dependent objects (via references and collections). @return the number of deleted objects
public void body() { MockConfiguration configuration = (MockConfiguration) ((IMessageEvent) getReason()) .getParameter(SFipa.CONTENT).getValue(); getBeliefbase().getBelief("agent_behaviour").setFact( configuration.getBehaviour()); // CREATING NEW DF_SERVICE NAME IServiceProvider iserviceprovider = getScope().getServiceContainer(); DirectoryFacilitatorService idfservice = new DirectoryFacilitatorService( iserviceprovider); DFServiceDescription dfaux = (DFServiceDescription) idfservice .createDFServiceDescription(configuration.getDFservice(), configuration.getDFservice(), "Universidad Politécnica de Madrid"); IDFComponentDescription df_description = idfservice .createDFComponentDescription(null, dfaux); waitFor(WAIT_FOR_TIME); IGoal keep = createGoal("dfcap.df_keep_registered"); keep.getParameter("description").setValue(df_description); keep.getParameter("leasetime").setValue(new Long(LEASE_TIME)); dispatchSubgoalAndWait(keep); Logger logger = Logger.getLogger(this.getClass().toString()); logger.info(configuration.getDFservice() + " registered"); }
The plan body.
public void refresh(String[] configLocations) throws GeomajasException { try { setConfigLocations(configLocations); refresh(); } catch (Exception e) { throw new GeomajasException(e, ExceptionCode.REFRESH_CONFIGURATION_FAILED); } }
Refresh this context with the specified configuration locations. @param configLocations list of configuration resources (see implementation for specifics) @throws GeomajasException indicates a problem with the new location files (see cause)
public void rollback() throws GeomajasException { try { setConfigLocations(previousConfigLocations); refresh(); } catch (Exception e) { throw new GeomajasException(e, ExceptionCode.REFRESH_CONFIGURATION_FAILED); } }
Roll back to the previous configuration. @throws GeomajasException indicates an unlikely problem with the rollback (see cause)
public void configure(Configuration config) throws ConfigurationException { LoggingConfiguration lc = (LoggingConfiguration) config; String levelName = lc.getLogLevel(name); setLevel(levelName); }
/* @see org.apache.ojb.broker.util.configuration.Configurable#configure(Configuration)
public String getUrl(){ final StringBuilder sb = new StringBuilder(); sb.append("http://"); sb.append(getHttpConfiguration().getBindHost().get()); sb.append(":"); sb.append(getHttpConfiguration().getPort()); return sb.toString(); }
Returns the complete Grapes root URL @return String
public void addRow(final String... cells){ final Row row = new Row((Object[]) cells); if(!rows.contains(row)){ rows.add(row); } }
Add a row to the table if it does not already exist @param cells String...
public static String buildSequenceName(PersistenceBroker brokerForClass, FieldDescriptor field, boolean autoNaming) throws SequenceManagerException { String seqName = field.getSequenceName(); /* if we found a sequence name bound to the field descriptor via 'sequence-name' attribute we use that name */ if (seqName != null && seqName.trim().length() != 0) { return seqName; } else if (!autoNaming) { /* arminw: we don't find a sequence name and we should not automatic build one, thus we throw an exception */ throw new SequenceManagerException("Could not find sequence-name for field '" + field + "' of class '" + field.getClassDescriptor().getClassNameOfObject() + "', property 'autoNaming' in sequence-manager element in repository was '" + autoNaming + "'. Set autoNaming true in sequence-descriptor or define a " + " sequence-name in field-descriptor."); } ClassDescriptor cldTargetClass = field.getClassDescriptor(); /* check for inheritance on multiple table */ cldTargetClass = findInheritanceRoot(cldTargetClass); Class topLevel = brokerForClass.getTopLevelClass(cldTargetClass.getClassOfObject()); ClassDescriptor cldTopLevel = brokerForClass.getClassDescriptor(topLevel); /** * * MBAIRD * Should not use classname for the sequenceName as we will end up * re-using sequence numbers for classes mapped to the same table. * Instead, make the FullTableName the discriminator since it will * always be unique for that table, and hence that class. * * arminw: * If the found top-level class has extents, we take the first * found extent class table name as sequence name. Else we take * the table name of the 'targetClass'. * */ if (cldTopLevel.isExtent()) { /* arminw: this is a little critical, because we do not know if the extent classes will change by and by and the first found extent class may change, thus the returned table name could change! But I don't know a way to resolve this problem. I put a comment to the sequence manager docs TODO: find better solution */ // seqName = brokerForClass.getClassDescriptor(((Class) cldTopLevel.getExtentClasses(). // get(0))).getFullTableName(); seqName = firstFoundTableName(brokerForClass, cldTopLevel); } else { seqName = cldTargetClass.getFullTableName(); } // log.info("* targetClass: "+targetClass +", toplevel: "+topLevel+ " seqName: "+seqName); if (seqName == null) { seqName = SEQ_UNASSIGNED; log.warn("Too complex structure, can not assign automatic sequence name for field '" + field.getAttributeName() + "' in class '" + field.getClassDescriptor().getClassNameOfObject() + "'. Use a default sequence name instead: " + (SEQ_PREFIX + seqName)); } // System.out.println("* targetClass: " + cldTargetClass.getClassNameOfObject() + ", toplevel: " + topLevel + " seqName: " + seqName); seqName = SEQ_PREFIX + seqName; if (log.isDebugEnabled()) log.debug("Set automatic generated sequence-name for field '" + field.getAttributeName() + "' in class '" + field.getClassDescriptor().getClassNameOfObject() + "'."); field.setSequenceName(seqName); return seqName; }
Returns a unique sequence name (unique across all extents). <br/> If we found a non null value for the 'sequence-name' attribute in the field descriptor, we use the 'sequence-name' value as sequence name. <br/> Else if the top-level class of the target class has extents, we take the first extent class table name of the extents as sequence name. <br/> Else we take the table name of the target class. <p> If the method argument 'autoNaming' is true, the generated sequence name will be set in the given field descriptor using {@link org.apache.ojb.broker.metadata.FieldDescriptor#setSequenceName} to speed up sequence name lookup in future calls. </p> @param brokerForClass current used PB instance @param field target field @param autoNaming if 'false' no auto sequence name was build and a exception was throw if none could be found in field.
private static ClassDescriptor findInheritanceRoot(ClassDescriptor cld) { ClassDescriptor result = cld; if(cld.getSuperClassDescriptor() != null) { result = findInheritanceRoot(cld.getSuperClassDescriptor()); } return result; }
Returns the root {@link org.apache.ojb.broker.metadata.ClassDescriptor} of the inheriatance hierachy of the given descriptor or the descriptor itself if no inheriatance on multiple table is used.
private static String firstFoundTableName(PersistenceBroker brokerForClass, ClassDescriptor cld) { String name = null; if (!cld.isInterface() && cld.getFullTableName() != null) { return cld.getFullTableName(); } if (cld.isExtent()) { Collection extentClasses = cld.getExtentClasses(); for (Iterator iterator = extentClasses.iterator(); iterator.hasNext();) { name = firstFoundTableName(brokerForClass, brokerForClass.getClassDescriptor((Class) iterator.next())); // System.out.println("## " + cld.getClassNameOfObject()+" - name: "+name); if (name != null) break; } } return name; }
try to find the first none null table name for the given class-descriptor. If cld has extent classes, all of these cld's searched for the first none null table name.
public static long getMaxForExtent(PersistenceBroker brokerForClass, FieldDescriptor field) throws PersistenceBrokerException { if (field == null) { log.error("Given FieldDescriptor was null, could not detect max value across all extents"); return 0; // throw new PersistenceBrokerException("Given FieldDescriptor was null"); } // first lookup top-level class Class topLevel = brokerForClass.getTopLevelClass(field.getClassDescriptor().getClassOfObject()); return getMaxId(brokerForClass, topLevel, field); }
Lookup all tables associated with given class (search all extent classes) to find the current maximum value for the given field. <br><b>Note:</b> Only works for <code>long</code> autoincrement fields. @param brokerForClass persistence broker instance match the database of the given field/class @param field the target field
public static long getMaxId(PersistenceBroker brokerForClass, Class topLevel, FieldDescriptor original) throws PersistenceBrokerException { long max = 0; long tmp; ClassDescriptor cld = brokerForClass.getClassDescriptor(topLevel); // if class is not an interface / not abstract we have to search its directly mapped table if (!cld.isInterface() && !cld.isAbstract()) { tmp = getMaxIdForClass(brokerForClass, cld, original); if (tmp > max) { max = tmp; } } // if class is an extent we have to search through its subclasses if (cld.isExtent()) { Vector extentClasses = cld.getExtentClasses(); for (int i = 0; i < extentClasses.size(); i++) { Class extentClass = (Class) extentClasses.get(i); if (cld.getClassOfObject().equals(extentClass)) { throw new PersistenceBrokerException("Circular extent in " + extentClass + ", please check the repository"); } else { // fix by Mark Rowell // Call recursive tmp = getMaxId(brokerForClass, extentClass, original); } if (tmp > max) { max = tmp; } } } return max; }
Search down all extent classes and return max of all found PK values.
public static long getMaxIdForClass( PersistenceBroker brokerForClass, ClassDescriptor cldForOriginalOrExtent, FieldDescriptor original) throws PersistenceBrokerException { FieldDescriptor field = null; if (!original.getClassDescriptor().equals(cldForOriginalOrExtent)) { // check if extent match not the same table if (!original.getClassDescriptor().getFullTableName().equals( cldForOriginalOrExtent.getFullTableName())) { // we have to look for id's in extent class table field = cldForOriginalOrExtent.getFieldDescriptorByName(original.getAttributeName()); } } else { field = original; } if (field == null) { // if null skip this call return 0; } String column = field.getColumnName(); long result = 0; ResultSet rs = null; Statement stmt = null; StatementManagerIF sm = brokerForClass.serviceStatementManager(); String table = cldForOriginalOrExtent.getFullTableName(); // String column = cld.getFieldDescriptorByName(fieldName).getColumnName(); String sql = SM_SELECT_MAX + column + SM_FROM + table; try { //lookup max id for the current class stmt = sm.getGenericStatement(cldForOriginalOrExtent, Query.NOT_SCROLLABLE); rs = stmt.executeQuery(sql); rs.next(); result = rs.getLong(1); } catch (Exception e) { log.warn("Cannot lookup max value from table " + table + " for column " + column + ", PB was " + brokerForClass + ", using jdbc-descriptor " + brokerForClass.serviceConnectionManager().getConnectionDescriptor(), e); } finally { try { sm.closeResources(stmt, rs); } catch (Exception ignore) { // ignore it } } return result; }
lookup current maximum value for a single field in table the given class descriptor was associated.
public static Long getSeqStart(Properties prop) { String result = prop.getProperty(PROP_SEQ_START, null); if(result == null) { result = prop.getProperty(PROP_SEQ_START_OLD, null); } if(result != null) { return new Long(Long.parseLong(result)); } else { return null; } }
Database sequence properties helper method. Return sequence <em>start value</em> or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Long getSeqIncrementBy(Properties prop) { String result = prop.getProperty(PROP_SEQ_INCREMENT_BY, null); if(result != null) { return new Long(Long.parseLong(result)); } else { return null; } }
Database sequence properties helper method. Return sequence <em>increment by value</em> or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Long getSeqMaxValue(Properties prop) { String result = prop.getProperty(PROP_SEQ_MAX_VALUE, null); if(result != null) { return new Long(Long.parseLong(result)); } else { return null; } }
Database sequence properties helper method. Return sequence <em>max value</em> or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Long getSeqMinValue(Properties prop) { String result = prop.getProperty(PROP_SEQ_MIN_VALUE, null); if(result != null) { return new Long(Long.parseLong(result)); } else { return null; } }
Database sequence properties helper method. Return sequence <em>min value</em> or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Long getSeqCacheValue(Properties prop) { String result = prop.getProperty(PROP_SEQ_CACHE, null); if(result != null) { return new Long(Long.parseLong(result)); } else { return null; } }
Database sequence properties helper method. Return sequence <em>cache value</em> or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Boolean getSeqCycleValue(Properties prop) { String result = prop.getProperty(PROP_SEQ_CYCLE, null); if(result != null) { return Boolean.valueOf(result); } else { return null; } }
Database sequence properties helper method. Return sequence <em>cycle</em> Booelan or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Boolean getSeqOrderValue(Properties prop) { String result = prop.getProperty(PROP_SEQ_ORDER, null); if(result != null) { return Boolean.valueOf(result); } else { return null; } }
Database sequence properties helper method. Return sequence <em>order</em> Booelan or <em>null</em> if not set. @param prop The {@link java.util.Properties} instance to use. @return The found expression or <em>null</em>.
public static Identity fromByteArray(final byte[] anArray) throws PersistenceBrokerException { // reverse of the serialize() algorithm: // read from byte[] with a ByteArrayInputStream, decompress with // a GZIPInputStream and then deserialize by reading from the ObjectInputStream try { final ByteArrayInputStream bais = new ByteArrayInputStream(anArray); final GZIPInputStream gis = new GZIPInputStream(bais); final ObjectInputStream ois = new ObjectInputStream(gis); final Identity result = (Identity) ois.readObject(); ois.close(); gis.close(); bais.close(); return result; } catch (Exception ex) { throw new PersistenceBrokerException(ex); } }
Factory method that returns an Identity object created from a serializated representation. @param anArray The serialized representation @return The identity @see {@link #serialize}. @deprecated
public byte[] serialize() throws PersistenceBrokerException { // Identity is serialized and written to an ObjectOutputStream // This ObjectOutputstream is compressed by a GZIPOutputStream // and finally written to a ByteArrayOutputStream. // the resulting byte[] is returned try { final ByteArrayOutputStream bao = new ByteArrayOutputStream(); final GZIPOutputStream gos = new GZIPOutputStream(bao); final ObjectOutputStream oos = new ObjectOutputStream(gos); oos.writeObject(this); oos.close(); gos.close(); bao.close(); return bao.toByteArray(); } catch (Exception ignored) { throw new PersistenceBrokerException(ignored); } }
Return the serialized form of this Identity. @return The serialized representation @see #fromByteArray @deprecated
protected void checkForPrimaryKeys(final Object realObject) throws ClassNotPersistenceCapableException { // if no PKs are specified OJB can't handle this class ! if (m_pkValues == null || m_pkValues.length == 0) { throw createException("OJB needs at least one primary key attribute for class: ", realObject, null); } // arminw: should never happen // if(m_pkValues[0] instanceof ValueContainer) // throw new OJBRuntimeException("Can't handle pk values of type "+ValueContainer.class.getName()); }
OJB can handle only classes that declare at least one primary key attribute, this method checks this condition. @param realObject The real object to check @throws ClassNotPersistenceCapableException thrown if no primary key is specified for the objects class
public Object createNewInstance(Class[] types, Object[] args) { try { Object result; // create an instance of the target class if (types != null) { result = ClassHelper.newInstance(getClassToServe(), types, args, true); } else { result = ClassHelper.newInstance(getClassToServe(), true); } // if defined in OJB.properties all instances are wrapped by an interceptor result = InterceptorFactory.getInstance().createInterceptorFor(result); return result; } catch (InstantiationException e) { getLogger().error("ConfigurableFactory can't instantiate class " + getClassToServe() + buildArgumentString(types, args), e); throw new PersistenceBrokerException(e); } catch (IllegalAccessException e) { getLogger().error("ConfigurableFactory can't access constructor for class " + getClassToServe() + buildArgumentString(types, args), e); throw new PersistenceBrokerException(e); } catch (Exception e) { getLogger().error("ConfigurableFactory instantiation failed for class " + getClassToServe() + buildArgumentString(types, args), e); throw new PersistenceBrokerException(e); } }
factory method for creating new instances the Class to be instantiated is defined by getClassToServe(). @return Object the created instance
public Object createNewInstance(Class type, Object arg) { if (type != null) return createNewInstance(new Class[]{type}, new Object[]{arg}); else return createNewInstance((Class[]) null, (Object[]) null); }
factory method for creating new instances the Class to be instantiated is defined by getClassToServe(). @return Object the created instance
public DbOrganization getDbOrganization(final Organization organization) { final DbOrganization dbOrganization = new DbOrganization(); dbOrganization.setName(organization.getName()); dbOrganization.getCorporateGroupIdPrefixes().addAll(organization.getCorporateGroupIdPrefixes()); return dbOrganization; }
Transform an organization from client/server model to database model @param organization Organization @return DbOrganization
public Organization getOrganization(final DbOrganization dbOrganization) { final Organization organization = DataModelFactory.createOrganization(dbOrganization.getName()); organization.getCorporateGroupIdPrefixes().addAll(dbOrganization.getCorporateGroupIdPrefixes()); return organization; }
Transform an organization from database model to client/server model @param dbOrganization DbOrganization @return Organization
public DbLicense getDbLicense(final License license) { final DbLicense dbLicense = new DbLicense(); dbLicense.setName(license.getName()); dbLicense.setLongName(license.getLongName()); dbLicense.setComments(license.getComments()); dbLicense.setRegexp(license.getRegexp()); dbLicense.setUrl(license.getUrl()); return dbLicense; }
Transform a license from client/server model to database model @param license the license to transform @return DbLicense return a license in database model
public License getLicense(final DbLicense dbLicense) { final License license = DataModelFactory.createLicense(dbLicense.getName(), dbLicense.getLongName(), dbLicense.getComments(), dbLicense.getRegexp(), dbLicense.getUrl()); if (dbLicense.isApproved() != null) { license.setApproved(dbLicense.isApproved()); }else{ license.setPending(true); } if (license.getLongName().isEmpty() && license.getComments().isEmpty() && license.getUrl().isEmpty() && license.getRegexp().isEmpty()) { license.setUnknown(true); } return license; }
Transform a license from database model to client/server model @param dbLicense DbLicense the license to transform @return License return a license in database model
public DbArtifact getDbArtifact(final Artifact artifact) { final DbArtifact dbArtifact = new DbArtifact(); dbArtifact.setGroupId(artifact.getGroupId()); dbArtifact.setArtifactId(artifact.getArtifactId()); dbArtifact.setVersion(artifact.getVersion()); dbArtifact.setClassifier(artifact.getClassifier()); dbArtifact.setType(artifact.getType()); dbArtifact.setExtension(artifact.getExtension()); dbArtifact.setOrigin(artifact.getOrigin()); dbArtifact.setPromoted(artifact.isPromoted()); dbArtifact.setLicenses(artifact.getLicenses()); dbArtifact.setSize(artifact.getSize()); dbArtifact.setSha256(artifact.getSha256()); dbArtifact.setDescription(artifact.getDescription()); dbArtifact.setDownloadUrl(artifact.getDownloadUrl()); dbArtifact.setProvider(artifact.getProvider()); dbArtifact.setCreatedDateTime(artifact.getCreatedDateTime()); dbArtifact.setUpdatedDateTime(artifact.getUpdatedDateTime()); return dbArtifact; }
Transform an artifact from client/server model to database model WARNING: This transformation does not take licenses into account!!! @param artifact the artifact to transform @return DbArtifact
public Artifact getArtifact(final DbArtifact dbArtifact) { final Artifact artifact = DataModelFactory.createArtifact(dbArtifact.getGroupId(), dbArtifact.getArtifactId(), dbArtifact.getVersion(), dbArtifact.getClassifier(), dbArtifact.getType(), dbArtifact.getExtension()); artifact.setSha256(dbArtifact.getSha256()); artifact.setDescription(dbArtifact.getDescription()); artifact.setPromoted(dbArtifact.isPromoted()); artifact.setSize(dbArtifact.getSize()); artifact.setDownloadUrl(dbArtifact.getDownloadUrl()); artifact.setProvider(dbArtifact.getProvider()); artifact.setOrigin(dbArtifact.getOrigin()); artifact.setCreatedDateTime(dbArtifact.getCreatedDateTime()); artifact.setUpdatedDateTime(dbArtifact.getUpdatedDateTime()); for (final String licenseId : dbArtifact.getLicenses()) { artifact.addLicense(licenseId); } return artifact; }
Transform an artifact from database model to client/server model @param dbArtifact the artifact to transform @return Artifact return a license in database model
public DbModule getDbModule(final Module module) { final DbModule dbModule = new DbModule(); dbModule.setName(module.getName()); dbModule.setVersion(module.getVersion()); dbModule.setPromoted(module.isPromoted()); dbModule.setSubmodule(module.isSubmodule()); dbModule.setCreatedDateTime(module.getCreatedDateTime()); dbModule.setUpdatedDateTime(module.getUpdatedDateTime()); // Artifact for (final Artifact artifact : module.getArtifacts()) { final DbArtifact dbArtifact = getDbArtifact(artifact); dbModule.addArtifact(dbArtifact); } // Dependencies for (final Dependency dependency : module.getDependencies()) { dbModule.addDependency(dependency.getTarget().getGavc(), dependency.getScope()); } // SubModules final StringBuilder sb = new StringBuilder(); for (final Module submodule : module.getSubmodules()) { final DbModule dbSubmodule = getDbModule(submodule); dbModule.addSubmodule(dbSubmodule); sb.setLength(0); } return dbModule; }
Transform a module from client/server model to database model @param module the module to transform @return DbModule
public Module getModule(final DbModule dbModule) { final Module module = DataModelFactory.createModule(dbModule.getName(), dbModule.getVersion()); module.setPromoted(dbModule.isPromoted()); module.setSubmodule(dbModule.isSubmodule()); module.setCreatedDateTime(dbModule.getCreatedDateTime()); module.setUpdatedDateTime(dbModule.getUpdatedDateTime()); // Artifacts for (final String gavc : dbModule.getArtifacts()) { // Artifacts final DbArtifact dbArtifact = repositoryHandler.getArtifact(gavc); if (null != dbArtifact) { final Artifact artifact = getArtifact(dbArtifact); module.addArtifact(artifact); } } // Dependencies for (final DbDependency dbDependency : dbModule.getDependencies()) { // Dependencies final Dependency dependency = getDependency(dbDependency, module.getName(), module.getVersion()); dependency.setSourceName(module.getName()); dependency.setSourceVersion(module.getVersion()); module.addDependency(dependency); } // Submodules for (final DbModule dbSubmodule : dbModule.getSubmodules()) { module.addSubmodule(getModule(dbSubmodule)); } return module; }
Transform a module from database model to client/server model @param dbModule DbModule @return Module
public Dependency getDependency(final DbDependency dbDependency, final String sourceName, final String sourceVersion) { final DbArtifact dbArtifact = repositoryHandler.getArtifact(dbDependency.getTarget()); final Artifact artifact; if (dbArtifact == null) { artifact = DataUtils.createArtifact(dbDependency.getTarget()); } else { artifact = getArtifact(dbArtifact); } final Dependency dependency = DataModelFactory.createDependency(artifact, dbDependency.getScope()); dependency.setSourceName(sourceName); dependency.setSourceVersion(sourceVersion); return dependency; }
Transform a dependency from database model to client/server model @param dbDependency DbDependency @return Dependency
public Comment getComment(final DbComment dbComment) { return DataModelFactory.createComment(dbComment.getEntityId(), dbComment.getEntityType(), dbComment.getAction(), dbComment.getDbCommentText(), dbComment.getDbCommentedBy(), dbComment.getDbCreatedDateTime()); }
Transform comment from database model to client/server model @param dbComment - database model to transform @return - the client/server model
public DbComment getDbComment(final Comment comment) { final DbComment dbComment = new DbComment(); dbComment.setEntityId(comment.getEntityId()); dbComment.setEntityType(comment.getEntityType()); dbComment.setAction(comment.getAction()); dbComment.setDbCommentText(comment.getCommentText()); dbComment.setDbCommentedBy(comment.getCommentedBy()); dbComment.setDbCreatedDateTime(comment.getCreatedDateTime()); return dbComment; }
Transform client/server model to a database model @param comment - client/server model to transform @return the database model
private static byte[] encode3to4(byte[] source, int srcOffset, int numSigBytes, byte[] destination, int destOffset) { // 1 2 3 // 01234567890123456789012345678901 Bit position // --------000000001111111122222222 Array position from threeBytes // --------| || || || | Six bit groups to index ALPHABET // >>18 >>12 >> 6 >> 0 Right shift necessary // 0x3f 0x3f 0x3f Additional AND // Create buffer with zero-padding if there are only one or two // significant bytes passed in the array. // We have to shift left 24 in order to flush out the 1's that appear // when Java treats a value as negative that is cast from a byte to an int. int inBuff = (numSigBytes > 0 ? ((source[srcOffset] << SHIFT_3B) >>> SHIFT_1B) : 0) | (numSigBytes > 1 ? ((source[srcOffset + 1] << SHIFT_3B) >>> SHIFT_2B) : 0) | (numSigBytes > 2 ? ((source[srcOffset + 2] << SHIFT_3B) >>> SHIFT_3B) : 0); switch (numSigBytes) { case 3: // NOSONAR destination[destOffset] = ALPHABET[(inBuff >>> SHIFT_3S)]; destination[destOffset + 1] = ALPHABET[(inBuff >>> SHIFT_2S) & BITS_6]; destination[destOffset + 2] = ALPHABET[(inBuff >>> SHIFT_1S) & BITS_6]; destination[destOffset + 3] = ALPHABET[(inBuff) & BITS_6]; // NOSONAR return destination; case 2: destination[destOffset] = ALPHABET[(inBuff >>> SHIFT_3S)]; destination[destOffset + 1] = ALPHABET[(inBuff >>> SHIFT_2S) & BITS_6]; destination[destOffset + 2] = ALPHABET[(inBuff >>> SHIFT_1S) & BITS_6]; destination[destOffset + 3] = EQUALS_SIGN; // NOSONAR return destination; case 1: destination[destOffset] = ALPHABET[(inBuff >>> SHIFT_3S)]; destination[destOffset + 1] = ALPHABET[(inBuff >>> SHIFT_2S) & BITS_6]; destination[destOffset + 2] = EQUALS_SIGN; destination[destOffset + 3] = EQUALS_SIGN; // NOSONAR return destination; default: return destination; } }
Encodes up to three bytes of the array <var>source</var> and writes the resulting four Base64 bytes to <var>destination</var>. The source and destination arrays can be manipulated anywhere along their length by specifying <var>srcOffset</var> and <var>destOffset</var>. This method does not check to make sure your arrays are large enough to accomodate <var>srcOffset</var> + 3 for the <var>source</var> array or <var>destOffset</var> + 4 for the <var>destination</var> array. The actual number of significant bytes in your array is given by <var>numSigBytes</var>. @param source the array to convert @param srcOffset the index where conversion begins @param numSigBytes the number of significant bytes in your array @param destination the array to hold the conversion @param destOffset the index where output will be put @return the <var>destination</var> array
@Override public void body() { CallQueue queue = (CallQueue) getBeliefbase().getBelief("queue") .getFact(); Call call = queue.getPendingCall(); if (call.getCustormer().getLanguage().equalsIgnoreCase("English")) { String target_service = "report-service"; String msgContent = "NewRecordedCall"; // Looking for reporter agent IDF df = (IDF) SServiceProvider.getService(getServiceContainer(), IDF.class, RequiredServiceInfo.SCOPE_PLATFORM).get(this); IDFServiceDescription sd = df.createDFServiceDescription( target_service, null, null); IDFComponentDescription dfadesc = df.createDFComponentDescription( null, sd); ISearchConstraints constraints = df.createSearchConstraints(-1, 0); // Use a subgoal to search IGoal ft = createGoal("dfcap.df_search"); ft.getParameter("description").setValue(dfadesc); ft.getParameter("constraints").setValue(constraints); dispatchSubgoalAndWait(ft); IDFComponentDescription[] agents = (IDFComponentDescription[]) ft .getParameterSet("result").getValues(); if (agents.length > 0) { IMessageEvent msg = createMessageEvent("send_inform"); msg.getParameter(SFipa.CONTENT).setValue(msgContent); msg.getParameterSet(SFipa.RECEIVERS).addValue( agents[0].getName()); sendMessage(msg); logger.info("Message sent to: " + agents[0].getName().getLocalName()); } else { logger.severe("Service " + target_service + " not found."); } } else { System.out.println("Language: " + call.getCustormer().getLanguage()); String target_service = "helpdesk-service"; String msgContent = "UnknownLanguageCall"; // Looking for reporter agent IDF df = (IDF) SServiceProvider.getService(getServiceContainer(), IDF.class, RequiredServiceInfo.SCOPE_PLATFORM).get(this); IDFServiceDescription sd = df.createDFServiceDescription( target_service, null, null); IDFComponentDescription dfadesc = df.createDFComponentDescription( null, sd); ISearchConstraints constraints = df.createSearchConstraints(-1, 0); // Use a subgoal to search IGoal ft = createGoal("dfcap.df_search"); ft.getParameter("description").setValue(dfadesc); ft.getParameter("constraints").setValue(constraints); dispatchSubgoalAndWait(ft); IDFComponentDescription[] agents = (IDFComponentDescription[]) ft .getParameterSet("result").getValues(); IMessageEvent msg = createMessageEvent("helpdesk-request"); msg.getParameter(SFipa.CONTENT).setValue(msgContent); if (agents.length > 0) { msg.getParameterSet(SFipa.RECEIVERS).addValue( agents[0].getName()); sendMessage(msg); System.out.println("Enviado"); } else { System.out.println("Service " + target_service + " not found."); logger.severe("Service " + target_service + " not found."); } logger.info("Message sent to: " + agents[0].getName()); } }
/* (non-Javadoc) @see jadex.bdi.runtime.Plan#body()
public static ReportQueryByCriteria newReportQuery(Class classToSearchFrom, String[] columns, Criteria criteria, boolean distinct) { criteria = addCriteriaForOjbConcreteClasses(getRepository().getDescriptorFor(classToSearchFrom), criteria); return new ReportQueryByCriteria(classToSearchFrom, columns, criteria, distinct); }
create a new ReportQueryByCriteria @param classToSearchFrom @param criteria @param distinct @return ReportQueryByCriteria
public static ReportQueryByCriteria newReportQuery(Class classToSearchFrom, Criteria criteria, boolean distinct) { criteria = addCriteriaForOjbConcreteClasses(getRepository().getDescriptorFor(classToSearchFrom), criteria); return newReportQuery(classToSearchFrom, null, criteria, distinct); }
create a new ReportQueryByCriteria @param classToSearchFrom @param criteria @param distinct @return ReportQueryByCriteria
public static ReportQueryByCriteria newReportQuery(Class classToSearchFrom, Criteria criteria) { return newReportQuery(classToSearchFrom, criteria, false); }
create a new ReportQueryByCriteria @param classToSearchFrom @param criteria @return ReportQueryByCriteria
public static QueryByCriteria newQuery(Class classToSearchFrom, Criteria criteria, boolean distinct) { criteria = addCriteriaForOjbConcreteClasses(getRepository().getDescriptorFor(classToSearchFrom), criteria); return new QueryByCriteria(classToSearchFrom, criteria, distinct); }
Method declaration @param classToSearchFrom @param criteria @param distinct @return QueryByCriteria
public static QueryByCriteria newQuery(Class classToSearchFrom, Criteria criteria) { return newQuery(classToSearchFrom, criteria, false); }
Method declaration @param classToSearchFrom @param criteria @return QueryByCriteria
private static Collection getExtentClasses(ClassDescriptor cld) { /** * 1. check if this class has a ojbConcreteClass attribute */ FieldDescriptor fd = cld.getFieldDescriptorByName(ClassDescriptor.OJB_CONCRETE_CLASS); Collection classes = new HashSet(); // use same class only once if (fd != null) { classes.add(cld.getClassOfObject().getName()); } /** * 2. if this class has extents/is an extent search for all extents */ if (cld.isExtent()) { Vector extentClasses = cld.getExtentClasses(); /** * 3. get all extents for this class */ for (int i = 0; i < extentClasses.size(); i++) { Class ec = (Class) extentClasses.get(i); ClassDescriptor extCld = cld.getRepository().getDescriptorFor(ec); classes.addAll(getExtentClasses(extCld)); } } return classes; }
Searches the class descriptor for the ojbConcrete class attribute if it finds the concrete class attribute, append a where clause which specifies we can load all classes that are this type or extents of this type. @param cld @return the extent classes
private static Criteria addCriteriaForOjbConcreteClasses(ClassDescriptor cld, Criteria crit) { /** * 1. check if this class has a ojbConcreteClass attribute */ Criteria concreteClassDiscriminator = null; Collection classes = getExtentClasses(cld); /** * 1. create a new Criteria for objConcreteClass */ if (!classes.isEmpty()) { concreteClassDiscriminator = new Criteria(); if (classes.size() > 1) { concreteClassDiscriminator = new Criteria(); concreteClassDiscriminator.addIn(ClassDescriptor.OJB_CONCRETE_CLASS, classes); } else { concreteClassDiscriminator.addEqualTo(ClassDescriptor.OJB_CONCRETE_CLASS, classes.toArray()[0]); } } /** * 2. only add the AND (objConcreteClass = "some.class" OR....) if we've actually found concrete * classes. */ if (concreteClassDiscriminator != null) { /** * it's possible there is no criteria attached to the query, and in this * case we still have to add the IN/EqualTo criteria for the concrete class type * so check if the crit is null and then create a blank one if needed. */ if (crit == null) { crit = new Criteria(); } crit.addAndCriteria(concreteClassDiscriminator); } /** * will just return the passed in criteria if no OJB concrete class is attribute is found. */ return crit; }
Searches the class descriptor for the ojbConcrete class attribute if it finds the concrete class attribute, append a where clause which specifies we can load all classes that are this type or extents of this type. @param cld @param crit @return the passed in Criteria object + optionally and'ed criteria with OR'd class type discriminators.
public StatementsForClassIF getStatementsForClass(JdbcConnectionDescriptor cds, ClassDescriptor cld) { return (StatementsForClassIF) this.createNewInstance(new Class[]{JdbcConnectionDescriptor.class, ClassDescriptor.class}, new Object[]{cds, cld}); }
Get an instance of {@link org.apache.ojb.broker.accesslayer.StatementsForClassIF} @param cds our connection descriptor @param cld the class descriptor of the persistant object @return an instance of {@link org.apache.ojb.broker.accesslayer.StatementsForClassIF}
public TileMap getCapabilities(TmsLayer layer) throws TmsLayerException { try { // Create a JaxB unmarshaller: JAXBContext context = JAXBContext.newInstance(TileMap.class); Unmarshaller um = context.createUnmarshaller(); // Find out where to retrieve the capabilities and unmarshall: if (layer.getBaseTmsUrl().startsWith(CLASSPATH)) { String location = layer.getBaseTmsUrl().substring(CLASSPATH.length()); if (location.length() > 0 && location.charAt(0) == '/') { // classpath resources should not start with a slash, but they often do location = location.substring(1); } ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (null == cl) { cl = getClass().getClassLoader(); // NOSONAR fallback from proper behaviour for some environments } InputStream is = cl.getResourceAsStream(location); if (null != is) { try { return (TileMap) um.unmarshal(is); } finally { try { is.close(); } catch (IOException ioe) { // ignore, just closing the stream } } } throw new TmsLayerException(TmsLayerException.COULD_NOT_FIND_FILE, layer.getBaseTmsUrl()); } // Normal case, find the URL and unmarshal: return (TileMap) um.unmarshal(httpService.getStream(layer.getBaseTmsUrl(), layer)); } catch (JAXBException e) { throw new TmsLayerException(e, TmsLayerException.COULD_NOT_READ_FILE, layer.getBaseTmsUrl()); } catch (IOException e) { throw new TmsLayerException(e, TmsLayerException.COULD_NOT_READ_FILE, layer.getBaseTmsUrl()); } }
Get the configuration for a TMS layer by retrieving and parsing it's XML description file. The parsing is done using JaxB. @param layer the tms layer to get capabilities for. @return Returns the description as a Java configuration object. @throws TmsLayerException In case something went wrong trying to find or parse the XML description file.
public RasterLayerInfo asLayerInfo(TileMap tileMap) { RasterLayerInfo layerInfo = new RasterLayerInfo(); layerInfo.setCrs(tileMap.getSrs()); layerInfo.setDataSourceName(tileMap.getTitle()); layerInfo.setLayerType(LayerType.RASTER); layerInfo.setMaxExtent(asBbox(tileMap.getBoundingBox())); layerInfo.setTileHeight(tileMap.getTileFormat().getHeight()); layerInfo.setTileWidth(tileMap.getTileFormat().getWidth()); List<ScaleInfo> zoomLevels = new ArrayList<ScaleInfo>(tileMap.getTileSets().getTileSets().size()); for (TileSet tileSet : tileMap.getTileSets().getTileSets()) { zoomLevels.add(asScaleInfo(tileSet)); } layerInfo.setZoomLevels(zoomLevels); return layerInfo; }
Transform a TMS layer description object into a raster layer info object. @param tileMap The TMS layer description object. @return The raster layer info object as used by Geomajas.
public Bbox asBbox(BoundingBox boundingBox) { double width = boundingBox.getMaxX() - boundingBox.getMinX(); double height = boundingBox.getMaxY() - boundingBox.getMinY(); return new Bbox(boundingBox.getMinX(), boundingBox.getMinY(), width, height); }
Transforms a TMS bounding box information object into a Geomajas {@link Bbox} object. @param boundingBox The TMS bounding box object. @return The default Geomajas bounding box.
public ScaleInfo asScaleInfo(TileSet tileSet) { ScaleInfo scaleInfo = new ScaleInfo(); scaleInfo.setPixelPerUnit(1 / tileSet.getUnitsPerPixel()); return scaleInfo; }
Transforms a TMS tile-set description object into a Geomajas {@link ScaleInfo} object. @param tileSet The tile set description. @return The default Geomajas scale object.
private void init_jdbcTypes() throws SQLException { ReportQuery q = (ReportQuery) getQueryObject().getQuery(); m_jdbcTypes = new int[m_attributeCount]; // try to get jdbcTypes from Query if (q.getJdbcTypes() != null) { m_jdbcTypes = q.getJdbcTypes(); } else { ResultSetMetaData rsMetaData = getRsAndStmt().m_rs.getMetaData(); for (int i = 0; i < m_attributeCount; i++) { m_jdbcTypes[i] = rsMetaData.getColumnType(i + 1); } } }
get the jdbcTypes from the Query or the ResultSet if not available from the Query @throws SQLException
protected Object getObjectFromResultSet() throws PersistenceBrokerException { Object[] result = new Object[m_attributeCount]; ReportQuery q =(ReportQuery) getQueryObject().getQuery(); for (int i = 0; i < m_attributeCount; i++) { try { int jdbcType = m_jdbcTypes[i]; String attr = q.getAttributes()[i]; FieldDescriptor fld = (FieldDescriptor) q.getAttributeFieldDescriptors().get(attr); Object val =JdbcTypesHelper.getObjectFromColumn(getRsAndStmt().m_rs, new Integer(jdbcType), i + 1); if (fld != null && fld.getFieldConversion() != null) { val = fld.getFieldConversion().sqlToJava(val); } result[i] = val; } catch (SQLException e) { throw new PersistenceBrokerException(e); } } return result; }
returns an Object[] representing the columns of the current ResultSet row. There is no OJB object materialization, Proxy generation etc. involved to maximize performance.
private int getLiteralId(String literal) throws PersistenceBrokerException { ////logger.debug("lookup: " + literal); try { return tags.getIdByTag(literal); } catch (NullPointerException t) { throw new MetadataException("unknown literal: '" + literal + "'",t); } }
returns the XmlCapable id associated with the literal. OJB maintains a RepositoryTags table that provides a mapping from xml-tags to XmlCapable ids. @param literal the literal to lookup @return the int value representing the XmlCapable @throws MetadataException if no literal was found in tags mapping
private int getIsoLevel(String isoLevel) { if (isoLevel.equalsIgnoreCase(LITERAL_IL_READ_UNCOMMITTED)) { return IL_READ_UNCOMMITTED; } else if (isoLevel.equalsIgnoreCase(LITERAL_IL_READ_COMMITTED)) { return IL_READ_COMMITTED; } else if (isoLevel.equalsIgnoreCase(LITERAL_IL_REPEATABLE_READ)) { return IL_REPEATABLE_READ; } else if (isoLevel.equalsIgnoreCase(LITERAL_IL_SERIALIZABLE)) { return IL_SERIALIZABLE; } else if (isoLevel.equalsIgnoreCase(LITERAL_IL_OPTIMISTIC)) { return IL_OPTIMISTIC; } //logger.warn("unknown isolation-level: " + isoLevel + " using RW_UNCOMMITTED as default"); return defIsoLevel; }
maps IsolationLevel literals to the corresponding id @param isoLevel @return the id
public final boolean hasReturnValues() { if (this.hasReturnValue()) { return true; } else { // TODO: We may be able to 'pre-calculate' the results // of this loop by just checking arguments as they are added // The only problem is that the 'isReturnedbyProcedure' property // can be modified once the argument is added to this procedure. // If that occurs, then 'pre-calculated' results will be inacccurate. Iterator iter = this.getArguments().iterator(); while (iter.hasNext()) { ArgumentDescriptor arg = (ArgumentDescriptor) iter.next(); if (arg.getIsReturnedByProcedure()) { return true; } } } return false; }
Does this procedure return any values to the 'caller'? @return <code>true</code> if the procedure returns at least 1 value that is returned to the caller.
protected void addArguments(FieldDescriptor field[]) { for (int i = 0; i < field.length; i++) { ArgumentDescriptor arg = new ArgumentDescriptor(this); arg.setValue(field[i].getAttributeName(), false); this.addArgument(arg); } }
Set up arguments for each FieldDescriptor in an array.
@Api public void setRoles(Map<String, List<AuthorizationInfo>> roles) { Map<String, List<NamedRoleInfo>> namedRoles = new HashMap<String, List<NamedRoleInfo>>(); for (String ldapRole : roles.keySet()) { DN dn; List<AuthorizationInfo> auth = roles.get(ldapRole); NamedRoleInfo role = new NamedRoleInfo(); role.setAuthorizations(auth); try { dn = new DN(ldapRole); role.setName(dn.getRDN().getAttributeValues()[0]); } catch (LDAPException e) { role.setName(ldapRole); } namedRoles.put(ldapRole, Collections.singletonList(role)); } setNamedRoles(namedRoles); }
Set the authorizations for the roles which may be defined. If the keys are DN values, the application role names are taken from the leftmost RDN value. Use {@link LdapAuthenticationService#setNamedRoles(Map)} instead of this method to explicitly define application role names. @param roles map with roles, keys are the values for {@link #rolesAttribute}, probably DN values @since 1.10.0 (actually already from 1.9.0 but annotations was missing)
@Api public void setNamedRoles(Map<String, List<NamedRoleInfo>> namedRoles) { this.namedRoles = namedRoles; ldapRoleMapping = new HashMap<String, Set<String>>(); for (String roleName : namedRoles.keySet()) { if (!ldapRoleMapping.containsKey(roleName)) { ldapRoleMapping.put(roleName, new HashSet<String>()); } for (NamedRoleInfo role : namedRoles.get(roleName)) { ldapRoleMapping.get(roleName).add(role.getName()); } } }
Set the named roles which may be defined. @param roles map with roles, keys are the values for {@link #rolesAttribute}, probably DN values @since 1.10.0
@Action( semantics = SemanticsOf.IDEMPOTENT ) public ExcelModuleDemoToDoItemBulkUpdateManager changeFileName(final String fileName) { setFileName(fileName); return toDoItemExportImportService.newBulkUpdateManager(this); }
region > changeFileName (action)
@Action public ExcelModuleDemoToDoItemBulkUpdateManager select( final Category category, @Nullable final Subcategory subcategory, @ParameterLayout(named="Completed?") final boolean completed) { setCategory(category); setSubcategory(subcategory); setComplete(completed); return toDoItemExportImportService.newBulkUpdateManager(this); }
region > select (action)
@SuppressWarnings("unchecked") @Collection public List<ExcelModuleDemoToDoItem> getToDoItems() { return container.allMatches(ExcelModuleDemoToDoItem.class, Predicates.and( ExcelModuleDemoToDoItem.Predicates.thoseOwnedBy(currentUserName()), ExcelModuleDemoToDoItem.Predicates.thoseCompleted(isComplete()), ExcelModuleDemoToDoItem.Predicates.thoseCategorised(getCategory(), getSubcategory()))); }
region > toDoItems (derived collection)
@Action( semantics = SemanticsOf.SAFE ) public Blob export() { final String fileName = withExtension(getFileName(), ".xlsx"); final List<ExcelModuleDemoToDoItem> items = getToDoItems(); return toExcel(fileName, items); }
region > export (action)
@Action @ActionLayout( named = "Import" ) @MemberOrder(name="toDoItems", sequence="2") public List<ExcelModuleDemoToDoItemBulkUpdateLineItem> importBlob( @Parameter(fileAccept = ".xlsx") @ParameterLayout(named="Excel spreadsheet") final Blob spreadsheet) { final List<ExcelModuleDemoToDoItemBulkUpdateLineItem> lineItems = excelService.fromExcel(spreadsheet, WORKSHEET_SPEC); container.informUser(lineItems.size() + " items imported"); return lineItems; }
region > import (action)
public void setPixelPerUnit(double pixelPerUnit) { if (pixelPerUnit < MINIMUM_PIXEL_PER_UNIT) { pixelPerUnit = MINIMUM_PIXEL_PER_UNIT; } if (pixelPerUnit > MAXIMUM_PIXEL_PER_UNIT) { pixelPerUnit = MAXIMUM_PIXEL_PER_UNIT; } this.pixelPerUnit = pixelPerUnit; setPixelPerUnitBased(true); postConstruct(); }
Sets the scale value in pixel per map unit. @param pixelPerUnit the scale value (pix/map unit)
@PostConstruct protected void postConstruct() { if (pixelPerUnitBased) { // Calculate numerator and denominator if (pixelPerUnit > PIXEL_PER_METER) { this.numerator = pixelPerUnit / conversionFactor; this.denominator = 1; } else { this.numerator = 1; this.denominator = PIXEL_PER_METER / pixelPerUnit; } setPixelPerUnitBased(false); } else { // Calculate PPU this.pixelPerUnit = numerator / denominator * conversionFactor; setPixelPerUnitBased(true); } }
Finish configuration.
void nextExecuted(String sql) throws SQLException { count++; if (_order.contains(sql)) { return; } String sqlCmd = sql.substring(0, 7); String rest = sql.substring(sqlCmd.equals("UPDATE ") ? 7 // "UPDATE " : 12); // "INSERT INTO " or "DELETE FROM " String tableName = rest.substring(0, rest.indexOf(' ')); HashSet fkTables = (HashSet) _fkInfo.get(tableName); // we should not change order of INSERT/DELETE/UPDATE // statements for the same table if (_touched.contains(tableName)) { executeBatch(); } if (sqlCmd.equals("INSERT ")) { if (_dontInsert != null && _dontInsert.contains(tableName)) { // one of the previous INSERTs contained a table // that references this table. // Let's execute that previous INSERT right now so that // in the future INSERTs into this table will go first // in the _order array. executeBatch(); } } else //if (sqlCmd.equals("DELETE ") || sqlCmd.equals("UPDATE ")) { // We process UPDATEs in the same way as DELETEs // because setting FK to NULL in UPDATE is equivalent // to DELETE from the referential integrity point of view. if (_deleted != null && fkTables != null) { HashSet intersection = (HashSet) _deleted.clone(); intersection.retainAll(fkTables); if (!intersection.isEmpty()) { // one of the previous DELETEs contained a table // that is referenced from this table. // Let's execute that previous DELETE right now so that // in the future DELETEs into this table will go first // in the _order array. executeBatch(); } } } _order.add(sql); _touched.add(tableName); if (sqlCmd.equals("INSERT ")) { if (fkTables != null) { if (_dontInsert == null) { _dontInsert = new HashSet(); } _dontInsert.addAll(fkTables); } } else if (sqlCmd.equals("DELETE ")) { if (_deleted == null) { _deleted = new HashSet(); } _deleted.add(tableName); } }
Remember the order of execution
private PreparedStatement prepareBatchStatement(String sql) { String sqlCmd = sql.substring(0, 7); if (sqlCmd.equals("UPDATE ") || sqlCmd.equals("DELETE ") || (_useBatchInserts && sqlCmd.equals("INSERT "))) { PreparedStatement stmt = (PreparedStatement) _statements.get(sql); if (stmt == null) { // [olegnitz] for JDK 1.2 we need to list both PreparedStatement and Statement // interfaces, otherwise proxy.jar works incorrectly stmt = (PreparedStatement) Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{ PreparedStatement.class, Statement.class, BatchPreparedStatement.class}, new PreparedStatementInvocationHandler(this, sql, m_jcd)); _statements.put(sql, stmt); } return stmt; } else { return null; } }
If UPDATE, INSERT or DELETE, return BatchPreparedStatement, otherwise return null.
protected void load() { Logger bootLogger = LoggerFactory.getBootLogger(); // first we check whether the system property // org.apache.ojb.broker.util.logging.Logger // is set (or its alias LoggerClass which is deprecated) ClassLoader contextLoader = ClassHelper.getClassLoader(); String loggerClassName; _loggerClass = null; properties = new Properties(); loggerClassName = getLoggerClass(System.getProperties()); _loggerConfigFile = getLoggerConfigFile(System.getProperties()); InputStream ojbLogPropFile; if (loggerClassName == null) { // now we're trying to load the OJB-logging.properties file String ojbLogPropFilePath = System.getProperty(OJB_LOGGING_PROPERTIES_FILE, OJB_LOGGING_PROPERTIES_FILE); try { URL ojbLoggingURL = ClassHelper.getResource(ojbLogPropFilePath); if (ojbLoggingURL == null) { ojbLoggingURL = (new File(ojbLogPropFilePath)).toURL(); } ojbLogPropFile = ojbLoggingURL.openStream(); try { bootLogger.info("Found logging properties file: " + ojbLogPropFilePath); properties.load(ojbLogPropFile); _loggerConfigFile = getLoggerConfigFile(properties); loggerClassName = getLoggerClass(properties); } finally { ojbLogPropFile.close(); } } catch (Exception ex) { if(loggerClassName == null) { bootLogger.warn("Can't read logging properties file using path '" + ojbLogPropFilePath + "', message is: " + SystemUtils.LINE_SEPARATOR + ex.getMessage() + SystemUtils.LINE_SEPARATOR + "Will try to load logging properties from OJB.properties file"); } else { bootLogger.info("Problems while closing resources for path '" + ojbLogPropFilePath + "', message is: " + SystemUtils.LINE_SEPARATOR + ex.getMessage(), ex); } } } if (loggerClassName == null) { // deprecated: load the OJB.properties file // this is not good because we have all OJB properties in this config String ojbPropFile = System.getProperty("OJB.properties", "OJB.properties"); try { ojbLogPropFile = contextLoader.getResourceAsStream(ojbPropFile); if (ojbLogPropFile != null) { try { properties.load(ojbLogPropFile); loggerClassName = getLoggerClass(properties); _loggerConfigFile = getLoggerConfigFile(properties); if (loggerClassName != null) { // deprecation warning for after 1.0 bootLogger.warn("Please use a separate '"+OJB_LOGGING_PROPERTIES_FILE+"' file to specify your logging settings"); } } finally { ojbLogPropFile.close(); } } } catch (Exception ex) {} } if (loggerClassName != null) { try { _loggerClass = ClassHelper.getClass(loggerClassName); bootLogger.info("Logging: Found logger class '" + loggerClassName); } catch (ClassNotFoundException ex) { _loggerClass = PoorMansLoggerImpl.class; bootLogger.warn("Could not load logger class "+loggerClassName+", defaulting to "+_loggerClass.getName(), ex); } } else { // still no logger configured - lets check whether commons-logging is configured if ((System.getProperty(PROPERTY_COMMONS_LOGGING_LOG) != null) || (System.getProperty(PROPERTY_COMMONS_LOGGING_LOGFACTORY) != null)) { // yep, so use commons-logging _loggerClass = CommonsLoggerImpl.class; bootLogger.info("Logging: Found commons logging properties, use " + _loggerClass); } else { // but perhaps there is a log4j.properties file ? try { ojbLogPropFile = contextLoader.getResourceAsStream("log4j.properties"); if (ojbLogPropFile != null) { // yep, so use log4j _loggerClass = Log4jLoggerImpl.class; _loggerConfigFile = "log4j.properties"; bootLogger.info("Logging: Found 'log4j.properties' file, use " + _loggerClass); ojbLogPropFile.close(); } } catch (Exception ex) {} if (_loggerClass == null) { // or a commons-logging.properties file ? try { ojbLogPropFile = contextLoader.getResourceAsStream("commons-logging.properties"); if (ojbLogPropFile != null) { // yep, so use commons-logging _loggerClass = CommonsLoggerImpl.class; _loggerConfigFile = "commons-logging.properties"; bootLogger.info("Logging: Found 'commons-logging.properties' file, use " + _loggerClass); ojbLogPropFile.close(); } } catch (Exception ex) {} if (_loggerClass == null) { // no, so default to poor man's logging bootLogger.info("** Can't find logging configuration file, use default logger **"); _loggerClass = PoorMansLoggerImpl.class; } } } } }
/* (non-Javadoc) @see org.apache.ojb.broker.util.configuration.impl.ConfigurationAbstractImpl#load()
public MaterializationCache createObjectCache(PersistenceBroker broker) { CacheDistributor cache = null; try { log.info("Start creating new ObjectCache instance"); /* 1. if default cache was not found, create an new instance of the default cache specified in the configuration. 2. Then instantiate AllocatorObjectCache to handle per connection/ per class caching instances. 3. To support intern operations we wrap ObjectCache with an InternalObjectCache implementation */ cache = new CacheDistributor(broker); log.info("Instantiate new " + cache.getClass().getName() + " for PB instance " + broker); } catch(Exception e) { log.error("Error while initiation, please check your configuration" + " files and the used implementation class", e); } log.info("New ObjectCache instance was created"); return new MaterializationCache(cache); }
Creates a new {@link ObjectCacheInternal} instance. Each <tt>ObjectCache</tt> implementation was wrapped by a {@link CacheDistributor} and the distributor was wrapped by {@link MaterializationCache}. @param broker The PB instance to associate with the cache instance
public static void generateJavaFiles(String requirementsFolder, String platformName, String src_test_dir, String tests_package, String casemanager_package, String loggingPropFile) throws Exception { File reqFolder = new File(requirementsFolder); if (reqFolder.isDirectory()) { for (File f : reqFolder.listFiles()) { if (f.getName().endsWith(".story")) { try { SystemReader.generateJavaFilesForOneStory( f.getCanonicalPath(), platformName, src_test_dir, tests_package, casemanager_package, loggingPropFile); } catch (IOException e) { String message = "ERROR: " + e.getMessage(); logger.severe(message); throw new BeastException(message, e); } } } for (File f : reqFolder.listFiles()) { if (f.isDirectory()) { SystemReader.generateJavaFiles(requirementsFolder + File.separator + f.getName(), platformName, src_test_dir, tests_package + "." + f.getName(), casemanager_package, loggingPropFile); } } } else if (reqFolder.getName().endsWith(".story")) { SystemReader.generateJavaFilesForOneStory(requirementsFolder, platformName, src_test_dir, tests_package, casemanager_package, loggingPropFile); } else { String message = "No story file found in " + requirementsFolder; logger.severe(message); throw new BeastException(message); } }
Main method of the class, which handles the process of creating the tests @param requirementsFolder , it is the folder where the plain text given by the client is stored @param platformName , to choose the MAS platform (JADE, JADEX, etc.) @param src_test_dir , the folder where our classes are created @param tests_package , the name of the package where the stories are created @param casemanager_package , the path where casemanager must be created @param loggingPropFile , properties file @throws Exception , if any error is found in the configuration
public static void generateJavaFilesForOneStory(String storyFilePath, String platformName, String src_test_dir, String tests_package, String casemanager_package, String loggingPropFile) throws Exception { /* * This map has the following structure: {Scenario1ID => * [GivenDescription1, WhenDescription1, ThenDescription1], Scenario2ID * => [GivenDescription2, WhenDescription2, ThenDescription2], ...} */ HashMap<String, String[]> scenarios = new HashMap<String, String[]>(); String storyName = null; String story_user = null; String user_feature = null; String user_benefit = null; BufferedReader fileReader = createFileReader(storyFilePath); if (fileReader == null) { logger.severe("ERROR Reading the file " + storyFilePath); } else { // Starting with the CaseManager // Shall I perish, may $Deity have mercy on my soul, and on those // who should finish this File caseManager = CreateSystemCaseManager.startSystemCaseManager( casemanager_package, src_test_dir); try { String nextLine = null; // TYPES: // As a -> 1 // I want to -> 2 // So that -> 3 int lineType = 0; while ((nextLine = fileReader.readLine()) != null) { // Again, why am I using class variables to store the data // I only fucking use in this method? if (nextLine.startsWith("Story")) { String aux = nextLine.replaceFirst("Story", "").trim(); if (aux.startsWith(":") || aux.startsWith("-")) { aux = aux.substring(1).trim(); } storyName = aux; } else if (nextLine.startsWith("As a")) { story_user = nextLine.replaceFirst("As a", "").trim(); lineType = 1; } else if (nextLine.startsWith("I want to")) { user_feature = nextLine.replaceFirst("I want to", "") .trim(); lineType = 2; } else if (nextLine.startsWith("So that")) { user_benefit = nextLine.replaceFirst("So that", "") .trim(); lineType = 3; } else if (nextLine.startsWith("And")) { switch (lineType) { case 1: story_user= story_user + " and " + nextLine.replaceFirst("And", "").trim(); break; case 2: user_feature= user_feature + " and " + nextLine.replaceFirst("And", "").trim(); break; case 3: user_benefit= user_benefit + " and " + nextLine.replaceFirst("And", "").trim(); break; default: break; } } else if (nextLine.startsWith("Scenario")) { // I am assuming that the file is properly formated // TODO: Check that it actually is properly formated. String aux = nextLine.replaceFirst("Scenario", "").trim(); if (aux.startsWith(":") || aux.startsWith("-")) { aux = aux.substring(1).trim(); } aux.toLowerCase(); String scenarioID = createClassName(aux); while (!fileReader.ready()) { Thread.yield(); } nextLine = fileReader.readLine(); String givenDescription = nextLine.replaceFirst( "Given", "").trim(); while (!fileReader.ready()) { Thread.yield(); } nextLine = fileReader.readLine(); while (nextLine.startsWith("And")) { givenDescription = givenDescription + " and " + nextLine.replaceFirst("And", "").trim(); while (!fileReader.ready()) { Thread.yield(); } nextLine = fileReader.readLine(); } String whenDescription = nextLine.replaceFirst("When", "").trim(); while (!fileReader.ready()) { Thread.yield(); } nextLine = fileReader.readLine(); while (nextLine.startsWith("And")) { whenDescription = whenDescription + " and " + nextLine.replaceFirst("And", "").trim(); while (!fileReader.ready()) { Thread.yield(); } nextLine = fileReader.readLine(); } String thenDescription = nextLine.replaceFirst("Then", "").trim(); nextLine = fileReader.readLine(); while (nextLine!=null && nextLine.startsWith("And")) { thenDescription = thenDescription + " and " + nextLine.replaceFirst("And", "").trim(); nextLine = fileReader.readLine(); } String[] scenarioData = new String[3]; scenarioData[0] = givenDescription; scenarioData[1] = whenDescription; scenarioData[2] = thenDescription; scenarios.put(scenarioID, scenarioData); } else if (!nextLine.trim().isEmpty()) { // Is not an empty line, but has not been recognized. logger.severe("ERROR: The test writen in the plain text can not be handed"); logger.severe("Try again whit the following key-words: {Story -," + " As a, I want to, So that, Scenario:, Given, When, Then}"); } // The only possibility here is to get an empty line, // so I don't have to do anything. } fileReader.close(); // Now, I should have all the variables set. if (storyName != null) { // // I have a story, so... if (fileDoesNotExist(createClassName(storyName) + ".java", tests_package, src_test_dir)) { CreateSystemTestSuite.createSystemTestSuite(storyName, platformName, tests_package, src_test_dir, loggingPropFile, story_user, user_feature, user_benefit, scenarios); } CreateSystemCaseManager.addStory(caseManager, storyName, tests_package, story_user, user_feature, user_benefit); } else { // This should not happen, since this class should only be // used // to create System tests, (i.e. "story" should never be // null) logger.severe("ERROR: No Story found in :" + storyFilePath); } CreateSystemCaseManager.closeSystemCaseManager(caseManager); } catch (Exception e) { logger.severe("ERROR: " + e.getMessage()); throw e; } } }
Main method of the class, which handles all the process to create all tests. @param storyFilePath , it is the folder where the plain text given by the client is stored @param platformName , to choose the MAS platform (JADE, JADEX, etc.) @param src_test_dir , the folder where our classes are created @param tests_package , the name of the package where the stories are created @param casemanager_package , the path where casemanager must be created @param loggingPropFile , properties file @throws Exception , if any error is found in the configuration
public String getDescriptiveStatus() { if (response!=null && response.getStatus()!=null) { if (response.getStatus()>=500 && response.getStatus()<600) { return "Failed"; } else if (response.getStatus()>=400 && response.getStatus()<500) { return "Completed"; } else if (response.getStatus()>=200 && response.getStatus()<300) { if (orchestrations!=null) { for (Orchestration orch : orchestrations) { if (orch.getResponse()!=null && orch.getResponse().getStatus()!=null) { if (orch.getResponse().getStatus()>=400 && orch.getResponse().getStatus()<600) { return "Completed with error(s)"; } } } } } } return "Successful"; }
Get a descriptive status string based on the status code: 'Successful', 'Completed', 'Completed with error(s)' or 'Failed'
public void setPromoted(final boolean promoted) { this.promoted = promoted; for (final Artifact artifact : artifacts) { artifact.setPromoted(promoted); } for (final Module suModule : submodules) { suModule.setPromoted(promoted); } }
Sets the promotion state. <P>INFO: This method updates automatically all the contained artifacts. @param promoted boolean
public void addDependency(final Dependency dependency) { if(dependency != null && !dependencies.contains(dependency)){ this.dependencies.add(dependency); } }
Add a dependency to the module. @param dependency Dependency
public void addSubmodule(final Module submodule) { if (!submodules.contains(submodule)) { submodule.setSubmodule(true); if (promoted) { submodule.setPromoted(promoted); } submodules.add(submodule); } }
Adds a submodule to the module. <P> INFO: If the module is promoted, all added submodule will be promoted. @param submodule Module
public void addArtifact(final Artifact artifact) { if (!artifacts.contains(artifact)) { if (promoted) { artifact.setPromoted(promoted); } artifacts.add(artifact); } }
Adds an artifact to the module. <P> INFO: If the module is promoted, all added artifacts will be promoted. @param artifact Artifact
@Override public void format(final LoggingEvent event, final StringBuffer toAppendTo) { String renderedMessage = event.getRenderedMessage(); if (renderedMessage!= null && renderedMessage.contains("\n")) { toAppendTo.append("\n"); String[] messageParts = renderedMessage.split("\n"); String spacing = getSpacing(); for (int i = 0; i < messageParts.length; i++) { String messagePart = messageParts[i]; toAppendTo.append(spacing); toAppendTo.append(messagePart); if (i != (messageParts.length - 1)) { toAppendTo.append("\n"); } } } else { toAppendTo.append(renderedMessage); } }
{@inheritDoc}
public static String get(MessageKey key) { return data.getProperty(key.toString(), key.toString()); }
Retrieves the configured message by property key @param key The key in the file @return The associated value in case the key is found in the message bundle file. If no such key is defined, the returned value would be the key itself.