code
stringlengths
67
466k
docstring
stringlengths
1
13.2k
@PostConstruct @SuppressWarnings("unused") protected void postConstruct() throws GeomajasException { crs = geoService.getCrs2(getLayerInfo().getCrs()); srid = geoService.getSridFromCrs(crs); filterService.registerFeatureModel(featureModel); if (null == featureModel) { HibernateFeatureModel hibernateFeatureModel = applicationContext.getBean(HibernateFeatureModel.class); hibernateFeatureModel.setSessionFactory(getSessionFactory()); hibernateFeatureModel.setLayerInfo(getLayerInfo()); featureModel = hibernateFeatureModel; } }
Finish initializing the layer. @throws GeomajasException oops
@Api public void setFeatureModel(FeatureModel featureModel) throws LayerException { this.featureModel = featureModel; if (null != getLayerInfo()) { featureModel.setLayerInfo(getLayerInfo()); } filterService.registerFeatureModel(featureModel); }
Set the featureModel. @param featureModel feature model @throws LayerException problem setting the feature model @since 1.8.0
public Iterator<?> getElements(Filter filter, int offset, int maxResultSize) throws LayerException { try { Session session = getSessionFactory().getCurrentSession(); Criteria criteria = session.createCriteria(getFeatureInfo().getDataSourceName()); if (filter != null) { if (filter != Filter.INCLUDE) { CriteriaVisitor visitor = new CriteriaVisitor((HibernateFeatureModel) featureModel, dateFormat); Criterion c = (Criterion) filter.accept(visitor, criteria); if (c != null) { criteria.add(c); } } } // Sorting of elements. if (getFeatureInfo().getSortAttributeName() != null) { if (SortType.ASC.equals(getFeatureInfo().getSortType())) { criteria.addOrder(Order.asc(getFeatureInfo().getSortAttributeName())); } else { criteria.addOrder(Order.desc(getFeatureInfo().getSortAttributeName())); } } criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); if (isScrollableResultSet()) { return (Iterator<?>) new ScrollIterator(criteria.scroll()); } else { List<?> list = criteria.list(); return list.iterator(); } } catch (HibernateException he) { throw new HibernateLayerException(he, ExceptionCode.HIBERNATE_LOAD_FILTER_FAIL, getFeatureInfo() .getDataSourceName(), filter.toString()); } }
This implementation does not support the 'offset' parameter. The maxResultSize parameter is not used (limiting the result needs to be done after security {@link org.geomajas.internal.layer.vector.GetFeaturesEachStep}). If you expect large results to be returned enable scrollableResultSet to retrieve only as many records as needed.
public void update(Object feature) throws LayerException { Session session = getSessionFactory().getCurrentSession(); session.update(feature); }
Update a feature object in the Hibernate session. @param feature feature object @throws LayerException oops
private void enforceSrid(Object feature) throws LayerException { Geometry geom = getFeatureModel().getGeometry(feature); if (null != geom) { geom.setSRID(srid); getFeatureModel().setGeometry(feature, geom); } }
Enforces the correct srid on incoming features. @param feature object to enforce srid on @throws LayerException problem getting or setting srid
private Envelope getBoundsLocal(Filter filter) throws LayerException { try { Session session = getSessionFactory().getCurrentSession(); Criteria criteria = session.createCriteria(getFeatureInfo().getDataSourceName()); CriteriaVisitor visitor = new CriteriaVisitor((HibernateFeatureModel) getFeatureModel(), dateFormat); Criterion c = (Criterion) filter.accept(visitor, criteria); if (c != null) { criteria.add(c); } criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY); List<?> features = criteria.list(); Envelope bounds = new Envelope(); for (Object f : features) { Envelope geomBounds = getFeatureModel().getGeometry(f).getEnvelopeInternal(); if (!geomBounds.isNull()) { bounds.expandToInclude(geomBounds); } } return bounds; } catch (HibernateException he) { throw new HibernateLayerException(he, ExceptionCode.HIBERNATE_LOAD_FILTER_FAIL, getFeatureInfo() .getDataSourceName(), filter.toString()); } }
Bounds are calculated locally, can use any filter, but slower than native. @param filter filter which needs to be applied @return the bounds of the specified features @throws LayerException oops
public Object getBean(String name) { Bean bean = beans.get(name); if (null == bean) { return null; } return bean.object; }
Get a bean value from the context. @param name bean name @return bean value or null
public void setBean(String name, Object object) { Bean bean = beans.get(name); if (null == bean) { bean = new Bean(); beans.put(name, bean); } bean.object = object; }
Set a bean in the context. @param name bean name @param object bean value
public Object remove(String name) { Bean bean = beans.get(name); if (null != bean) { beans.remove(name); bean.destructionCallback.run(); return bean.object; } return null; }
Remove a bean from the context, calling the destruction callback if any. @param name bean name @return previous value
public void registerDestructionCallback(String name, Runnable callback) { Bean bean = beans.get(name); if (null == bean) { bean = new Bean(); beans.put(name, bean); } bean.destructionCallback = callback; }
Register the given callback as to be executed after request completion. @param name The name of the bean. @param callback The callback of the bean to be executed for destruction.
public void clear() { for (Bean bean : beans.values()) { if (null != bean.destructionCallback) { bean.destructionCallback.run(); } } beans.clear(); }
Clear all beans and call the destruction callback.
private String parseLayerId(HttpServletRequest request) { StringTokenizer tokenizer = new StringTokenizer(request.getRequestURI(), "/"); String token = ""; while (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } return token; }
Get the layer ID out of the request URL. @param request servlet request @return layer id
private WmsLayer getLayer(String layerId) { RasterLayer layer = configurationService.getRasterLayer(layerId); if (layer instanceof WmsLayer) { return (WmsLayer) layer; } return null; }
Given a layer ID, search for the WMS layer. @param layerId layer id @return WMS layer or null if layer is not a WMS layer
private byte[] createErrorImage(int width, int height, Exception e) throws IOException { String error = e.getMessage(); if (null == error) { Writer result = new StringWriter(); PrintWriter printWriter = new PrintWriter(result); e.printStackTrace(printWriter); error = result.toString(); } BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_4BYTE_ABGR); Graphics2D g = (Graphics2D) image.getGraphics(); g.setColor(Color.RED); g.drawString(error, ERROR_MESSAGE_X, height / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); ImageIO.write(image, "PNG", out); out.flush(); byte[] result = out.toByteArray(); out.close(); return result; }
Create an error image should an error occur while fetching a WMS map. @param width image width @param height image height @param e exception @return error image @throws java.io.IOException oops
public DBag difference(DBag otherBag) { DBagImpl result = new DBagImpl(getPBKey()); Iterator iter = this.iterator(); while (iter.hasNext()) { Object candidate = iter.next(); if (!otherBag.contains(candidate)) { result.add(candidate); } } return result; }
A new <code>DBag</code> instance is created that contains the difference of this object and the <code>DBag</code> instance referenced by <code>otherBag</code>. This method is similar to the <code>removeAll</code> method in <code>Collection</code>, except that this method creates a new collection and <code>removeAll</code> modifies the object to contain the result. @param otherBag The other bag to use in creating the difference. @return A <code>DBag</code> instance that contains the elements of this object minus the elements in <code>otherBag</code>.
public int occurrences(Object obj) { int count = 0; for (int i = 0; i < this.size(); i++) { if ((obj == null) ? this.get(i) == null : this.get(i).equals(obj)) { count++; } } return count; }
This method returns the number of occurrences of the object <code>obj</code> in the <code>DBag</code> collection. @param obj The value that may have elements in the collection. @return The number of occurrences of <code>obj</code> in this collection.
public static void trace(final Logger logger, final String format, final Throwable throwable, final AbstractLoggingHelperConverter converter, final Object... params) { if (logger.isTraceEnabled()) { Object[] formatParams = params; if (converter != null) { formatParams = converter.convert(params); } final String message = String.format(format, formatParams); logger.trace(message, throwable); } }
Enable logging using String.format internally only if debug level is enabled. @param logger the logger that will be used to log the message @param format the format string (the template string) @param throwable a throwable object that holds the throwable information @param converter the converter used to convert the param arguments in case the trace level is enabled @param params the parameters to be formatted into it the string format
public static void trace(final Logger logger, final String format, final Throwable throwable, final Object... params) { trace(logger, format, throwable, null, params); }
Enable logging using String.format internally only if debug level is enabled. @param logger the logger that will be used to log the message @param format the format string (the template string) @param throwable a throwable object that holds the throable information @param params the parameters to be formatted into it the string format
public static void debug(final Logger logger, final String format, final Throwable throwable, final AbstractLoggingHelperConverter converter, final Object... params) { if (logger.isDebugEnabled()) { Object[] formatParams = params; if (converter != null) { formatParams = converter.convert(params); } final String message = String.format(format, formatParams); logger.debug(message, throwable); } }
Enable logging using String.format internally only if debug level is enabled. @param logger the logger that will be used to log the message @param format the format string (the template string) @param throwable a throwable object that holds the throable information @param params the parameters to be formatted into it the string format
public static void info(final Logger logger, final String format, final Object... params) { info(logger, format, null, params); }
Enable logging using String.format internally only if info level is enabled. @param logger the logger that will be used to log the message @param format the format string (the template string) @param params the parameters to be formatted into it the string format
public static void info(final Logger logger, final String format, final Throwable throwable, final Object... params) { if (logger.isInfoEnabled()) { final String message = String.format(format, params); logger.info(message, throwable); } }
Enable logging using String.format internally only if info level is enabled. @param logger the logger that will be used to log the message @param format the format string (the template string) @param throwable a throwable object that holds the throable information @param params the parameters to be formatted into it the string format
public static void warn(final Logger logger, final String format, final Object... params) { warn(logger, format, null, params); }
log message using the String.format API @param logger the logger that will be used to log the message @param format the format string (the template string) @param params the parameters to be formatted into it the string format
public static void warn(final Logger logger, final String format, final Throwable throwable, final Object... params) { final String message = String.format(format, params); logger.warn(message, throwable); }
log message using the String.format API @param logger the logger that will be used to log the message @param format the format string (the template string) @param throwable a throwable object that holds the throable information @param params the parameters to be formatted into it the string format
public static void error(final Logger logger, final String format, final Object... params) { error(logger, format, null, params); }
log message using the String.format API @param logger the logger that will be used to log the message @param format the format string (the template string) @param params the parameters to be formatted into it the string format
public static void fatal(final Logger logger, final String format, final Object... params) { fatal(logger, format, null, params); }
log message using the String.format API. @param logger the logger that will be used to log the message @param format the format string (the template string) @param params the parameters to be formatted into it the string format
public static void log(final Logger logger, final String callerFQCN, final Level level, final Throwable throwable, final String format, final Object... params) { if (level.isGreaterOrEqual(logger.getEffectiveLevel())) { logger.log(callerFQCN, level, String.format(format, params), throwable); } }
log message using the String.format API. @param logger a logger object that will be used for the actual log. @param callerFQCN The wrapper class' fully qualified class name. @param level the level of the requested log @param throwable a throwable object if applicable @param format the format string (the template string) @param params the parameters to be formatted into it the string format
public static String formatCommunicationMessage(final String protocol, final String source, final String destination, final String message) { return COMM_MESSAGE_FORMAT.format(new Object[] { protocol, source, destination, message }); }
Helper method for formatting transmission and reception messages. @param protocol The protocol used @param source Message source @param destination Message destination @param message The message @return A formatted message in the format: "protocol[&lt;protocol&gt;] source[&lt;source&gt;] destination[&lt;destination&gt;] &lt;message&gt;" <br/> e.g. protocol[OpenCAS] source[234.234.234.234:4321] destination[123.123.123.123:4567] 0x0a0b0c0d0e0f
public static String formatCommunicationMessage(final String protocol, final String source, final String destination, final String message, final IN_OUT_MODE inOutMODE) { return COMM_MESSAGE_FORMAT_IN_OUT.format(new Object[] { inOutMODE, protocol, source, destination, message }); }
Helper method for formatting transmission and reception messages. @param protocol The protocol used @param source Message source @param destination Message destination @param message The message @param inOutMODE - Enum the designates if this communication protocol is in coming (received) or outgoing (transmitted) @return A formatted message in the format: "Rx: / Tx: protocol[&lt;protocol&gt;] source[&lt;source&gt;] destination[&lt;destination&gt;] &lt;message&gt;" <br/> e.g. Rx: protocol[OpenCAS] source[234.234.234.234:4321] destination[123.123.123.123:4567] 0x0a0b0c0d0e0f
public static String formatConnectionEstablishmentMessage(final String connectionName, final String host, final String connectionReason) { return CON_ESTABLISHMENT_FORMAT.format(new Object[] { connectionName, host, connectionReason }); }
Helper method for formatting connection establishment messages. @param connectionName The name of the connection @param host The remote host @param connectionReason The reason for establishing the connection @return A formatted message in the format: "[&lt;connectionName&gt;] remote host[&lt;host&gt;] &lt;connectionReason&gt;" <br/> e.g. [con1] remote host[123.123.123.123] connection to ECMG.
public static String formatConnectionTerminationMessage(final String connectionName, final String host, final String connectionReason, final String terminationReason) { return CON_TERMINATION_FORMAT.format(new Object[] { connectionName, host, connectionReason, terminationReason }); }
Helper method for formatting connection termination messages. @param connectionName The name of the connection @param host The remote host @param connectionReason The reason for establishing the connection @param terminationReason The reason for terminating the connection @return A formatted message in the format: "[&lt;connectionName&gt;] remote host[&lt;host&gt;] &lt;connectionReason&gt; - &lt;terminationReason&gt;" <br/> e.g. [con1] remote host[123.123.123.123] connection to ECMG - terminated by remote host.
public void fillJCDFromDataSource(JdbcConnectionDescriptor jcd, DataSource dataSource, String username, String password) throws MetadataException { String realUsername = (jcd.getUserName() != null ? jcd.getUserName() : username); String realPassword = (jcd.getPassWord() != null ? jcd.getPassWord() : password); Connection connection = null; DatabaseMetaData metadata = null; try { // we have to open a connection to be able to retrieve metadata if (realUsername != null) { connection = dataSource.getConnection(realUsername, realPassword); } else { connection = dataSource.getConnection(); } metadata = connection.getMetaData(); } catch (Throwable t) { if (connection != null) { try { connection.close(); } catch (SQLException ex) {} } throw new MetadataException("Could not get the metadata from the given datasource", t); } try { HashMap urlComponents = parseConnectionUrl(metadata.getURL()); if (urlComponents.containsKey(PROPERTY_DBALIAS)) { jcd.setProtocol((String)urlComponents.get(PROPERTY_PROTOCOL)); jcd.setSubProtocol((String)urlComponents.get(PROPERTY_SUBPROTOCOL)); jcd.setDbAlias((String)urlComponents.get(PROPERTY_DBALIAS)); if (jdbcSubProtocolToPlatform.containsKey(jcd.getSubProtocol())) { // TODO: We might be able to use this: metadata.getDatabaseProductName(); jcd.setDbms((String)jdbcSubProtocolToPlatform.get(jcd.getSubProtocol())); } } } catch (Throwable t) { try { connection.close(); } catch (SQLException ex) {} throw new MetadataException("Could not get the metadata from the given datasource", t); } try { // this will only work with JDK >= 1.4 and only with some jdbc drivers Integer majorVersion = (Integer)PropertyUtils.getProperty(metadata, "JDBCMajorVersion"); Integer minorVersion = (Integer)PropertyUtils.getProperty(metadata, "JDBCMinorVersion"); jcd.setJdbcLevel(Double.parseDouble(majorVersion.toString()+"."+minorVersion.toString())); } catch (Throwable t) { // otherwise we're assuming JDBC 2.0 compliance jcd.setJdbcLevel(2.0); } try { connection.close(); } catch (SQLException ex) {} }
Fills parameters of the given {@link JdbcConnectionDescriptor} with metadata extracted from the given datasource. @param jcd The jdbc connection descriptor to fill @param dataSource The data source @param username The username required to establish a connection via the data source Can be empty if the data source does not require it or if one is specified in the jdbc connection descriptor @param password The username required to establish a connection via the data source Can be empty if the data source or username does not require it or if one is specified in the jdbc connection descriptor
public HashMap parseConnectionUrl(String jdbcConnectionUrl) { HashMap result = new HashMap(); if (jdbcConnectionUrl == null) { return result; } int pos = jdbcConnectionUrl.indexOf(':'); int lastPos; result.put(PROPERTY_PROTOCOL, jdbcConnectionUrl.substring(0, pos)); lastPos = pos; pos = jdbcConnectionUrl.indexOf(':', lastPos + 1); String subProtocol = jdbcConnectionUrl.substring(lastPos + 1, pos); // there are a few jdbc drivers that have a subprotocol containing one or more ':' if ("inetpool".equals(subProtocol)) { // Possible forms are: // inetpool:<subprotocol> // inetpool:jdbc:<subprotocol> (where we'll remove the 'jdbc' part) int tmpPos = jdbcConnectionUrl.indexOf(':', pos + 1); if ("inetpool:jdbc".equals(jdbcConnectionUrl.substring(lastPos + 1, tmpPos))) { pos = tmpPos; tmpPos = jdbcConnectionUrl.indexOf(':', pos + 1); } subProtocol += ":" + jdbcConnectionUrl.substring(pos + 1, tmpPos); } else if ("jtds".equals(subProtocol) || "microsoft".equals(subProtocol) || "sybase".equals(subProtocol)) { pos = jdbcConnectionUrl.indexOf(':', pos + 1); subProtocol = ":" + jdbcConnectionUrl.substring(lastPos + 1, pos); } result.put(PROPERTY_SUBPROTOCOL, subProtocol); result.put(PROPERTY_DBALIAS, jdbcConnectionUrl.substring(pos + 1)); return result; }
Splits the given jdbc connection url into its components and puts them into a hash map using the <code>PROPERTY_</code> constants. @param jdbcConnectionUrl The connection url @return The properties
public String findPlatformFor(String jdbcSubProtocol, String jdbcDriver) { String platform = (String)jdbcSubProtocolToPlatform.get(jdbcSubProtocol); if (platform == null) { platform = (String)jdbcDriverToPlatform.get(jdbcDriver); } return platform; }
Derives the OJB platform to use for a database that is connected via a url using the specified subprotocol, and where the specified jdbc driver is used. @param jdbcSubProtocol The JDBC subprotocol used to connect to the database @param jdbcDriver The JDBC driver used to connect to the database @return The platform identifier or <code>null</code> if no platform could be found
public static void validate(final Artifact artifact) { if((artifact.getOrigin()== null || "maven".equals(artifact.getOrigin())) && (artifact.getGroupId() == null || artifact.getGroupId().isEmpty())){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact groupId should not be null or empty") .build()); } if(artifact.getArtifactId() == null || artifact.getArtifactId().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact artifactId should not be null or empty") .build()); } if(artifact.getVersion() == null || artifact.getVersion().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact version should not be null or empty") .build()); } }
Checks if the provided artifact is valid and could be stored into the database @param artifact the artifact to test @throws WebApplicationException if the data is corrupted
public static void validatePostArtifact(final Artifact artifact) { validate(artifact); if(artifact.getExtension() == null || artifact.getExtension().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact extension should not be null or empty") .build()); } if(artifact.getSha256() == null || artifact.getSha256().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact SHA256 checksum should not be null or empty") .build()); } if(artifact.getSha256().length() != 64){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Artifact SHA256 checksum length should be 64") .build()); } }
Checks if the provided artifact is valid and could be stored into the database @param artifact the artifact to test @throws WebApplicationException if the data is corrupted
public static void validate(final License license) { // A license should have a name if(license.getName() == null || license.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License name should not be empty!") .build()); } // A license should have a long name if(license.getLongName() == null || license.getLongName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License long name should not be empty!") .build()); } // If there is a regexp, it should compile if(license.getRegexp() != null && !license.getRegexp().isEmpty()){ try{ Pattern.compile(license.getRegexp()); } catch (PatternSyntaxException e){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License regexp does not compile!").build()); } Pattern regex = Pattern.compile("[&%//]"); if(regex.matcher(license.getRegexp()).find()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("License regexp does not compile!").build()); } } }
Checks if the provided license is valid and could be stored into the database @param license the license to test @throws WebApplicationException if the data is corrupted
public static void validate(final Module module) { if (null == module) { throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Module cannot be null!") .build()); } if(module.getName() == null || module.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Module name cannot be null or empty!") .build()); } if(module.getVersion()== null || module.getVersion().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Module version cannot be null or empty!") .build()); } // Check artifacts for(final Artifact artifact: DataUtils.getAllArtifacts(module)){ validate(artifact); } // Check dependencies for(final Dependency dependency: DataUtils.getAllDependencies(module)){ validate(dependency.getTarget()); } }
Checks if the provided module is valid and could be stored into the database @param module the module to test @throws WebApplicationException if the data is corrupted
public static void validate(final Organization organization) { if(organization.getName() == null || organization.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Organization name cannot be null or empty!") .build()); } }
Checks if the provided organization is valid and could be stored into the database @param organization Organization @throws WebApplicationException if the data is corrupted
public static void validate(final ArtifactQuery artifactQuery) { final Pattern invalidChars = Pattern.compile("[^A-Fa-f0-9]"); if(artifactQuery.getUser() == null || artifactQuery.getUser().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [user] missing") .build()); } if( artifactQuery.getStage() != 0 && artifactQuery.getStage() !=1 ){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Invalid [stage] value (supported 0 | 1)") .build()); } if(artifactQuery.getName() == null || artifactQuery.getName().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [name] missing, it should be the file name") .build()); } if(artifactQuery.getSha256() == null || artifactQuery.getSha256().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [sha256] missing") .build()); } if(artifactQuery.getSha256().length() < 64 || invalidChars.matcher(artifactQuery.getSha256()).find()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Invalid file checksum value") .build()); } if(artifactQuery.getType() == null || artifactQuery.getType().isEmpty()){ throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Mandatory field [type] missing") .build()); } }
Checks if the provided artifactQuery is valid @param artifactQuery ArtifactQuery @throws WebApplicationException if the data is corrupted
private static String getDefaultPersistentFieldClassName() { try { PersistenceBrokerConfiguration config = (PersistenceBrokerConfiguration) OjbConfigurator.getInstance().getConfigurationFor( null); Class clazz = config.getPersistentFieldClass(); return clazz.getName(); } catch (ConfigurationException e) { log.error("Cannot look-up PersistentField class, use default implementation instead", e); return DEFAULT_PERSISTENT_FIELD_IMPL.getName(); } }
}
protected long getUniqueLong(FieldDescriptor field) throws SequenceManagerException { long result; // lookup sequence name String sequenceName = calculateSequenceName(field); try { result = buildNextSequence(field.getClassDescriptor(), sequenceName); } catch (Throwable e) { // maybe the sequence was not created try { log.info("Create DB sequence key '"+sequenceName+"'"); createSequence(field.getClassDescriptor(), sequenceName); } catch (Exception e1) { throw new SequenceManagerException( SystemUtils.LINE_SEPARATOR + "Could not grab next id, failed with " + SystemUtils.LINE_SEPARATOR + e.getMessage() + SystemUtils.LINE_SEPARATOR + "Creation of new sequence failed with " + SystemUtils.LINE_SEPARATOR + e1.getMessage() + SystemUtils.LINE_SEPARATOR , e1); } try { result = buildNextSequence(field.getClassDescriptor(), sequenceName); } catch (Throwable e1) { throw new SequenceManagerException("Could not grab next id, sequence seems to exist", e); } } return result; }
returns a unique long value for class clazz and field fieldName. the returned number is unique accross all tables in the extent of clazz.
public Object getUniqueValue(FieldDescriptor field) throws SequenceManagerException { Object result = field.getJdbcType().sequenceKeyConversion(new Long(getUniqueLong(field))); // perform a sql to java conversion here, so that clients do // not see any db specific values result = field.getFieldConversion().sqlToJava(result); return result; }
Returns a unique object for the given field attribute. The returned value takes in account the jdbc-type and the FieldConversion.sql2java() conversion defined for <code>field</code>. The returned object is unique accross all tables in the extent of class the field belongs to.
protected Collection provideStateManagers(Collection pojos) { PersistenceCapable pc; int [] fieldNums; Iterator iter = pojos.iterator(); Collection result = new ArrayList(); while (iter.hasNext()) { // obtain a StateManager pc = (PersistenceCapable) iter.next(); Identity oid = new Identity(pc, broker); StateManagerInternal smi = pmi.getStateManager(oid, pc.getClass()); // fetch attributes into StateManager JDOClass jdoClass = Helper.getJDOClass(pc.getClass()); fieldNums = jdoClass.getManagedFieldNumbers(); FieldManager fm = new OjbFieldManager(pc, broker); smi.replaceFields(fieldNums, fm); smi.retrieve(); // get JDO PersistencecCapable instance from SM and add it to result collection Object instance = smi.getObject(); result.add(instance); } return result; }
This methods enhances the objects loaded by a broker query with a JDO StateManager an brings them under JDO control. @param pojos the OJB pojos as obtained by the broker @return the collection of JDO PersistenceCapable instances
public static void debug(boolean alsoStdout, Class clazz, String posInfo, Object msg) { if (alsoStdout) { System.out.println(msg.toString()); } String name = clazz.getName(); if (posInfo != null) { name += "." + posInfo; } Log log = LogFactory.getLog(name); if (log.isDebugEnabled()) { log.debug(msg); } }
Logs the given debug message to stdout (if verbose is on) and to the log for the given class (if the log level has been set to debug or higher). @param alsoStdout Whether to also put the message to stdout @param clazz The clazz @param posInfo The position info, e.g. method name @param msg The message
public final Object copy(final Object toCopy, PersistenceBroker broker) { return clone(toCopy, IdentityMapFactory.getIdentityMap(), new HashMap()); }
makes a deep clone of the object, using reflection. @param toCopy the object you want to copy @return
private static void setFields(final Object from, final Object to, final Field[] fields, final boolean accessible, final Map objMap, final Map metadataMap) { for (int f = 0, fieldsLength = fields.length; f < fieldsLength; ++f) { final Field field = fields[f]; final int modifiers = field.getModifiers(); if ((Modifier.STATIC & modifiers) != 0) continue; if ((Modifier.FINAL & modifiers) != 0) throw new ObjectCopyException("cannot set final field [" + field.getName() + "] of class [" + from.getClass().getName() + "]"); if (!accessible && ((Modifier.PUBLIC & modifiers) == 0)) { try { field.setAccessible(true); } catch (SecurityException e) { throw new ObjectCopyException("cannot access field [" + field.getName() + "] of class [" + from.getClass().getName() + "]: " + e.toString(), e); } } try { cloneAndSetFieldValue(field, from, to, objMap, metadataMap); } catch (Exception e) { throw new ObjectCopyException("cannot set field [" + field.getName() + "] of class [" + from.getClass().getName() + "]: " + e.toString(), e); } } }
copy all fields from the "from" object to the "to" object. @param from source object @param to from's clone @param fields fields to be populated @param accessible 'true' if all 'fields' have been made accessible during traversal
public void registerComponent(java.awt.Component c) { unregisterComponent(c); if (recognizerAbstractClass == null) { hmDragGestureRecognizers.put(c, dragSource.createDefaultDragGestureRecognizer(c, dragWorker.getAcceptableActions(c), dgListener) ); } else { hmDragGestureRecognizers.put(c, dragSource.createDragGestureRecognizer (recognizerAbstractClass, c, dragWorker.getAcceptableActions(c), dgListener) ); } }
add a Component to this Worker. After the call dragging is enabled for this Component. @param c the Component to register
public void unregisterComponent(java.awt.Component c) { java.awt.dnd.DragGestureRecognizer recognizer = (java.awt.dnd.DragGestureRecognizer)this.hmDragGestureRecognizers.remove(c); if (recognizer != null) recognizer.setComponent(null); }
remove drag support from the given Component. @param c the Component to remove
protected Object doInvoke(Object proxy, Method methodToBeInvoked, Object[] args) throws Throwable { Method m = getRealSubject().getClass().getMethod( methodToBeInvoked.getName(), methodToBeInvoked.getParameterTypes()); return m.invoke(getRealSubject(), args); }
this method will be invoked after methodToBeInvoked is invoked
public static File startSystemCaseManager(String package_path, String dest_dir) throws BeastException { Logger logger = Logger .getLogger("CreateSystemCaseManager.startSystemCaseManager"); File folder = SystemReader.createFolder(package_path, dest_dir); File caseManager = new File(folder, "UserStoriesManager.java"); FileWriter caseManagerWriter; try { if (!caseManager.exists()) { caseManagerWriter = new FileWriter(caseManager); caseManagerWriter.write("package " + package_path + ";\n"); caseManagerWriter.write("\n"); caseManagerWriter.write("import org.junit.Assert;\n"); caseManagerWriter.write("import org.junit.Test;\n"); caseManagerWriter.write("import org.junit.runner.JUnitCore;\n"); caseManagerWriter.write("import org.junit.runner.Result;\n"); caseManagerWriter.write("\n"); caseManagerWriter.write("/**\n"); caseManagerWriter .write(" * Main class to launch all tests in a single run\n"); caseManagerWriter.write(" *\n"); caseManagerWriter.write(" * @author es.upm.dit.gsi.beast\n"); caseManagerWriter.write(" */\n"); caseManagerWriter.write("public class UserStoriesManager {\n"); caseManagerWriter.write("\n"); caseManagerWriter.flush(); caseManagerWriter.close(); // logger.info("CaseManager has been created in "+dest_dir+Reader.createFolderPath(package_path)); } else { List<String> lines = new ArrayList<String>(); // read the file into lines BufferedReader r = new BufferedReader(new FileReader( caseManager)); String in; while ((in = r.readLine()) != null) { lines.add(in); } r.close(); lines.remove(lines.size() - 1); // write it back PrintWriter w = new PrintWriter(new FileWriter(caseManager)); for (String line : lines) { w.println(line); } w.close(); } } catch (IOException e) { logger.severe("ERROR writing Case Manager file"); throw new BeastException("ERROR writing Case Manager file", e); } return caseManager; }
This method creates CaseManager file and writes on it: the package, the imports, its comments and the class name. @param package_path as es.upm.dit... @param dest_dir as src/main/java @return the File with its first part written @throws BeastException
public void addIterator(OJBIterator iterator) { /** * only add iterators that are not null and non-empty. */ if (iterator != null) { if (iterator.hasNext()) { setNextIterator(); m_rsIterators.add(iterator); } } }
use this method to construct the ChainingIterator iterator by iterator.
public int size() throws PersistenceBrokerException { if (m_fullSize == -1) { int size = 0; Iterator it = m_rsIterators.iterator(); while (it.hasNext()) { size += ((OJBIterator) it.next()).size(); } m_fullSize = size; } return m_fullSize; }
Calculates the size of all the iterators. Caches it for fast lookups in the future. iterators shouldn't change size after the queries have been executed so caching is safe (assumption, should check). @return the combined size of all the iterators for all extents.
public boolean absolute(int row) throws PersistenceBrokerException { // 1. handle the special cases first. if (row == 0) { return true; } if (row == 1) { m_activeIteratorIndex = 0; m_activeIterator = (OJBIterator) m_rsIterators.get(m_activeIteratorIndex); m_activeIterator.absolute(1); return true; } if (row == -1) { m_activeIteratorIndex = m_rsIterators.size(); m_activeIterator = (OJBIterator) m_rsIterators.get(m_activeIteratorIndex); m_activeIterator.absolute(-1); return true; } // now do the real work. boolean movedToAbsolute = false; boolean retval = false; setNextIterator(); // row is positive, so index from beginning. if (row > 0) { int sizeCount = 0; Iterator it = m_rsIterators.iterator(); OJBIterator temp = null; while (it.hasNext() && !movedToAbsolute) { temp = (OJBIterator) it.next(); if (temp.size() < row) { sizeCount += temp.size(); } else { // move to the offset - sizecount m_currentCursorPosition = row - sizeCount; retval = temp.absolute(m_currentCursorPosition); movedToAbsolute = true; } } } // row is negative, so index from end else if (row < 0) { int sizeCount = 0; OJBIterator temp = null; for (int i = m_rsIterators.size(); ((i >= 0) && !movedToAbsolute); i--) { temp = (OJBIterator) m_rsIterators.get(i); if (temp.size() < row) { sizeCount += temp.size(); } else { // move to the offset - sizecount m_currentCursorPosition = row + sizeCount; retval = temp.absolute(m_currentCursorPosition); movedToAbsolute = true; } } } return retval; }
the absolute and relative calls are the trickiest parts. We have to move across cursor boundaries potentially. a + row value indexes from beginning of resultset a - row value indexes from the end of th resulset. Calling absolute(1) is the same as calling first(). Calling absolute(-1) is the same as calling last().
public boolean relative(int row) throws PersistenceBrokerException { if (row == 0) { return true; } boolean movedToRelative = false; boolean retval = false; setNextIterator(); if (row > 0) { // special case checking for the iterator we're currently in // (since it isn't positioned on the boundary potentially) if (row > (m_activeIterator.size() - m_currentCursorPosition)) { // the relative position lies over the border of the // current iterator. // starting position counter should be set to whatever we have left in // active iterator. int positionCounter = m_activeIterator.size() - m_currentCursorPosition; for (int i = m_activeIteratorIndex + 1; ((i < m_rsIterators.size()) && !movedToRelative); i++) { m_activeIteratorIndex = i; m_currentCursorPosition = 0; m_activeIterator = (OJBIterator) m_rsIterators.get(m_activeIteratorIndex); if (!((row - positionCounter) > m_activeIterator.size())) { // the relative position requested is within this iterator. m_currentCursorPosition = row - positionCounter; retval = m_activeIterator.relative(m_currentCursorPosition); movedToRelative = true; } } } else { // the relative position lays within the current iterator. retval = m_activeIterator.relative(row); movedToRelative = true; } } return retval; }
Moves the cursor a relative number of rows. Movement can go in forward (positive) or reverse (negative). Calling relative does not "wrap" meaning if you move before first or after last you get positioned at the first or last row. Calling relative(0) does not change the cursor position. Note: Calling the method relative(1) is different from calling the method next() because is makes sense to call next() when there is no current row, for example, when the cursor is positioned before the first row or after the last row of the result set.
public void releaseDbResources() { Iterator it = m_rsIterators.iterator(); while (it.hasNext()) { ((OJBIterator) it.next()).releaseDbResources(); } }
delegate to each contained OJBIterator and release its resources.
private boolean setNextIterator() { boolean retval = false; // first, check if the activeIterator is null, and set it. if (m_activeIterator == null) { if (m_rsIterators.size() > 0) { m_activeIteratorIndex = 0; m_currentCursorPosition = 0; m_activeIterator = (OJBIterator) m_rsIterators.get(m_activeIteratorIndex); } } else if (!m_activeIterator.hasNext()) { if (m_rsIterators.size() > (m_activeIteratorIndex + 1)) { // we still have iterators in the collection, move to the // next one, increment the counter, and set the active // iterator. m_activeIteratorIndex++; m_currentCursorPosition = 0; m_activeIterator = (OJBIterator) m_rsIterators.get(m_activeIteratorIndex); retval = true; } } return retval; }
Convenience routine to move to the next iterator if needed. @return true if the iterator is changed, false if no changes.
public boolean containsIteratorForTable(String aTable) { boolean result = false; if (m_rsIterators != null) { for (int i = 0; i < m_rsIterators.size(); i++) { OJBIterator it = (OJBIterator) m_rsIterators.get(i); if (it instanceof RsIterator) { if (((RsIterator) it).getClassDescriptor().getFullTableName().equals(aTable)) { result = true; break; } } else if (it instanceof ChainingIterator) { result = ((ChainingIterator) it).containsIteratorForTable(aTable); } } } return result; }
Answer true if an Iterator for a Table is already available @param aTable @return
public void init(Envelope maxExtent, double scale) { double[] layerSize = TileUtil.getTileLayerSize(code, maxExtent, scale); tileWidth = layerSize[0]; tileHeight = layerSize[1]; int[] screenSize = TileUtil.getTileScreenSize(layerSize, scale); screenWidth = screenSize[0]; screenHeight = screenSize[1]; bounds = TileUtil.getTileBounds(code, maxExtent, scale); }
-------------------------------------------------------------------------
public Class getSearchClass() { Object obj = getExampleObject(); if (obj instanceof Identity) { return ((Identity) obj).getObjectsTopLevelClass(); } else { return obj.getClass(); } }
Answer the search class. This is the class of the example object or the class represented by Identity. @return Class
private <T> T getBeanOrNull(String name, Class<T> requiredType) { if (name == null || !applicationContext.containsBean(name)) { return null; } else { try { return applicationContext.getBean(name, requiredType); } catch (BeansException be) { log.error("Error during getBeanOrNull, not rethrown, " + be.getMessage(), be); return null; } } }
Get a bean from the application context. Returns null if the bean does not exist. @param name name of bean @param requiredType type of bean @return the bean or null
public void restoreSecurityContext(CacheContext context) { SavedAuthorization cached = context.get(CacheContext.SECURITY_CONTEXT_KEY, SavedAuthorization.class); if (cached != null) { log.debug("Restoring security context {}", cached); securityManager.restoreSecurityContext(cached); } else { securityManager.clearSecurityContext(); } }
Puts the cached security context in the thread local. @param context the cache context
public void addMoreContext(CacheContext context) { Object cached = context.get(CacheContext.SECURITY_CONTEXT_KEY); if (cached == null) { SavedAuthorization sa = securityContext.getSavedAuthorization(); log.debug("Storing SavedAuthorization {}", sa); context.put(CacheContext.SECURITY_CONTEXT_KEY, sa); } }
Puts the thread local security in the cache (only when nothing is there just yet). @param context cache context
private void sortFileList() { if (this.size() > 1) { Collections.sort(this.fileList, new Comparator() { public final int compare(final Object o1, final Object o2) { final File f1 = (File) o1; final File f2 = (File) o2; final Object[] f1TimeAndCount = backupSuffixHelper .backupTimeAndCount(f1.getName(), baseFile); final Object[] f2TimeAndCount = backupSuffixHelper .backupTimeAndCount(f2.getName(), baseFile); final long f1TimeSuffix = ((Long) f1TimeAndCount[0]).longValue(); final long f2TimeSuffix = ((Long) f2TimeAndCount[0]).longValue(); if ((0L == f1TimeSuffix) && (0L == f2TimeSuffix)) { final long f1Time = f1.lastModified(); final long f2Time = f2.lastModified(); if (f1Time < f2Time) { return -1; } if (f1Time > f2Time) { return 1; } return 0; } if (f1TimeSuffix < f2TimeSuffix) { return -1; } if (f1TimeSuffix > f2TimeSuffix) { return 1; } final int f1Count = ((Integer) f1TimeAndCount[1]).intValue(); final int f2Count = ((Integer) f2TimeAndCount[1]).intValue(); if (f1Count < f2Count) { return -1; } if (f1Count > f2Count) { return 1; } if (f1Count == f2Count) { if (fileHelper.isCompressed(f1)) { return -1; } if (fileHelper.isCompressed(f2)) { return 1; } } return 0; } }); } }
Sort by time bucket, then backup count, and by compression state.
public void rollbackOtherBeanUsing_2(ArticleVO article, List persons) { log.info("rollbackOtherBeanUsing_2 method was called"); ArticleManagerPBLocal am = getArticleManager(); PersonManagerPBLocal pm = getPersonManager(); pm.storePersons(persons); am.failureStore(article); }
First store a list of persons then we store the article using a failure store method in ArticleManager. @ejb:interface-method
public void rollbackClientWrongInput(List articles, List persons) { log.info("rollbackClientWrongInput method was called"); ArticleManagerPBLocal am = getArticleManager(); PersonManagerPBLocal pm = getPersonManager(); am.storeArticles(articles); pm.storePersons(persons); }
This test method expect an invalid object in the person list, so that OJB cause an internal error. @ejb:interface-method
public void rollbackSetRollbackAndThrowException(List objects) { log.info("rollbackSetRollbackAndThrowException method was called"); storeObjects(objects); getSessionContext().setRollbackOnly(); // to notify the client about the failure we throw an exception // if we don't throw such an exception the client don't get notified // about the failure throw new EJBException("## Testing of rollback behaviour - rollbackSetRollbackAndThrowException ##"); }
We do call ctx.setRollbackOnly and do odmg-tx.abort() call. @ejb:interface-method
public void rollbackBreakIteration(List objectsToStore) { // now we mix up different api's and use PB-api too log.info("rollbackBreakIteration"); /* store list of objects, then get these objects with Iterator, start iteration, then break */ storeObjects(objectsToStore); Class searchClass = objectsToStore.get(0).getClass(); PersistenceBroker broker = getBroker(); try { Query q = new QueryByCriteria(searchClass); // we get the iterator and step into the first found object Iterator it = broker.getIteratorByQuery(q); it.next(); } /* Now we want to break iteration or something wrong. In this case we have to cleanup the used PB instance by a close call */ finally { if(broker != null) broker.close(); } // to notify the client about the failure we throw an exception // if we don't throw such an exception the client don't get notified // about the failure throw new EJBException("## Testing of rollback behaviour - rollbackBreakIteration ##"); }
We use several OJB services, start to iterate a query result and do an odmg-tx.abort call. @ejb:interface-method
public static PBKey extractAllTokens(String name) { if(name == null) { throw new PersistenceBrokerException("Could not extract PBKey, given argument is 'null'"); } String user = null; String passwd = null; StringTokenizer tok = new StringTokenizer(name, REPOSITORY_NAME_SEPARATOR); String dbName = tok.nextToken(); if(tok.hasMoreTokens()) { user = tok.nextToken(); if(user != null && user.trim().equals("")) { user = null; } } if(tok.hasMoreTokens()) { if(user != null) passwd = tok.nextToken(); } if(user != null && passwd == null) { passwd = ""; } return new PBKey(dbName, user, passwd); }
splits up the name string and extract db url, user name and password and build a new PBKey instance - the token '#' is used to separate the substrings. @throws PersistenceBrokerException if given name was <code>null</code>
public static PBKey crossCheckPBKey(PBKey key) { if(key.getUser() == null) { PBKey defKey = MetadataManager.getInstance().connectionRepository().getStandardPBKeyForJcdAlias(key.getAlias()); if(defKey != null) { return defKey; } } return key; }
Check if the user of the given PBKey was <code>null</code>, if so we try to get user/password from the jdbc-connection-descriptor matching the given PBKey.getAlias().
private ClassDescriptor getRealClassDescriptor(ClassDescriptor aCld, Object anObj) { ClassDescriptor result; if(aCld.getClassOfObject() == ProxyHelper.getRealClass(anObj)) { result = aCld; } else { result = aCld.getRepository().getDescriptorFor(anObj.getClass()); } return result; }
Answer the real ClassDescriptor for anObj ie. aCld may be an Interface of anObj, so the cld for anObj is returned
public ValueContainer[] getKeyValues(ClassDescriptor cld, Object objectOrProxy, boolean convertToSql) throws PersistenceBrokerException { IndirectionHandler handler = ProxyHelper.getIndirectionHandler(objectOrProxy); if(handler != null) { return getKeyValues(cld, handler.getIdentity(), convertToSql); //BRJ: convert Identity } else { ClassDescriptor realCld = getRealClassDescriptor(cld, objectOrProxy); return getValuesForObject(realCld.getPkFields(), objectOrProxy, convertToSql); } }
Returns an Array with an Objects PK VALUES if convertToSql is true, any associated java-to-sql conversions are applied. If the Object is a Proxy or a VirtualProxy NO conversion is necessary. @param objectOrProxy @param convertToSql @return Object[] @throws PersistenceBrokerException
public ValueContainer[] getKeyValues(ClassDescriptor cld, Identity oid) throws PersistenceBrokerException { return getKeyValues(cld, oid, true); }
Return primary key values of given Identity object. @param cld @param oid @return Object[] @throws PersistenceBrokerException
public ValueContainer[] getKeyValues(ClassDescriptor cld, Identity oid, boolean convertToSql) throws PersistenceBrokerException { FieldDescriptor[] pkFields = cld.getPkFields(); ValueContainer[] result = new ValueContainer[pkFields.length]; Object[] pkValues = oid.getPrimaryKeyValues(); try { for(int i = 0; i < result.length; i++) { FieldDescriptor fd = pkFields[i]; Object cv = pkValues[i]; if(convertToSql) { // BRJ : apply type and value mapping cv = fd.getFieldConversion().javaToSql(cv); } result[i] = new ValueContainer(cv, fd.getJdbcType()); } } catch(Exception e) { throw new PersistenceBrokerException("Can't generate primary key values for given Identity " + oid, e); } return result; }
Return key Values of an Identity @param cld @param oid @param convertToSql @return Object[] @throws PersistenceBrokerException
public ValueContainer[] getKeyValues(ClassDescriptor cld, Object objectOrProxy) throws PersistenceBrokerException { return getKeyValues(cld, objectOrProxy, true); }
returns an Array with an Objects PK VALUES, with any java-to-sql FieldConversion applied. If the Object is a Proxy or a VirtualProxy NO conversion is necessary. @param objectOrProxy @return Object[] @throws PersistenceBrokerException
public boolean representsNull(FieldDescriptor fld, Object aValue) { if(aValue == null) return true; boolean result = false; if(((aValue instanceof Number) && (((Number) aValue).longValue() == 0))) { Class type = fld.getPersistentField().getType(); /* AnonymousPersistentFields will *always* have a null type according to the javadoc comments in AnonymousPersistentField.getType() and never represents a primitve java field with value 0, thus we return always 'false' in this case. (If the value object is null, the first check above return true) */ if(type != null) { result = type.isPrimitive(); } } // TODO: Do we need this check?? String could be nullified, why should we assume // it's 'null' on empty string? else if((aValue instanceof String) && (((String) aValue).length() == 0)) { result = fld.isPrimaryKey(); } return result; }
Decide if the given object value represents 'null'.<br/> - If given value is 'null' itself, true will be returned<br/> - If given value is instance of Number with value 0 and the field-descriptor represents a primitive field, true will be returned<br/> - If given value is instance of String with length 0 and the field-descriptor is a primary key, true will be returned<br/>
public boolean hasNullPKField(ClassDescriptor cld, Object obj) { FieldDescriptor[] fields = cld.getPkFields(); boolean hasNull = false; // an unmaterialized proxy object can never have nullified PK's IndirectionHandler handler = ProxyHelper.getIndirectionHandler(obj); if(handler == null || handler.alreadyMaterialized()) { if(handler != null) obj = handler.getRealSubject(); FieldDescriptor fld; for(int i = 0; i < fields.length; i++) { fld = fields[i]; hasNull = representsNull(fld, fld.getPersistentField().get(obj)); if(hasNull) break; } } return hasNull; }
Detect if the given object has a PK field represents a 'null' value.
private Object setAutoIncrementValue(FieldDescriptor fd, Object obj) { PersistentField f = fd.getPersistentField(); try { // lookup SeqMan for a value matching db column an Object result = m_broker.serviceSequenceManager().getUniqueValue(fd); // reflect autoincrement value back into object f.set(obj, result); return result; } catch(MetadataException e) { throw new PersistenceBrokerException( "Error while trying to autoincrement field " + f.getDeclaringClass() + "#" + f.getName(), e); } catch(SequenceManagerException e) { throw new PersistenceBrokerException("Could not get key value", e); } }
Set an autoincremented value in given object field that has already had a field conversion run on it, if an value for the given field is already set, it will be overridden - no further checks are done. <p> The data type of the value that is returned by this method is compatible with the java-world. The return value has <b>NOT</b> been run through a field conversion and converted to a corresponding sql-type. @return the autoincremented value set on given object @throws PersistenceBrokerException if there is an erros accessing obj field values
public ValueContainer[] getValuesForObject(FieldDescriptor[] fields, Object obj, boolean convertToSql, boolean assignAutoincrement) throws PersistenceBrokerException { ValueContainer[] result = new ValueContainer[fields.length]; for(int i = 0; i < fields.length; i++) { FieldDescriptor fd = fields[i]; Object cv = fd.getPersistentField().get(obj); /* handle autoincrement attributes if - is a autoincrement field - field represents a 'null' value, is nullified and generate a new value */ if(assignAutoincrement && fd.isAutoIncrement() && representsNull(fd, cv)) { /* setAutoIncrementValue returns a value that is properly typed for the java-world. This value needs to be converted to it's corresponding sql type so that the entire result array contains objects that are properly typed for sql. */ cv = setAutoIncrementValue(fd, obj); } if(convertToSql) { // apply type and value conversion cv = fd.getFieldConversion().javaToSql(cv); } // create ValueContainer result[i] = new ValueContainer(cv, fd.getJdbcType()); } return result; }
Get the values of the fields for an obj Autoincrement values are automatically set. @param fields @param obj @throws PersistenceBrokerException
public ValueContainer[] getNonKeyRwValues(ClassDescriptor cld, Object obj) throws PersistenceBrokerException { return getValuesForObject(cld.getNonPkRwFields(), obj, true); }
Returns an array containing values for all non PK field READ/WRITE attributes of the object based on the specified {@link org.apache.ojb.broker.metadata.ClassDescriptor}. <br/> NOTE: This method doesn't do any checks on the specified {@link org.apache.ojb.broker.metadata.ClassDescriptor} the caller is reponsible to pass a valid descriptor. @param cld The {@link org.apache.ojb.broker.metadata.ClassDescriptor} to extract the RW-fields @param obj The object with target fields to extract. @throws MetadataException if there is an erros accessing obj field values
public ValueContainer[] getAllRwValues(ClassDescriptor cld, Object obj) throws PersistenceBrokerException { return getValuesForObject(cld.getAllRwFields(), obj, true); }
Returns an array containing values for all READ/WRITE attributes of the object based on the specified {@link org.apache.ojb.broker.metadata.ClassDescriptor}. <br/> NOTE: This method doesn't do any checks on the specified {@link org.apache.ojb.broker.metadata.ClassDescriptor} the caller is reponsible to pass a valid descriptor. @param cld The {@link org.apache.ojb.broker.metadata.ClassDescriptor} to extract the RW-fields @param obj The object with target fields to extract. @throws MetadataException if there is an erros accessing obj field values
public Object[] extractValueArray(ValueContainer[] containers) { Object[] result = new Object[containers.length]; for(int i = 0; i < containers.length; i++) { result[i] = containers[i].getValue(); } return result; }
Extract an value array of the given {@link ValueContainer} array. @param containers @return An object array
public boolean assertValidPksForStore(FieldDescriptor[] fieldDescriptors, Object[] pkValues) { int fieldDescriptorSize = fieldDescriptors.length; for(int i = 0; i < fieldDescriptorSize; i++) { FieldDescriptor fld = fieldDescriptors[i]; /** * a pk field is valid if it is either managed by OJB * (autoincrement or locking) or if it does contain a * valid non-null value. */ if(!(fld.isAutoIncrement() || fld.isLocking() || !representsNull(fld, pkValues[i]))) { return false; } } return true; }
returns true if the primary key fields are valid for store, else false. PK fields are valid if each of them is either an OJB managed attribute (autoincrement or locking) or if it contains a valid non-null value @param fieldDescriptors the array of PK fielddescriptors @param pkValues the array of PK values @return boolean
public boolean assertValidPkForDelete(ClassDescriptor cld, Object obj) { if(!ProxyHelper.isProxy(obj)) { FieldDescriptor fieldDescriptors[] = cld.getPkFields(); int fieldDescriptorSize = fieldDescriptors.length; for(int i = 0; i < fieldDescriptorSize; i++) { FieldDescriptor fd = fieldDescriptors[i]; Object pkValue = fd.getPersistentField().get(obj); if (representsNull(fd, pkValue)) { return false; } } } return true; }
returns true if the primary key fields are valid for delete, else false. PK fields are valid if each of them contains a valid non-null value @param cld the ClassDescriptor @param obj the object @return boolean
public Query getCountQuery(Query aQuery) { if(aQuery instanceof QueryBySQL) { return getQueryBySqlCount((QueryBySQL) aQuery); } else if(aQuery instanceof ReportQueryByCriteria) { return getReportQueryByCriteriaCount((ReportQueryByCriteria) aQuery); } else { return getQueryByCriteriaCount((QueryByCriteria) aQuery); } }
Build a Count-Query based on aQuery @param aQuery @return The count query
private Query getQueryBySqlCount(QueryBySQL aQuery) { String countSql = aQuery.getSql(); int fromPos = countSql.toUpperCase().indexOf(" FROM "); if(fromPos >= 0) { countSql = "select count(*)" + countSql.substring(fromPos); } int orderPos = countSql.toUpperCase().indexOf(" ORDER BY "); if(orderPos >= 0) { countSql = countSql.substring(0, orderPos); } return new QueryBySQL(aQuery.getSearchClass(), countSql); }
Create a Count-Query for QueryBySQL @param aQuery @return The count query
private Query getQueryByCriteriaCount(QueryByCriteria aQuery) { Class searchClass = aQuery.getSearchClass(); ReportQueryByCriteria countQuery = null; Criteria countCrit = null; String[] columns = new String[1]; // BRJ: copied Criteria without groupby, orderby, and prefetched relationships if (aQuery.getCriteria() != null) { countCrit = aQuery.getCriteria().copy(false, false, false); } if (aQuery.isDistinct()) { // BRJ: Count distinct is dbms dependent // hsql/sapdb: select count (distinct(person_id || project_id)) from person_project // mysql: select count (distinct person_id,project_id) from person_project // [tomdz] // Some databases have no support for multi-column count distinct (e.g. Derby) // Here we use a SELECT count(*) FROM (SELECT DISTINCT ...) instead // // concatenation of pk-columns is a simple way to obtain a single column // but concatenation is also dbms dependent: // // SELECT count(distinct concat(row1, row2, row3)) mysql // SELECT count(distinct (row1 || row2 || row3)) ansi // SELECT count(distinct (row1 + row2 + row3)) ms sql-server FieldDescriptor[] pkFields = m_broker.getClassDescriptor(searchClass).getPkFields(); String[] keyColumns = new String[pkFields.length]; if (pkFields.length > 1) { // TODO: Use ColumnName. This is a temporary solution because // we cannot yet resolve multiple columns in the same attribute. for (int idx = 0; idx < pkFields.length; idx++) { keyColumns[idx] = pkFields[idx].getColumnName(); } } else { for (int idx = 0; idx < pkFields.length; idx++) { keyColumns[idx] = pkFields[idx].getAttributeName(); } } // [tomdz] // TODO: Add support for databases that do not support COUNT DISTINCT over multiple columns // if (getPlatform().supportsMultiColumnCountDistinct()) // { // columns[0] = "count(distinct " + getPlatform().concatenate(keyColumns) + ")"; // } // else // { // columns = keyColumns; // } columns[0] = "count(distinct " + getPlatform().concatenate(keyColumns) + ")"; } else { columns[0] = "count(*)"; } // BRJ: we have to preserve indirection table ! if (aQuery instanceof MtoNQuery) { MtoNQuery mnQuery = (MtoNQuery)aQuery; ReportQueryByMtoNCriteria mnReportQuery = new ReportQueryByMtoNCriteria(searchClass, columns, countCrit); mnReportQuery.setIndirectionTable(mnQuery.getIndirectionTable()); countQuery = mnReportQuery; } else { countQuery = new ReportQueryByCriteria(searchClass, columns, countCrit); } // BRJ: we have to preserve outer-join-settings (by André Markwalder) for (Iterator outerJoinPath = aQuery.getOuterJoinPaths().iterator(); outerJoinPath.hasNext();) { String path = (String) outerJoinPath.next(); if (aQuery.isPathOuterJoin(path)) { countQuery.setPathOuterJoin(path); } } //BRJ: add orderBy Columns asJoinAttributes List orderBy = aQuery.getOrderBy(); if ((orderBy != null) && !orderBy.isEmpty()) { String[] joinAttributes = new String[orderBy.size()]; for (int idx = 0; idx < orderBy.size(); idx++) { joinAttributes[idx] = ((FieldHelper)orderBy.get(idx)).name; } countQuery.setJoinAttributes(joinAttributes); } // [tomdz] // TODO: // For those databases that do not support COUNT DISTINCT over multiple columns // we wrap the normal SELECT DISTINCT that we just created, into a SELECT count(*) // For this however we need a report query that gets its data from a sub query instead // of a table (target class) // if (aQuery.isDistinct() && !getPlatform().supportsMultiColumnCountDistinct()) // { // } return countQuery; }
Create a Count-Query for QueryByCriteria
private Query getReportQueryByCriteriaCount(ReportQueryByCriteria aQuery) { ReportQueryByCriteria countQuery = (ReportQueryByCriteria) getQueryByCriteriaCount(aQuery); // BRJ: keep the original columns to build the Join countQuery.setJoinAttributes(aQuery.getAttributes()); // BRJ: we have to preserve groupby information Iterator iter = aQuery.getGroupBy().iterator(); while(iter.hasNext()) { countQuery.addGroupBy((FieldHelper) iter.next()); } return countQuery; }
Create a Count-Query for ReportQueryByCriteria
public boolean doesExist(ClassDescriptor cld, Identity oid, Object obj) { boolean result = false; String sql = (String) sqlSelectMap.get(cld); if(sql == null) { sql = new SqlExistStatement(cld, LoggerFactory.getDefaultLogger()).getStatement(); sqlSelectMap.put(cld, sql); } ValueContainer[] pkValues; if(oid == null) { pkValues = getKeyValues(cld, obj, true); } else { pkValues = getKeyValues(cld, oid); } StatementManagerIF sm = m_broker.serviceStatementManager(); PreparedStatement stmt = null; ResultSet rs = null; try { stmt = sm.getPreparedStatement(cld, sql, false, 1, false); sm.bindValues(stmt, pkValues, 1); rs = stmt.executeQuery(); result = rs.next(); } catch(SQLException e) { throw ExceptionHelper.generateException("[BrokerHelper#doesExist] Can't check if specified" + " object is already persisted", e, sql, cld, pkValues, null, obj); } finally { sm.closeResources(stmt, rs); } return result; }
TODO: This method should be moved to {@link org.apache.ojb.broker.accesslayer.JdbcAccess} before 1.1 release. This method checks if the requested object can be found in database (without object materialization). @param cld The {@link org.apache.ojb.broker.metadata.ClassDescriptor} of the object/{@link org.apache.ojb.broker.Identity} to check. @param obj The <em>object</em> to check. @param oid The associated {@link org.apache.ojb.broker.Identity}. {@link org.apache.ojb.broker.Identity} of the object @return Return <em>true</em> if the object is already persisted, <em>false</em> if the object is transient.
public void link(Object obj, ObjectReferenceDescriptor ord, boolean insert) { linkOrUnlink(true, obj, ord, insert); }
This method concatenate the main object and the specified reference object (1:1 reference a referenced object, 1:n and m:n reference a collection of referenced objects) by hand. This method is needed when in the reference metadata definitions the auto-xxx setting was disabled. More info see OJB doc. @param obj Object with reference @param ord the ObjectReferenceDescriptor of the reference @param insert flag signals insert operation
public boolean link(Object obj, String attributeName, boolean insert) { return linkOrUnlink(true, obj, attributeName, insert); }
This method concatenate the main object and the specified reference object (1:1 reference a referenced object, 1:n and m:n reference a collection of referenced objects) by hand. This method is needed when in the reference metadata definitions the auto-xxx setting was disabled. More info see OJB doc. @param obj Object with reference @param attributeName field name of the reference @param insert flag signals insert operation @return true if the specified reference was found and linking was successful
public boolean unlink(Object source, String attributeName, Object target) { return linkOrUnlink(false, source, attributeName, false); }
Unlink the specified reference object. More info see OJB doc. @param source The source object with the specified reference field. @param attributeName The field name of the reference to unlink. @param target The referenced object to unlink.
public void unlink(Object obj, ObjectReferenceDescriptor ord, boolean insert) { linkOrUnlink(false, obj, ord, insert); }
Unlink the specified reference from this object. More info see OJB doc. @param obj Object with reference @param ord the ObjectReferenceDescriptor of the reference @param insert flag signals insert operation
public void unlink(Object source, CollectionDescriptor cds, List referencesToUnlink) { for(int i = 0; i < referencesToUnlink.size(); i++) { unlink(source, cds, referencesToUnlink.get(i)); } }
Unlink a bunch of 1:n or m:n objects. @param source The source object with reference. @param cds The {@link org.apache.ojb.broker.metadata.CollectionDescriptor} of the relation. @param referencesToUnlink List of referenced objects to unlink.
public void unlink(Object source, CollectionDescriptor cds, Object referenceToUnlink) { if(cds.isMtoNRelation()) { m_broker.deleteMtoNImplementor(new MtoNImplementor(cds, source, referenceToUnlink)); } else { ClassDescriptor cld = m_broker.getClassDescriptor(referenceToUnlink.getClass()); m_broker.unlinkFK(referenceToUnlink, cld, cds); } }
Unlink a single 1:n or m:n object. @param source The source object with reference. @param cds The {@link org.apache.ojb.broker.metadata.CollectionDescriptor} of the relation. @param referenceToUnlink The referenced object to link.
public void link(Object source, CollectionDescriptor cds, List referencesToLink) { for(int i = 0; i < referencesToLink.size(); i++) { link(source, cds, referencesToLink.get(i)); } }
Link a bunch of 1:n or m:n objects. @param source The source object with reference. @param cds The {@link org.apache.ojb.broker.metadata.CollectionDescriptor} of the relation. @param referencesToLink List of referenced objects to link.
public void link(Object source, CollectionDescriptor cds, Object referenceToLink) { if(cds.isMtoNRelation()) { m_broker.addMtoNImplementor(new MtoNImplementor(cds, source, referenceToLink)); } else { ClassDescriptor cld = m_broker.getClassDescriptor(referenceToLink.getClass()); m_broker.link(referenceToLink, cld, cds, source, false); } }
Link a single 1:n or m:n object. @param source The source object with the declared reference. @param cds The {@link org.apache.ojb.broker.metadata.CollectionDescriptor} of the relation declared in source object. @param referenceToLink The referenced object to link.
public static Iterator getCollectionIterator(Object collectionOrArray) { Iterator colIterator; if (collectionOrArray instanceof ManageableCollection) { colIterator = ((ManageableCollection) collectionOrArray).ojbIterator(); } else if (collectionOrArray instanceof Collection) { colIterator = ((Collection) collectionOrArray).iterator(); } else if (collectionOrArray.getClass().isArray()) { colIterator = new ArrayIterator(collectionOrArray); } else { throw new OJBRuntimeException( "Given object collection of type '" + (collectionOrArray != null ? collectionOrArray.getClass().toString() : "null") + "' can not be managed by OJB. Use Array, Collection or ManageableCollection instead!"); } return colIterator; }
Returns an Iterator instance for {@link java.util.Collection}, object Array or {@link org.apache.ojb.broker.ManageableCollection} instances. @param collectionOrArray a none <em>null</em> object of type {@link java.util.Collection}, Array or {@link org.apache.ojb.broker.ManageableCollection}. @return Iterator able to handle given collection object
public static Object[] getCollectionArray(Object collectionOrArray) { Object[] result; if (collectionOrArray instanceof Collection) { result = ((Collection) collectionOrArray).toArray(); } else if (collectionOrArray instanceof ManageableCollection) { Collection newCol = new ArrayList(); CollectionUtils.addAll(newCol, ((ManageableCollection) collectionOrArray).ojbIterator()); result = newCol.toArray(); } else if (collectionOrArray.getClass().isArray()) { result = (Object[]) collectionOrArray; } else { throw new OJBRuntimeException( "Given object collection of type '" + (collectionOrArray != null ? collectionOrArray.getClass().toString() : "null") + "' can not be managed by OJB. Use Array, Collection or ManageableCollection instead!"); } return result; }
Returns an object array for {@link java.util.Collection}, array or {@link org.apache.ojb.broker.ManageableCollection} instances. @param collectionOrArray a none <em>null</em> object of type {@link java.util.Collection}, Array or {@link org.apache.ojb.broker.ManageableCollection}. @return Object array able to handle given collection or array object
public static boolean hasAnonymousKeyReference(ClassDescriptor cld, ObjectReferenceDescriptor rds) { boolean result = false; FieldDescriptor[] fkFields = rds.getForeignKeyFieldDescriptors(cld); for(int i = 0; i < fkFields.length; i++) { FieldDescriptor fkField = fkFields[i]; if(fkField.isAnonymous()) { result = true; break; } } return result; }
Returns <em>true</em> if one or more anonymous FK fields are used. @param cld The {@link org.apache.ojb.broker.metadata.ClassDescriptor} of the main object. @param rds The {@link org.apache.ojb.broker.metadata.ObjectReferenceDescriptor} of the referenced object. @return <em>true</em> if one or more anonymous FK fields are used for specified reference.
protected boolean _load () { java.sql.ResultSet rs = null; try { // This synchronization is necessary for Oracle JDBC drivers 8.1.7, 9.0.1, 9.2.0.1 // The documentation says synchronization is done within the driver, but they // must have overlooked something. Without the lock we'd get mysterious error // messages. synchronized(getDbMeta()) { getDbMetaTreeModel().setStatusBarMessage("Reading schemas for catalog " + this.getAttribute(ATT_CATALOG_NAME)); rs = getDbMeta().getSchemas(); final java.util.ArrayList alNew = new java.util.ArrayList(); int count = 0; while (rs.next()) { getDbMetaTreeModel().setStatusBarMessage("Creating schema " + getCatalogName() + "." + rs.getString("TABLE_SCHEM")); alNew.add(new DBMetaSchemaNode(getDbMeta(), getDbMetaTreeModel(), DBMetaCatalogNode.this, rs.getString("TABLE_SCHEM"))); count++; } if (count == 0) alNew.add(new DBMetaSchemaNode(getDbMeta(), getDbMetaTreeModel(), DBMetaCatalogNode.this, null)); alChildren = alNew; javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { getDbMetaTreeModel().nodeStructureChanged(DBMetaCatalogNode.this); } }); rs.close(); } } catch (java.sql.SQLException sqlEx) { getDbMetaTreeModel().reportSqlError("Error retrieving schemas", sqlEx); try { if (rs != null) rs.close (); } catch (java.sql.SQLException sqlEx2) { this.getDbMetaTreeModel().reportSqlError("Error retrieving schemas", sqlEx2); } return false; } return true; }
Loads the schemas associated to this catalog.