repo
string | commit
string | message
string | diff
string |
---|---|---|---|
nekop/java-examples
|
09260119246cde92bd0882d4edc0b834ad181963
|
Add create and destroy methods
|
diff --git a/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java
index ee56185..47c018a 100644
--- a/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java
+++ b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java
@@ -1,10 +1,16 @@
package jp.programmers.jboss.hello;
public class Hello implements HelloMBean {
+ public void create() throws Exception {
+ System.out.println("Hello.create()");
+ }
public void start() throws Exception {
System.out.println("Hello.start()");
}
public void stop() throws Exception {
System.out.println("Hello.stop()");
}
+ public void destroy() throws Exception {
+ System.out.println("Hello.destroy()");
+ }
}
diff --git a/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java
index 370b25e..e4ac037 100644
--- a/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java
+++ b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java
@@ -1,6 +1,8 @@
package jp.programmers.jboss.hello;
public interface HelloMBean {
+ void create() throws Exception;
void start() throws Exception;
void stop() throws Exception;
+ void destroy() throws Exception;
}
|
nekop/java-examples
|
37a756ee3608d7487b4582fed6e51459c22435ce
|
Add maven pom examples
|
diff --git a/maven/pom-ee6-web.xml b/maven/pom-ee6-web.xml
new file mode 100644
index 0000000..4b339a5
--- /dev/null
+++ b/maven/pom-ee6-web.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.github.nekop</groupId>
+ <artifactId>ee6-web</artifactId>
+ <packaging>war</packaging>
+ <name>ee6-web</name>
+ <version>1.0</version>
+
+ <properties>
+ <version.org.jboss.spec.jboss-javaee-web-6.0>3.0.2.Final</version.org.jboss.spec.jboss-javaee-web-6.0>
+ <version.maven-war-plugin>2.4</version.maven-war-plugin>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+ <dependencies>
+ <!-- prune API jar, do not use for real projects/testing
+ <dependency>
+ <groupId>javax</groupId>
+ <artifactId>javaee-api</artifactId>
+ <version>6.0</version>
+ <scope>provided</scope>
+ </dependency>
+ -->
+ <!-- It's actually a bom, but we can declare it directly for simple use cases -->
+ <dependency>
+ <groupId>org.jboss.spec</groupId>
+ <artifactId>jboss-javaee-web-6.0</artifactId>
+ <version>${version.org.jboss.spec.jboss-javaee-web-6.0}</version>
+ <scope>provided</scope>
+ <type>pom</type>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.7</source>
+ <target>1.7</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-war-plugin</artifactId>
+ <version>${version.maven-war-plugin}</version>
+ <configuration>
+ <failOnMissingWebXml>false</failOnMissingWebXml>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
|
nekop/java-examples
|
60efc0389eaf99cfcc6db03b0dd0b86d9beab31b
|
Add rhgss
|
diff --git a/rhgss/README.md b/rhgss/README.md
new file mode 100644
index 0000000..610c480
--- /dev/null
+++ b/rhgss/README.md
@@ -0,0 +1,5 @@
+# Red Hat GSS works
+
+This is place for code I wrote at work and can be shared under open source software license.
+
+GSS is Global Support Service btw.
diff --git a/rhgss/httpsessionsize/README.md b/rhgss/httpsessionsize/README.md
new file mode 100644
index 0000000..c31267a
--- /dev/null
+++ b/rhgss/httpsessionsize/README.md
@@ -0,0 +1,13 @@
+# HttpSessionSizeFilter
+
+This servlet filter serializes the HttpSession content and logs HttpSession size.
+
+With INFO level it logs only whole HttpSession size (1 log per 1 req). With FINE/DEBUG it logs each attribute size too.
+
+By default it's compiled with Java 6 and targeted to Java EE 6 / Servlet 3.0, but also it can be easily modified to use Java EE 5 / Servlet 2.5 / Java 5.
+
+See (very small) source code for details.
+
+## Install
+
+Add the target/httpsessionsize.jar to WEB-INF/lib directory in your war file.
diff --git a/rhgss/httpsessionsize/example-web.xml b/rhgss/httpsessionsize/example-web.xml
new file mode 100644
index 0000000..24d9de7
--- /dev/null
+++ b/rhgss/httpsessionsize/example-web.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<web-app
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:web="http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+ version="2.5">
+
+ <!-- For Servlet 2.5, by default it uses Servlet 3.0 annotation so you don't need this file.
+ Copy this block if your web application is Servlet 2.5 -->
+ <filter>
+ <filter-name>HttpSessionSizeFilter</filter-name>
+ <filter-class>com.redhat.gss.example.HttpSessionSizeFilter</filter-class>
+ </filter>
+ <filter-mapping>
+ <filter-name>HttpSessionSizeFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+</web-app>
diff --git a/rhgss/httpsessionsize/pom.xml b/rhgss/httpsessionsize/pom.xml
new file mode 100644
index 0000000..1628c39
--- /dev/null
+++ b/rhgss/httpsessionsize/pom.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.redhat.gss.example</groupId>
+ <artifactId>httpsessionsize</artifactId>
+ <packaging>jar</packaging>
+ <name>httpsessionsize</name>
+ <version>1.0</version>
+
+ <dependencies>
+<!--
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <version>2.5</version>
+ <scope>provided</scope>
+ </dependency>
+-->
+ <dependency>
+ <groupId>javax</groupId>
+ <artifactId>javaee-api</artifactId>
+ <version>6.0</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/rhgss/httpsessionsize/src/main/java/com/redhat/gss/example/HttpSessionSizeFilter.java b/rhgss/httpsessionsize/src/main/java/com/redhat/gss/example/HttpSessionSizeFilter.java
new file mode 100644
index 0000000..79143db
--- /dev/null
+++ b/rhgss/httpsessionsize/src/main/java/com/redhat/gss/example/HttpSessionSizeFilter.java
@@ -0,0 +1,89 @@
+/*
+ * To the extent possible under law, Red Hat, Inc. has dedicated all
+ * copyright to this software to the public domain worldwide, pursuant
+ * to the CC0 Public Domain Dedication. This software is distributed
+ * without any warranty.
+ *
+ * See <http://creativecommons.org/publicdomain/zero/1.0/>.
+ */
+package com.redhat.gss.example;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.NotSerializableException;
+import java.io.ObjectOutputStream;
+import java.util.Enumeration;
+import java.util.logging.Level;
+import java.util.logging.LogManager;
+import java.util.logging.LogRecord;
+import java.util.logging.Logger;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpSession;
+
+// For Java EE 6 / Servlet 3.0, remove them if needed
+import javax.servlet.annotation.WebFilter;
+@WebFilter("/*")
+
+public class HttpSessionSizeFilter implements Filter {
+
+ private Logger log =
+ LogManager.getLogManager().getLogger(HttpSessionSizeFilter.class.getName());
+
+ public void init(FilterConfig filterConfig) throws ServletException { }
+
+ public void doFilter(ServletRequest request,
+ ServletResponse response,
+ FilterChain chain)
+ throws IOException, ServletException {
+ chain.doFilter(request, response);
+ HttpSession session = ((HttpServletRequest)request).getSession(false);
+ if (session == null) {
+ return;
+ }
+ String id = session.getId();
+ int size = 0;
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
+ try {
+ int previous = 0; // for debug log
+ for (Enumeration e = session.getAttributeNames();
+ e.hasMoreElements(); ) {
+ String key = (String)e.nextElement();
+ try {
+ oos.writeObject(session.getAttribute(key));
+ // If debug enabled, also print each attribute
+ if (log.isLoggable(Level.FINE)) {
+ oos.flush();
+ int current = baos.toByteArray().length;
+ log.log(Level.FINE,
+ "HttpSession attribute {0} = {1} bytes",
+ new Object[] {key, current - previous});
+ previous = current;
+ }
+ } catch (NotSerializableException nse) {
+ LogRecord lr = new LogRecord(
+ Level.WARNING, "Failed to serialize HttpSession attribute {0}");
+ lr.setParameters(new Object[] {key});
+ lr.setThrown(nse);
+ log.log(lr);
+ }
+ }
+ oos.flush();
+ size = baos.toByteArray().length;
+ } finally {
+ try {
+ oos.close();
+ } catch (Exception ignore) { }
+ }
+ log.log(Level.INFO, "HttpSession {0} = {1} bytes", new Object[] {id, size});
+ }
+
+ public void destroy() { }
+
+}
|
nekop/java-examples
|
0e421ddd64d238b424587f7020205de05b02bf09
|
Add ee6-ejb-interfaces
|
diff --git a/ee6-ejb-interfaces/deploy.sh b/ee6-ejb-interfaces/deploy.sh
new file mode 100644
index 0000000..afb3ca8
--- /dev/null
+++ b/ee6-ejb-interfaces/deploy.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JBOSS_HOME=~/eap6
+
+mvn clean package
+cd target
+mkdir -p empty
+cd empty
+jar cf ee6-ejb-interfaces-web.war .
+mv ee6-ejb-interfaces-web.war ..
+cd ..
+rmdir empty
+jar cf ee6-ejb-interfaces.ear ee6-ejb-interfaces-web.war ee6-ejb-interfaces.jar
+rm ee6-ejb-interfaces-web.war
+cd ..
+
+cp target/ee6-ejb-interfaces.ear $JBOSS_HOME/standalone/deployments/
diff --git a/ee6-ejb-interfaces/ee6-ejb-interfaces-client.rb b/ee6-ejb-interfaces/ee6-ejb-interfaces-client.rb
new file mode 100644
index 0000000..d68fa42
--- /dev/null
+++ b/ee6-ejb-interfaces/ee6-ejb-interfaces-client.rb
@@ -0,0 +1,90 @@
+require 'java'
+
+JBOSS_HOME="/home/nekop/eap6"
+
+require "./target/ee6-ejb-interfaces.jar"
+# native dep
+require "#{JBOSS_HOME}/bin/client/jboss-client.jar"
+# rs dep
+require "#{JBOSS_HOME}/modules/system/layers/base/org/jboss/resteasy/resteasy-jaxrs/main/resteasy-jaxrs-2.3.7.Final-redhat-2.jar"
+require "#{JBOSS_HOME}/modules/system/layers/base/javax/ws/rs/api/main/jboss-jaxrs-api_1.1_spec-1.0.1.Final-redhat-2.jar"
+require "#{JBOSS_HOME}/modules/system/layers/base/org/apache/httpcomponents/main/httpclient-4.2.1-redhat-1.jar"
+require "#{JBOSS_HOME}/modules/system/layers/base/org/apache/httpcomponents/main/httpcore-4.2.1-redhat-1.jar"
+require "#{JBOSS_HOME}/modules/system/layers/base/org/slf4j/jcl-over-slf4j/main/jcl-over-slf4j-1.7.2.redhat-2.jar"
+require "#{JBOSS_HOME}/modules/system/layers/base/org/slf4j/main/slf4j-api-1.7.2.redhat-2.jar"
+
+java_import "java.util.Properties"
+java_import "javax.naming.Context"
+java_import "javax.naming.InitialContext"
+
+def initial_context
+ p = Properties.new()
+ p.put("remote.connections", "default")
+ p.put("remote.connection.default.port", "4447")
+ p.put("remote.connection.default.host", "localhost")
+ p.put("remote.connectionprovider.create.options.org.xnio.Options.SSL_ENABLED", "false")
+ p.put(Context.URL_PKG_PREFIXES, "org.jboss.ejb.client.naming")
+ p.put("org.jboss.ejb.client.scoped.context", true)
+ InitialContext.new(p)
+end
+
+def corba_initial_context
+ p = Properties.new()
+ p.put(Context.INITIAL_CONTEXT_FACTORY,
+ "com.sun.jndi.cosnaming.CNCtxFactory")
+ p.put(Context.PROVIDER_URL, "corbaloc:iiop:localhost:3528/JBoss/Naming/root")
+ InitialContext.new(p)
+end
+
+def hello_slsb(ejb_context)
+ ear_name = "ee6-ejb-interfaces"
+ ejbjar_name = "ee6-ejb-interfaces"
+ ejb_name = "Hello"
+ interface_name = "com.github.nekop.examples.HelloRemote"
+ ejb_context.lookup("#{ear_name}/#{ejbjar_name}/#{ejb_name}!#{interface_name}")
+end
+
+type = ARGV.shift
+case type
+when "native"
+ ejb_context = initial_context.lookup("ejb:")
+ begin
+ bean = hello_slsb(ejb_context)
+ bean.hello("world")
+ ensure
+ begin
+ ejb_context.close
+ rescue
+ # no-op
+ end
+ end
+when "iiop"
+ java_import "com.github.nekop.examples.HelloEJB2Home"
+ java.lang.System::setProperty("com.sun.CORBA.ORBUseDynamicStub", "true")
+ o = corba_initial_context.lookup("Hello")
+ home = javax.rmi.PortableRemoteObject.narrow(o, HelloEJB2Home.java_class)
+ bean = home.create()
+ bean.hello("world")
+when "ws"
+ java_import "java.net.URL"
+ java_import "javax.xml.namespace.QName"
+ java_import "javax.xml.ws.Service"
+ java_import "com.github.nekop.examples.HelloLocal"
+ ejbjar_name = "ee6-ejb-interfaces"
+ ejb_name = "Hello"
+ wsdlLocation = URL.new("http://127.0.0.1:8080/#{ejbjar_name}/#{ejb_name}?wsdl")
+ serviceName = QName.new("http://examples.nekop.github.com/", "#{ejb_name}Service")
+ portName = QName.new("http://examples.nekop.github.com/", "#{ejb_name}Port")
+ service = Service.create(wsdlLocation, serviceName)
+ bean = service.getPort(portName, HelloLocal.java_class)
+ bean.hello("world")
+when "rs"
+ java_import "org.jboss.resteasy.client.ClientRequest"
+ war_name = "ee6-ejb-interfaces-web"
+ url = "http://localhost:8080/#{war_name}/rest/hello/world"
+ request = ClientRequest.new(url)
+ request.get(java.lang.String.java_class);
+else
+ puts "unko"
+end
+
diff --git a/ee6-ejb-interfaces/pom.xml b/ee6-ejb-interfaces/pom.xml
new file mode 100644
index 0000000..3214973
--- /dev/null
+++ b/ee6-ejb-interfaces/pom.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.github.nekop.examples</groupId>
+ <artifactId>ee6-ejb-interfaces</artifactId>
+ <packaging>jar</packaging>
+ <name>ee6-ejb-interfaces</name>
+ <version>1.0</version>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>javax</groupId>
+ <artifactId>javaee-api</artifactId>
+ <version>6.0</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ </build>
+
+</project>
diff --git a/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/Hello.java b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/Hello.java
new file mode 100644
index 0000000..ad41730
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/Hello.java
@@ -0,0 +1,21 @@
+package com.github.nekop.examples;
+
+import javax.ejb.Stateless;
+import javax.ejb.RemoteHome;
+import javax.ejb.Stateless;
+import javax.jws.WebService;
+
+@Stateless
+@RemoteHome(HelloEJB2Home.class)
+@WebService
+public class Hello implements HelloRemote, HelloLocal {
+
+ String simpleName = getClass().getSimpleName();
+
+ @Override
+ public String hello(String name) {
+ System.out.println(simpleName + "#hello(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+}
diff --git a/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloApplication.java b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloApplication.java
new file mode 100644
index 0000000..15d64da
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloApplication.java
@@ -0,0 +1,7 @@
+package com.github.nekop.examples;
+
+import javax.ws.rs.ApplicationPath;
+import javax.ws.rs.core.Application;
+
+@ApplicationPath("/rest")
+public class HelloApplication extends Application { }
diff --git a/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloEJB2Home.java b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloEJB2Home.java
new file mode 100644
index 0000000..c95361c
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloEJB2Home.java
@@ -0,0 +1,9 @@
+package com.github.nekop.examples;
+
+import java.rmi.RemoteException;
+import javax.ejb.CreateException;
+import javax.ejb.EJBHome;
+
+public interface HelloEJB2Home extends EJBHome {
+ public HelloEJB2Remote create() throws CreateException, RemoteException;
+}
diff --git a/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloEJB2Remote.java b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloEJB2Remote.java
new file mode 100644
index 0000000..3e78533
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloEJB2Remote.java
@@ -0,0 +1,8 @@
+package com.github.nekop.examples;
+
+import java.rmi.RemoteException;
+import javax.ejb.EJBObject;
+
+public interface HelloEJB2Remote extends EJBObject {
+ public String hello(String name) throws RemoteException;
+}
diff --git a/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloLocal.java b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloLocal.java
new file mode 100644
index 0000000..6f2119e
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloLocal.java
@@ -0,0 +1,16 @@
+package com.github.nekop.examples;
+
+import javax.ejb.Local;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.jws.WebService;
+
+@Local
+@Path("/hello")
+@WebService
+public interface HelloLocal {
+ @GET
+ @Path("{name}")
+ String hello(@PathParam("name") String name);
+}
diff --git a/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloRemote.java b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloRemote.java
new file mode 100644
index 0000000..5a7aabe
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/java/com/github/nekop/examples/HelloRemote.java
@@ -0,0 +1,8 @@
+package com.github.nekop.examples;
+
+import javax.ejb.Remote;
+
+@Remote
+public interface HelloRemote {
+ String hello(String name);
+}
diff --git a/ee6-ejb-interfaces/src/main/resources/META-INF/jboss-ejb3.xml b/ee6-ejb-interfaces/src/main/resources/META-INF/jboss-ejb3.xml
new file mode 100644
index 0000000..4d357ed
--- /dev/null
+++ b/ee6-ejb-interfaces/src/main/resources/META-INF/jboss-ejb3.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<jboss:ejb-jar
+ xmlns:jboss="http://www.jboss.com/xml/ns/javaee"
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:iiop="urn:iiop"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.jboss.com/xml/ns/javaee http://www.jboss.org/j2ee/schema/jboss-ejb3-2_0.xsd
+ http://java.sun.com/xml/ns/javaee http://www.jboss.org/j2ee/schema/jboss-ejb3-spec-2_0.xsd
+ urn:iiop jboss-ejb-iiop_1_0.xsd"
+ version="3.1"
+ impl-version="2.0">
+ <assembly-descriptor>
+ <iiop:iiop>
+ <ejb-name>Hello</ejb-name>
+ </iiop:iiop>
+ </assembly-descriptor>
+</jboss:ejb-jar>
|
nekop/java-examples
|
bf63baee56e2c565c5edc010fba72870ef08906d
|
Cosmetic changes
|
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
index 9d14322..243f83b 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
@@ -1,52 +1,58 @@
package jp.programmers.examples.ejb3.slsb;
import javax.ejb.Local;
import javax.ejb.Remote;
import javax.ejb.Stateless;
import javax.ejb.Timeout;
import javax.ejb.Timer;
import javax.ejb.SessionContext;
import javax.annotation.Resource;
@Remote
@Stateless
public class HelloSLSB implements Hello {
@Resource
SessionContext ctx;
String simpleName = getClass().getSimpleName();
+ @Override
public String hello() {
System.out.println(simpleName + "#hello()");
return this.hello("world");
}
+ @Override
public String hello(String name) {
System.out.println(simpleName + "#hello(String)");
System.out.println("name=" + name);
return "Hello " + name;
}
@Timeout
+ @Override
public void ejbTimeout(Timer timer) {
System.out.println(simpleName + "#ejbTimeout(Timer)");
System.out.println("timer=" + timer);
}
+ @Override
public void initTimer() {
ctx.getTimerService().createTimer(0, 20 * 1000, null);
}
+ @Override
public void exception() {
throw new RuntimeException();
}
+ @Override
public void sleep(long msec) {
System.out.println(simpleName + "#sleep()");
try {
Thread.sleep(msec);
} catch (InterruptedException ignore) { }
}
}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java
index 3e5a570..3d3df6c 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java
@@ -1,36 +1,39 @@
package jp.programmers.examples.ejb3.slsb.iiop;
import javax.ejb.RemoteHome;
import javax.ejb.Stateless;
import javax.ejb.SessionContext;
import javax.annotation.Resource;
@Stateless
@RemoteHome(HelloHome.class)
public class HelloIIOP {
@Resource
SessionContext ctx;
+ String simpleName = getClass().getSimpleName();
+
public String hello() {
- System.out.println("HelloSLSB#hello()");
+ System.out.println(simpleName + "#hello()");
return this.hello("world");
}
public String hello(String name) {
- System.out.println("HelloSLSB#hello(String)");
+ System.out.println(simpleName + "#hello(String)");
System.out.println("name=" + name);
return "Hello " + name;
}
public void exception() {
throw new RuntimeException();
}
public void sleep(long msec) {
+ System.out.println(simpleName + "#sleep()");
try {
Thread.sleep(msec);
} catch (InterruptedException ignore) { }
}
}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java
index b44fb56..c06b283 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java
@@ -1,22 +1,23 @@
package jp.programmers.examples.ejb3.slsb.ws;
import javax.ejb.Remote;
import javax.ejb.Stateless;
import javax.jws.WebService;
import javax.jws.WebMethod;
import javax.jws.soap.SOAPBinding;
import jp.programmers.examples.ejb3.slsb.HelloSLSB;
@Remote
@Stateless
@WebService
@SOAPBinding
public class HelloWS extends HelloSLSB implements HelloEndpoint {
@WebMethod
- public String hello() {
- return super.hello();
+ @Override
+ public String hello(String name) {
+ return super.hello(name);
}
}
|
nekop/java-examples
|
9deac564ea71d7b2973743be0187143d02525088
|
Added mockxa
|
diff --git a/mockxa/pom.xml b/mockxa/pom.xml
new file mode 100644
index 0000000..137878c
--- /dev/null
+++ b/mockxa/pom.xml
@@ -0,0 +1,92 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.redhat.gss</groupId>
+ <artifactId>mockxa</artifactId>
+ <name>mockxa</name>
+ <version>1.0</version>
+ <packaging>war</packaging>
+
+ <properties>
+ <version.org.jboss.as>7.2.1.Final-redhat-10</version.org.jboss.as>
+ <version.org.jboss.jboss-transaction-spi>7.0.0.Final-redhat-2</version.org.jboss.jboss-transaction-spi>
+ <version.org.jboss.msc.jboss-msc>1.0.4.GA-redhat-1</version.org.jboss.msc.jboss-msc>
+ <version.org.jboss.spec.javax.servlet.jboss-servlet-api_3.0_spec>1.0.2.Final-redhat-1</version.org.jboss.spec.javax.servlet.jboss-servlet-api_3.0_spec>
+ <version.org.jboss.jboss-transaction-spi>7.0.0.Final-redhat-2</version.org.jboss.jboss-transaction-spi>
+ <version.org.jboss.jbossts>4.17.7.Final-redhat-4</version.org.jboss.jbossts>
+ <version.org.jboss.logging.jboss-logging>3.1.2.GA-redhat-1</version.org.jboss.logging.jboss-logging>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.as</groupId>
+ <artifactId>jboss-as-server</artifactId>
+ <version>${version.org.jboss.as}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.as</groupId>
+ <artifactId>jboss-as-transactions</artifactId>
+ <version>${version.org.jboss.as}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jboss-transaction-spi</artifactId>
+ <version>${version.org.jboss.jboss-transaction-spi}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.spec.javax.servlet</groupId>
+ <artifactId>jboss-servlet-api_3.0_spec</artifactId>
+ <version>${version.org.jboss.spec.javax.servlet.jboss-servlet-api_3.0_spec}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.msc</groupId>
+ <artifactId>jboss-msc</artifactId>
+ <version>${version.org.jboss.msc.jboss-msc}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.logging</groupId>
+ <artifactId>jboss-logging</artifactId>
+ <version>${version.org.jboss.logging.jboss-logging}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jboss-transaction-spi</artifactId>
+ <version>${version.org.jboss.jboss-transaction-spi}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.jbossts.jts</groupId>
+ <artifactId>jbossjts-jacorb</artifactId>
+ <version>${version.org.jboss.jbossts}</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <artifactId>maven-war-plugin</artifactId>
+ <version>2.1.1</version>
+ <configuration>
+ <failOnMissingWebXml>false</failOnMissingWebXml>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResource.java b/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResource.java
new file mode 100644
index 0000000..15878af
--- /dev/null
+++ b/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResource.java
@@ -0,0 +1,202 @@
+package com.redhat.gss.mockxa;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.ObjectInputStream;
+import java.io.BufferedInputStream;
+import java.io.FileOutputStream;
+import java.io.ObjectOutputStream;
+import java.io.BufferedOutputStream;
+import java.util.Arrays;
+
+import javax.transaction.xa.Xid;
+import javax.transaction.xa.XAResource;
+import javax.transaction.xa.XAException;
+import org.jboss.logging.Logger;
+
+public class MockXAResource implements XAResource {
+
+ private static Logger log = Logger.getLogger(MockXAResource.class);
+
+ public int resourceId = 0;
+ public boolean crashInPrepare = false;
+ public boolean crashInRollback = false;
+ public boolean crashInCommit = false;
+ public boolean exceptionInPrepare = false;
+ public boolean exceptionInEnd = false;
+ public boolean exceptionInRollback = false;
+ public boolean exceptionInCommit = false;
+ public boolean exceptionInRecover = false;
+ public boolean hangInPrepare = false;
+ public boolean hangInCommit = false;
+ public boolean logException = true;
+ public int exceptionErrorCode = -1;
+ private int transactionTimeout = 30000;
+
+ public void start(Xid xid, int flags) throws XAException {
+ log.info("start('" + xid + "', " + flags + ')');
+ }
+
+ public void end(Xid xid, int flags) throws XAException {
+ log.info("end('" + xid + "', " + flags + ')');
+ if (exceptionInEnd) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ }
+
+ public int prepare(Xid xid) throws XAException {
+ log.info("prepare('" + xid + "')");
+ if (crashInPrepare) {
+ Runtime.getRuntime().halt(0);
+ }
+ if (exceptionInPrepare) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ if (hangInPrepare) {
+ try {
+ Thread.sleep(Long.MAX_VALUE);
+ } catch (InterruptedException ignore) { }
+ }
+ // Save the xid before return XA_OK
+ saveRecoverXid(xid);
+ return XAResource.XA_OK;
+ }
+
+ public void commit(Xid xid, boolean onePhase) throws XAException {
+ log.info("commit('" + xid + "', " + onePhase + ')');
+ if (crashInCommit) {
+ Runtime.getRuntime().halt(0);
+ }
+ if (exceptionInCommit) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ if (hangInCommit) {
+ try {
+ Thread.sleep(Long.MAX_VALUE);
+ } catch (InterruptedException ignore) { }
+ }
+ // Commit suceeds, delete xid from
+ deleteRecoverXid(xid);
+ }
+
+ public void rollback(Xid xid) throws XAException {
+ log.info("rollback('" + xid + "')");
+ if (crashInRollback) {
+ Runtime.getRuntime().halt(0);
+ }
+ if (exceptionInRollback) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ deleteRecoverXid(xid);
+ }
+
+ public void forget(Xid xid) throws XAException {
+ log.trace("forget('" + xid + "')");
+ }
+
+ public Xid[] recover(int flags) throws XAException {
+ log.info("recover('" + flags + "')");
+ if (exceptionInRecover) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ Xid[] result = loadRecoverXids();
+ log.info("recover() returns: " + Arrays.asList(result));
+ return result;
+ }
+
+ public boolean isSameRM(XAResource xaResource) throws XAException {
+ boolean result =
+ this.resourceId == ((MockXAResource)xaResource).resourceId;
+ log.info("isSameRM() returns: " + result);
+ return result;
+ }
+
+ public int getTransactionTimeout() throws XAException {
+ return transactionTimeout;
+ }
+
+ public boolean setTransactionTimeout(int transactionTimeout) throws XAException {
+ this.transactionTimeout = transactionTimeout;
+ return true;
+ }
+
+
+ private Xid[] loadRecoverXids() {
+ File dataDir = new File(System.getProperty("jboss.server.data.dir"));
+ File debugDir = new File(dataDir, "debug");
+ if (!debugDir.exists()) {
+ return new Xid[0];
+ }
+ File[] files = debugDir.listFiles();
+ Xid[] xids = new Xid[files.length];
+ try {
+ for (int i = 0; i < files.length; i++) {
+ File f = files[i];
+ ObjectInputStream ois =
+ new ObjectInputStream(
+ new BufferedInputStream(
+ new FileInputStream(f)));
+ Object o = ois.readObject();
+ ois.close();
+ xids[i] = (Xid)o;
+ }
+ } catch (Exception ex) {
+ throw new RuntimeException("Error when load recoverXid", ex);
+ }
+ return xids;
+ }
+
+ private void saveRecoverXid(Xid xid) {
+ log.info("saveRecoverXid(): " + xid);
+ File dataDir = new File(System.getProperty("jboss.server.data.dir"));
+ File debugDir = new File(dataDir, "debug");
+ if (!debugDir.exists()) {
+ debugDir.mkdir();
+ }
+ File file = new File(debugDir, xid.toString());
+ try {
+ ObjectOutputStream oos =
+ new ObjectOutputStream(
+ new BufferedOutputStream(
+ new FileOutputStream(file)));
+ oos.writeObject(xid);
+ oos.flush();
+ oos.close();
+ } catch (Exception ex) {
+ throw new RuntimeException("Error when save recoverXid", ex);
+ }
+ }
+
+ private void deleteRecoverXid(Xid xid) {
+ log.info("deleteRecoverXid(): " + xid);
+ File dataDir = new File(System.getProperty("jboss.server.data.dir"));
+ File debugDir = new File(dataDir, "debug");
+ File file = new File(debugDir, xid.toString());
+ if (file.exists()) {
+ boolean success = file.delete();
+ if (!success) {
+ log.warn("Failed to delete recoverXid: " + file);
+ }
+ }
+ }
+}
diff --git a/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResourceRecovery.java b/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResourceRecovery.java
new file mode 100644
index 0000000..663159a
--- /dev/null
+++ b/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResourceRecovery.java
@@ -0,0 +1,20 @@
+package com.redhat.gss.mockxa;
+
+import org.jboss.tm.XAResourceRecovery;
+import javax.transaction.xa.XAResource;
+import org.jboss.logging.Logger;
+
+public class MockXAResourceRecovery implements XAResourceRecovery {
+
+ private static Logger log = Logger.getLogger(MockXAResourceRecovery.class);
+
+ public MockXAResourceRecovery() {
+ log.info("MockXAResourceRecovery");
+ }
+
+ public XAResource[] getXAResources() {
+ log.info("getXAResources()");
+ return new XAResource[] { new MockXAResource() };
+ }
+
+}
diff --git a/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResourceRecoveryRegistrationListener.java b/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResourceRecoveryRegistrationListener.java
new file mode 100644
index 0000000..25b27fe
--- /dev/null
+++ b/mockxa/src/main/java/com/redhat/gss/mockxa/MockXAResourceRecoveryRegistrationListener.java
@@ -0,0 +1,45 @@
+package com.redhat.gss.mockxa;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import org.jboss.as.server.CurrentServiceContainer;
+import org.jboss.as.txn.service.TxnServices;
+import org.jboss.logging.Logger;
+import org.jboss.msc.service.ServiceContainer;
+import org.jboss.msc.service.ServiceController;
+import org.jboss.tm.XAResourceRecoveryRegistry;
+
+public class MockXAResourceRecoveryRegistrationListener
+ implements ServletContextListener {
+
+ private static Logger log = Logger.getLogger(MockXAResourceRecoveryRegistrationListener.class);
+ private MockXAResourceRecovery mockXAResourceRecovery = new MockXAResourceRecovery();
+
+ public void contextInitialized(ServletContextEvent sce) {
+ try {
+ ServiceContainer container = CurrentServiceContainer.getServiceContainer();
+ ServiceController<XAResourceRecoveryRegistry> service =
+ (ServiceController<XAResourceRecoveryRegistry>)
+ container.getService(TxnServices.JBOSS_TXN_ARJUNA_RECOVERY_MANAGER);
+ XAResourceRecoveryRegistry registry = service.getValue();
+ registry.addXAResourceRecovery(mockXAResourceRecovery);
+ log.info("Registered MockXAResourceRecovery");
+ } catch (Exception ex) {
+ throw new RuntimeException("", ex);
+ }
+ }
+
+ public void contextDestroyed(ServletContextEvent sce) {
+ try {
+ ServiceContainer container = CurrentServiceContainer.getServiceContainer();
+ ServiceController<XAResourceRecoveryRegistry> service =
+ (ServiceController<XAResourceRecoveryRegistry>)
+ container.getService(TxnServices.JBOSS_TXN_ARJUNA_RECOVERY_MANAGER);
+ XAResourceRecoveryRegistry registry = service.getValue();
+ registry.removeXAResourceRecovery(mockXAResourceRecovery);
+ } catch (Exception ex) {
+ throw new RuntimeException("", ex);
+ }
+ }
+
+}
diff --git a/mockxa/src/main/java/com/redhat/gss/mockxa/SerializableMockXAResource.java b/mockxa/src/main/java/com/redhat/gss/mockxa/SerializableMockXAResource.java
new file mode 100644
index 0000000..365a349
--- /dev/null
+++ b/mockxa/src/main/java/com/redhat/gss/mockxa/SerializableMockXAResource.java
@@ -0,0 +1,7 @@
+package com.redhat.gss.mockxa;
+
+import java.io.Serializable;
+
+public class SerializableMockXAResource extends MockXAResource implements Serializable {
+
+}
diff --git a/mockxa/src/main/webapp/WEB-INF/jboss-deployment-structure.xml b/mockxa/src/main/webapp/WEB-INF/jboss-deployment-structure.xml
new file mode 100644
index 0000000..5670925
--- /dev/null
+++ b/mockxa/src/main/webapp/WEB-INF/jboss-deployment-structure.xml
@@ -0,0 +1,11 @@
+<jboss-deployment-structure>
+ <deployment>
+ <dependencies>
+ <module name="org.jboss.as.server" />
+ <module name="org.jboss.msc" />
+ <module name="org.jboss.jboss-transaction-spi" />
+ <module name="org.jboss.jts" />
+ <module name="org.jboss.as.transactions" />
+ </dependencies>
+ </deployment>
+</jboss-deployment-structure>
diff --git a/mockxa/src/main/webapp/WEB-INF/web.xml b/mockxa/src/main/webapp/WEB-INF/web.xml
new file mode 100644
index 0000000..5701985
--- /dev/null
+++ b/mockxa/src/main/webapp/WEB-INF/web.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<web-app
+ xmlns="http://java.sun.com/xml/ns/j2ee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
+ http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+ version="2.5">
+ <listener>
+ <listener-class>com.redhat.gss.mockxa.MockXAResourceRecoveryRegistrationListener</listener-class>
+ </listener>
+</web-app>
diff --git a/mockxa/src/main/webapp/txtest.jsp b/mockxa/src/main/webapp/txtest.jsp
new file mode 100644
index 0000000..93e7934
--- /dev/null
+++ b/mockxa/src/main/webapp/txtest.jsp
@@ -0,0 +1,31 @@
+<%@page import="javax.naming.InitialContext" %>
+<%@page import="javax.transaction.Transaction" %>
+<%@page import="javax.transaction.TransactionManager" %>
+<%@page import="javax.transaction.xa.XAException" %>
+<%@page import="com.redhat.gss.mockxa.MockXAResource" %>
+
+<%
+ try {
+ TransactionManager tm = InitialContext.doLookup("java:/TransactionManager");
+ // Clear existing tx
+ try { tm.rollback(); } catch (Exception ignore) { }
+
+ tm.begin();
+ Transaction t = tm.getTransaction();
+ MockXAResource res1 = new MockXAResource();
+ res1.resourceId = 1;
+ MockXAResource res2 = new MockXAResource();
+ res2.resourceId = 2;
+
+ //res2.crashInCommit = true;
+
+ res2.exceptionInCommit = true;
+ res2.exceptionErrorCode = XAException.XAER_RMERR;
+ t.enlistResource(res1);
+ t.enlistResource(res2);
+ tm.commit();
+ } catch (Exception ex) {
+ throw new RuntimeException("txtest", ex);
+ }
+
+%>
|
nekop/java-examples
|
1e93fb08e425cb8c1fa227f99c65761e723c9215
|
Add ws and iiop examples
|
diff --git a/as7-ejb/pom.xml b/as7-ejb/pom.xml
index 0f731d9..df9c44d 100644
--- a/as7-ejb/pom.xml
+++ b/as7-ejb/pom.xml
@@ -1,196 +1,260 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers.examples</groupId>
<artifactId>example-as7-ejb</artifactId>
<packaging>jar</packaging>
<name>example-as7-ejb</name>
<version>1.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<version.org.jboss.as.plugins.maven.plugin>7.3.Final</version.org.jboss.as.plugins.maven.plugin>
<version.org.jboss.bom>1.0.0.Final</version.org.jboss.bom>
<version.org.jboss.as>7.1.1.Final</version.org.jboss.as>
<version.compiler.plugin>2.3.1</version.compiler.plugin>
<version.exec.plugin>1.2.1</version.exec.plugin>
<maven.compiler.target>1.6</maven.compiler.target>
<maven.compiler.source>1.6</maven.compiler.source>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.jboss.bom</groupId>
<artifactId>jboss-javaee-6.0-with-tools</artifactId>
<version>${version.org.jboss.bom}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.jboss.as</groupId>
<artifactId>jboss-as-dist</artifactId>
<version>${version.org.jboss.as}</version>
<scope>import</scope>
<type>pom</type>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.jboss.spec.javax.ejb</groupId>
<artifactId>jboss-ejb-api_3.1_spec</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.jboss.ejb3</groupId>
<artifactId>jboss-ejb3-ext-api</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${version.compiler.plugin}</version>
<configuration>
<source>${maven.compiler.source}</source>
<target>${maven.compiler.target}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${version.exec.plugin}</version>
<executions>
<execution>
<goals>
<goal>java</goal>
</goals>
</execution>
</executions>
<configuration>
<mainClass>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</mainClass>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>client-deps</id>
<dependencies>
<dependency>
<groupId>org.jboss.spec.javax.ejb</groupId>
<artifactId>jboss-ejb-api_3.1_spec</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.spec.javax.transaction</groupId>
<artifactId>jboss-transaction-api_1.1_spec</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss</groupId>
<artifactId>jboss-ejb-client</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.xnio</groupId>
<artifactId>xnio-api</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.xnio</groupId>
<artifactId>xnio-nio</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.remoting3</groupId>
<artifactId>jboss-remoting</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.sasl</groupId>
<artifactId>jboss-sasl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.marshalling</groupId>
<artifactId>jboss-marshalling-river</artifactId>
<scope>runtime</scope>
</dependency>
</dependencies>
</profile>
<profile>
<id>client</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${version.exec.plugin}</version>
<executions>
<execution>
<goals>
<goal>exec</goal>
</goals>
</execution>
</executions>
<configuration>
<executable>java</executable>
<arguments>
<argument>-Djboss.ejb.client.properties.file.path=target/classes/nonclustered-jboss-ejb-client.properties</argument>
<argument>-Djava.util.logging.config.file=target/classes/finest-logging.properties</argument>
<argument>-classpath</argument>
<classpath />
<argument>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</argument>
<argument>HelloSLSB</argument>
</arguments>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>client-clustered</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${version.exec.plugin}</version>
<executions>
<execution>
<goals>
<goal>exec</goal>
</goals>
</execution>
</executions>
<configuration>
<executable>java</executable>
<arguments>
<argument>-Djboss.ejb.client.properties.file.path=target/classes/clustered-jboss-ejb-client.properties</argument>
<argument>-Djava.util.logging.config.file=target/classes/finest-logging.properties</argument>
<argument>-classpath</argument>
<classpath />
<argument>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</argument>
<argument>ClusteredHelloSLSB</argument>
</arguments>
</configuration>
</plugin>
</plugins>
</build>
</profile>
+ <profile>
+ <id>client-ws</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>${version.exec.plugin}</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <executable>java</executable>
+ <arguments>
+ <argument>-classpath</argument>
+ <classpath />
+ <argument>jp.programmers.examples.ejb3.slsb.ws.HelloWSClient</argument>
+ </arguments>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ <profile>
+ <id>client-iiop-deps</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.spec.javax.ejb</groupId>
+ <artifactId>jboss-ejb-api_3.1_spec</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>client-iiop</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>${version.exec.plugin}</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <executable>java</executable>
+ <arguments>
+ <argument>-classpath</argument>
+ <classpath />
+ <argument>jp.programmers.examples.ejb3.slsb.iiop.HelloIIOPClient</argument>
+ </arguments>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
</profiles>
</project>
diff --git a/as7-ejb/run-client-iiop.sh b/as7-ejb/run-client-iiop.sh
new file mode 100644
index 0000000..4228550
--- /dev/null
+++ b/as7-ejb/run-client-iiop.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient-iiop -Pclient-iiop-deps exec:exec
diff --git a/as7-ejb/run-client-ws.sh b/as7-ejb/run-client-ws.sh
new file mode 100644
index 0000000..c4f4d45
--- /dev/null
+++ b/as7-ejb/run-client-ws.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient-ws -Pclient-deps exec:exec
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/Hello.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/Hello.java
new file mode 100644
index 0000000..c5ff4d3
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/Hello.java
@@ -0,0 +1,9 @@
+package jp.programmers.examples.ejb3.sfsb;
+
+public interface Hello {
+ public String hello();
+ public String hello(String name);
+ public String getLastMessage();
+ public void exception();
+ public void destroy();
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSB.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSB.java
new file mode 100644
index 0000000..8af4fb8
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSB.java
@@ -0,0 +1,36 @@
+package jp.programmers.examples.ejb3.sfsb;
+
+import javax.ejb.Stateful;
+import javax.ejb.Remote;
+import javax.ejb.Remove;
+
+@Remote
+@Stateful
+public class HelloSFSB implements Hello {
+
+ private String lastMessage;
+
+ public String hello() {
+ System.out.println("HelloSFSB#hello()");
+ return this.hello("world");
+ }
+
+ public String hello(String name) {
+ System.out.println("HelloSFSB#hello(String)");
+ System.out.println("name=" + name);
+ lastMessage = "Hello " + name;
+ return lastMessage;
+ }
+
+ public String getLastMessage() {
+ return lastMessage;
+ }
+
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+ @Remove
+ public void destroy() {
+ }
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSBClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSBClient.java
new file mode 100644
index 0000000..bc95281
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSBClient.java
@@ -0,0 +1,30 @@
+package jp.programmers.examples.ejb3.sfsb;
+
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+
+public class HelloSFSBClient {
+
+ public static final String DEFAULT_PROVIDER_URL = "localhost:1099";
+
+ public static void main(String... args) throws Exception {
+ String providerUrl = DEFAULT_PROVIDER_URL;
+ if (args.length != 0) {
+ providerUrl = args[0];
+ }
+
+ String jndiName = "HelloSFSB/remote";
+ Properties props = new Properties();
+ props.put(Context.INITIAL_CONTEXT_FACTORY,
+ "org.jnp.interfaces.NamingContextFactory");
+ props.put(Context.URL_PKG_PREFIXES,
+ "org.jboss.naming:org.jnp.interfaces");
+ props.put(Context.PROVIDER_URL, providerUrl);
+ InitialContext context = new InitialContext(props);
+ Hello hello = (Hello)context.lookup(jndiName);
+ hello.hello();
+ hello.destroy();
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloHome.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloHome.java
new file mode 100644
index 0000000..08e1b93
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloHome.java
@@ -0,0 +1,9 @@
+package jp.programmers.examples.ejb3.slsb.iiop;
+
+import java.rmi.RemoteException;
+import javax.ejb.CreateException;
+import javax.ejb.EJBHome;
+
+public interface HelloHome extends EJBHome {
+ public HelloRemote create() throws CreateException, RemoteException;
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java
new file mode 100644
index 0000000..3e5a570
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOP.java
@@ -0,0 +1,36 @@
+package jp.programmers.examples.ejb3.slsb.iiop;
+
+import javax.ejb.RemoteHome;
+import javax.ejb.Stateless;
+import javax.ejb.SessionContext;
+import javax.annotation.Resource;
+
+@Stateless
+@RemoteHome(HelloHome.class)
+public class HelloIIOP {
+
+ @Resource
+ SessionContext ctx;
+
+ public String hello() {
+ System.out.println("HelloSLSB#hello()");
+ return this.hello("world");
+ }
+
+ public String hello(String name) {
+ System.out.println("HelloSLSB#hello(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+ public void sleep(long msec) {
+ try {
+ Thread.sleep(msec);
+ } catch (InterruptedException ignore) { }
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOPClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOPClient.java
new file mode 100644
index 0000000..8e5dbfb
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloIIOPClient.java
@@ -0,0 +1,40 @@
+package jp.programmers.examples.ejb3.slsb.iiop;
+
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.rmi.PortableRemoteObject;
+
+public class HelloIIOPClient {
+
+ public static final String DEFAULT_PROVIDER_URL = "corbaloc:iiop:localhost:3528/JBoss/Naming/root";
+
+ public static void main(String... args) throws Exception {
+ String providerUrl = DEFAULT_PROVIDER_URL;
+ if (args.length != 0) {
+ // We can use IOR for naming service like:
+ //providerUrl = "IOR:000000000000002B49444C3A6F6D672E6F72672F436F734E616D696E672F4E616D696E67436F6E746578744578743A312E3000000000000100000000000000B0000102000000000A3132372E302E302E31000DC8000000114A426F73732F4E616D696E672F726F6F74000000000000040000000000000008000000004A4143000000000100000020000000000501000100000001000100010001010900000002050100010001010000000014000000080000001A00000DC90000002100000030000000000000000100000000000000220000000000000000000000000000000000000000000000000000000000000000";
+ providerUrl = args[0];
+ }
+
+ System.setProperty("com.sun.CORBA.ORBUseDynamicStub", "true");
+ String jndiName = "HelloIIOP";
+ Properties props = new Properties();
+ props.put(Context.INITIAL_CONTEXT_FACTORY,
+ "com.sun.jndi.cosnaming.CNCtxFactory");
+ props.put(Context.PROVIDER_URL, providerUrl);
+ InitialContext context = new InitialContext(props);
+ Object o = context.lookup(jndiName);
+
+ // Or
+ //InitialContext context = new InitialContext();
+ //Object o = context.lookup("corbaname:iiop:localhost:3528#HelloIIOP");
+
+ System.out.println(o);
+ HelloHome helloHome =
+ (HelloHome)PortableRemoteObject.narrow(o, HelloHome.class);
+ HelloRemote hello = helloHome.create();
+ System.out.println(hello.hello());
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloRemote.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloRemote.java
new file mode 100644
index 0000000..fba1fcf
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/iiop/HelloRemote.java
@@ -0,0 +1,12 @@
+package jp.programmers.examples.ejb3.slsb.iiop;
+
+import java.rmi.RemoteException;
+import javax.ejb.EJBObject;
+
+// Cannot extend Hello because we have to throw RemoteException in EJB 2.x :(
+public interface HelloRemote extends EJBObject {
+ public String hello() throws RemoteException;
+ public String hello(String name) throws RemoteException;
+ public void exception() throws RemoteException;
+ public void sleep(long msec) throws RemoteException;
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java
index ebf0ae7..8f65a5e 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java
@@ -1,18 +1,18 @@
package jp.programmers.examples.ejb3.slsb.ws;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.xml.ws.Service;
public class HelloWSClient {
public static void main(String... args) throws Exception {
- URL wsdlLocation = new URL("http://127.0.0.1:8080/example-ejb3slsb/HelloWS?wsdl");
+ URL wsdlLocation = new URL("http://127.0.0.1:8080/example-as7-ejb/HelloWS?wsdl");
QName serviceName = new QName("http://ws.slsb.ejb3.examples.programmers.jp/", "HelloWSService");
Service service = Service.create(wsdlLocation, serviceName);
QName portName = new QName("http://ws.slsb.ejb3.examples.programmers.jp/", "HelloWSPort");
HelloEndpoint hello = service.getPort(portName, HelloEndpoint.class);
hello.hello();
}
}
diff --git a/as7-ejb/src/main/resources/META-INF/jboss-ejb3.xml b/as7-ejb/src/main/resources/META-INF/jboss-ejb3.xml
new file mode 100644
index 0000000..15b012a
--- /dev/null
+++ b/as7-ejb/src/main/resources/META-INF/jboss-ejb3.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<jboss:ejb-jar
+ xmlns:jboss="http://www.jboss.com/xml/ns/javaee"
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:iiop="urn:iiop"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.jboss.com/xml/ns/javaee http://www.jboss.org/j2ee/schema/jboss-ejb3-2_0.xsd
+ http://java.sun.com/xml/ns/javaee http://www.jboss.org/j2ee/schema/jboss-ejb3-spec-2_0.xsd
+ urn:iiop jboss-ejb-iiop_1_0.xsd"
+ version="3.1"
+ impl-version="2.0">
+ <assembly-descriptor>
+ <iiop:iiop>
+ <ejb-name>*</ejb-name>
+ </iiop:iiop>
+ </assembly-descriptor>
+</jboss:ejb-jar>
|
nekop/java-examples
|
19d210987c9ac172eaf658f0c6b5810603508240
|
Added DebugSessionListener
|
diff --git a/servlet/src/main/java/jp/programmers/examples/DebugSessionListener.java b/servlet/src/main/java/jp/programmers/examples/DebugSessionListener.java
new file mode 100644
index 0000000..d0109e2
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/DebugSessionListener.java
@@ -0,0 +1,55 @@
+package jp.programmers.examples;
+
+import javax.servlet.http.HttpSessionBindingListener;
+import javax.servlet.http.HttpSessionListener;
+import javax.servlet.http.HttpSessionActivationListener;
+import javax.servlet.http.HttpSessionAttributeListener;
+import javax.servlet.http.HttpSessionBindingEvent;
+import javax.servlet.http.HttpSessionEvent;
+import java.io.Serializable;
+
+public class DebugSessionListener
+ implements HttpSessionBindingListener, HttpSessionActivationListener,
+ HttpSessionListener, HttpSessionAttributeListener,
+ Serializable {
+
+ // HttpSessionBindingListener methods, need to be set by HttpSession.setAttribute()
+
+ public void valueBound(HttpSessionBindingEvent event) {
+ System.out.println("DebugSessionListener.valueBound");
+ }
+ public void valueUnbound(HttpSessionBindingEvent event) {
+ System.out.println("DebugSessionListener.valueUnbound");
+ }
+
+ // HttpSessionActivationListener methods, need to be set by HttpSession.setAttribute()
+
+ public void sessionDidActivate(HttpSessionEvent se) {
+ System.out.println("DebugSessionListener.sessionDidActivate");
+ }
+ public void sessionWillPassivate(HttpSessionEvent se) {
+ System.out.println("DebugSessionListener.sessionWillPassivate");
+ }
+
+ // HttpSessionListener methods, need to be registered by ServletContext.addListener()
+
+ public void sessionCreated(HttpSessionEvent se) {
+ System.out.println("DebugSessionListener.sessionCreated");
+ }
+ public void sessionDestroyed(HttpSessionEvent se) {
+ System.out.println("DebugSessionListener.sessionDestroyed");
+ }
+
+ // HttpSessionAttributeListener methods, need to be registered by ServletContext.addListener()
+
+ public void attributeAdded(HttpSessionBindingEvent event) {
+ System.out.println("DebugSessionListener.attributeAdded");
+ }
+ public void attributeRemoved(HttpSessionBindingEvent event) {
+ System.out.println("DebugSessionListener.attributeRemoved");
+ }
+ public void attributeReplaced(HttpSessionBindingEvent event) {
+ System.out.println("DebugSessionListener.attributeReplaced");
+ }
+
+}
diff --git a/servlet/src/main/webapp/WEB-INF/jboss-web.xml b/servlet/src/main/webapp/WEB-INF/jboss-web.xml
new file mode 100644
index 0000000..e86ae25
--- /dev/null
+++ b/servlet/src/main/webapp/WEB-INF/jboss-web.xml
@@ -0,0 +1,6 @@
+<jboss-web>
+ <replication-config>
+ <replication-trigger>SET_AND_NON_PRIMITIVE_GET</replication-trigger>
+ <replication-granularity>ATTRIBUTE</replication-granularity>
+ </replication-config>
+</jboss-web>
diff --git a/servlet/src/main/webapp/WEB-INF/web.xml b/servlet/src/main/webapp/WEB-INF/web.xml
index 070c2db..e09beec 100644
--- a/servlet/src/main/webapp/WEB-INF/web.xml
+++ b/servlet/src/main/webapp/WEB-INF/web.xml
@@ -1,27 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<web-app
xmlns="http://java.sun.com/xml/ns/javaee"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:web="http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
version="2.5">
+ <distributable />
+
+ <listener>
+ <listener-class>jp.programmers.examples.DebugSessionListener</listener-class>
+ </listener>
+
+
<servlet>
<servlet-name>Hello Servlet</servlet-name>
<servlet-class>jp.programmers.examples.HelloServlet</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>Hello Servlet</servlet-name>
<url-pattern>/hello</url-pattern>
</servlet-mapping>
<filter>
<filter-name>ContentLengthFilter</filter-name>
<filter-class>jp.programmers.examples.ContentLengthFilter</filter-class>
</filter>
<filter-mapping>
<filter-name>ContentLengthFilter</filter-name>
<url-pattern>/length/*</url-pattern>
</filter-mapping>
+ <session-config>
+ <session-timeout>1</session-timeout>
+ </session-config>
+
</web-app>
diff --git a/servlet/src/main/webapp/invalidate.jsp b/servlet/src/main/webapp/invalidate.jsp
new file mode 100644
index 0000000..a8e0bc5
--- /dev/null
+++ b/servlet/src/main/webapp/invalidate.jsp
@@ -0,0 +1,4 @@
+<%
+session.invalidate();
+%>
+ok
diff --git a/servlet/src/main/webapp/listener.jsp b/servlet/src/main/webapp/listener.jsp
new file mode 100644
index 0000000..935e15c
--- /dev/null
+++ b/servlet/src/main/webapp/listener.jsp
@@ -0,0 +1,6 @@
+<%
+jp.programmers.examples.DebugSessionListener listener =
+ new jp.programmers.examples.DebugSessionListener();
+session.setAttribute("DebugSessionListener", listener);
+%>
+ok
|
nekop/java-examples
|
de30b4c632273bf78d093b47c189a22bb2a37203
|
Add debug print in sleep() method
|
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
index 1225ffd..9d14322 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
@@ -1,51 +1,52 @@
package jp.programmers.examples.ejb3.slsb;
import javax.ejb.Local;
import javax.ejb.Remote;
import javax.ejb.Stateless;
import javax.ejb.Timeout;
import javax.ejb.Timer;
import javax.ejb.SessionContext;
import javax.annotation.Resource;
@Remote
@Stateless
public class HelloSLSB implements Hello {
@Resource
SessionContext ctx;
String simpleName = getClass().getSimpleName();
public String hello() {
System.out.println(simpleName + "#hello()");
return this.hello("world");
}
public String hello(String name) {
System.out.println(simpleName + "#hello(String)");
System.out.println("name=" + name);
return "Hello " + name;
}
@Timeout
public void ejbTimeout(Timer timer) {
System.out.println(simpleName + "#ejbTimeout(Timer)");
System.out.println("timer=" + timer);
}
public void initTimer() {
ctx.getTimerService().createTimer(0, 20 * 1000, null);
}
public void exception() {
throw new RuntimeException();
}
public void sleep(long msec) {
+ System.out.println(simpleName + "#sleep()");
try {
Thread.sleep(msec);
} catch (InterruptedException ignore) { }
}
}
|
nekop/java-examples
|
1925eaa766015689f386d8573d03bcc67d74576c
|
Remove debug lines
|
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
index dfd2b73..98b397e 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
@@ -1,29 +1,26 @@
package jp.programmers.examples.ejb3.slsb;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
import jp.programmers.examples.ejb3.slsb.Hello;
public class HelloSLSBClient {
public static final String EJBJAR_NAME = "example-as7-ejb";
public static final String EJB_NAME = "HelloSLSB";
public static void main(String... args) throws Exception {
String ejbName = EJB_NAME;
if (args.length != 0) {
ejbName = args[0];
}
String jndiName = "ejb:/" + EJBJAR_NAME + "/" + ejbName + "!" + Hello.class.getName();
Properties props = new Properties();
props.put(Context.URL_PKG_PREFIXES, "org.jboss.ejb.client.naming");
InitialContext context = new InitialContext(props);
- java.util.logging.Logger.getLogger(HelloSLSBClient.class.getName()).fine("info");
- java.util.logging.Logger.getLogger(HelloSLSBClient.class.getName()).fine("fine");
- java.util.logging.Logger.getLogger(HelloSLSBClient.class.getName()).severe("severe");
Hello hello = (Hello)context.lookup(jndiName);
hello.hello();
}
}
|
nekop/java-examples
|
aaa9497305364a39d8040403bc50b1aa058b98d5
|
Fix clustered client setup
|
diff --git a/as7-ejb/pom.xml b/as7-ejb/pom.xml
index 678e28c..0f731d9 100644
--- a/as7-ejb/pom.xml
+++ b/as7-ejb/pom.xml
@@ -1,160 +1,196 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers.examples</groupId>
<artifactId>example-as7-ejb</artifactId>
<packaging>jar</packaging>
<name>example-as7-ejb</name>
<version>1.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<version.org.jboss.as.plugins.maven.plugin>7.3.Final</version.org.jboss.as.plugins.maven.plugin>
<version.org.jboss.bom>1.0.0.Final</version.org.jboss.bom>
<version.org.jboss.as>7.1.1.Final</version.org.jboss.as>
<version.compiler.plugin>2.3.1</version.compiler.plugin>
<version.exec.plugin>1.2.1</version.exec.plugin>
<maven.compiler.target>1.6</maven.compiler.target>
<maven.compiler.source>1.6</maven.compiler.source>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.jboss.bom</groupId>
<artifactId>jboss-javaee-6.0-with-tools</artifactId>
<version>${version.org.jboss.bom}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.jboss.as</groupId>
<artifactId>jboss-as-dist</artifactId>
<version>${version.org.jboss.as}</version>
<scope>import</scope>
<type>pom</type>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.jboss.spec.javax.ejb</groupId>
<artifactId>jboss-ejb-api_3.1_spec</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.jboss.ejb3</groupId>
<artifactId>jboss-ejb3-ext-api</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${version.compiler.plugin}</version>
<configuration>
<source>${maven.compiler.source}</source>
<target>${maven.compiler.target}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${version.exec.plugin}</version>
<executions>
<execution>
<goals>
<goal>java</goal>
</goals>
</execution>
</executions>
<configuration>
<mainClass>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</mainClass>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
- <id>client-as7</id>
+ <id>client-deps</id>
<dependencies>
<dependency>
<groupId>org.jboss.spec.javax.ejb</groupId>
<artifactId>jboss-ejb-api_3.1_spec</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.spec.javax.transaction</groupId>
<artifactId>jboss-transaction-api_1.1_spec</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss</groupId>
<artifactId>jboss-ejb-client</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.xnio</groupId>
<artifactId>xnio-api</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.xnio</groupId>
<artifactId>xnio-nio</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.remoting3</groupId>
<artifactId>jboss-remoting</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.sasl</groupId>
<artifactId>jboss-sasl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.jboss.marshalling</groupId>
<artifactId>jboss-marshalling-river</artifactId>
<scope>runtime</scope>
</dependency>
</dependencies>
+ </profile>
+ <profile>
+ <id>client</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>${version.exec.plugin}</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <executable>java</executable>
+ <arguments>
+ <argument>-Djboss.ejb.client.properties.file.path=target/classes/nonclustered-jboss-ejb-client.properties</argument>
+ <argument>-Djava.util.logging.config.file=target/classes/finest-logging.properties</argument>
+ <argument>-classpath</argument>
+ <classpath />
+ <argument>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</argument>
+ <argument>HelloSLSB</argument>
+ </arguments>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ <profile>
+ <id>client-clustered</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${version.exec.plugin}</version>
<executions>
<execution>
<goals>
- <goal>java</goal>
+ <goal>exec</goal>
</goals>
</execution>
</executions>
<configuration>
- <mainClass>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</mainClass>
-<!--
+ <executable>java</executable>
<arguments>
+ <argument>-Djboss.ejb.client.properties.file.path=target/classes/clustered-jboss-ejb-client.properties</argument>
+ <argument>-Djava.util.logging.config.file=target/classes/finest-logging.properties</argument>
+ <argument>-classpath</argument>
+ <classpath />
+ <argument>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</argument>
<argument>ClusteredHelloSLSB</argument>
</arguments>
--->
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
diff --git a/as7-ejb/run-client-clustered.sh b/as7-ejb/run-client-clustered.sh
new file mode 100644
index 0000000..2cdf80f
--- /dev/null
+++ b/as7-ejb/run-client-clustered.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient-clustered -Pclient-deps exec:exec
diff --git a/as7-ejb/run-client.sh b/as7-ejb/run-client.sh
new file mode 100644
index 0000000..c6a38ce
--- /dev/null
+++ b/as7-ejb/run-client.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient -Pclient-deps exec:exec
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
index 98b397e..dfd2b73 100644
--- a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
@@ -1,26 +1,29 @@
package jp.programmers.examples.ejb3.slsb;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
import jp.programmers.examples.ejb3.slsb.Hello;
public class HelloSLSBClient {
public static final String EJBJAR_NAME = "example-as7-ejb";
public static final String EJB_NAME = "HelloSLSB";
public static void main(String... args) throws Exception {
String ejbName = EJB_NAME;
if (args.length != 0) {
ejbName = args[0];
}
String jndiName = "ejb:/" + EJBJAR_NAME + "/" + ejbName + "!" + Hello.class.getName();
Properties props = new Properties();
props.put(Context.URL_PKG_PREFIXES, "org.jboss.ejb.client.naming");
InitialContext context = new InitialContext(props);
+ java.util.logging.Logger.getLogger(HelloSLSBClient.class.getName()).fine("info");
+ java.util.logging.Logger.getLogger(HelloSLSBClient.class.getName()).fine("fine");
+ java.util.logging.Logger.getLogger(HelloSLSBClient.class.getName()).severe("severe");
Hello hello = (Hello)context.lookup(jndiName);
hello.hello();
}
}
diff --git a/as7-ejb/src/main/resources/META-INF/ejb-jar.xml b/as7-ejb/src/main/resources/META-INF/ejb-jar.xml
deleted file mode 100644
index 72ceff8..0000000
--- a/as7-ejb/src/main/resources/META-INF/ejb-jar.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-
-<ejb-jar
- version="3.0"
- xmlns="http://java.sun.com/xml/ns/javaee"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
- http://java.sun.com/xml/ns/javaee/ejb-jar_3_0.xsd">
- <!--
- <enterprise-beans>
- <session>
- <ejb-name>HelloSLSB</ejb-name>
- <ejb-class>jp.programmers.examples.ejb3.slsb.HelloSLSB</ejb-class>
- </session>
- </enterprise-beans>
- -->
-</ejb-jar>
diff --git a/as7-ejb/src/main/resources/META-INF/jboss.xml b/as7-ejb/src/main/resources/META-INF/jboss.xml
deleted file mode 100644
index edbf363..0000000
--- a/as7-ejb/src/main/resources/META-INF/jboss.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-
-<jboss version="5.0"
- xmlns="http://www.jboss.com/xml/ns/javaee"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://www.jboss.com/xml/ns/javaee
- http://www.jboss.org/j2ee/schema/jboss_5_0.xsd">
- <!--
- <enterprise-beans>
- <session>
- <ejb-name>HelloSLSB</ejb-name>
- </session>
- </enterprise-beans>
- -->
-</jboss>
diff --git a/as7-ejb/src/main/resources/client-log4j.xml b/as7-ejb/src/main/resources/client-log4j.xml
deleted file mode 100644
index efcfd67..0000000
--- a/as7-ejb/src/main/resources/client-log4j.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
-
-<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
-
- <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
- <param name="File" value="client.log"/>
- <param name="Append" value="false"/>
- <param name="DatePattern" value="'.'yyyy-MM-dd"/>
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
- </layout>
- </appender>
-
- <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
- <param name="Target" value="System.out"/>
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
- </layout>
- </appender>
-
- <root>
- <level value="ALL"/>
- <appender-ref ref="CONSOLE"/>
-<!--
- <appender-ref ref="FILE"/>
--->
- </root>
-
-</log4j:configuration>
diff --git a/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties b/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties
index 2a5e74a..204840f 100644
--- a/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties
+++ b/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties
@@ -1,22 +1,22 @@
remote.connectionprovider.create.options.org.xnio.Options.SSL_ENABLED=false
-remote.connections=n1,n2
+remote.connections=node1,node2
remote.connection.localhost.host=localhost
remote.connection.localhost.port=4447
remote.connection.localhost.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
remote.connection.localhost.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
remote.connection.node1.host=node1
remote.connection.node1.port=4447
remote.connection.node1.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
remote.connection.node1.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
remote.connection.node2.host=node2
remote.connection.node2.port=4447
remote.connection.node2.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
remote.connection.node2.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
remote.clusters=ejb
remote.cluster.ejb.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
remote.cluster.ejb.connect.options.org.xnio.Options.SSL_ENABLED=false
diff --git a/as7-ejb/src/main/resources/finest-logging.properties b/as7-ejb/src/main/resources/finest-logging.properties
new file mode 100644
index 0000000..f1823d4
--- /dev/null
+++ b/as7-ejb/src/main/resources/finest-logging.properties
@@ -0,0 +1,4 @@
+handlers=java.util.logging.ConsoleHandler
+.level=FINEST
+java.util.logging.ConsoleHandler.level=FINEST
+java.util.logging.ConsoleHandler.formatter=java.util.logging.SimpleFormatter
diff --git a/as7-ejb/src/main/resources/jboss-ejb-client.properties b/as7-ejb/src/main/resources/nonclustered-jboss-ejb-client.properties
similarity index 100%
rename from as7-ejb/src/main/resources/jboss-ejb-client.properties
rename to as7-ejb/src/main/resources/nonclustered-jboss-ejb-client.properties
|
nekop/java-examples
|
4749624374ccb666e109d7ba8176e19f1f572df5
|
Add as7-ejb
|
diff --git a/as7-ejb/pom.xml b/as7-ejb/pom.xml
new file mode 100644
index 0000000..678e28c
--- /dev/null
+++ b/as7-ejb/pom.xml
@@ -0,0 +1,160 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-as7-ejb</artifactId>
+ <packaging>jar</packaging>
+ <name>example-as7-ejb</name>
+ <version>1.0</version>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ <version.org.jboss.as.plugins.maven.plugin>7.3.Final</version.org.jboss.as.plugins.maven.plugin>
+ <version.org.jboss.bom>1.0.0.Final</version.org.jboss.bom>
+ <version.org.jboss.as>7.1.1.Final</version.org.jboss.as>
+ <version.compiler.plugin>2.3.1</version.compiler.plugin>
+ <version.exec.plugin>1.2.1</version.exec.plugin>
+ <maven.compiler.target>1.6</maven.compiler.target>
+ <maven.compiler.source>1.6</maven.compiler.source>
+ </properties>
+
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.bom</groupId>
+ <artifactId>jboss-javaee-6.0-with-tools</artifactId>
+ <version>${version.org.jboss.bom}</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.as</groupId>
+ <artifactId>jboss-as-dist</artifactId>
+ <version>${version.org.jboss.as}</version>
+ <scope>import</scope>
+ <type>pom</type>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.spec.javax.ejb</groupId>
+ <artifactId>jboss-ejb-api_3.1_spec</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.ejb3</groupId>
+ <artifactId>jboss-ejb3-ext-api</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>${version.compiler.plugin}</version>
+ <configuration>
+ <source>${maven.compiler.source}</source>
+ <target>${maven.compiler.target}</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>${version.exec.plugin}</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>java</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <mainClass>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</mainClass>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>client-as7</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.spec.javax.ejb</groupId>
+ <artifactId>jboss-ejb-api_3.1_spec</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.spec.javax.transaction</groupId>
+ <artifactId>jboss-transaction-api_1.1_spec</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jboss-ejb-client</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.xnio</groupId>
+ <artifactId>xnio-api</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.xnio</groupId>
+ <artifactId>xnio-nio</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.remoting3</groupId>
+ <artifactId>jboss-remoting</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.sasl</groupId>
+ <artifactId>jboss-sasl</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.marshalling</groupId>
+ <artifactId>jboss-marshalling-river</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>${version.exec.plugin}</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>java</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <mainClass>jp.programmers.examples.ejb3.slsb.HelloSLSBClient</mainClass>
+<!--
+ <arguments>
+ <argument>ClusteredHelloSLSB</argument>
+ </arguments>
+-->
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+
+</project>
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/Hello.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/Hello.java
new file mode 100644
index 0000000..9008490
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/Hello.java
@@ -0,0 +1,12 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import javax.ejb.Timer;
+
+public interface Hello {
+ public String hello();
+ public String hello(String name);
+ public void ejbTimeout(Timer timer);
+ public void initTimer();
+ public void exception();
+ public void sleep(long msec);
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
new file mode 100644
index 0000000..1225ffd
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
@@ -0,0 +1,51 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import javax.ejb.Local;
+import javax.ejb.Remote;
+import javax.ejb.Stateless;
+import javax.ejb.Timeout;
+import javax.ejb.Timer;
+import javax.ejb.SessionContext;
+import javax.annotation.Resource;
+
+@Remote
+@Stateless
+public class HelloSLSB implements Hello {
+
+ @Resource
+ SessionContext ctx;
+
+ String simpleName = getClass().getSimpleName();
+
+ public String hello() {
+ System.out.println(simpleName + "#hello()");
+ return this.hello("world");
+ }
+
+ public String hello(String name) {
+ System.out.println(simpleName + "#hello(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+
+ @Timeout
+ public void ejbTimeout(Timer timer) {
+ System.out.println(simpleName + "#ejbTimeout(Timer)");
+ System.out.println("timer=" + timer);
+ }
+
+ public void initTimer() {
+ ctx.getTimerService().createTimer(0, 20 * 1000, null);
+ }
+
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+ public void sleep(long msec) {
+ try {
+ Thread.sleep(msec);
+ } catch (InterruptedException ignore) { }
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
new file mode 100644
index 0000000..98b397e
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
@@ -0,0 +1,26 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import jp.programmers.examples.ejb3.slsb.Hello;
+
+public class HelloSLSBClient {
+
+ public static final String EJBJAR_NAME = "example-as7-ejb";
+ public static final String EJB_NAME = "HelloSLSB";
+
+ public static void main(String... args) throws Exception {
+ String ejbName = EJB_NAME;
+ if (args.length != 0) {
+ ejbName = args[0];
+ }
+ String jndiName = "ejb:/" + EJBJAR_NAME + "/" + ejbName + "!" + Hello.class.getName();
+ Properties props = new Properties();
+ props.put(Context.URL_PKG_PREFIXES, "org.jboss.ejb.client.naming");
+ InitialContext context = new InitialContext(props);
+ Hello hello = (Hello)context.lookup(jndiName);
+ hello.hello();
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
new file mode 100644
index 0000000..d469934
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
@@ -0,0 +1,29 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+public class HelloSLSBLoadClient {
+
+ public static void main(final String... args) throws Exception {
+ // Confirm single request works before load testing
+ HelloSLSBClient.main(args);
+
+ ExecutorService ex = Executors.newCachedThreadPool();
+ Runnable r = new Runnable() {
+ public void run() {
+ try {
+ HelloSLSBClient.main(args);
+ } catch (Exception ignore) {
+ }
+ }
+ };
+ for (int i = 0; i < 2000; i++) {
+ ex.execute(r);
+ }
+ ex.awaitTermination(60, TimeUnit.SECONDS);
+ ex.shutdown();
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/clustered/ClusteredHelloSLSB.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/clustered/ClusteredHelloSLSB.java
new file mode 100644
index 0000000..9f0f180
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/clustered/ClusteredHelloSLSB.java
@@ -0,0 +1,16 @@
+package jp.programmers.examples.ejb3.slsb.clustered;
+
+import javax.ejb.Remote;
+import javax.ejb.Stateless;
+import jp.programmers.examples.ejb3.slsb.Hello;
+import jp.programmers.examples.ejb3.slsb.HelloSLSB;
+import org.jboss.ejb3.annotation.Clustered;
+
+@Remote
+@Stateless
[email protected]
+//@org.jboss.ejb3.annotation.Clustered(loadBalancePolicy="FirstAvailableIdenticalAllProxies")
+//@org.jboss.ejb3.annotation.Clustered(loadBalancePolicy="RandomRobin")
+public class ClusteredHelloSLSB extends HelloSLSB implements Hello {
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloEndpoint.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloEndpoint.java
new file mode 100644
index 0000000..c1b1876
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloEndpoint.java
@@ -0,0 +1,8 @@
+package jp.programmers.examples.ejb3.slsb.ws;
+
+import javax.jws.WebService;
+
+@WebService
+public interface HelloEndpoint {
+ public String hello();
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java
new file mode 100644
index 0000000..b44fb56
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWS.java
@@ -0,0 +1,22 @@
+package jp.programmers.examples.ejb3.slsb.ws;
+
+import javax.ejb.Remote;
+import javax.ejb.Stateless;
+import javax.jws.WebService;
+import javax.jws.WebMethod;
+import javax.jws.soap.SOAPBinding;
+
+import jp.programmers.examples.ejb3.slsb.HelloSLSB;
+
+@Remote
+@Stateless
+@WebService
+@SOAPBinding
+public class HelloWS extends HelloSLSB implements HelloEndpoint {
+
+ @WebMethod
+ public String hello() {
+ return super.hello();
+ }
+
+}
diff --git a/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java
new file mode 100644
index 0000000..ebf0ae7
--- /dev/null
+++ b/as7-ejb/src/main/java/jp/programmers/examples/ejb3/slsb/ws/HelloWSClient.java
@@ -0,0 +1,18 @@
+package jp.programmers.examples.ejb3.slsb.ws;
+
+import java.net.URL;
+import javax.xml.namespace.QName;
+import javax.xml.ws.Service;
+
+public class HelloWSClient {
+
+ public static void main(String... args) throws Exception {
+ URL wsdlLocation = new URL("http://127.0.0.1:8080/example-ejb3slsb/HelloWS?wsdl");
+ QName serviceName = new QName("http://ws.slsb.ejb3.examples.programmers.jp/", "HelloWSService");
+ Service service = Service.create(wsdlLocation, serviceName);
+ QName portName = new QName("http://ws.slsb.ejb3.examples.programmers.jp/", "HelloWSPort");
+ HelloEndpoint hello = service.getPort(portName, HelloEndpoint.class);
+ hello.hello();
+ }
+
+}
diff --git a/as7-ejb/src/main/resources/META-INF/ejb-jar.xml b/as7-ejb/src/main/resources/META-INF/ejb-jar.xml
new file mode 100644
index 0000000..72ceff8
--- /dev/null
+++ b/as7-ejb/src/main/resources/META-INF/ejb-jar.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<ejb-jar
+ version="3.0"
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
+ http://java.sun.com/xml/ns/javaee/ejb-jar_3_0.xsd">
+ <!--
+ <enterprise-beans>
+ <session>
+ <ejb-name>HelloSLSB</ejb-name>
+ <ejb-class>jp.programmers.examples.ejb3.slsb.HelloSLSB</ejb-class>
+ </session>
+ </enterprise-beans>
+ -->
+</ejb-jar>
diff --git a/as7-ejb/src/main/resources/META-INF/jboss.xml b/as7-ejb/src/main/resources/META-INF/jboss.xml
new file mode 100644
index 0000000..edbf363
--- /dev/null
+++ b/as7-ejb/src/main/resources/META-INF/jboss.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<jboss version="5.0"
+ xmlns="http://www.jboss.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.jboss.com/xml/ns/javaee
+ http://www.jboss.org/j2ee/schema/jboss_5_0.xsd">
+ <!--
+ <enterprise-beans>
+ <session>
+ <ejb-name>HelloSLSB</ejb-name>
+ </session>
+ </enterprise-beans>
+ -->
+</jboss>
diff --git a/as7-ejb/src/main/resources/client-log4j.xml b/as7-ejb/src/main/resources/client-log4j.xml
new file mode 100644
index 0000000..efcfd67
--- /dev/null
+++ b/as7-ejb/src/main/resources/client-log4j.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
+
+ <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
+ <param name="File" value="client.log"/>
+ <param name="Append" value="false"/>
+ <param name="DatePattern" value="'.'yyyy-MM-dd"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
+ </layout>
+ </appender>
+
+ <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
+ </layout>
+ </appender>
+
+ <root>
+ <level value="ALL"/>
+ <appender-ref ref="CONSOLE"/>
+<!--
+ <appender-ref ref="FILE"/>
+-->
+ </root>
+
+</log4j:configuration>
diff --git a/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties b/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties
new file mode 100644
index 0000000..2a5e74a
--- /dev/null
+++ b/as7-ejb/src/main/resources/clustered-jboss-ejb-client.properties
@@ -0,0 +1,22 @@
+remote.connectionprovider.create.options.org.xnio.Options.SSL_ENABLED=false
+
+remote.connections=n1,n2
+
+remote.connection.localhost.host=localhost
+remote.connection.localhost.port=4447
+remote.connection.localhost.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
+remote.connection.localhost.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
+
+remote.connection.node1.host=node1
+remote.connection.node1.port=4447
+remote.connection.node1.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
+remote.connection.node1.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
+
+remote.connection.node2.host=node2
+remote.connection.node2.port=4447
+remote.connection.node2.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
+remote.connection.node2.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
+
+remote.clusters=ejb
+remote.cluster.ejb.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
+remote.cluster.ejb.connect.options.org.xnio.Options.SSL_ENABLED=false
diff --git a/as7-ejb/src/main/resources/jboss-ejb-client.properties b/as7-ejb/src/main/resources/jboss-ejb-client.properties
new file mode 100644
index 0000000..80089eb
--- /dev/null
+++ b/as7-ejb/src/main/resources/jboss-ejb-client.properties
@@ -0,0 +1,8 @@
+remote.connectionprovider.create.options.org.xnio.Options.SSL_ENABLED=false
+
+remote.connections=localhost
+
+remote.connection.localhost.host=localhost
+remote.connection.localhost.port=4447
+remote.connection.localhost.connect.options.org.xnio.Options.SASL_POLICY_NOANONYMOUS=false
+remote.connection.localhost.connect.options.org.xnio.Options.SASL_POLICY_NOPLAINTEXT=false
|
nekop/java-examples
|
a5dc61811e07b410d133c1e39c82f525afbd4806
|
Cleanup
|
diff --git a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
index bd6fa28..968e73e 100644
--- a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
+++ b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
@@ -1,83 +1,88 @@
package jp.programmers.xnio3.examples;
import org.xnio.Xnio;
import org.xnio.XnioWorker;
import org.xnio.OptionMap;
import org.xnio.Options;
import org.xnio.ChannelListener;
import org.xnio.ChannelListeners;
import org.xnio.channels.ConnectedStreamChannel;
import org.xnio.channels.AcceptingChannel;
import java.net.InetAddress;
import java.net.Inet4Address;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
public class EchoServer {
static String SERVER_HOST = "127.0.0.1";
static int SERVER_PORT = 54565;
- public static void main(String[] args) throws Exception {
- ChannelListener<ConnectedStreamChannel> serverListener = new ChannelListener<ConnectedStreamChannel>() {
- String lastRead = "";
+ static class EchoServerListener implements ChannelListener<ConnectedStreamChannel> {
+ String lastRead = "";
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ System.out.println("opened channel: " + channel);
+ channel.getReadSetter().set(new ReadListener());
+ channel.getWriteSetter().set(new WriteListener());
+ channel.getCloseSetter().set(new CloseListener());
+ channel.resumeReads();
+ }
+
+ class ReadListener implements ChannelListener<ConnectedStreamChannel> {
+ ByteBuffer buffer = ByteBuffer.allocate(1024);
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ try {
+ channel.read(buffer);
+ buffer.flip();
+ byte[] bytes = new byte[buffer.limit()];
+ buffer.get(bytes);
+ buffer.clear();
+ lastRead = new String(bytes, "UTF-8");
+ System.out.println("Read: " + lastRead);
+ channel.resumeWrites();
+ } catch (Throwable t) {
+ throw new RuntimeException("read error", t);
+ }
+ }
+ }
+ class WriteListener implements ChannelListener<ConnectedStreamChannel> {
public void handleEvent(final ConnectedStreamChannel channel) {
- System.out.println("opened channel: " + channel);
- channel.getCloseSetter().set(new ChannelListener<ConnectedStreamChannel>() {
- public void handleEvent(final ConnectedStreamChannel channel) {
- System.out.println("closed channel: " + channel);
- }
- });
- channel.getReadSetter().set(new ChannelListener<ConnectedStreamChannel>() {
- public void handleEvent(final ConnectedStreamChannel channel) {
- try {
- ByteBuffer buffer = ByteBuffer.allocate(1024);
- channel.read(buffer);
- buffer.clear();
- byte[] bytes = new byte[buffer.capacity()];
- buffer.get(bytes);
- lastRead = new String(bytes, "UTF-8");
- System.out.println("Read: " + lastRead);
- channel.resumeWrites();
- } catch (Throwable t) {
- throw new RuntimeException("read error", t);
- }
- }
- });
- channel.getWriteSetter().set(new ChannelListener<ConnectedStreamChannel>() {
- public void handleEvent(final ConnectedStreamChannel channel) {
- try {
- System.out.println("Write: " + lastRead);
- ByteBuffer buffer = ByteBuffer.wrap(lastRead.getBytes("UTF-8"));
- channel.write(buffer);
- channel.close();
- } catch (Throwable t) {
- throw new RuntimeException("write error", t);
- }
- }
- });
- channel.resumeReads();
+ try {
+ System.out.println("Write: " + lastRead);
+ ByteBuffer buffer = ByteBuffer.wrap(lastRead.getBytes("UTF-8"));
+ channel.write(buffer);
+ channel.close();
+ } catch (Throwable t) {
+ throw new RuntimeException("write error", t);
+ }
}
- };
+ }
+ class CloseListener implements ChannelListener<ConnectedStreamChannel> {
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ System.out.println("closed channel: " + channel);
+ }
+ }
+ }
+ public static void main(String[] args) throws Exception {
Xnio xnio =
Xnio.getInstance();
XnioWorker worker =
xnio.createWorker(OptionMap.create(Options.WORKER_WRITE_THREADS, 2, Options.WORKER_READ_THREADS, 2));
InetSocketAddress address =
new InetSocketAddress(Inet4Address.getByName(SERVER_HOST), SERVER_PORT);
ChannelListener<? super AcceptingChannel<ConnectedStreamChannel>> acceptListener =
- ChannelListeners.<ConnectedStreamChannel>openListenerAdapter(serverListener);
+ ChannelListeners.<ConnectedStreamChannel>openListenerAdapter(new EchoServerListener());
OptionMap optionMap =
OptionMap.create(Options.REUSE_ADDRESSES, Boolean.TRUE);
AcceptingChannel<? extends ConnectedStreamChannel> server =
worker.createStreamServer(address,
acceptListener,
optionMap);
server.resumeAccepts();
//server.close();
}
}
|
nekop/java-examples
|
e784a7568fc0b82046f88476283142bde6025e7e
|
Removed duplicated option
|
diff --git a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
index 9367262..bd6fa28 100644
--- a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
+++ b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
@@ -1,83 +1,83 @@
package jp.programmers.xnio3.examples;
import org.xnio.Xnio;
import org.xnio.XnioWorker;
import org.xnio.OptionMap;
import org.xnio.Options;
import org.xnio.ChannelListener;
import org.xnio.ChannelListeners;
import org.xnio.channels.ConnectedStreamChannel;
import org.xnio.channels.AcceptingChannel;
import java.net.InetAddress;
import java.net.Inet4Address;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
public class EchoServer {
static String SERVER_HOST = "127.0.0.1";
static int SERVER_PORT = 54565;
public static void main(String[] args) throws Exception {
ChannelListener<ConnectedStreamChannel> serverListener = new ChannelListener<ConnectedStreamChannel>() {
String lastRead = "";
public void handleEvent(final ConnectedStreamChannel channel) {
System.out.println("opened channel: " + channel);
channel.getCloseSetter().set(new ChannelListener<ConnectedStreamChannel>() {
public void handleEvent(final ConnectedStreamChannel channel) {
System.out.println("closed channel: " + channel);
}
});
channel.getReadSetter().set(new ChannelListener<ConnectedStreamChannel>() {
public void handleEvent(final ConnectedStreamChannel channel) {
try {
ByteBuffer buffer = ByteBuffer.allocate(1024);
channel.read(buffer);
buffer.clear();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
lastRead = new String(bytes, "UTF-8");
System.out.println("Read: " + lastRead);
channel.resumeWrites();
} catch (Throwable t) {
throw new RuntimeException("read error", t);
}
}
});
channel.getWriteSetter().set(new ChannelListener<ConnectedStreamChannel>() {
public void handleEvent(final ConnectedStreamChannel channel) {
try {
System.out.println("Write: " + lastRead);
ByteBuffer buffer = ByteBuffer.wrap(lastRead.getBytes("UTF-8"));
channel.write(buffer);
channel.close();
} catch (Throwable t) {
throw new RuntimeException("write error", t);
}
}
});
channel.resumeReads();
}
};
Xnio xnio =
Xnio.getInstance();
XnioWorker worker =
xnio.createWorker(OptionMap.create(Options.WORKER_WRITE_THREADS, 2, Options.WORKER_READ_THREADS, 2));
InetSocketAddress address =
new InetSocketAddress(Inet4Address.getByName(SERVER_HOST), SERVER_PORT);
ChannelListener<? super AcceptingChannel<ConnectedStreamChannel>> acceptListener =
ChannelListeners.<ConnectedStreamChannel>openListenerAdapter(serverListener);
OptionMap optionMap =
OptionMap.create(Options.REUSE_ADDRESSES, Boolean.TRUE);
AcceptingChannel<? extends ConnectedStreamChannel> server =
worker.createStreamServer(address,
acceptListener,
- OptionMap.create(Options.REUSE_ADDRESSES, Boolean.TRUE));
+ optionMap);
server.resumeAccepts();
//server.close();
}
}
|
nekop/java-examples
|
ce87678d322b2d7d65973a9f5ad49e1febe8ab2b
|
Implement EchoServer
|
diff --git a/xnio3-example/pom.xml b/xnio3-example/pom.xml
index e1d1637..8712c2e 100644
--- a/xnio3-example/pom.xml
+++ b/xnio3-example/pom.xml
@@ -1,45 +1,50 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers</groupId>
<artifactId>xnio3-example</artifactId>
<name>xnio3-example</name>
<version>1.0</version>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.jboss.xnio</groupId>
<artifactId>xnio-api</artifactId>
<version>3.0.4.GA</version>
</dependency>
<dependency>
<groupId>org.jboss.xnio</groupId>
<artifactId>xnio-nio</artifactId>
<version>3.0.4.GA</version>
</dependency>
+ <dependency>
+ <groupId>org.jboss.logging</groupId>
+ <artifactId>jboss-logging</artifactId>
+ <version>3.1.1.GA</version>
+ </dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
diff --git a/xnio3-example/run.sh b/xnio3-example/run.sh
new file mode 100644
index 0000000..db9b1b0
--- /dev/null
+++ b/xnio3-example/run.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -e exec:java -Dexec.mainClass=jp.programmers.xnio3.examples.EchoServer
diff --git a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
index 3924311..9367262 100644
--- a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
+++ b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
@@ -1,9 +1,83 @@
package jp.programmers.xnio3.examples;
import org.xnio.Xnio;
+import org.xnio.XnioWorker;
+import org.xnio.OptionMap;
+import org.xnio.Options;
+import org.xnio.ChannelListener;
+import org.xnio.ChannelListeners;
+import org.xnio.channels.ConnectedStreamChannel;
+import org.xnio.channels.AcceptingChannel;
+import java.net.InetAddress;
+import java.net.Inet4Address;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
public class EchoServer {
+
+ static String SERVER_HOST = "127.0.0.1";
+ static int SERVER_PORT = 54565;
+
public static void main(String[] args) throws Exception {
- Xnio xnio = Xnio.getInstance();
+ ChannelListener<ConnectedStreamChannel> serverListener = new ChannelListener<ConnectedStreamChannel>() {
+ String lastRead = "";
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ System.out.println("opened channel: " + channel);
+ channel.getCloseSetter().set(new ChannelListener<ConnectedStreamChannel>() {
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ System.out.println("closed channel: " + channel);
+ }
+ });
+ channel.getReadSetter().set(new ChannelListener<ConnectedStreamChannel>() {
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ try {
+ ByteBuffer buffer = ByteBuffer.allocate(1024);
+ channel.read(buffer);
+ buffer.clear();
+ byte[] bytes = new byte[buffer.capacity()];
+ buffer.get(bytes);
+ lastRead = new String(bytes, "UTF-8");
+ System.out.println("Read: " + lastRead);
+ channel.resumeWrites();
+ } catch (Throwable t) {
+ throw new RuntimeException("read error", t);
+ }
+ }
+ });
+ channel.getWriteSetter().set(new ChannelListener<ConnectedStreamChannel>() {
+ public void handleEvent(final ConnectedStreamChannel channel) {
+ try {
+ System.out.println("Write: " + lastRead);
+ ByteBuffer buffer = ByteBuffer.wrap(lastRead.getBytes("UTF-8"));
+ channel.write(buffer);
+ channel.close();
+ } catch (Throwable t) {
+ throw new RuntimeException("write error", t);
+ }
+ }
+ });
+ channel.resumeReads();
+ }
+ };
+
+ Xnio xnio =
+ Xnio.getInstance();
+ XnioWorker worker =
+ xnio.createWorker(OptionMap.create(Options.WORKER_WRITE_THREADS, 2, Options.WORKER_READ_THREADS, 2));
+ InetSocketAddress address =
+ new InetSocketAddress(Inet4Address.getByName(SERVER_HOST), SERVER_PORT);
+ ChannelListener<? super AcceptingChannel<ConnectedStreamChannel>> acceptListener =
+ ChannelListeners.<ConnectedStreamChannel>openListenerAdapter(serverListener);
+ OptionMap optionMap =
+ OptionMap.create(Options.REUSE_ADDRESSES, Boolean.TRUE);
+
+ AcceptingChannel<? extends ConnectedStreamChannel> server =
+ worker.createStreamServer(address,
+ acceptListener,
+ OptionMap.create(Options.REUSE_ADDRESSES, Boolean.TRUE));
+
+ server.resumeAccepts();
+
+ //server.close();
}
}
|
nekop/java-examples
|
c3d1a05e3e6698b2e092255f1c697cafa65eb75b
|
Add xnio3-example
|
diff --git a/xnio3-example/pom.xml b/xnio3-example/pom.xml
new file mode 100644
index 0000000..e1d1637
--- /dev/null
+++ b/xnio3-example/pom.xml
@@ -0,0 +1,45 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>xnio3-example</artifactId>
+ <name>xnio3-example</name>
+ <version>1.0</version>
+ <packaging>jar</packaging>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.xnio</groupId>
+ <artifactId>xnio-api</artifactId>
+ <version>3.0.4.GA</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.xnio</groupId>
+ <artifactId>xnio-nio</artifactId>
+ <version>3.0.4.GA</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
+
diff --git a/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
new file mode 100644
index 0000000..3924311
--- /dev/null
+++ b/xnio3-example/src/main/java/jp/programmers/xnio3/examples/EchoServer.java
@@ -0,0 +1,9 @@
+package jp.programmers.xnio3.examples;
+
+import org.xnio.Xnio;
+
+public class EchoServer {
+ public static void main(String[] args) throws Exception {
+ Xnio xnio = Xnio.getInstance();
+ }
+}
|
nekop/java-examples
|
677d81cd5ebba498ef61b8a64c0b742529bfce21
|
Add appclient
|
diff --git a/as7-ear/appclient/pom.xml b/as7-ear/appclient/pom.xml
new file mode 100644
index 0000000..3eebd6f
--- /dev/null
+++ b/as7-ear/appclient/pom.xml
@@ -0,0 +1,46 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-appclient</artifactId>
+ <name>as7-ear-appclient</name>
+ <version>1.0</version>
+ <packaging>app-client</packaging>
+
+ <parent>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear</artifactId>
+ <version>1.0</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <dependencies>
+ <dependency>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-ejb</artifactId>
+ <version>1.0</version>
+ <type>ejb</type>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-acr-plugin</artifactId>
+ <version>1.0</version>
+ <extensions>true</extensions>
+ <configuration>
+ <archive>
+ <manifest>
+ <mainClass>jp.programmers.as7.examples.HelloClient</mainClass>
+ </manifest>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+
diff --git a/as7-ear/appclient/src/main/java/jp/programmers/as7/examples/HelloClient.java b/as7-ear/appclient/src/main/java/jp/programmers/as7/examples/HelloClient.java
new file mode 100644
index 0000000..0ac7a2d
--- /dev/null
+++ b/as7-ear/appclient/src/main/java/jp/programmers/as7/examples/HelloClient.java
@@ -0,0 +1,14 @@
+package jp.programmers.as7.examples;
+
+import javax.ejb.EJB;
+
+public class HelloClient {
+
+ @EJB
+ static Hello hello;
+
+ public static void main(String[] args) throws Exception {
+ hello.hello();
+ }
+
+}
diff --git a/as7-ear/ear/pom.xml b/as7-ear/ear/pom.xml
index b8da34d..e2f1360 100644
--- a/as7-ear/ear/pom.xml
+++ b/as7-ear/ear/pom.xml
@@ -1,76 +1,86 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear-ear</artifactId>
<name>as7-ear-ear</name>
<version>1.0</version>
<packaging>ear</packaging>
<parent>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear</artifactId>
<version>1.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear-ejb</artifactId>
<version>1.0</version>
<type>ejb</type>
</dependency>
<dependency>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear-web</artifactId>
<version>1.0</version>
<type>war</type>
</dependency>
+ <dependency>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-appclient</artifactId>
+ <version>1.0</version>
+ <type>app-client</type>
+ </dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-ear-plugin</artifactId>
<version>2.7</version>
<configuration>
<finalName>as7-ear</finalName>
<version>6</version>
<modules>
<webModule>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear-web</artifactId>
</webModule>
<ejbModule>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear-ejb</artifactId>
</ejbModule>
+ <appClientModule>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-appclient</artifactId>
+ </appClientModule>
</modules>
</configuration>
</plugin>
<plugin>
<groupId>org.jboss.as.plugins</groupId>
<artifactId>jboss-as-maven-plugin</artifactId>
<version>7.1.1.Final</version>
<configuration>
<filename>as7-ear</filename>
<skip>false</skip>
</configuration>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.1</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
diff --git a/as7-ear/pom.xml b/as7-ear/pom.xml
index 5954871..5e9802d 100644
--- a/as7-ear/pom.xml
+++ b/as7-ear/pom.xml
@@ -1,56 +1,56 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear</artifactId>
<name>as7-ear</name>
<version>1.0</version>
<packaging>pom</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.jboss.spec</groupId>
<artifactId>jboss-javaee-6.0</artifactId>
<version>3.0.1.Final</version>
<type>pom</type>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
- <finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.jboss.as.plugins</groupId>
<artifactId>jboss-as-maven-plugin</artifactId>
<version>7.1.1.Final</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>ejb</module>
+ <module>appclient</module>
<module>web</module>
<module>ear</module>
</modules>
</project>
|
nekop/java-examples
|
752b4effa2cf38df2ec441df7b17535e894a438c
|
Fix indent
|
diff --git a/as7-ear/ejb/pom.xml b/as7-ear/ejb/pom.xml
index 8e27637..085dabf 100644
--- a/as7-ear/ejb/pom.xml
+++ b/as7-ear/ejb/pom.xml
@@ -1,33 +1,33 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear-ejb</artifactId>
<name>as7-ear-ejb</name>
<version>1.0</version>
<packaging>ejb</packaging>
<parent>
<groupId>jp.programmers</groupId>
<artifactId>as7-ear</artifactId>
<version>1.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
-
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-ejb-plugin</artifactId>
<version>2.3</version>
<configuration>
<ejbVersion>3.0</ejbVersion>
</configuration>
</plugin>
</plugins>
</build>
+
</project>
|
nekop/java-examples
|
bbc465fe12fce457ea239e49bf6eacee7860bdd5
|
Fix SFSB remove method not being called
|
diff --git a/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java b/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java
index 09d8281..73f89e8 100644
--- a/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java
+++ b/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java
@@ -1,21 +1,22 @@
package jp.programmers.jboss.ejb;
import javax.ejb.Startup;
import javax.ejb.Singleton;
import javax.ejb.EJB;
import javax.annotation.PostConstruct;
@Startup
@Singleton
public class StartupBean {
@EJB
private StatefulBean sfsb;
@PostConstruct
public void init() {
System.out.println("StatefulBean.init()");
sfsb.test();
+ sfsb.remove();
}
}
|
nekop/java-examples
|
ac3ae9084b6a9d3cdaf7f730ec92f8451a5541a5
|
Add as7 ear example
|
diff --git a/as7-ear/ear/pom.xml b/as7-ear/ear/pom.xml
new file mode 100644
index 0000000..b8da34d
--- /dev/null
+++ b/as7-ear/ear/pom.xml
@@ -0,0 +1,76 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-ear</artifactId>
+ <name>as7-ear-ear</name>
+ <version>1.0</version>
+ <packaging>ear</packaging>
+
+ <parent>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear</artifactId>
+ <version>1.0</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <dependencies>
+ <dependency>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-ejb</artifactId>
+ <version>1.0</version>
+ <type>ejb</type>
+ </dependency>
+ <dependency>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-web</artifactId>
+ <version>1.0</version>
+ <type>war</type>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-ear-plugin</artifactId>
+ <version>2.7</version>
+ <configuration>
+ <finalName>as7-ear</finalName>
+ <version>6</version>
+ <modules>
+ <webModule>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-web</artifactId>
+ </webModule>
+ <ejbModule>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-ejb</artifactId>
+ </ejbModule>
+ </modules>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jboss.as.plugins</groupId>
+ <artifactId>jboss-as-maven-plugin</artifactId>
+ <version>7.1.1.Final</version>
+ <configuration>
+ <filename>as7-ear</filename>
+ <skip>false</skip>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.1</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
+
diff --git a/as7-ear/ejb/pom.xml b/as7-ear/ejb/pom.xml
new file mode 100644
index 0000000..8e27637
--- /dev/null
+++ b/as7-ear/ejb/pom.xml
@@ -0,0 +1,33 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-ejb</artifactId>
+ <name>as7-ear-ejb</name>
+ <version>1.0</version>
+ <packaging>ejb</packaging>
+
+ <parent>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear</artifactId>
+ <version>1.0</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-ejb-plugin</artifactId>
+ <version>2.3</version>
+ <configuration>
+ <ejbVersion>3.0</ejbVersion>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+
diff --git a/as7-ear/ejb/src/main/java/jp/programmers/as7/examples/Hello.java b/as7-ear/ejb/src/main/java/jp/programmers/as7/examples/Hello.java
new file mode 100644
index 0000000..b20d40a
--- /dev/null
+++ b/as7-ear/ejb/src/main/java/jp/programmers/as7/examples/Hello.java
@@ -0,0 +1,12 @@
+package jp.programmers.as7.examples;
+
+import javax.ejb.Timer;
+
+public interface Hello {
+ public String hello();
+ public String hello(String name);
+ public void ejbTimeout(Timer timer);
+ public void initTimer();
+ public void exception();
+ public void sleep(long msec);
+}
diff --git a/as7-ear/ejb/src/main/java/jp/programmers/as7/examples/HelloSLSB.java b/as7-ear/ejb/src/main/java/jp/programmers/as7/examples/HelloSLSB.java
new file mode 100644
index 0000000..973b48e
--- /dev/null
+++ b/as7-ear/ejb/src/main/java/jp/programmers/as7/examples/HelloSLSB.java
@@ -0,0 +1,49 @@
+package jp.programmers.as7.examples;
+
+import javax.ejb.Local;
+import javax.ejb.Remote;
+import javax.ejb.Stateless;
+import javax.ejb.Timeout;
+import javax.ejb.Timer;
+import javax.ejb.SessionContext;
+import javax.annotation.Resource;
+
+@Remote
+@Stateless
+public class HelloSLSB implements Hello {
+
+ @Resource
+ SessionContext ctx;
+
+ public String hello() {
+ System.out.println("HelloSLSB#hello()");
+ return this.hello("world");
+ }
+
+ public String hello(String name) {
+ System.out.println("HelloSLSB#hello(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+
+ @Timeout
+ public void ejbTimeout(Timer timer) {
+ System.out.println("HelloSLSB#ejbTimeout(Timer)");
+ System.out.println("timer=" + timer);
+ }
+
+ public void initTimer() {
+ ctx.getTimerService().createTimer(0, 20 * 1000, null);
+ }
+
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+ public void sleep(long msec) {
+ try {
+ Thread.sleep(msec);
+ } catch (InterruptedException ignore) { }
+ }
+
+}
diff --git a/as7-ear/pom.xml b/as7-ear/pom.xml
new file mode 100644
index 0000000..5954871
--- /dev/null
+++ b/as7-ear/pom.xml
@@ -0,0 +1,56 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear</artifactId>
+ <name>as7-ear</name>
+ <version>1.0</version>
+ <packaging>pom</packaging>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.spec</groupId>
+ <artifactId>jboss-javaee-6.0</artifactId>
+ <version>3.0.1.Final</version>
+ <type>pom</type>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jboss.as.plugins</groupId>
+ <artifactId>jboss-as-maven-plugin</artifactId>
+ <version>7.1.1.Final</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <modules>
+ <module>ejb</module>
+ <module>web</module>
+ <module>ear</module>
+ </modules>
+
+</project>
+
diff --git a/as7-ear/web/pom.xml b/as7-ear/web/pom.xml
new file mode 100644
index 0000000..6517b35
--- /dev/null
+++ b/as7-ear/web/pom.xml
@@ -0,0 +1,32 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear-web</artifactId>
+ <name>as7-ear-web</name>
+ <version>1.0</version>
+ <packaging>war</packaging>
+
+ <parent>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ear</artifactId>
+ <version>1.0</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-war-plugin</artifactId>
+ <version>2.1.1</version>
+ <configuration>
+ <failOnMissingWebXml>false</failOnMissingWebXml>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
+
|
nekop/java-examples
|
645a7bea006907c43ed781085e0981db3617bc8b
|
Add as7-ejb-startup
|
diff --git a/as7-ejb-startup/pom.xml b/as7-ejb-startup/pom.xml
new file mode 100644
index 0000000..b68b4df
--- /dev/null
+++ b/as7-ejb-startup/pom.xml
@@ -0,0 +1,43 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-ejb-startup</artifactId>
+ <name>as7-ejb-startup</name>
+ <version>1.0</version>
+ <!-- packaging is *not* ejb, since we don't need ejb deployment descriptors anymore -->
+ <packaging>jar</packaging>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.spec</groupId>
+ <artifactId>jboss-javaee-6.0</artifactId>
+ <version>3.0.1.Final</version>
+ <type>pom</type>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
+
diff --git a/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java b/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java
new file mode 100644
index 0000000..09d8281
--- /dev/null
+++ b/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StartupBean.java
@@ -0,0 +1,21 @@
+package jp.programmers.jboss.ejb;
+
+import javax.ejb.Startup;
+import javax.ejb.Singleton;
+import javax.ejb.EJB;
+import javax.annotation.PostConstruct;
+
+@Startup
+@Singleton
+public class StartupBean {
+
+ @EJB
+ private StatefulBean sfsb;
+
+ @PostConstruct
+ public void init() {
+ System.out.println("StatefulBean.init()");
+ sfsb.test();
+ }
+
+}
diff --git a/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StatefulBean.java b/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StatefulBean.java
new file mode 100644
index 0000000..1a11af3
--- /dev/null
+++ b/as7-ejb-startup/src/main/java/jp/programmers/jboss/ejb/StatefulBean.java
@@ -0,0 +1,29 @@
+package jp.programmers.jboss.ejb;
+
+import javax.ejb.Stateful;
+import javax.ejb.Remove;
+import javax.ejb.SessionSynchronization;
+import javax.ejb.EJBException;
+
+@Stateful
+public class StatefulBean implements SessionSynchronization {
+
+ public void test() {
+ System.out.println("StatefulBean.test()");
+ }
+
+ @Remove
+ public void remove() {
+ System.out.println("StatefulBean.remove()");
+ }
+
+ public void afterBegin() throws EJBException, java.rmi.RemoteException {
+ System.out.println("StatefulBean.afterBegin()");
+ }
+ public void beforeCompletion() throws EJBException, java.rmi.RemoteException {
+ System.out.println("StatefulBean.beforeCompletion()");
+ }
+ public void afterCompletion(boolean committed) throws EJBException, java.rmi.RemoteException {
+ System.out.println("StatefulBean.afterCompletion()");
+ }
+}
|
nekop/java-examples
|
1a8e690433b3a37cba5f82803dafab89f92615b1
|
Add as7-hello-service
|
diff --git a/as7-hello-service/pom.xml b/as7-hello-service/pom.xml
new file mode 100644
index 0000000..e0d8201
--- /dev/null
+++ b/as7-hello-service/pom.xml
@@ -0,0 +1,31 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>as7-hello-service</artifactId>
+ <name>as7-hello-service</name>
+ <version>1.0</version>
+ <packaging>jar</packaging>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java
new file mode 100644
index 0000000..ee56185
--- /dev/null
+++ b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/Hello.java
@@ -0,0 +1,10 @@
+package jp.programmers.jboss.hello;
+
+public class Hello implements HelloMBean {
+ public void start() throws Exception {
+ System.out.println("Hello.start()");
+ }
+ public void stop() throws Exception {
+ System.out.println("Hello.stop()");
+ }
+}
diff --git a/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java
new file mode 100644
index 0000000..370b25e
--- /dev/null
+++ b/as7-hello-service/src/main/java/jp/programmers/jboss/hello/HelloMBean.java
@@ -0,0 +1,6 @@
+package jp.programmers.jboss.hello;
+
+public interface HelloMBean {
+ void start() throws Exception;
+ void stop() throws Exception;
+}
diff --git a/as7-hello-service/src/main/resources/META-INF/jboss-beans.xml.example b/as7-hello-service/src/main/resources/META-INF/jboss-beans.xml.example
new file mode 100644
index 0000000..a421e28
--- /dev/null
+++ b/as7-hello-service/src/main/resources/META-INF/jboss-beans.xml.example
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<deployment xmlns="urn:jboss:bean-deployer:2.0">
+ <bean name="Hello"
+ class="jp.programmers.jboss.hello.Hello">
+ </bean>
+</deployment>
diff --git a/as7-hello-service/src/main/resources/META-INF/jboss-service.xml b/as7-hello-service/src/main/resources/META-INF/jboss-service.xml
new file mode 100644
index 0000000..9a29244
--- /dev/null
+++ b/as7-hello-service/src/main/resources/META-INF/jboss-service.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<server>
+ <mbean code="jp.programmers.jboss.hello.Hello" name="example:service=Hello" />
+</server>
|
nekop/java-examples
|
137570ac01a0445a9b41a44acc9bf7c5645d7596
|
Add rest example
|
diff --git a/rest/pom.xml b/rest/pom.xml
new file mode 100644
index 0000000..0cf5557
--- /dev/null
+++ b/rest/pom.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-rest</artifactId>
+ <packaging>war</packaging>
+ <name>example-rest</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>javax</groupId>
+ <artifactId>javaee-web-api</artifactId>
+ <version>6.0</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-war-plugin</artifactId>
+ <configuration>
+ <webResources>
+ <resource>
+ <directory>${basedir}/src/main/webapp/WEB-INF</directory>
+ <targetPath>WEB-INF</targetPath>
+ <filtering>true</filtering>
+ </resource>
+ </webResources>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/rest/src/main/java/jp/programmers/examples/HelloRest.java b/rest/src/main/java/jp/programmers/examples/HelloRest.java
new file mode 100644
index 0000000..398c43a
--- /dev/null
+++ b/rest/src/main/java/jp/programmers/examples/HelloRest.java
@@ -0,0 +1,20 @@
+package jp.programmers.examples;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+
+// http://localhost:8080/example-rest/rest/hello
+
+@Path("/hello")
+public class HelloRest {
+
+ @GET
+ @Produces(MediaType.APPLICATION_XML + "; charset=UTF-8")
+ public Object hello() {
+ System.out.println("HelloSLSB#hello()");
+ return "<xml>ã¯ãã¼</xml>";
+ }
+
+}
diff --git a/rest/src/main/webapp/WEB-INF/web.xml b/rest/src/main/webapp/WEB-INF/web.xml
new file mode 100644
index 0000000..0a62efe
--- /dev/null
+++ b/rest/src/main/webapp/WEB-INF/web.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<web-app
+ version="3.0"
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
+ http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd">
+
+ <servlet-mapping>
+ <servlet-name>javax.ws.rs.core.Application</servlet-name>
+ <url-pattern>/rest/*</url-pattern>
+ </servlet-mapping>
+<!--
+ <servlet-mapping>
+ <servlet-name>javax.ws.rs.core.Application</servlet-name>
+ <url-pattern>/hello/*</url-pattern>
+ </servlet-mapping>
+-->
+
+</web-app>
|
nekop/java-examples
|
b89854e2f38f8cddf3dcf8f6743256e1631d6416
|
Remove unused imports
|
diff --git a/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
index d1d5305..d469934 100644
--- a/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
+++ b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
@@ -1,32 +1,29 @@
package jp.programmers.examples.ejb3.slsb;
import java.util.concurrent.Executors;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
-import java.util.Properties;
-import javax.naming.Context;
-import javax.naming.InitialContext;
public class HelloSLSBLoadClient {
public static void main(final String... args) throws Exception {
// Confirm single request works before load testing
HelloSLSBClient.main(args);
ExecutorService ex = Executors.newCachedThreadPool();
Runnable r = new Runnable() {
public void run() {
try {
HelloSLSBClient.main(args);
} catch (Exception ignore) {
}
}
};
for (int i = 0; i < 2000; i++) {
ex.execute(r);
}
ex.awaitTermination(60, TimeUnit.SECONDS);
ex.shutdown();
}
}
|
nekop/java-examples
|
0a0aff8a76563553d7c78da14ad301eb5d929f1d
|
Add jpa-managed example
|
diff --git a/jpa-managed/pom.xml b/jpa-managed/pom.xml
new file mode 100644
index 0000000..571c97e
--- /dev/null
+++ b/jpa-managed/pom.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-jpa</artifactId>
+ <packaging>jar</packaging>
+ <name>example-jpa</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>javaee</groupId>
+ <artifactId>javaee-api</artifactId>
+ <version>5</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/jpa-managed/src/main/java/jp/programmers/examples/jpa/Cat.java b/jpa-managed/src/main/java/jp/programmers/examples/jpa/Cat.java
new file mode 100644
index 0000000..8a9b9d1
--- /dev/null
+++ b/jpa-managed/src/main/java/jp/programmers/examples/jpa/Cat.java
@@ -0,0 +1,31 @@
+package jp.programmers.examples.jpa;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+@Entity
+public class Cat {
+
+ @Id @GeneratedValue
+ private Integer id;
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+
+}
diff --git a/jpa-managed/src/main/resources/META-INF/persistence.xml b/jpa-managed/src/main/resources/META-INF/persistence.xml
new file mode 100644
index 0000000..7b3a950
--- /dev/null
+++ b/jpa-managed/src/main/resources/META-INF/persistence.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<persistence xmlns="http://java.sun.com/xml/ns/persistence"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_1_0.xsd"
+ version="1.0">
+ <persistence-unit name="test" transaction-type="JTA">
+ <provider>org.hibernate.ejb.HibernatePersistence</provider>
+ <jta-data-source>java:/DefaultDS</jta-data-source>
+ <properties>
+ <property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
+ <property name="hibernate.max_fetch_depth" value="3"/>
+ <property name="hibernate.hbm2ddl.auto" value="create-drop"/>
+ <property name="hibernate.show_sql" value="true"/>
+ </properties>
+ </persistence-unit>
+</persistence>
|
nekop/java-examples
|
b65a6a28243603b7d5abdaa1b8f240cec1891f1a
|
Small updates
|
diff --git a/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java b/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java
index 96c7fd9..9a4265a 100644
--- a/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java
+++ b/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java
@@ -1,70 +1,74 @@
package jp.programmers.examples.ejb2.cmp;
import java.rmi.RemoteException;
import javax.ejb.CreateException;
import javax.ejb.EJBException;
import javax.ejb.EntityBean;
import javax.ejb.EntityContext;
import javax.ejb.RemoveException;
/**
* @ejb.bean
* name="Student"
* type="CMP"
* cmp-version="2.x"
* view-type="local"
* reentrant="false"
* local-jndi-name="Student"
* @ejb.util generate="physical"
* @ejb.pk generate="true"
* @ejb.persistence table-name="STUDENT"
* @jboss.persistence
* create-table="true"
* remove-table="false"
*/
public abstract class StudentEntityBean implements EntityBean {
private EntityContext ctx;
/**
* @ejb.pk-field
* @ejb.persistent-field
* @ejb.interface-method
* @ejb.persistence column-name="ID"
*/
public abstract Integer getId();
public abstract void setId(Integer id);
/**
* @ejb.persistent-field
* @ejb.interface-method
* @ejb.persistence column-name="NAME"
*/
public abstract String getName();
+
+ /**
+ * @ejb.interface-method
+ */
public abstract void setName(String name);
// EntityBean implementation ------------------------------------
/**
* @ejb.create-method
*/
public StudentPK ejbCreate(StudentPK pk) throws CreateException {
setId(pk.getId());
return null;
}
public void ejbPostCreate(StudentPK pk) {}
public void ejbActivate() throws EJBException, RemoteException {}
public void ejbLoad() throws EJBException, RemoteException {}
public void ejbPassivate() throws EJBException, RemoteException {}
public void ejbRemove() throws RemoveException, EJBException, RemoteException {}
public void ejbStore() throws EJBException, RemoteException {}
public void setEntityContext(EntityContext ctx) throws EJBException, RemoteException {
this.ctx = ctx;
}
public void unsetEntityContext() throws EJBException, RemoteException {
this.ctx = null;
}
}
|
nekop/java-examples
|
571d5720517d96598e5cd7224f6fb694d38a6cac
|
Small updates
|
diff --git a/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java
index 47a68e3..15b6677 100644
--- a/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java
+++ b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java
@@ -1,30 +1,30 @@
package jp.programmers.examples.ejb2.slsb;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
public class HelloSLSBClient {
public static final String DEFAULT_PROVIDER_URL = "localhost:1099";
- public static void main(String[] args) throws Exception {
+ public static void main(String... args) throws Exception {
String providerUrl = DEFAULT_PROVIDER_URL;
if (args.length != 0) {
providerUrl = args[0];
}
String jndiName = "HelloSLSB";
Properties props = new Properties();
props.put(Context.INITIAL_CONTEXT_FACTORY,
"org.jnp.interfaces.NamingContextFactory");
props.put(Context.URL_PKG_PREFIXES,
"org.jboss.naming:org.jnp.interfaces");
props.put(Context.PROVIDER_URL, providerUrl);
InitialContext context = new InitialContext(props);
HelloSLSBHome helloHome = (HelloSLSBHome)context.lookup(jndiName);
HelloSLSB hello = helloHome.create();
hello.hello();
}
}
diff --git a/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBLoadClient.java b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBLoadClient.java
new file mode 100644
index 0000000..beb5882
--- /dev/null
+++ b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBLoadClient.java
@@ -0,0 +1,29 @@
+package jp.programmers.examples.ejb2.slsb;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+public class HelloSLSBLoadClient {
+
+ public static void main(final String... args) throws Exception {
+ // Confirm single request works before load testing
+ HelloSLSBClient.main(args);
+
+ ExecutorService ex = Executors.newCachedThreadPool();
+ Runnable r = new Runnable() {
+ public void run() {
+ try {
+ HelloSLSBClient.main(args);
+ } catch (Exception ignore) {
+ }
+ }
+ };
+ for (int i = 0; i < 4000; i++) {
+ ex.execute(r);
+ }
+ ex.awaitTermination(60, TimeUnit.SECONDS);
+ ex.shutdown();
+ }
+
+}
|
nekop/java-examples
|
a1a435a8b2dc9366c7c23df30580e3329686f8b7
|
Add ability to set maxWaiters
|
diff --git a/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java b/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java
index 1b219a3..6346ff3 100644
--- a/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java
+++ b/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java
@@ -1,100 +1,110 @@
package jp.programmers.examples;
import java.io.IOException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class SemaphoreFilter implements Filter {
public static final String CONCURRENCY = "CONCURRENCY";
public static final String FAIR = "FAIR";
public static final String BLOCK_TIMEOUT_MILLIS = "BLOCK_TIMEOUT_MILLIS";
+ public static final String MAX_WAITERS = "MAX_WAITERS";
protected FilterConfig config;
protected Semaphore semaphore;
protected int concurrency = 10;
+ protected int maxWaiters = 10;
protected boolean fair;
protected long blockTimeoutMillis = 0;
public void init(FilterConfig config) throws ServletException {
this.config = config;
if (config.getInitParameter(CONCURRENCY) != null) {
try {
concurrency = Integer.parseInt(config.getInitParameter(CONCURRENCY));
} catch (NumberFormatException ex) {
- // ignore, todo logging
+ config.getServletContext().log(this.getClass().getSimpleName() + " Invalid CONCURRENCY value", ex);
}
}
if (config.getInitParameter(FAIR) != null) {
fair = "true".equalsIgnoreCase(config.getInitParameter(FAIR));
}
if (config.getInitParameter(BLOCK_TIMEOUT_MILLIS) != null) {
try {
blockTimeoutMillis = Long.parseLong(config.getInitParameter(BLOCK_TIMEOUT_MILLIS));
} catch (NumberFormatException ex) {
- // ignore, todo logging
+ config.getServletContext().log(this.getClass().getSimpleName() + " Invalid BLOCK_TIMEOUT_MILLIS value", ex);
+ }
+ }
+ if (config.getInitParameter(MAX_WAITERS) != null) {
+ try {
+ maxWaiters = Integer.parseInt(config.getInitParameter(MAX_WAITERS));
+ } catch (NumberFormatException ex) {
+ config.getServletContext().log(this.getClass().getSimpleName() + " Invalid MAX_WAITERS value", ex);
}
}
this.semaphore = new Semaphore(concurrency, fair);
}
public void destroy() { }
public void doFilter(ServletRequest request,
ServletResponse response,
FilterChain chain)
throws IOException, ServletException {
if (controlConcurrency(request, response)) {
boolean shouldRelease = true;
try {
- if (!semaphore.tryAcquire(blockTimeoutMillis, TimeUnit.MILLISECONDS)) {
+ if (semaphore.getQueueLength() >= maxWaiters ||
+ !semaphore.tryAcquire(blockTimeoutMillis, TimeUnit.MILLISECONDS)) {
shouldRelease = false;
permitDenied(request, response);
return;
}
chain.doFilter(request, response);
} catch (InterruptedException e) {
shouldRelease = false;
permitDenied(request, response);
return;
} finally {
if (shouldRelease) {
semaphore.release();
}
}
} else {
chain.doFilter(request, response);
}
}
/**
* Subclass friendly method to add conditions.
*/
protected boolean controlConcurrency(ServletRequest request,
ServletResponse response)
throws IOException, ServletException {
return true;
}
/**
* Subclass friendly method to add error handling when a permit isn't granted.
*/
protected void permitDenied(ServletRequest request,
ServletResponse response)
throws IOException, ServletException {
// We are busy, send 503 Service Temporary Unavailable
if (response instanceof HttpServletResponse) {
((HttpServletResponse)response).sendError(503);
} else {
- // todo, what should we do?
+ throw new ServletException("Server is busy");
}
}
}
|
nekop/java-examples
|
d60cf3ace0eaeb4fdd18a5e6fcf2a96c4fb178a3
|
Add SemaphoreFilter
|
diff --git a/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java b/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java
new file mode 100644
index 0000000..1b219a3
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/SemaphoreFilter.java
@@ -0,0 +1,100 @@
+package jp.programmers.examples;
+
+import java.io.IOException;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+
+public class SemaphoreFilter implements Filter {
+
+ public static final String CONCURRENCY = "CONCURRENCY";
+ public static final String FAIR = "FAIR";
+ public static final String BLOCK_TIMEOUT_MILLIS = "BLOCK_TIMEOUT_MILLIS";
+
+ protected FilterConfig config;
+ protected Semaphore semaphore;
+ protected int concurrency = 10;
+ protected boolean fair;
+ protected long blockTimeoutMillis = 0;
+
+ public void init(FilterConfig config) throws ServletException {
+ this.config = config;
+ if (config.getInitParameter(CONCURRENCY) != null) {
+ try {
+ concurrency = Integer.parseInt(config.getInitParameter(CONCURRENCY));
+ } catch (NumberFormatException ex) {
+ // ignore, todo logging
+ }
+ }
+ if (config.getInitParameter(FAIR) != null) {
+ fair = "true".equalsIgnoreCase(config.getInitParameter(FAIR));
+ }
+ if (config.getInitParameter(BLOCK_TIMEOUT_MILLIS) != null) {
+ try {
+ blockTimeoutMillis = Long.parseLong(config.getInitParameter(BLOCK_TIMEOUT_MILLIS));
+ } catch (NumberFormatException ex) {
+ // ignore, todo logging
+ }
+ }
+ this.semaphore = new Semaphore(concurrency, fair);
+ }
+
+ public void destroy() { }
+
+ public void doFilter(ServletRequest request,
+ ServletResponse response,
+ FilterChain chain)
+ throws IOException, ServletException {
+ if (controlConcurrency(request, response)) {
+ boolean shouldRelease = true;
+ try {
+ if (!semaphore.tryAcquire(blockTimeoutMillis, TimeUnit.MILLISECONDS)) {
+ shouldRelease = false;
+ permitDenied(request, response);
+ return;
+ }
+ chain.doFilter(request, response);
+ } catch (InterruptedException e) {
+ shouldRelease = false;
+ permitDenied(request, response);
+ return;
+ } finally {
+ if (shouldRelease) {
+ semaphore.release();
+ }
+ }
+ } else {
+ chain.doFilter(request, response);
+ }
+ }
+
+ /**
+ * Subclass friendly method to add conditions.
+ */
+ protected boolean controlConcurrency(ServletRequest request,
+ ServletResponse response)
+ throws IOException, ServletException {
+ return true;
+ }
+
+ /**
+ * Subclass friendly method to add error handling when a permit isn't granted.
+ */
+ protected void permitDenied(ServletRequest request,
+ ServletResponse response)
+ throws IOException, ServletException {
+ // We are busy, send 503 Service Temporary Unavailable
+ if (response instanceof HttpServletResponse) {
+ ((HttpServletResponse)response).sendError(503);
+ } else {
+ // todo, what should we do?
+ }
+ }
+
+}
|
nekop/java-examples
|
28ace78843eee7719cddfd5094ba6bce55fa5cb7
|
Remove confusing comment and System.out.println
|
diff --git a/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java b/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java
index 695a801..7ad9d4e 100644
--- a/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java
+++ b/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java
@@ -1,92 +1,84 @@
package jp.programmers.examples;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
-/**
- * A deferred response wrapper.
- *
- * Do not call getWriter() before setContentLength(). Otherwise
- * the subsequent setContentLength() call will simply be ignored.
- */
public class ContentLengthResponseWrapper extends HttpServletResponseWrapper {
private HttpServletResponse response;
private boolean isOutputStream = false;
private ServletOutputStream sout;
private ByteArrayOutputStream bout;
private boolean isWriter = false;
private StringWriter sw;
private PrintWriter pw;
public ContentLengthResponseWrapper(HttpServletResponse response) {
super(response);
this.response = response;
bout = new ByteArrayOutputStream();
sout = new ServletOutputStream() {
public void write(int b) throws IOException {
bout.write(b);
}
};
sw = new StringWriter();
pw = new PrintWriter(sw);
}
public ServletOutputStream getOutputStream() throws IOException {
if (isWriter) {
// Invalid call sequence, propagate it to throw exception
response.getWriter();
response.getOutputStream();
}
isOutputStream = true;
return sout;
}
public PrintWriter getWriter() throws IOException {
if (isOutputStream) {
// Invalid call sequence, propagate it to throw exception
response.getOutputStream();
response.getWriter();
}
isWriter = true;
return pw;
}
public void flushResponse() throws IOException {
if (isOutputStream) {
try {
sout.flush();
} catch (IOException ignore) { }
byte[] result = bout.toByteArray();
int length = result.length;
- System.out.println("length=" + length);
response.setContentLength(length);
response.getOutputStream().write(result);
} else if (isWriter) {
pw.flush();
String s = sw.toString();
String charset = response.getCharacterEncoding();
if (charset == null) {
charset = "ISO-8859-1";
}
int length = s.getBytes(charset).length;
response.setContentLength(length);
- System.out.println("length=" + length);
response.getWriter().write(s);
}
}
}
|
nekop/java-examples
|
608bb2dbe5af8dbf2e3ed982b6ef53779b6fcc24
|
Make numLatch an instance variable
|
diff --git a/jab/src/main/java/jp/programmers/jab/NettyJAB.java b/jab/src/main/java/jp/programmers/jab/NettyJAB.java
index d5fa96e..0998515 100644
--- a/jab/src/main/java/jp/programmers/jab/NettyJAB.java
+++ b/jab/src/main/java/jp/programmers/jab/NettyJAB.java
@@ -1,168 +1,164 @@
package jp.programmers.jab;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelHandler;
import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
import org.jboss.netty.handler.codec.http.DefaultHttpRequest;
import org.jboss.netty.handler.codec.http.HttpChunk;
import org.jboss.netty.handler.codec.http.HttpClientCodec;
import org.jboss.netty.handler.codec.http.HttpContentDecompressor;
import org.jboss.netty.handler.codec.http.HttpHeaders;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpRequest;
import org.jboss.netty.handler.codec.http.HttpResponse;
import org.jboss.netty.handler.codec.http.HttpVersion;
import org.jboss.netty.handler.logging.LoggingHandler;
import org.jboss.netty.logging.InternalLogLevel;
public abstract class NettyJAB extends BaseJAB {
ExecutorService executor;
ClientBootstrap bootstrap;
HttpClientPipelineFactory pipelineFactory;
InetSocketAddress address;
URI uri;
String scheme;
String host;
int port;
+ CountDownLatch numLatch;
AtomicInteger numCount = new AtomicInteger();
public void init(JABOptions options) throws Exception {
super.init(options);
if (options.getThreads() > 0) {
executor = Executorz.newFixedThreadPoolAndPrefill(options.getThreads());
} else {
executor = Executorz.newFixedThreadPoolAndPrefill(options.getConcurrency());
}
uri = new URI(options.getArguments().get(0));
scheme = uri.getScheme() == null ? "http" : uri.getScheme();
host = uri.getHost() == null ? "localhost" : uri.getHost();
port = uri.getPort();
if (port == -1) {
if (scheme.equalsIgnoreCase("http")) {
port = 80;
}
}
address = new InetSocketAddress(host, port);
bootstrap = new ClientBootstrap(createChannelFactory(executor));
pipelineFactory = new HttpClientPipelineFactory();
bootstrap.setPipelineFactory(pipelineFactory);
}
protected abstract ChannelFactory createChannelFactory(ExecutorService executor);
protected void fire(int num) throws Exception {
numCount.set(0);
- CountDownLatch numLatch = new CountDownLatch(num);
- pipelineFactory.latch = numLatch;
+ numLatch = new CountDownLatch(num);
// Note this loop should be concurrency, not num
int concurrency = options.getConcurrency();
for (int i = 0; i < concurrency; i++) {
submit();
}
numLatch.await(300, TimeUnit.SECONDS);
}
private void submit() {
if (numCount.incrementAndGet() <= options.getNum()) {
ChannelFuture future = bootstrap.connect(address);
}
}
protected void end() throws Exception {
bootstrap.releaseExternalResources();
}
class HttpClientPipelineFactory implements ChannelPipelineFactory {
- CountDownLatch latch;
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline pipeline = Channels.pipeline();
//pipeline.addLast("log", new LoggingHandler(InternalLogLevel.INFO));
pipeline.addLast("codec", new HttpClientCodec());
pipeline.addLast("inflater", new HttpContentDecompressor());
- JABHandler jabHandler = new JABHandler();
- jabHandler.latch = this.latch;
- pipeline.addLast("handler", jabHandler);
+ pipeline.addLast("handler", new JABHandler());
return pipeline;
}
}
class JABHandler extends SimpleChannelHandler {
- CountDownLatch latch;
long start = 0;
boolean chunked = false;
boolean finish = false;
boolean error = false;
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
start = System.currentTimeMillis();
}
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
Channel channel = e.getChannel();
HttpRequest request =
new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, uri.toASCIIString());
request.setHeader(HttpHeaders.Names.HOST, host);
request.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE);
//request.setHeader(HttpHeaders.Names.ACCEPT_ENCODING, HttpHeaders.Values.GZIP);
channel.write(request);
}
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) {
if (chunked) {
HttpChunk chunk = (HttpChunk) e.getMessage();
if (chunk.isLast()) {
finish = chunk.isLast();
}
return;
}
HttpResponse msg = (HttpResponse)e.getMessage();
int statusCode = msg.getStatus().getCode();
if (statusCode < 200 || 299 < statusCode) {
error = true;
Recorder.instance.error();
}
chunked = msg.isChunked();
if (!chunked) {
finish = true;
}
}
public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
long end = System.currentTimeMillis();
if (finish && !error) {
Recorder.instance.success(end - start);
}
- if (latch != null) {
- latch.countDown();
+ if (numLatch != null) {
+ numLatch.countDown();
}
submit();
}
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
error = true;
e.getCause().printStackTrace();
Recorder.instance.error();
}
}
}
diff --git a/jab/src/main/java/jp/programmers/jab/StandardJAB.java b/jab/src/main/java/jp/programmers/jab/StandardJAB.java
index cdbe61c..a8e73e6 100644
--- a/jab/src/main/java/jp/programmers/jab/StandardJAB.java
+++ b/jab/src/main/java/jp/programmers/jab/StandardJAB.java
@@ -1,88 +1,84 @@
package jp.programmers.jab;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.CountDownLatch;
import java.util.List;
import java.util.Iterator;
import java.net.URL;
import java.net.URLConnection;
import java.net.HttpURLConnection;
import java.io.BufferedInputStream;
public class StandardJAB extends BaseJAB {
ExecutorService executor;
SimpleURLConnectionTask task;
+ CountDownLatch numLatch;
public void init(JABOptions options) throws Exception {
super.init(options);
executor = Executorz.newFixedThreadPoolAndPrefill(options.getConcurrency());
URL url = new URL(options.getArguments().get(0));
task = new SimpleURLConnectionTask(url);
}
protected void fire(int num) throws Exception {
// There is no API on ExecutorService to know all tasks are finished, so we use Latch
- CountDownLatch numLatch = new CountDownLatch(num);
- task.setLatch(numLatch);
+ numLatch = new CountDownLatch(num);
for (int i = 0; i < num; i++) {
executor.submit(task);
}
numLatch.await(300, TimeUnit.SECONDS);
}
protected void end() throws Exception {
executor.shutdown();
executor.awaitTermination(30, TimeUnit.SECONDS);
}
class SimpleURLConnectionTask implements Runnable {
URL url;
- CountDownLatch latch;
public SimpleURLConnectionTask(URL url) {
this.url = url;
}
- public void setLatch(CountDownLatch latch) {
- this.latch = latch;
- }
public void run() {
long start = System.currentTimeMillis();
byte[] buff = new byte[8192];
try {
URLConnection conn = url.openConnection();
BufferedInputStream in =
new BufferedInputStream(conn.getInputStream());
try {
if (conn instanceof HttpURLConnection) {
int statusCode = ((HttpURLConnection)conn).getResponseCode();
if (statusCode < 200 || 299 < statusCode) {
Recorder.instance.error();
return;
}
}
while (true) {
int r = in.read(buff);
if (r < 1 && in.read() == -1) {
// EOF
break;
}
}
} finally {
in.close();
}
} catch (Throwable t) {
t.printStackTrace();
Recorder.instance.error();
return;
} finally {
long end = System.currentTimeMillis();
Recorder.instance.success(end - start);
- if (latch != null) {
- latch.countDown();
+ if (numLatch != null) {
+ numLatch.countDown();
}
}
}
}
}
|
nekop/java-examples
|
a3a63d6cc90094e6b83d5571b34ca4841be32dc0
|
Add Netty implementation
|
diff --git a/jab/pom.xml b/jab/pom.xml
index 573c7be..59d9752 100644
--- a/jab/pom.xml
+++ b/jab/pom.xml
@@ -1,50 +1,80 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>jp.programmers</groupId>
<artifactId>jab</artifactId>
<name>jab</name>
<version>1.0</version>
<packaging>jar</packaging>
-
-
<repositories>
<repository>
<id>jenkins-ci.org</id>
<name>Jenkins CI</name>
<url>http://maven.jenkins-ci.org/content/repositories/releases/</url>
</repository>
</repositories>
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
<dependencies>
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
<version>2.0.17</version>
</dependency>
+ <dependency>
+ <groupId>org.jboss.netty</groupId>
+ <artifactId>netty</artifactId>
+ <version>3.2.7.Final</version>
+ </dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
<configuration>
<source>1.5</source>
<target>1.5</target>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>1.2.1</version>
+ <configuration>
+ <mainClass>jp.programmers.jab.JABMain</mainClass>
+ <jvmArgs>
+ <jvmArg>-Xmn128m</jvmArg>
+ <jvmArg>-Xms1024m</jvmArg>
+ <jvmArg>-Xmx1024m</jvmArg>
+ <jvmArg>-Xss512k</jvmArg>
+ <jvmArg>-XX:PermSize=64m</jvmArg>
+ <jvmArg>-Xverify:none</jvmArg>
+ <jvmArg>-XX:+UseConcMarkSweepGC</jvmArg>
+ <jvmArg>-XX:+TieredCompilation</jvmArg>
+ <jvmArg>-XX:+UseCompressedOops</jvmArg>
+ <jvmArg>-verbose:gc</jvmArg>
+ <jvmArg>-XX:+PrintGCDetails</jvmArg>
+ <jvmArg>-XX:+PrintGCTimeStamps</jvmArg>
+ </jvmArgs>
+ </configuration>
+ </plugin>
</plugins>
</build>
</project>
diff --git a/jab/run-all.sh b/jab/run-all.sh
new file mode 100644
index 0000000..637c90e
--- /dev/null
+++ b/jab/run-all.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+TARGET_URL="http://localhost/100kb.dat"
+CONCURRENCY=800
+NUM_REQUESTS=10000
+NIO_THREADS=20
+
+echo "ab"
+ab -c $CONCURRENCY -n $NUM_REQUESTS $TARGET_URL
+echo "standard"
+sh run.sh -c $CONCURRENCY -n $NUM_REQUESTS -w $NUM_REQUESTS $TARGET_URL
+echo "netty-oio"
+sh run.sh -j netty-oio -c $CONCURRENCY -n $NUM_REQUESTS -w $NUM_REQUESTS $TARGET_URL
+echo "netty-nio"
+sh run.sh -j netty-nio -t $NIO_THREADS -c $CONCURRENCY -n $NUM_REQUESTS -w $NUM_REQUESTS $TARGET_URL
diff --git a/jab/run.sh b/jab/run.sh
index b483119..40e429f 100644
--- a/jab/run.sh
+++ b/jab/run.sh
@@ -1,7 +1,5 @@
#!/bin/sh
-ulimit -u 8192
+# $ mvn package && sh run.sh -c 400 -n 100000 http://localhost:8080/
-# $ mvn package && sh run.sh -w -c 400 -n 100000 http://localhost:8080/
-
-mvn exec:java -Dexec.mainClass=jp.programmers.jab.JAB -Dexec.args="$*"
+mvn exec:java -Dexec.args="$*"
diff --git a/jab/src/main/java/jp/programmers/jab/BaseJAB.java b/jab/src/main/java/jp/programmers/jab/BaseJAB.java
new file mode 100644
index 0000000..b1aa0f2
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/BaseJAB.java
@@ -0,0 +1,41 @@
+package jp.programmers.jab;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.CountDownLatch;
+import java.util.List;
+import java.util.Iterator;
+import java.net.URL;
+import java.net.URLConnection;
+import java.net.HttpURLConnection;
+import java.io.BufferedInputStream;
+
+public abstract class BaseJAB implements JAB {
+
+ protected JABOptions options;
+
+ public void init(JABOptions options) throws Exception {
+ this.options = options;
+ }
+
+ public void execute() throws Exception {
+ if (options.getWarmup() > 0) {
+ System.out.println("Warming up");
+ fire(options.getWarmup());
+ Recorder.instance.reset();
+ System.out.println("Done warming up");
+ }
+ System.out.println("Testing");
+ long start = System.currentTimeMillis();
+ fire(options.getNum());
+ long end = System.currentTimeMillis();
+ System.out.println("Done testing, time=" + (end - start) + "ms");
+ Recorder.instance.report();
+ end();
+ }
+
+ protected void end() throws Exception { }
+
+ protected abstract void fire(int num) throws Exception;
+}
diff --git a/jab/src/main/java/jp/programmers/jab/Executorz.java b/jab/src/main/java/jp/programmers/jab/Executorz.java
new file mode 100644
index 0000000..7df15af
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/Executorz.java
@@ -0,0 +1,25 @@
+package jp.programmers.jab;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.CountDownLatch;
+
+public class Executorz {
+ static ExecutorService newFixedThreadPoolAndPrefill(int num) throws Exception {
+ ExecutorService executor =
+ Executors.newFixedThreadPool(num);
+ final CountDownLatch latch = new CountDownLatch(1);
+ Runnable initTask = new Runnable() {
+ public void run() {
+ try {
+ latch.await();
+ } catch (Exception ignore) { }
+ }
+ };
+ for (int i = 0; i < num; i++) {
+ executor.submit(initTask);
+ }
+ latch.countDown();
+ return executor;
+ }
+}
diff --git a/jab/src/main/java/jp/programmers/jab/JAB.java b/jab/src/main/java/jp/programmers/jab/JAB.java
index 5b12eb2..150bb7d 100644
--- a/jab/src/main/java/jp/programmers/jab/JAB.java
+++ b/jab/src/main/java/jp/programmers/jab/JAB.java
@@ -1,155 +1,9 @@
package jp.programmers.jab;
-import static org.kohsuke.args4j.ExampleMode.ALL;
-import org.kohsuke.args4j.CmdLineParser;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.CountDownLatch;
-import java.util.List;
-import java.util.Iterator;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.HttpURLConnection;
-import java.io.BufferedInputStream;
+public interface JAB {
-public class JAB {
+ void init(JABOptions options) throws Exception;
- public static void main(String[] args) throws Exception {
- JABOptions options = new JABOptions();
- CmdLineParser parser = new CmdLineParser(options);
- parser.parseArgument(args);
- URL url = null;
- if (options.getArguments().isEmpty()) {
- System.err.println("Example: JAB" + parser.printExample(ALL));
- System.exit(-1);
- } else {
- url = new URL(options.getArguments().get(0));
- }
- ExecutorService executor =
- createExecutor(options.getConcurrency());
- int num = options.getNum();
- CountDownLatch numLatch = new CountDownLatch(num);
- SimpleURLConnectionTask task =
- new SimpleURLConnectionTask(url);
- task.setLatch(numLatch);
- if (options.isWarmup()) {
- System.out.println("Warming up");
- for (int i = 0; i < num; i++) {
- executor.submit(task);
- }
- numLatch.await(30, TimeUnit.SECONDS);
- reset();
- System.out.println("Done warming up");
- }
- task.setLatch(null);
- System.out.println("Testing");
- long start = System.currentTimeMillis();
- for (int i = 0; i < num; i++) {
- executor.submit(task);
- }
- executor.shutdown();
- executor.awaitTermination(30, TimeUnit.SECONDS);
- long end = System.currentTimeMillis();
- System.out.println("Done testing, time=" + (end - start) + "ms");
- report();
- }
+ void execute() throws Exception;
- public static AtomicInteger successCount = new AtomicInteger(0);
- public static AtomicInteger errorCount = new AtomicInteger(0);
- public static List<Long> times = new CopyOnWriteArrayList<Long>();
- //public static List<Long> times =
- // Collections.synchronizedList(new ArrayList<Long>());
-
- public static void success(long time) {
- successCount.incrementAndGet();
- times.add(time);
- }
-
- public static void error() {
- errorCount.incrementAndGet();
- }
-
- public static void reset() {
- successCount.set(0);
- errorCount.set(0);
- times.clear();
- }
-
- public static void report() {
- long total = 0;
- for (Iterator<Long> it = times.iterator(); it.hasNext(); ) {
- total += it.next();
- }
- double average = total / (double)successCount.get();
- System.out.println("successCount=" + successCount);
- System.out.println("errorCount=" + errorCount);
- System.out.println("average=" + average);
- }
-
- private static ExecutorService createExecutor(int num) throws Exception {
- ExecutorService executor =
- Executors.newFixedThreadPool(num);
- final CountDownLatch latch = new CountDownLatch(1);
- Runnable initTask = new Runnable() {
- public void run() {
- try {
- latch.await();
- } catch (Exception ignore) { }
- }
- };
- for (int i = 0; i < num; i++) {
- executor.submit(initTask);
- }
- latch.countDown();
- return executor;
- }
-
- public static class SimpleURLConnectionTask implements Runnable {
- URL url;
- CountDownLatch latch;
- public SimpleURLConnectionTask(URL url) {
- this.url = url;
- }
- public void setLatch(CountDownLatch latch) {
- this.latch = latch;
- }
- public void run() {
- long start = System.currentTimeMillis();
- try {
- URLConnection conn = url.openConnection();
- BufferedInputStream in =
- new BufferedInputStream(conn.getInputStream());
- try {
- if (conn instanceof HttpURLConnection) {
- int responseCode = ((HttpURLConnection)conn).getResponseCode();
- if (responseCode < 200 || 299 < responseCode) {
- error();
- return;
- }
- }
- while (true) {
- int r = in.read();
- if (r == -1) {
- // EOF
- break;
- }
- }
- } finally {
- in.close();
- }
- } catch (Throwable t) {
- error();
- return;
- } finally {
- if (latch != null) {
- latch.countDown();
- }
- }
- long end = System.currentTimeMillis();
- success(end - start);
- }
- }
}
diff --git a/jab/src/main/java/jp/programmers/jab/JABFactory.java b/jab/src/main/java/jp/programmers/jab/JABFactory.java
new file mode 100644
index 0000000..09fec81
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/JABFactory.java
@@ -0,0 +1,17 @@
+package jp.programmers.jab;
+
+public class JABFactory {
+
+ public static JAB create(String type) {
+ if (type == null || "".equals(type)) {
+ return new StandardJAB();
+ } else if ("netty-oio".equals(type)) {
+ return new NettyOioJAB();
+ } else if ("netty-nio".equals(type)) {
+ return new NettyNioJAB();
+ } else {
+ System.out.println("Unknown type, will use StandardJAB: " + type);
+ return new StandardJAB();
+ }
+ }
+}
diff --git a/jab/src/main/java/jp/programmers/jab/JABMain.java b/jab/src/main/java/jp/programmers/jab/JABMain.java
new file mode 100644
index 0000000..c681029
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/JABMain.java
@@ -0,0 +1,20 @@
+package jp.programmers.jab;
+
+import static org.kohsuke.args4j.ExampleMode.ALL;
+import org.kohsuke.args4j.CmdLineParser;
+
+public class JABMain {
+
+ public static void main(String[] args) throws Exception {
+ JABOptions options = new JABOptions();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+ if (options.getArguments().isEmpty()) {
+ System.err.println("Example: JAB" + parser.printExample(ALL));
+ System.exit(-1);
+ }
+ JAB jab = JABFactory.create(options.getType());
+ jab.init(options);
+ jab.execute();
+ }
+}
diff --git a/jab/src/main/java/jp/programmers/jab/JABOptions.java b/jab/src/main/java/jp/programmers/jab/JABOptions.java
index 66a6209..1fbc5c0 100644
--- a/jab/src/main/java/jp/programmers/jab/JABOptions.java
+++ b/jab/src/main/java/jp/programmers/jab/JABOptions.java
@@ -1,53 +1,75 @@
package jp.programmers.jab;
import org.kohsuke.args4j.Option;
import org.kohsuke.args4j.Argument;
import java.util.List;
import java.util.ArrayList;
public class JABOptions {
@Option(name="-n",usage="Number of requests")
private int num = 1;
- @Option(name="-c",usage="Concurrency, number of threads")
+ @Option(name="-w",usage="Number of requests in warm up phase")
+ private int warmup = 0;
+
+ @Option(name="-c",usage="Concurrency")
private int concurrency = 1;
- @Option(name="-w",usage="Run twice, warm up run and actual benchmark run")
- private boolean warmup = false;
+ @Option(name="-t",usage="Number of threads, used only in NIO JAB")
+ private int threads = 1;
+
+ @Option(name="-j",usage="JAB Type")
+ private String type = "";
@Argument
private List<String> arguments = new ArrayList<String>();
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
+ public int getWarmup() {
+ return warmup;
+ }
+
+ public void setWarmup(int warmup) {
+ this.warmup = warmup;
+ }
+
public int getConcurrency() {
return concurrency;
}
public void setConcurrency(int concurrency) {
this.concurrency = concurrency;
}
- public boolean isWarmup() {
- return warmup;
+ public int getThreads() {
+ return threads;
}
- public void setWarmup(boolean warmup) {
- this.warmup = warmup;
+ public void setThreads(int threads) {
+ this.threads = threads;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
}
public List<String> getArguments() {
return arguments;
}
public void setArguments(List<String> arguments) {
this.arguments = arguments;
}
}
diff --git a/jab/src/main/java/jp/programmers/jab/NettyJAB.java b/jab/src/main/java/jp/programmers/jab/NettyJAB.java
new file mode 100644
index 0000000..d5fa96e
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/NettyJAB.java
@@ -0,0 +1,168 @@
+package jp.programmers.jab;
+
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import org.jboss.netty.bootstrap.ClientBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelFuture;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.ExceptionEvent;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelHandler;
+import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
+import org.jboss.netty.handler.codec.http.DefaultHttpRequest;
+import org.jboss.netty.handler.codec.http.HttpChunk;
+import org.jboss.netty.handler.codec.http.HttpClientCodec;
+import org.jboss.netty.handler.codec.http.HttpContentDecompressor;
+import org.jboss.netty.handler.codec.http.HttpHeaders;
+import org.jboss.netty.handler.codec.http.HttpMethod;
+import org.jboss.netty.handler.codec.http.HttpRequest;
+import org.jboss.netty.handler.codec.http.HttpResponse;
+import org.jboss.netty.handler.codec.http.HttpVersion;
+import org.jboss.netty.handler.logging.LoggingHandler;
+import org.jboss.netty.logging.InternalLogLevel;
+
+public abstract class NettyJAB extends BaseJAB {
+
+ ExecutorService executor;
+ ClientBootstrap bootstrap;
+ HttpClientPipelineFactory pipelineFactory;
+ InetSocketAddress address;
+ URI uri;
+ String scheme;
+ String host;
+ int port;
+ AtomicInteger numCount = new AtomicInteger();
+
+ public void init(JABOptions options) throws Exception {
+ super.init(options);
+ if (options.getThreads() > 0) {
+ executor = Executorz.newFixedThreadPoolAndPrefill(options.getThreads());
+ } else {
+ executor = Executorz.newFixedThreadPoolAndPrefill(options.getConcurrency());
+ }
+ uri = new URI(options.getArguments().get(0));
+ scheme = uri.getScheme() == null ? "http" : uri.getScheme();
+ host = uri.getHost() == null ? "localhost" : uri.getHost();
+ port = uri.getPort();
+ if (port == -1) {
+ if (scheme.equalsIgnoreCase("http")) {
+ port = 80;
+ }
+ }
+ address = new InetSocketAddress(host, port);
+ bootstrap = new ClientBootstrap(createChannelFactory(executor));
+ pipelineFactory = new HttpClientPipelineFactory();
+ bootstrap.setPipelineFactory(pipelineFactory);
+
+ }
+
+ protected abstract ChannelFactory createChannelFactory(ExecutorService executor);
+
+ protected void fire(int num) throws Exception {
+ numCount.set(0);
+ CountDownLatch numLatch = new CountDownLatch(num);
+ pipelineFactory.latch = numLatch;
+ // Note this loop should be concurrency, not num
+ int concurrency = options.getConcurrency();
+ for (int i = 0; i < concurrency; i++) {
+ submit();
+ }
+ numLatch.await(300, TimeUnit.SECONDS);
+ }
+
+ private void submit() {
+ if (numCount.incrementAndGet() <= options.getNum()) {
+ ChannelFuture future = bootstrap.connect(address);
+ }
+ }
+
+ protected void end() throws Exception {
+ bootstrap.releaseExternalResources();
+ }
+
+ class HttpClientPipelineFactory implements ChannelPipelineFactory {
+ CountDownLatch latch;
+ @Override
+ public ChannelPipeline getPipeline() throws Exception {
+ ChannelPipeline pipeline = Channels.pipeline();
+ //pipeline.addLast("log", new LoggingHandler(InternalLogLevel.INFO));
+ pipeline.addLast("codec", new HttpClientCodec());
+ pipeline.addLast("inflater", new HttpContentDecompressor());
+ JABHandler jabHandler = new JABHandler();
+ jabHandler.latch = this.latch;
+ pipeline.addLast("handler", jabHandler);
+ return pipeline;
+ }
+ }
+
+ class JABHandler extends SimpleChannelHandler {
+ CountDownLatch latch;
+ long start = 0;
+ boolean chunked = false;
+ boolean finish = false;
+ boolean error = false;
+
+ public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
+ start = System.currentTimeMillis();
+ }
+
+ public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
+ Channel channel = e.getChannel();
+ HttpRequest request =
+ new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, uri.toASCIIString());
+ request.setHeader(HttpHeaders.Names.HOST, host);
+ request.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE);
+ //request.setHeader(HttpHeaders.Names.ACCEPT_ENCODING, HttpHeaders.Values.GZIP);
+ channel.write(request);
+ }
+
+ public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) {
+ if (chunked) {
+ HttpChunk chunk = (HttpChunk) e.getMessage();
+ if (chunk.isLast()) {
+ finish = chunk.isLast();
+ }
+ return;
+ }
+ HttpResponse msg = (HttpResponse)e.getMessage();
+ int statusCode = msg.getStatus().getCode();
+ if (statusCode < 200 || 299 < statusCode) {
+ error = true;
+ Recorder.instance.error();
+ }
+ chunked = msg.isChunked();
+ if (!chunked) {
+ finish = true;
+ }
+ }
+
+ public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
+ long end = System.currentTimeMillis();
+ if (finish && !error) {
+ Recorder.instance.success(end - start);
+ }
+ if (latch != null) {
+ latch.countDown();
+ }
+ submit();
+ }
+
+ public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
+ error = true;
+ e.getCause().printStackTrace();
+ Recorder.instance.error();
+ }
+ }
+
+}
diff --git a/jab/src/main/java/jp/programmers/jab/NettyNioJAB.java b/jab/src/main/java/jp/programmers/jab/NettyNioJAB.java
new file mode 100644
index 0000000..3179563
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/NettyNioJAB.java
@@ -0,0 +1,13 @@
+package jp.programmers.jab;
+
+import java.util.concurrent.ExecutorService;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+
+public class NettyNioJAB extends NettyJAB {
+
+ protected ChannelFactory createChannelFactory(ExecutorService executor) {
+ return new NioClientSocketChannelFactory(executor, executor);
+ }
+
+}
diff --git a/jab/src/main/java/jp/programmers/jab/NettyOioJAB.java b/jab/src/main/java/jp/programmers/jab/NettyOioJAB.java
new file mode 100644
index 0000000..8aff245
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/NettyOioJAB.java
@@ -0,0 +1,13 @@
+package jp.programmers.jab;
+
+import java.util.concurrent.ExecutorService;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
+
+public class NettyOioJAB extends NettyJAB {
+
+ protected ChannelFactory createChannelFactory(ExecutorService executor) {
+ return new OioClientSocketChannelFactory(executor);
+ }
+
+}
diff --git a/jab/src/main/java/jp/programmers/jab/Recorder.java b/jab/src/main/java/jp/programmers/jab/Recorder.java
new file mode 100644
index 0000000..08aa97d
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/Recorder.java
@@ -0,0 +1,42 @@
+package jp.programmers.jab;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class Recorder {
+
+ public static Recorder instance = new Recorder();
+
+ AtomicInteger successCount = new AtomicInteger(0);
+ AtomicInteger errorCount = new AtomicInteger(0);
+ List<Long> times = new CopyOnWriteArrayList<Long>();
+
+ void success(long time) {
+ successCount.incrementAndGet();
+ times.add(time);
+ }
+
+ void error() {
+ errorCount.incrementAndGet();
+ }
+
+ void reset() {
+ successCount.set(0);
+ errorCount.set(0);
+ times.clear();
+ }
+
+ void report() {
+ long total = 0;
+ for (Iterator<Long> it = times.iterator(); it.hasNext(); ) {
+ total += it.next();
+ }
+ double average = total / (double)successCount.get();
+ System.out.println("successCount=" + successCount);
+ System.out.println("errorCount=" + errorCount);
+ System.out.println("average=" + average);
+ }
+
+}
diff --git a/jab/src/main/java/jp/programmers/jab/StandardJAB.java b/jab/src/main/java/jp/programmers/jab/StandardJAB.java
new file mode 100644
index 0000000..cdbe61c
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/StandardJAB.java
@@ -0,0 +1,88 @@
+package jp.programmers.jab;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.CountDownLatch;
+import java.util.List;
+import java.util.Iterator;
+import java.net.URL;
+import java.net.URLConnection;
+import java.net.HttpURLConnection;
+import java.io.BufferedInputStream;
+
+public class StandardJAB extends BaseJAB {
+
+ ExecutorService executor;
+ SimpleURLConnectionTask task;
+
+ public void init(JABOptions options) throws Exception {
+ super.init(options);
+ executor = Executorz.newFixedThreadPoolAndPrefill(options.getConcurrency());
+ URL url = new URL(options.getArguments().get(0));
+ task = new SimpleURLConnectionTask(url);
+ }
+
+ protected void fire(int num) throws Exception {
+ // There is no API on ExecutorService to know all tasks are finished, so we use Latch
+ CountDownLatch numLatch = new CountDownLatch(num);
+ task.setLatch(numLatch);
+ for (int i = 0; i < num; i++) {
+ executor.submit(task);
+ }
+ numLatch.await(300, TimeUnit.SECONDS);
+ }
+
+ protected void end() throws Exception {
+ executor.shutdown();
+ executor.awaitTermination(30, TimeUnit.SECONDS);
+ }
+
+ class SimpleURLConnectionTask implements Runnable {
+ URL url;
+ CountDownLatch latch;
+ public SimpleURLConnectionTask(URL url) {
+ this.url = url;
+ }
+ public void setLatch(CountDownLatch latch) {
+ this.latch = latch;
+ }
+ public void run() {
+ long start = System.currentTimeMillis();
+ byte[] buff = new byte[8192];
+ try {
+ URLConnection conn = url.openConnection();
+ BufferedInputStream in =
+ new BufferedInputStream(conn.getInputStream());
+ try {
+ if (conn instanceof HttpURLConnection) {
+ int statusCode = ((HttpURLConnection)conn).getResponseCode();
+ if (statusCode < 200 || 299 < statusCode) {
+ Recorder.instance.error();
+ return;
+ }
+ }
+ while (true) {
+ int r = in.read(buff);
+ if (r < 1 && in.read() == -1) {
+ // EOF
+ break;
+ }
+ }
+ } finally {
+ in.close();
+ }
+ } catch (Throwable t) {
+ t.printStackTrace();
+ Recorder.instance.error();
+ return;
+ } finally {
+ long end = System.currentTimeMillis();
+ Recorder.instance.success(end - start);
+ if (latch != null) {
+ latch.countDown();
+ }
+ }
+ }
+ }
+}
|
nekop/java-examples
|
de74549e50be717c979806e53ad500944937f076
|
Cleanup
|
diff --git a/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java b/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java
index 5d9b892..122e825 100644
--- a/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java
+++ b/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java
@@ -1,138 +1,153 @@
package jp.programmers.examples;
import java.io.IOException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Random;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* This filter issues additional cookie on login, and check it until
* logout. In short, it's login time cookie. This gives us more robust
* than single JSESSIONID cookie auth provided by a container.
*
* In clustered environment, you should have unique SALT value accross
* the cluster.
*/
public class SessionFixationProtectionFilter implements Filter {
public static final String SALT = "SALT";
public static final String COOKIE_NAME = "COOKIE_NAME";
public static final String COOKIE_PATH = "COOKIE_PATH";
public static final String COOKIE_DOMAIN = "COOKIE_DOMAIN";
public static final String DEFAULT_COOKIE_NAME = "SessionFixationProtection";
public static final String DEFAULT_SALT = String.valueOf(new Random().nextInt());
private String salt = null;
private String cookieName = null;
private String cookiePath = null;
private String cookieDomain = null;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
salt = filterConfig.getInitParameter(SALT);
if (salt == null) {
salt = DEFAULT_SALT;
}
cookieName = filterConfig.getInitParameter(COOKIE_NAME);
if (cookieName == null) {
cookieName = DEFAULT_COOKIE_NAME;
}
cookiePath = filterConfig.getInitParameter(COOKIE_PATH);
cookieDomain = filterConfig.getInitParameter(COOKIE_DOMAIN);
}
@SuppressWarnings("unchecked")
@Override
public void doFilter(ServletRequest request,
ServletResponse response,
FilterChain chain)
throws IOException, ServletException {
HttpServletRequest req = (HttpServletRequest)request;
HttpServletResponse res = (HttpServletResponse)response;
+
HttpSession session = req.getSession(false);
+ if (session == null) {
+ // no session, do nothing
+ chain.doFilter(request, response);
+ return;
+ }
String user = req.getRemoteUser();
+ if (user == null) {
+ // already logout, just clean up
+ session.removeAttribute(cookieName);
+ chain.doFilter(request, response);
+ return;
+ }
+ String sessUser = (String)session.getAttribute(cookieName);
+ if (!user.equals(sessUser)) {
+ // switched user? remove previous info and go through
+ session.removeAttribute(cookieName);
+ sessUser = null;
+ }
- if (user != null && session.getAttribute(cookieName) == null) {
+ if (sessUser == null) {
// just logged in!
// going to set login cookie
String value = md5(salt + session.getId());
Cookie cookie = new Cookie(cookieName, value);
configureLoginCookie(cookie);
res.addCookie(cookie);
// mark session as this user should have a login cookie
- session.setAttribute(cookieName, "true");
- } else if (user != null && session.getAttribute(cookieName) != null) {
- // this user is logging in
+ session.setAttribute(cookieName, user);
+ } else {
+ // during login
// going to check login cookie
String expectedValue = md5(salt + session.getId());
boolean found = false;
for (Cookie c : req.getCookies()) {
if (c.getName().equals(cookieName)) {
if (expectedValue.equals(c.getValue())) {
found = true;
break;
}
}
}
if (!found) {
// possible session fixiation
handleCookieNotFound(req, res, chain);
return;
}
- } else {
- // this user is not logged in
- // do nothing
}
chain.doFilter(request, response);
}
/**
* Handles possible session fixiation. Calls HttpSession#invalidate() by default.
*/
protected void handleCookieNotFound(HttpServletRequest req,
HttpServletResponse res,
FilterChain chain)
throws IOException, ServletException {
// force invalidate
req.getSession().invalidate();
chain.doFilter(req, res);
}
protected void configureLoginCookie(Cookie cookie) {
cookie.setMaxAge(-1);
if (cookiePath != null) {
cookie.setPath(cookiePath);
}
if (cookieDomain != null) {
cookie.setDomain(cookieDomain);
}
}
@Override
public void destroy() { }
public static String md5(String s) {
try {
MessageDigest m = MessageDigest.getInstance("MD5");
byte[] data = s.getBytes();
m.update(data, 0, data.length);
BigInteger i = new BigInteger(1, m.digest());
return String.format("%1$032X", i);
} catch (NoSuchAlgorithmException ex) {
throw new RuntimeException("no MD5", ex);
}
}
}
|
nekop/java-examples
|
3b570c4da2ae0bb3ffe4f77a4f42ef37b4ad2a17
|
Kill tabs
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b83cbc5
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,4 @@
+*~
+.#*
+*#
+target/
diff --git a/README b/README
new file mode 100644
index 0000000..41266a1
--- /dev/null
+++ b/README
@@ -0,0 +1 @@
+Various examples written in Java.
diff --git a/ant/build.xml b/ant/build.xml
new file mode 100644
index 0000000..18b0ed5
--- /dev/null
+++ b/ant/build.xml
@@ -0,0 +1,74 @@
+<?xml version="1.0"?>
+
+<project name="antexample" default="jar">
+
+ <property file="./build.properties" />
+
+ <property name="src.dir" value="${basedir}/src" />
+ <property name="main.dir" value="${src.dir}" />
+ <property name="java.dir" value="${main.dir}/java" />
+ <property name="resources.dir" value="${main.dir}/resources" />
+ <property name="webapp.dir" value="${main.dir}/webapp" />
+ <property name="target.dir" value="${basedir}/target" />
+ <property name="classes.dir" value="${target.dir}/classes" />
+ <property name="main.class.name" value="jp.programmers.examples.MainClass" />
+
+ <path id="build.classpath">
+ <pathelement path="${classes.dir}" />
+ </path>
+
+ <target name="clean">
+ <delete dir="${target.dir}" />
+ </target>
+
+ <target name="compile">
+ <mkdir dir="${classes.dir}" />
+<!--
+ <copy todir="${classes.dir}">
+ <fileset dir="${resources.dir}" />
+ </copy>
+-->
+ <javac srcdir="${src.dir}"
+ destdir="${classes.dir}"
+ encoding="UTF-8"
+ source="1.5"
+ target="1.5"
+ debug="on"
+ optimize="off">
+ <classpath refid="build.classpath" />
+ </javac>
+ </target>
+
+ <target name="jar" depends="compile">
+ <mkdir dir="${target.dir}" />
+ <jar jarfile="${target.dir}/${ant.project.name}.jar">
+ <fileset dir="${classes.dir}" />
+<!--
+ <manifest><attribute name="Main-Class" value="${main.class.name}" /></manifest>
+-->
+ </jar>
+ </target>
+
+<!--
+ <target name="war" depends="compile">
+ <mkdir dir="${target.dir}/${ant.project.name}" />
+ <mkdir dir="${target.dir}/${ant.project.name}/WEB-INF/classes" />
+ <copy todir="${target.dir}/${ant.project.name}/WEB-INF/classes">
+ <fileset dir="${target.dir}/classes" />
+ </copy>
+ <jar jarfile="${target.dir}/${ant.project.name}.war">
+ <fileset dir="${target.dir}/${ant.project.name}" />
+ <fileset dir="${webapp.dir}" />
+ </jar>
+ </target>
+
+ <target name="run-main">
+ <java classname="${main.class.name}" classpathref="build.classpath" />
+ </target>
+
+ <target name="native2ascii">
+ <native2ascii dest="${classes.dir}" src="${resource.dir}" />
+ </target>
+-->
+
+</project>
diff --git a/clustered-webapp/test.war/WEB-INF/web.xml b/clustered-webapp/test.war/WEB-INF/web.xml
new file mode 100644
index 0000000..961f93a
--- /dev/null
+++ b/clustered-webapp/test.war/WEB-INF/web.xml
@@ -0,0 +1,8 @@
+<web-app>
+ <distributable />
+ <!--
+ <session-config>
+ <session-timeout>1</session-timeout>
+ </session-config>
+ -->
+</web-app>
diff --git a/clustered-webapp/test.war/index.jsp b/clustered-webapp/test.war/index.jsp
new file mode 100644
index 0000000..206267e
--- /dev/null
+++ b/clustered-webapp/test.war/index.jsp
@@ -0,0 +1 @@
+<%= session.getId() %>
diff --git a/clustered-webapp/test.war/sesstimeout.jsp b/clustered-webapp/test.war/sesstimeout.jsp
new file mode 100644
index 0000000..76f1044
--- /dev/null
+++ b/clustered-webapp/test.war/sesstimeout.jsp
@@ -0,0 +1,5 @@
+<p><%= session.getId() %></p>
+<p><%= session.getMaxInactiveInterval() %></p>
+<p><%= session.getCreationTime() %></p>
+<p><%= session.getLastAccessedTime() %></p>
+
diff --git a/customvalve/README.txt b/customvalve/README.txt
new file mode 100644
index 0000000..1dbed13
--- /dev/null
+++ b/customvalve/README.txt
@@ -0,0 +1,13 @@
+* What is this project
+
+Includes useful JBoss Web custom valve components and works with JBoss AS 4.2 series. Currently we only have ConfigureSessionCookieValve which allows you to modify attirbutes on JSESSIONID cookies.
+
+* Building
+
+Edit the build.properties and run ant command.
+
+* How to use
+
+1. Copy the target/customvalve.jar into $JBOSS_HOME/server/$JBOSS_SERVER_CONFIG/lib/ or $JBOSS_HOME/server/$JBOSS_SERVER_CONFIG/deploy/jboss-web.deployer/ directory. You can use "ant deploy" command which copys the jar file into lib dir.
+
+2. Write context.xml file and put it as WEB-INF/context.xml in your WAR archive. This project includes example context.xml file.
diff --git a/customvalve/build.properties b/customvalve/build.properties
new file mode 100644
index 0000000..78d991a
--- /dev/null
+++ b/customvalve/build.properties
@@ -0,0 +1,2 @@
+jboss.home=
+jboss.server.config=default
diff --git a/customvalve/build.xml b/customvalve/build.xml
new file mode 100644
index 0000000..85b11d9
--- /dev/null
+++ b/customvalve/build.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0"?>
+
+<project name="customvalve" default="jar" basedir=".">
+
+ <property file="${basedir}/build.properties" />
+
+ <property name="src.dir" value="${basedir}/src" />
+ <property name="main.dir" value="${src.dir}/main" />
+ <property name="java.dir" value="${main.dir}/java" />
+ <property name="target.dir" value="./target" />
+ <property name="classes.dir" value="${target.dir}/classes" />
+
+
+ <path id="build.classpath">
+ <pathelement path="${classes.dir}" />
+ <pathelement path="${jboss.home}/server/${jboss.server.config}/deploy/jboss-web.deployer/jbossweb.jar" />
+ <pathelement path="${jboss.home}/server/${jboss.server.config}/lib/servlet-api.jar" />
+ </path>
+
+ <target name="compile">
+ <mkdir dir="${classes.dir}" />
+ <javac srcdir="${src.dir}"
+ destdir="${classes.dir}"
+ encoding="UTF-8"
+ source="1.5"
+ target="1.5"
+ debug="on"
+ optimize="off">
+ <classpath refid="build.classpath" />
+ </javac>
+ </target>
+
+ <target name="jar" depends="compile">
+ <jar jarfile="${target.dir}/${ant.project.name}.jar">
+ <fileset dir="${classes.dir}" />
+ </jar>
+ </target>
+
+ <target name="deploy" depends="jar">
+ <copy file="${target.dir}/${ant.project.name}.jar" todir="${jboss.home}/server/${jboss.server.config}/lib" />
+ </target>
+
+ <target name="clean">
+ <delete dir="${target.dir}" />
+ </target>
+
+</project>
diff --git a/customvalve/context.xml b/customvalve/context.xml
new file mode 100644
index 0000000..ab9644e
--- /dev/null
+++ b/customvalve/context.xml
@@ -0,0 +1,12 @@
+<Context>
+
+ <!--
+ This valve force overwrite session cookie path, domain and secure attributes.
+ The attribute names are cookiePath, cookieDomain and cookieSecure.
+
+ <Valve className="com.redhat.jboss.support.ConfigureSessionCookieValve" cookiePath="/path" cookieDomain="some.domain.here" cookieSecure="true/false" />
+ -->
+ <Valve className="com.redhat.jboss.support.ConfigureSessionCookieValve" />
+
+</Context>
+
diff --git a/customvalve/src/main/java/com/redhat/jboss/support/ConfigureSessionCookieResponseWrapper.java b/customvalve/src/main/java/com/redhat/jboss/support/ConfigureSessionCookieResponseWrapper.java
new file mode 100644
index 0000000..da421a9
--- /dev/null
+++ b/customvalve/src/main/java/com/redhat/jboss/support/ConfigureSessionCookieResponseWrapper.java
@@ -0,0 +1,49 @@
+package com.redhat.jboss.support;
+
+import javax.servlet.http.Cookie;
+import org.apache.catalina.Globals;
+import org.apache.catalina.connector.Response;
+
+public class ConfigureSessionCookieResponseWrapper extends ResponseWrapper {
+
+ protected String cookiePath;
+ protected String cookieDomain;
+ protected String cookieSecure;
+
+ public ConfigureSessionCookieResponseWrapper(Response res, String cookiePath, String cookieDomain, String cookieSecure) {
+ super(res);
+ this.cookiePath = cookiePath;
+ this.cookieDomain = cookieDomain;
+ this.cookieSecure = cookieSecure;
+ }
+
+ // Called from addCookie() and addCookieInternal() methods.
+ protected void configureSessionCookie(Cookie cookie) {
+ if (Globals.SESSION_COOKIE_NAME.equals(cookie.getName())) {
+ if (cookiePath != null) {
+ cookie.setPath(cookiePath);
+ }
+ if (cookieDomain != null) {
+ cookie.setDomain(cookieDomain);
+ }
+ if (cookieSecure != null) {
+ if (cookieSecure.equalsIgnoreCase("true")) {
+ cookie.setSecure(true);
+ } else if (cookieSecure.equalsIgnoreCase("false")) {
+ cookie.setSecure(false);
+ }
+ }
+ }
+ }
+
+ public void addCookie(Cookie cookie) {
+ configureSessionCookie(cookie);
+ res.addCookie(cookie);
+ }
+
+ public void addCookieInternal(Cookie cookie) {
+ configureSessionCookie(cookie);
+ res.addCookieInternal(cookie);
+ }
+
+}
diff --git a/customvalve/src/main/java/com/redhat/jboss/support/ConfigureSessionCookieValve.java b/customvalve/src/main/java/com/redhat/jboss/support/ConfigureSessionCookieValve.java
new file mode 100644
index 0000000..0443265
--- /dev/null
+++ b/customvalve/src/main/java/com/redhat/jboss/support/ConfigureSessionCookieValve.java
@@ -0,0 +1,56 @@
+package com.redhat.jboss.support;
+
+import java.io.IOException;
+import javax.servlet.ServletException;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+import org.apache.catalina.valves.ValveBase;
+
+/**
+ * This class allows you to configure session cookie.
+ *
+ * Why every property start with verbose annoying "cookie" prefix? Why
+ * don't use "domain" instead of "cookieDomain"? Well, that's because
+ * the ValveBase class has getDomain() method already!
+ */
+public class ConfigureSessionCookieValve extends ValveBase {
+
+ protected String cookiePath = null;
+ protected String cookieDomain = null;
+ protected String cookieSecure = null;
+
+ public void invoke(Request request, Response response)
+ throws IOException, ServletException {
+ Response wrapperResponse = new ConfigureSessionCookieResponseWrapper(response, cookiePath, cookieDomain, cookieSecure);
+ try {
+ request.setResponse(wrapperResponse);
+ getNext().invoke(request, wrapperResponse);
+ } finally {
+ request.setResponse(response);
+ }
+ }
+
+ public String getCookiePath() {
+ return cookiePath;
+ }
+
+ public void setCookiePath(String cookiePath) {
+ this.cookiePath = cookiePath;
+ }
+
+ public String getCookieDomain() {
+ return cookieDomain;
+ }
+
+ public void setCookieDomain(String cookieDomain) {
+ this.cookieDomain = cookieDomain;
+ }
+
+ public String getCookieSecure() {
+ return cookieSecure;
+ }
+
+ public void setCookieSecure(String cookieSecure) {
+ this.cookieSecure = cookieSecure;
+ }
+}
diff --git a/customvalve/src/main/java/com/redhat/jboss/support/RequestCountValve.java b/customvalve/src/main/java/com/redhat/jboss/support/RequestCountValve.java
new file mode 100644
index 0000000..9953241
--- /dev/null
+++ b/customvalve/src/main/java/com/redhat/jboss/support/RequestCountValve.java
@@ -0,0 +1,31 @@
+package com.redhat.jboss.support;
+
+import java.io.IOException;
+import javax.servlet.ServletException;
+import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+import org.apache.catalina.valves.ValveBase;
+
+/**
+ * A valve which has current request count.
+ */
+public class RequestCountValve extends ValveBase {
+
+ private AtomicInteger count = new AtomicInteger(0);
+
+ public void invoke(Request request, Response response)
+ throws IOException, ServletException {
+ try {
+ count.incrementAndGet();
+ getNext().invoke(request, response);
+ } finally {
+ count.decrementAndGet();
+ }
+ }
+
+ public AtomicInteger getCount() {
+ return count;
+ }
+
+}
diff --git a/customvalve/src/main/java/com/redhat/jboss/support/ResponseWrapper.java b/customvalve/src/main/java/com/redhat/jboss/support/ResponseWrapper.java
new file mode 100644
index 0000000..5e74e3f
--- /dev/null
+++ b/customvalve/src/main/java/com/redhat/jboss/support/ResponseWrapper.java
@@ -0,0 +1,323 @@
+package com.redhat.jboss.support;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.Locale;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.catalina.Context;
+import org.apache.catalina.connector.Connector;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+
+/**
+ * Simple Wrapper class for connector Response.
+ *
+ * Note that you need to override the getResponse() method and return
+ * new ResponseFacade(this) if you want to intercept application
+ * method calls on HttpServletResponse. Otherwise the method calls go
+ * to the original connector Response directly.
+ */
+public class ResponseWrapper extends Response {
+
+ protected Response res;
+
+ public ResponseWrapper(Response res) {
+ this.res = res;
+ }
+
+ // Code for delegation of java.lang.Object methods to res
+
+ public int hashCode() {
+ return res.hashCode();
+ }
+
+ public boolean equals(Object object) {
+ return res.equals(object);
+ }
+
+ public String toString() {
+ return res.toString();
+ }
+
+ // Code for delegation of org.apache.catalina.connector.Response methods to res
+
+ public String getMessage() {
+ return res.getMessage();
+ }
+
+ public Context getContext() {
+ return res.getContext();
+ }
+
+ public void reset() {
+ res.reset();
+ }
+
+ public void reset(int n, String string) {
+ res.reset(n, string);
+ }
+
+ public boolean isError() {
+ return res.isError();
+ }
+
+ public void flushBuffer() throws IOException {
+ res.flushBuffer();
+ }
+
+ public void setError() {
+ res.setError();
+ }
+
+ public boolean isClosed() {
+ return res.isClosed();
+ }
+
+ public int getContentLength() {
+ return res.getContentLength();
+ }
+
+ public String getInfo() {
+ return res.getInfo();
+ }
+
+ public String getContentType() {
+ return res.getContentType();
+ }
+
+ public void setContentLength(int n) {
+ res.setContentLength(n);
+ }
+
+ public void setContentType(String string) {
+ res.setContentType(string);
+ }
+
+ public ServletOutputStream getOutputStream() throws IOException {
+ return res.getOutputStream();
+ }
+
+ public void recycle() {
+ res.recycle();
+ }
+
+ public void clearEncoders() {
+ res.clearEncoders();
+ }
+
+ public Connector getConnector() {
+ return res.getConnector();
+ }
+
+ public void setConnector(Connector connector) {
+ res.setConnector(connector);
+ }
+
+ public void setContext(Context context) {
+ res.setContext(context);
+ }
+
+ public Request getRequest() {
+ return res.getRequest();
+ }
+
+ public HttpServletResponse getResponse() {
+ return res.getResponse();
+ }
+
+ public OutputStream getStream() {
+ return res.getStream();
+ }
+
+ public void setStream(OutputStream outputStream) {
+ res.setStream(outputStream);
+ }
+
+ public String getCharacterEncoding() {
+ return res.getCharacterEncoding();
+ }
+
+ public Locale getLocale() {
+ return res.getLocale();
+ }
+
+ public void setCharacterEncoding(String string) {
+ res.setCharacterEncoding(string);
+ }
+
+ public void addCookie(Cookie cookie) {
+ res.addCookie(cookie);
+ }
+
+ public void addHeader(String string, String string1) {
+ res.addHeader(string, string1);
+ }
+
+ public Cookie[] getCookies() {
+ return res.getCookies();
+ }
+
+ public String getHeader(String string) {
+ return res.getHeader(string);
+ }
+
+ public String[] getHeaderNames() {
+ return res.getHeaderNames();
+ }
+
+ public void setRequest(Request request) {
+ res.setRequest(request);
+ }
+
+ public boolean isCommitted() {
+ return res.isCommitted();
+ }
+
+ public void addCookieInternal(Cookie cookie) {
+ res.addCookieInternal(cookie);
+ }
+
+ public void setLocale(Locale locale) {
+ res.setLocale(locale);
+ }
+
+ public int getBufferSize() {
+ return res.getBufferSize();
+ }
+
+ public void setBufferSize(int n) {
+ res.setBufferSize(n);
+ }
+
+ public void setCoyoteResponse(org.apache.coyote.Response response) {
+ res.setCoyoteResponse(response);
+ }
+
+ public org.apache.coyote.Response getCoyoteResponse() {
+ return res.getCoyoteResponse();
+ }
+
+ public int getContentCount() {
+ return res.getContentCount();
+ }
+
+ public void setAppCommitted(boolean flag) {
+ res.setAppCommitted(flag);
+ }
+
+ public boolean isAppCommitted() {
+ return res.isAppCommitted();
+ }
+
+ public boolean getIncluded() {
+ return res.getIncluded();
+ }
+
+ public void setIncluded(boolean flag) {
+ res.setIncluded(flag);
+ }
+
+ public void setSuspended(boolean flag) {
+ res.setSuspended(flag);
+ }
+
+ public boolean isSuspended() {
+ return res.isSuspended();
+ }
+
+ public ServletOutputStream createOutputStream() throws IOException {
+ return res.createOutputStream();
+ }
+
+ public void finishResponse() throws IOException {
+ res.finishResponse();
+ }
+
+ public PrintWriter getReporter() throws IOException {
+ return res.getReporter();
+ }
+
+ public PrintWriter getWriter() throws IOException {
+ return res.getWriter();
+ }
+
+ public void resetBuffer() {
+ res.resetBuffer();
+ }
+
+ public String[] getHeaderValues(String string) {
+ return res.getHeaderValues(string);
+ }
+
+ public int getStatus() {
+ return res.getStatus();
+ }
+
+ public void addDateHeader(String string, long l) {
+ res.addDateHeader(string, l);
+ }
+
+ public void addIntHeader(String string, int n) {
+ res.addIntHeader(string, n);
+ }
+
+ public boolean containsHeader(String string) {
+ return res.containsHeader(string);
+ }
+
+ public String encodeRedirectURL(String string) {
+ return res.encodeRedirectURL(string);
+ }
+
+ public String encodeRedirectUrl(String string) {
+ return res.encodeRedirectUrl(string);
+ }
+
+ public String encodeURL(String string) {
+ return res.encodeURL(string);
+ }
+
+ public String encodeUrl(String string) {
+ return res.encodeUrl(string);
+ }
+
+ public void sendAcknowledgement() throws IOException {
+ res.sendAcknowledgement();
+ }
+
+ public void sendError(int n) throws IOException {
+ res.sendError(n);
+ }
+
+ public void sendError(int n, String string) throws IOException {
+ res.sendError(n, string);
+ }
+
+ public void sendRedirect(String string) throws IOException {
+ res.sendRedirect(string);
+ }
+
+ public void setDateHeader(String string, long l) {
+ res.setDateHeader(string, l);
+ }
+
+ public void setHeader(String string, String string1) {
+ res.setHeader(string, string1);
+ }
+
+ public void setIntHeader(String string, int index) {
+ res.setIntHeader(string, index);
+ }
+
+ public void setStatus(int n) {
+ res.setStatus(n);
+ }
+
+ public void setStatus(int n, String string) {
+ res.setStatus(n, string);
+ }
+
+}
diff --git a/ejb2cmp/pom.xml b/ejb2cmp/pom.xml
new file mode 100644
index 0000000..fb10abf
--- /dev/null
+++ b/ejb2cmp/pom.xml
@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb2cmp</artifactId>
+ <packaging>ejb</packaging>
+ <name>example-ejb2cmp</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>xdoclet-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>xdoclet</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <ejbdoclet destDir="${project.build.directory}/generated-sources/xdoclet"
+ ejbSpec="2.1">
+ <fileset dir="${project.build.sourceDirectory}">
+ <include name="**/*Bean.java" />
+ <include name="**/*MDB.java" />
+ </fileset>
+ <homeinterface />
+ <remoteinterface />
+ <localinterface />
+ <localhomeinterface />
+ <entitypk />
+ <utilobject />
+ <deploymentdescriptor destDir="${project.build.outputDirectory}/META-INF" />
+ <jboss version="4.0" destDir="${project.build.outputDirectory}/META-INF" mergeDir="src/main/xdoclet" />
+ </ejbdoclet>
+ </tasks>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java b/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java
new file mode 100644
index 0000000..96c7fd9
--- /dev/null
+++ b/ejb2cmp/src/main/java/jp/programmers/examples/ejb2/cmp/StudentEntityBean.java
@@ -0,0 +1,70 @@
+package jp.programmers.examples.ejb2.cmp;
+
+import java.rmi.RemoteException;
+import javax.ejb.CreateException;
+import javax.ejb.EJBException;
+import javax.ejb.EntityBean;
+import javax.ejb.EntityContext;
+import javax.ejb.RemoveException;
+
+/**
+ * @ejb.bean
+ * name="Student"
+ * type="CMP"
+ * cmp-version="2.x"
+ * view-type="local"
+ * reentrant="false"
+ * local-jndi-name="Student"
+ * @ejb.util generate="physical"
+ * @ejb.pk generate="true"
+ * @ejb.persistence table-name="STUDENT"
+ * @jboss.persistence
+ * create-table="true"
+ * remove-table="false"
+ */
+public abstract class StudentEntityBean implements EntityBean {
+
+ private EntityContext ctx;
+
+ /**
+ * @ejb.pk-field
+ * @ejb.persistent-field
+ * @ejb.interface-method
+ * @ejb.persistence column-name="ID"
+ */
+ public abstract Integer getId();
+ public abstract void setId(Integer id);
+
+ /**
+ * @ejb.persistent-field
+ * @ejb.interface-method
+ * @ejb.persistence column-name="NAME"
+ */
+ public abstract String getName();
+ public abstract void setName(String name);
+
+ // EntityBean implementation ------------------------------------
+ /**
+ * @ejb.create-method
+ */
+ public StudentPK ejbCreate(StudentPK pk) throws CreateException {
+ setId(pk.getId());
+ return null;
+ }
+
+ public void ejbPostCreate(StudentPK pk) {}
+ public void ejbActivate() throws EJBException, RemoteException {}
+ public void ejbLoad() throws EJBException, RemoteException {}
+ public void ejbPassivate() throws EJBException, RemoteException {}
+ public void ejbRemove() throws RemoveException, EJBException, RemoteException {}
+ public void ejbStore() throws EJBException, RemoteException {}
+
+ public void setEntityContext(EntityContext ctx) throws EJBException, RemoteException {
+ this.ctx = ctx;
+ }
+
+ public void unsetEntityContext() throws EJBException, RemoteException {
+ this.ctx = null;
+ }
+}
+
diff --git a/ejb2mdb/QueueSend.bsh b/ejb2mdb/QueueSend.bsh
new file mode 100644
index 0000000..14da89c
--- /dev/null
+++ b/ejb2mdb/QueueSend.bsh
@@ -0,0 +1,23 @@
+connectionFactoryJNDIName = "ConnectionFactory";
+queueName = "queue/tmpQueue";
+message = "hello";
+
+start() {
+ context = new javax.naming.InitialContext();
+ cf = context.lookup(connectionFactoryJNDIName);
+ queue = context.lookup(queueName);
+ conn = cf.createQueueConnection();
+ try {
+ session = conn.createQueueSession(false, javax.jms.Session.AUTO_ACKNOWLEDGE);
+ sender = session.createSender(queue);
+ sender.send(session.createTextMessage(message));
+ sender.close();
+ session.close();
+ } finally {
+ if (conn != null) {
+ try {
+ conn.close();
+ } catch (Exception ignore) { }
+ }
+ }
+}
diff --git a/ejb2mdb/pom.xml b/ejb2mdb/pom.xml
new file mode 100644
index 0000000..5c33806
--- /dev/null
+++ b/ejb2mdb/pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb2</artifactId>
+ <packaging>ejb</packaging>
+ <name>example-ejb2</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>xdoclet-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>xdoclet</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <ejbdoclet destDir="${project.build.directory}/generated-sources/xdoclet"
+ ejbSpec="2.1">
+ <fileset dir="${project.build.sourceDirectory}">
+ <include name="**/*Bean.java" />
+ <include name="**/*MDB.java" />
+ </fileset>
+ <homeinterface />
+ <remoteinterface />
+ <localinterface />
+ <localhomeinterface />
+ <deploymentdescriptor destDir="${project.build.outputDirectory}/META-INF" />
+ <jboss version="4.0" destDir="${project.build.outputDirectory}/META-INF" mergeDir="src/main/xdoclet" />
+ </ejbdoclet>
+ </tasks>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb2mdb/src/main/java/jp/programmers/examples/ejb2/mdb/HelloMDB.java b/ejb2mdb/src/main/java/jp/programmers/examples/ejb2/mdb/HelloMDB.java
new file mode 100644
index 0000000..416d0fb
--- /dev/null
+++ b/ejb2mdb/src/main/java/jp/programmers/examples/ejb2/mdb/HelloMDB.java
@@ -0,0 +1,62 @@
+package jp.programmers.examples.ejb2.mdb;
+
+import javax.ejb.MessageDrivenBean;
+import javax.ejb.MessageDrivenContext;
+import javax.jms.JMSException;
+import javax.jms.Message;
+import javax.jms.MessageListener;
+import javax.jms.TextMessage;
+
+/**
+ * @ejb.bean name="HelloMDB"
+ * @jboss.destination-jndi-name name="queue/tmpQueue"
+ */
+public class HelloMDB implements MessageDrivenBean, MessageListener {
+
+ private MessageDrivenContext context = null;
+
+ public void onMessage(Message message) {
+ System.out.println("HelloMDB#onMessage(Message)");
+ if (message == null) {
+ System.out.println("message is null");
+ return;
+ }
+ String s = message.toString();
+
+ // JBM TextMessage toString() doesn't print its contents, so tweak it
+ String text = null;
+ if (message instanceof TextMessage) {
+ try {
+ text = ((TextMessage)message).getText();
+ s += ": text=" + text;
+ } catch (JMSException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ System.out.println(s);
+
+ // sleep if long value is passed
+ try {
+ long sleep = Long.parseLong(text);
+ Thread.sleep(sleep);
+ } catch (Exception ignore) { }
+
+ // raise exception if requested
+ if (text.equalsIgnoreCase("exception")) {
+ throw new RuntimeException("Exception requested.");
+ }
+
+ // call setRollbackOnly()
+ if (text.equalsIgnoreCase("rollback")) {
+ context.setRollbackOnly();
+ }
+
+ }
+
+ public void ejbCreate() { }
+ public void ejbRemove() { }
+ public void setMessageDrivenContext(MessageDrivenContext context) {
+ this.context = context;
+ }
+}
diff --git a/ejb2mdb/tmpQueue-service.xml b/ejb2mdb/tmpQueue-service.xml
new file mode 100644
index 0000000..602d137
--- /dev/null
+++ b/ejb2mdb/tmpQueue-service.xml
@@ -0,0 +1,9 @@
+<server>
+ <mbean code="org.jboss.jms.server.destination.QueueService"
+ name="jboss.messaging.destination:service=Queue,name=tmpQueue"
+ xmbean-dd="xmdesc/Queue-xmbean.xml">
+ <depends optional-attribute-name="ServerPeer">jboss.messaging:service=ServerPeer</depends>
+ <depends>jboss.messaging:service=PostOffice</depends>
+ </mbean>
+</server>
+
diff --git a/ejb2slsb/pom.xml b/ejb2slsb/pom.xml
new file mode 100644
index 0000000..6c17185
--- /dev/null
+++ b/ejb2slsb/pom.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb2</artifactId>
+ <packaging>ejb</packaging>
+ <name>example-ejb2</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>xdoclet-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>xdoclet</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <ejbdoclet destDir="${project.build.directory}/generated-sources/xdoclet"
+ ejbSpec="2.1">
+ <fileset dir="${project.build.sourceDirectory}">
+ <include name="**/*Bean.java" />
+ <include name="**/*MDB.java" />
+ </fileset>
+ <homeinterface />
+ <remoteinterface />
+ <localinterface />
+ <localhomeinterface />
+ <deploymentdescriptor destDir="${project.build.outputDirectory}/META-INF" />
+ <jboss version="4.0" destDir="${project.build.outputDirectory}/META-INF" mergeDir="src/main/xdoclet" />
+ </ejbdoclet>
+ </tasks>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>client-eap510</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jbossall-client</artifactId>
+ <version>5.1.0.EAP</version>
+ <scope>system</scope>
+ <systemPath>${jboss510.home}/client/jbossall-client.jar</systemPath>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb2slsb/run-client.sh b/ejb2slsb/run-client.sh
new file mode 100644
index 0000000..9a95b4d
--- /dev/null
+++ b/ejb2slsb/run-client.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient-eap510 exec:java -Dexec.mainClass=jp.programmers.examples.ejb2.slsb.HelloSLSBClient -Dexec.classpathScope=compile
diff --git a/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBBean.java b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBBean.java
new file mode 100644
index 0000000..4769be4
--- /dev/null
+++ b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBBean.java
@@ -0,0 +1,106 @@
+package jp.programmers.examples.ejb2.slsb;
+
+import java.rmi.RemoteException;
+import javax.ejb.EJBException;
+import javax.ejb.SessionBean;
+import javax.ejb.SessionContext;
+import javax.ejb.TimedObject;
+import javax.ejb.Timer;
+
+/**
+ * @ejb.bean name="HelloSLSB" type="Stateless"
+ */
+public class HelloSLSBBean implements SessionBean, TimedObject {
+
+ private SessionContext ctx;
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public String hello() {
+ System.out.println("HelloSLSB#hello()");
+ return this.hello("world");
+ }
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public String hello(String name) {
+ System.out.println("HelloSLSB#hello(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+
+ /**
+ * @ejb.interface-method view-type="local"
+ */
+ public String helloLocal() {
+ System.out.println("HelloSLSB#helloLocal()");
+ return this.hello("world");
+ }
+
+ /**
+ * @ejb.interface-method view-type="local"
+ */
+ public String helloLocal(String name) {
+ System.out.println("HelloSLSB#helloLocal(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public Object echo(Object o) {
+ return o;
+ }
+
+ /**
+ * @ejb.interface-method view-type="local"
+ */
+ public Object echoLocal(Object o) {
+ return o;
+ }
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public void sleep(int msec) {
+ System.out.println("HelloSLSB#sleep(int)");
+ System.out.println("msec=" + msec);
+ try {
+ Thread.sleep(msec);
+ } catch (InterruptedException ignore) { }
+ }
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public void initTimer() {
+ ctx.getTimerService().createTimer(0, 20 * 1000, null);
+ }
+
+ /**
+ * @ejb.interface-method view-type="remote"
+ */
+ public void ejbTimeout(Timer timer) {
+ System.out.println("HelloSLSB#ejbTimeout(Timer)");
+ System.out.println("timer=" + timer);
+ }
+
+ public void ejbCreate() { }
+ public void ejbActivate() throws EJBException, RemoteException { }
+ public void ejbPassivate() throws EJBException, RemoteException { }
+ public void ejbRemove() throws EJBException, RemoteException { }
+ public void setSessionContext(SessionContext context)
+ throws EJBException, RemoteException {
+ this.ctx = context;
+ }
+}
diff --git a/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java
new file mode 100644
index 0000000..47a68e3
--- /dev/null
+++ b/ejb2slsb/src/main/java/jp/programmers/examples/ejb2/slsb/HelloSLSBClient.java
@@ -0,0 +1,30 @@
+package jp.programmers.examples.ejb2.slsb;
+
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+
+public class HelloSLSBClient {
+
+ public static final String DEFAULT_PROVIDER_URL = "localhost:1099";
+
+ public static void main(String[] args) throws Exception {
+ String providerUrl = DEFAULT_PROVIDER_URL;
+ if (args.length != 0) {
+ providerUrl = args[0];
+ }
+
+ String jndiName = "HelloSLSB";
+ Properties props = new Properties();
+ props.put(Context.INITIAL_CONTEXT_FACTORY,
+ "org.jnp.interfaces.NamingContextFactory");
+ props.put(Context.URL_PKG_PREFIXES,
+ "org.jboss.naming:org.jnp.interfaces");
+ props.put(Context.PROVIDER_URL, providerUrl);
+ InitialContext context = new InitialContext(props);
+ HelloSLSBHome helloHome = (HelloSLSBHome)context.lookup(jndiName);
+ HelloSLSB hello = helloHome.create();
+ hello.hello();
+ }
+
+}
diff --git a/ejb3mailmdb/pom.xml b/ejb3mailmdb/pom.xml
new file mode 100644
index 0000000..a3e819a
--- /dev/null
+++ b/ejb3mailmdb/pom.xml
@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb3mailmdb</artifactId>
+ <packaging>jar</packaging>
+ <name>example-ejb3mailmdb</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>javax.mail</groupId>
+ <artifactId>mail</artifactId>
+ <version>1.4.2</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>eap510</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>mail-ra</artifactId>
+ <version>5.1.0.EAP</version>
+ <scope>system</scope>
+ <systemPath>${jboss510.home}/server/${jboss510.profile}/deploy/mail-ra.rar/mail-ra.jar</systemPath>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jboss-ejb3-ext-api</artifactId>
+ <version>5.1.0.EAP</version>
+ <scope>system</scope>
+ <systemPath>${jboss510.home}/common/lib/jboss-ejb3-ext-api.jar</systemPath>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb3mailmdb/src/main/java/jp/programmers/examples/ejb3/mdb/MailMDB.java b/ejb3mailmdb/src/main/java/jp/programmers/examples/ejb3/mdb/MailMDB.java
new file mode 100644
index 0000000..84fc240
--- /dev/null
+++ b/ejb3mailmdb/src/main/java/jp/programmers/examples/ejb3/mdb/MailMDB.java
@@ -0,0 +1,26 @@
+package jp.programmers.examples.ejb3.mdb;
+
+import javax.ejb.ActivationConfigProperty;
+import javax.ejb.MessageDriven;
+import javax.mail.Message;
+
+import org.jboss.ejb3.annotation.ResourceAdapter;
+import org.jboss.resource.adapter.mail.inflow.MailListener;
+
+@MessageDriven(activationConfig={
+ @ActivationConfigProperty(propertyName="mailServer", propertyValue="server"),
+ @ActivationConfigProperty(propertyName="mailFolder", propertyValue="INBOX"),
+ @ActivationConfigProperty(propertyName="storeProtocol", propertyValue="imap"),
+ @ActivationConfigProperty(propertyName="userName", propertyValue="foo"),
+ @ActivationConfigProperty(propertyName="password", propertyValue="foo")
+})
+@ResourceAdapter("mail-ra.rar")
+public class MailMDB implements MailListener {
+ public void onMessage(Message mail) {
+ try {
+ System.out.println("New email: " + mail.getSubject());
+ } catch (javax.mail.MessagingException ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/ejb3mdb/QueueSend.bsh b/ejb3mdb/QueueSend.bsh
new file mode 100644
index 0000000..14da89c
--- /dev/null
+++ b/ejb3mdb/QueueSend.bsh
@@ -0,0 +1,23 @@
+connectionFactoryJNDIName = "ConnectionFactory";
+queueName = "queue/tmpQueue";
+message = "hello";
+
+start() {
+ context = new javax.naming.InitialContext();
+ cf = context.lookup(connectionFactoryJNDIName);
+ queue = context.lookup(queueName);
+ conn = cf.createQueueConnection();
+ try {
+ session = conn.createQueueSession(false, javax.jms.Session.AUTO_ACKNOWLEDGE);
+ sender = session.createSender(queue);
+ sender.send(session.createTextMessage(message));
+ sender.close();
+ session.close();
+ } finally {
+ if (conn != null) {
+ try {
+ conn.close();
+ } catch (Exception ignore) { }
+ }
+ }
+}
diff --git a/ejb3mdb/pom.xml b/ejb3mdb/pom.xml
new file mode 100644
index 0000000..1b6d8ff
--- /dev/null
+++ b/ejb3mdb/pom.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb3mdb</artifactId>
+ <packaging>jar</packaging>
+ <name>example-ejb3mdb</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb3mdb/src/main/java/jp/programmers/examples/ejb3/mdb/HelloMDB.java b/ejb3mdb/src/main/java/jp/programmers/examples/ejb3/mdb/HelloMDB.java
new file mode 100644
index 0000000..f4230ae
--- /dev/null
+++ b/ejb3mdb/src/main/java/jp/programmers/examples/ejb3/mdb/HelloMDB.java
@@ -0,0 +1,72 @@
+package jp.programmers.examples.ejb3.mdb;
+
+import javax.ejb.ActivationConfigProperty;
+import javax.ejb.MessageDriven;
+import javax.jms.Message;
+import javax.jms.MessageListener;
+import javax.ejb.MessageDrivenContext;
+import javax.jms.TextMessage;
+import javax.jms.JMSException;
+import javax.annotation.Resource;
+
+/* Singleton configuration:
+@MessageDriven(
+ activationConfig={
+ @ActivationConfigProperty(propertyName="destinationType", propertyValue="javax.jms.Queue"),
+ @ActivationConfigProperty(propertyName="destination", propertyValue="queue/tmpQueue"),
+ @ActivationConfigProperty(propertyName="maxSession", propertyValue="1"),
+ @ActivationConfigProperty(propertyName="useDLQ", propertyValue="false")
+ })
+// TODO: This annotation package is JBoss 4, need to upgrade to JBoss 5 and declare dep in pom.xml
[email protected](value=org.jboss.ejb3.StrictMaxPool.class, maxSize=1)
+*/
+/* @javax.ejb.TransactionAttribute(javax.ejb.TransactionAttributeType.NOT_SUPPORTED) */
+
+// JBM has own DLQ processing feature, so make sure to disable DLQ processing on JCA side (useDLQ=false)
+@MessageDriven(
+ activationConfig={
+ @ActivationConfigProperty(propertyName="destinationType", propertyValue="javax.jms.Queue"),
+ @ActivationConfigProperty(propertyName="destination", propertyValue="queue/tmpQueue"),
+ @ActivationConfigProperty(propertyName="useDLQ", propertyValue="false")
+ })
+public class HelloMDB implements MessageListener {
+ @Resource MessageDrivenContext context;
+ public void onMessage(Message message) {
+ System.out.println("HelloMDB#onMessage(Message)");
+ if (message == null) {
+ System.out.println("message is null");
+ return;
+ }
+ String s = message.toString();
+
+ // JBM TextMessage toString() doesn't print its contents, so tweak it
+ String text = null;
+ if (message instanceof TextMessage) {
+ try {
+ text = ((TextMessage)message).getText();
+ s += ": text=" + text;
+ } catch (JMSException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ System.out.println(s);
+
+ // sleep if long value is passed
+ try {
+ long sleep = Long.parseLong(text);
+ Thread.sleep(sleep);
+ } catch (Exception ignore) { }
+
+ // raise exception if requested
+ if (text.equalsIgnoreCase("exception")) {
+ throw new RuntimeException("Exception requested.");
+ }
+
+ // call setRollbackOnly()
+ if (text.equalsIgnoreCase("rollback")) {
+ context.setRollbackOnly();
+ }
+
+ }
+}
diff --git a/ejb3mdb/tmpQueue-service.xml b/ejb3mdb/tmpQueue-service.xml
new file mode 100644
index 0000000..602d137
--- /dev/null
+++ b/ejb3mdb/tmpQueue-service.xml
@@ -0,0 +1,9 @@
+<server>
+ <mbean code="org.jboss.jms.server.destination.QueueService"
+ name="jboss.messaging.destination:service=Queue,name=tmpQueue"
+ xmbean-dd="xmdesc/Queue-xmbean.xml">
+ <depends optional-attribute-name="ServerPeer">jboss.messaging:service=ServerPeer</depends>
+ <depends>jboss.messaging:service=PostOffice</depends>
+ </mbean>
+</server>
+
diff --git a/ejb3sfsb/pom.xml b/ejb3sfsb/pom.xml
new file mode 100644
index 0000000..a1e44f7
--- /dev/null
+++ b/ejb3sfsb/pom.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb3sfsb</artifactId>
+ <packaging>jar</packaging>
+ <name>example-ejb3sfsb</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>client-eap510</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jbossall-client</artifactId>
+ <version>5.1.0.EAP</version>
+ <scope>system</scope>
+ <systemPath>${jboss510.home}/client/jbossall-client.jar</systemPath>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb3sfsb/run-client.sh b/ejb3sfsb/run-client.sh
new file mode 100644
index 0000000..567bfe5
--- /dev/null
+++ b/ejb3sfsb/run-client.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient-eap510 exec:java -Dexec.mainClass=jp.programmers.examples.ejb3.sfsb.HelloSFSBClient -Dexec.classpathScope=compile
diff --git a/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/Hello.java b/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/Hello.java
new file mode 100644
index 0000000..c5ff4d3
--- /dev/null
+++ b/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/Hello.java
@@ -0,0 +1,9 @@
+package jp.programmers.examples.ejb3.sfsb;
+
+public interface Hello {
+ public String hello();
+ public String hello(String name);
+ public String getLastMessage();
+ public void exception();
+ public void destroy();
+}
diff --git a/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSB.java b/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSB.java
new file mode 100644
index 0000000..8af4fb8
--- /dev/null
+++ b/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSB.java
@@ -0,0 +1,36 @@
+package jp.programmers.examples.ejb3.sfsb;
+
+import javax.ejb.Stateful;
+import javax.ejb.Remote;
+import javax.ejb.Remove;
+
+@Remote
+@Stateful
+public class HelloSFSB implements Hello {
+
+ private String lastMessage;
+
+ public String hello() {
+ System.out.println("HelloSFSB#hello()");
+ return this.hello("world");
+ }
+
+ public String hello(String name) {
+ System.out.println("HelloSFSB#hello(String)");
+ System.out.println("name=" + name);
+ lastMessage = "Hello " + name;
+ return lastMessage;
+ }
+
+ public String getLastMessage() {
+ return lastMessage;
+ }
+
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+ @Remove
+ public void destroy() {
+ }
+}
diff --git a/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSBClient.java b/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSBClient.java
new file mode 100644
index 0000000..bc95281
--- /dev/null
+++ b/ejb3sfsb/src/main/java/jp/programmers/examples/ejb3/sfsb/HelloSFSBClient.java
@@ -0,0 +1,30 @@
+package jp.programmers.examples.ejb3.sfsb;
+
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+
+public class HelloSFSBClient {
+
+ public static final String DEFAULT_PROVIDER_URL = "localhost:1099";
+
+ public static void main(String... args) throws Exception {
+ String providerUrl = DEFAULT_PROVIDER_URL;
+ if (args.length != 0) {
+ providerUrl = args[0];
+ }
+
+ String jndiName = "HelloSFSB/remote";
+ Properties props = new Properties();
+ props.put(Context.INITIAL_CONTEXT_FACTORY,
+ "org.jnp.interfaces.NamingContextFactory");
+ props.put(Context.URL_PKG_PREFIXES,
+ "org.jboss.naming:org.jnp.interfaces");
+ props.put(Context.PROVIDER_URL, providerUrl);
+ InitialContext context = new InitialContext(props);
+ Hello hello = (Hello)context.lookup(jndiName);
+ hello.hello();
+ hello.destroy();
+ }
+
+}
diff --git a/ejb3sfsb/src/main/resources/META-INF/ejb-jar.xml b/ejb3sfsb/src/main/resources/META-INF/ejb-jar.xml
new file mode 100644
index 0000000..92d099a
--- /dev/null
+++ b/ejb3sfsb/src/main/resources/META-INF/ejb-jar.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<ejb-jar
+ version="3.0"
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
+ http://java.sun.com/xml/ns/javaee/ejb-jar_3_0.xsd">
+ <!--
+ <enterprise-beans>
+ <session>
+ <ejb-name>HelloSFSB</ejb-name>
+ <ejb-class>jp.programmers.examples.ejb3.sfsb.HelloSFSB</ejb-class>
+ </session>
+ </enterprise-beans>
+ -->
+</ejb-jar>
diff --git a/ejb3sfsb/src/main/resources/META-INF/jboss.xml b/ejb3sfsb/src/main/resources/META-INF/jboss.xml
new file mode 100644
index 0000000..2cacaf4
--- /dev/null
+++ b/ejb3sfsb/src/main/resources/META-INF/jboss.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<jboss version="5.0"
+ xmlns="http://www.jboss.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.jboss.com/xml/ns/javaee
+ http://www.jboss.org/j2ee/schema/jboss_5_0.xsd">
+ <!--
+ <enterprise-beans>
+ <session>
+ <ejb-name>HelloSFSB</ejb-name>
+ </session>
+ </enterprise-beans>
+ -->
+</jboss>
diff --git a/ejb3sfsb/src/main/resources/client-log4j.xml b/ejb3sfsb/src/main/resources/client-log4j.xml
new file mode 100644
index 0000000..efcfd67
--- /dev/null
+++ b/ejb3sfsb/src/main/resources/client-log4j.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
+
+ <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
+ <param name="File" value="client.log"/>
+ <param name="Append" value="false"/>
+ <param name="DatePattern" value="'.'yyyy-MM-dd"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
+ </layout>
+ </appender>
+
+ <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
+ </layout>
+ </appender>
+
+ <root>
+ <level value="ALL"/>
+ <appender-ref ref="CONSOLE"/>
+<!--
+ <appender-ref ref="FILE"/>
+-->
+ </root>
+
+</log4j:configuration>
diff --git a/ejb3slsb/pom.xml b/ejb3slsb/pom.xml
new file mode 100644
index 0000000..135eae2
--- /dev/null
+++ b/ejb3slsb/pom.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-ejb3slsb</artifactId>
+ <packaging>jar</packaging>
+ <name>example-ejb3slsb</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss.javaee</groupId>
+ <artifactId>jboss-javaee</artifactId>
+ <version>5.0.0.GA</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>client-eap510</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.jboss</groupId>
+ <artifactId>jbossall-client</artifactId>
+ <version>5.1.0.EAP</version>
+ <scope>system</scope>
+ <systemPath>${jboss510.home}/client/jbossall-client.jar</systemPath>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/ejb3slsb/run-client.sh b/ejb3slsb/run-client.sh
new file mode 100644
index 0000000..84c539a
--- /dev/null
+++ b/ejb3slsb/run-client.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -Pclient-eap510 exec:java -Dexec.mainClass=jp.programmers.examples.ejb3.slsb.HelloSLSBClient -Dexec.classpathScope=compile
diff --git a/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/Hello.java b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/Hello.java
new file mode 100644
index 0000000..f337c7e
--- /dev/null
+++ b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/Hello.java
@@ -0,0 +1,11 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import javax.ejb.Timer;
+
+public interface Hello {
+ public String hello();
+ public String hello(String name);
+ public void ejbTimeout(Timer timer);
+ public void initTimer();
+ public void exception();
+}
diff --git a/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
new file mode 100644
index 0000000..d77d1d0
--- /dev/null
+++ b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSB.java
@@ -0,0 +1,43 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import javax.ejb.Local;
+import javax.ejb.Remote;
+import javax.ejb.Stateless;
+import javax.ejb.Timeout;
+import javax.ejb.Timer;
+import javax.ejb.SessionContext;
+import javax.annotation.Resource;
+
+@Remote
+@Stateless
+public class HelloSLSB implements Hello {
+
+ @Resource
+ SessionContext ctx;
+
+ public String hello() {
+ System.out.println("HelloSLSB#hello()");
+ return this.hello("world");
+ }
+
+ public String hello(String name) {
+ System.out.println("HelloSLSB#hello(String)");
+ System.out.println("name=" + name);
+ return "Hello " + name;
+ }
+
+ @Timeout
+ public void ejbTimeout(Timer timer) {
+ System.out.println("HelloSLSB#ejbTimeout(Timer)");
+ System.out.println("timer=" + timer);
+ }
+
+ public void initTimer() {
+ ctx.getTimerService().createTimer(0, 20 * 1000, null);
+ }
+
+ public void exception() {
+ throw new RuntimeException();
+ }
+
+}
diff --git a/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
new file mode 100644
index 0000000..2373b86
--- /dev/null
+++ b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBClient.java
@@ -0,0 +1,29 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+
+public class HelloSLSBClient {
+
+ public static final String DEFAULT_PROVIDER_URL = "localhost:1099";
+
+ public static void main(String... args) throws Exception {
+ String providerUrl = DEFAULT_PROVIDER_URL;
+ if (args.length != 0) {
+ providerUrl = args[0];
+ }
+
+ String jndiName = "HelloSLSB/remote";
+ Properties props = new Properties();
+ props.put(Context.INITIAL_CONTEXT_FACTORY,
+ "org.jnp.interfaces.NamingContextFactory");
+ props.put(Context.URL_PKG_PREFIXES,
+ "org.jboss.naming:org.jnp.interfaces");
+ props.put(Context.PROVIDER_URL, providerUrl);
+ InitialContext context = new InitialContext(props);
+ Hello hello = (Hello)context.lookup(jndiName);
+ hello.hello();
+ }
+
+}
diff --git a/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
new file mode 100644
index 0000000..f905571
--- /dev/null
+++ b/ejb3slsb/src/main/java/jp/programmers/examples/ejb3/slsb/HelloSLSBLoadClient.java
@@ -0,0 +1,35 @@
+package jp.programmers.examples.ejb3.slsb;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ExecutorService;
+import java.util.Properties;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+
+public class HelloSLSBLoadClient {
+
+ public static final String DEFAULT_PROVIDER_URL = "localhost:1099";
+
+ public static void main(String... args) throws Exception {
+ String providerUrl = DEFAULT_PROVIDER_URL;
+ if (args.length != 0) {
+ providerUrl = args[0];
+ }
+
+ final String param = providerUrl;
+ ExecutorService ex = Executors.newCachedThreadPool();
+ Runnable r = new Runnable() {
+ public void run() {
+ try {
+ HelloSLSBClient.main(param);
+ } catch (Exception ignore) {
+ }
+ }
+ };
+ for (int i = 0; i < 10; i++) {
+ ex.execute(r);
+ }
+ ex.shutdown();
+ }
+
+}
diff --git a/ejb3slsb/src/main/resources/META-INF/ejb-jar.xml b/ejb3slsb/src/main/resources/META-INF/ejb-jar.xml
new file mode 100644
index 0000000..72ceff8
--- /dev/null
+++ b/ejb3slsb/src/main/resources/META-INF/ejb-jar.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<ejb-jar
+ version="3.0"
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
+ http://java.sun.com/xml/ns/javaee/ejb-jar_3_0.xsd">
+ <!--
+ <enterprise-beans>
+ <session>
+ <ejb-name>HelloSLSB</ejb-name>
+ <ejb-class>jp.programmers.examples.ejb3.slsb.HelloSLSB</ejb-class>
+ </session>
+ </enterprise-beans>
+ -->
+</ejb-jar>
diff --git a/ejb3slsb/src/main/resources/META-INF/jboss.xml b/ejb3slsb/src/main/resources/META-INF/jboss.xml
new file mode 100644
index 0000000..edbf363
--- /dev/null
+++ b/ejb3slsb/src/main/resources/META-INF/jboss.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<jboss version="5.0"
+ xmlns="http://www.jboss.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.jboss.com/xml/ns/javaee
+ http://www.jboss.org/j2ee/schema/jboss_5_0.xsd">
+ <!--
+ <enterprise-beans>
+ <session>
+ <ejb-name>HelloSLSB</ejb-name>
+ </session>
+ </enterprise-beans>
+ -->
+</jboss>
diff --git a/ejb3slsb/src/main/resources/client-log4j.xml b/ejb3slsb/src/main/resources/client-log4j.xml
new file mode 100644
index 0000000..efcfd67
--- /dev/null
+++ b/ejb3slsb/src/main/resources/client-log4j.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
+
+ <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
+ <param name="File" value="client.log"/>
+ <param name="Append" value="false"/>
+ <param name="DatePattern" value="'.'yyyy-MM-dd"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
+ </layout>
+ </appender>
+
+ <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d %-5p [%c] (%t) %m%n"/>
+ </layout>
+ </appender>
+
+ <root>
+ <level value="ALL"/>
+ <appender-ref ref="CONSOLE"/>
+<!--
+ <appender-ref ref="FILE"/>
+-->
+ </root>
+
+</log4j:configuration>
diff --git a/elaopmetrics/README b/elaopmetrics/README
new file mode 100644
index 0000000..657baad
--- /dev/null
+++ b/elaopmetrics/README
@@ -0,0 +1,3 @@
+Edit build.properties and run:
+
+$ and deploy
diff --git a/elaopmetrics/build.properties b/elaopmetrics/build.properties
new file mode 100644
index 0000000..78d991a
--- /dev/null
+++ b/elaopmetrics/build.properties
@@ -0,0 +1,2 @@
+jboss.home=
+jboss.server.config=default
diff --git a/elaopmetrics/build.xml b/elaopmetrics/build.xml
new file mode 100644
index 0000000..1020a9d
--- /dev/null
+++ b/elaopmetrics/build.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+
+<project name="elaopmetrics" default="jar" basedir=".">
+
+ <property file="${basedir}/build.properties" />
+
+ <property name="src.dir" value="${basedir}/src" />
+ <property name="main.dir" value="${src.dir}/main" />
+ <property name="java.dir" value="${main.dir}/java" />
+ <property name="resources.dir" value="${main.dir}/resources" />
+ <property name="target.dir" value="./target" />
+ <property name="classes.dir" value="${target.dir}/classes" />
+
+
+ <path id="build.classpath">
+ <pathelement path="${classes.dir}" />
+ <pathelement path="${jboss.home}/server/${jboss.server.config}/deploy/jboss-aop-jdk50.deployer/jboss-aop-jdk50.jar" />
+ </path>
+
+ <target name="compile">
+ <mkdir dir="${classes.dir}" />
+ <copy todir="${classes.dir}">
+ <fileset dir="${resources.dir}" />
+ </copy>
+ <javac srcdir="${src.dir}"
+ destdir="${classes.dir}"
+ encoding="UTF-8"
+ debug="on"
+ optimize="off">
+ <classpath refid="build.classpath" />
+ </javac>
+ </target>
+
+ <target name="jar" depends="compile">
+ <jar jarfile="${target.dir}/${ant.project.name}.aop">
+ <fileset dir="${classes.dir}" />
+ </jar>
+ </target>
+
+ <target name="deploy" depends="jar">
+ <copy todir="${jboss.home}/server/${jboss.server.config}/deploy/"
+ file="${target.dir}/${ant.project.name}.aop" />
+ </target>
+
+ <target name="clean">
+ <delete dir="${target.dir}" />
+ </target>
+
+</project>
diff --git a/elaopmetrics/src/main/java/org/jboss/jbossaop/ValueExpressionMetrics.java b/elaopmetrics/src/main/java/org/jboss/jbossaop/ValueExpressionMetrics.java
new file mode 100644
index 0000000..c79b948
--- /dev/null
+++ b/elaopmetrics/src/main/java/org/jboss/jbossaop/ValueExpressionMetrics.java
@@ -0,0 +1,26 @@
+package org.jboss.injbossaop;
+
+import org.jboss.aop.advice.Interceptor;
+import org.jboss.aop.joinpoint.Invocation;
+import org.jboss.aop.joinpoint.MethodInvocation;
+
+public class ValueExpressionMetrics implements Interceptor {
+
+ public String getName() {
+ return "ValueExpressionMetrics";
+ }
+
+ public Object invoke(Invocation invocation) throws Throwable {
+ System.out.println("*****ExpressionMetrics:invoke");
+ long startTime = System.currentTimeMillis();
+ try {
+ return invocation.invokeNext();
+ } finally {
+ long endTime = System.currentTimeMillis() - startTime;
+ java.lang.reflect.Method m = ((MethodInvocation) invocation).getMethod();
+ System.out.println("method " + m.toString() + " time: " + endTime + "ms");
+ }
+
+ }
+
+}
diff --git a/elaopmetrics/src/main/resources/META-INF/jboss-aop.xml b/elaopmetrics/src/main/resources/META-INF/jboss-aop.xml
new file mode 100644
index 0000000..6d25195
--- /dev/null
+++ b/elaopmetrics/src/main/resources/META-INF/jboss-aop.xml
@@ -0,0 +1,5 @@
+<aop>
+ <bind pointcut="execution(public java.lang.Object org.jboss.el.ValueExpressionImpl->getValue(..))">
+ <interceptor class="org.jboss.injbossaop.ValueExpressionMetrics"/>
+ </bind>
+</aop>
diff --git a/jab/pom.xml b/jab/pom.xml
new file mode 100644
index 0000000..573c7be
--- /dev/null
+++ b/jab/pom.xml
@@ -0,0 +1,50 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers</groupId>
+ <artifactId>jab</artifactId>
+ <name>jab</name>
+ <version>1.0</version>
+ <packaging>jar</packaging>
+
+
+
+ <repositories>
+ <repository>
+ <id>jenkins-ci.org</id>
+ <name>Jenkins CI</name>
+ <url>http://maven.jenkins-ci.org/content/repositories/releases/</url>
+ </repository>
+ </repositories>
+
+ <dependencies>
+ <dependency>
+ <groupId>args4j</groupId>
+ <artifactId>args4j</artifactId>
+ <version>2.0.17</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.5</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/jab/run.sh b/jab/run.sh
new file mode 100644
index 0000000..b483119
--- /dev/null
+++ b/jab/run.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+ulimit -u 8192
+
+# $ mvn package && sh run.sh -w -c 400 -n 100000 http://localhost:8080/
+
+mvn exec:java -Dexec.mainClass=jp.programmers.jab.JAB -Dexec.args="$*"
diff --git a/jab/src/main/java/jp/programmers/jab/JAB.java b/jab/src/main/java/jp/programmers/jab/JAB.java
new file mode 100644
index 0000000..5b12eb2
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/JAB.java
@@ -0,0 +1,155 @@
+package jp.programmers.jab;
+
+import static org.kohsuke.args4j.ExampleMode.ALL;
+import org.kohsuke.args4j.CmdLineParser;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.CountDownLatch;
+import java.util.List;
+import java.util.Iterator;
+import java.net.URL;
+import java.net.URLConnection;
+import java.net.HttpURLConnection;
+import java.io.BufferedInputStream;
+
+public class JAB {
+
+ public static void main(String[] args) throws Exception {
+ JABOptions options = new JABOptions();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+ URL url = null;
+ if (options.getArguments().isEmpty()) {
+ System.err.println("Example: JAB" + parser.printExample(ALL));
+ System.exit(-1);
+ } else {
+ url = new URL(options.getArguments().get(0));
+ }
+ ExecutorService executor =
+ createExecutor(options.getConcurrency());
+ int num = options.getNum();
+ CountDownLatch numLatch = new CountDownLatch(num);
+ SimpleURLConnectionTask task =
+ new SimpleURLConnectionTask(url);
+ task.setLatch(numLatch);
+ if (options.isWarmup()) {
+ System.out.println("Warming up");
+ for (int i = 0; i < num; i++) {
+ executor.submit(task);
+ }
+ numLatch.await(30, TimeUnit.SECONDS);
+ reset();
+ System.out.println("Done warming up");
+ }
+ task.setLatch(null);
+ System.out.println("Testing");
+ long start = System.currentTimeMillis();
+ for (int i = 0; i < num; i++) {
+ executor.submit(task);
+ }
+ executor.shutdown();
+ executor.awaitTermination(30, TimeUnit.SECONDS);
+ long end = System.currentTimeMillis();
+ System.out.println("Done testing, time=" + (end - start) + "ms");
+ report();
+ }
+
+ public static AtomicInteger successCount = new AtomicInteger(0);
+ public static AtomicInteger errorCount = new AtomicInteger(0);
+ public static List<Long> times = new CopyOnWriteArrayList<Long>();
+ //public static List<Long> times =
+ // Collections.synchronizedList(new ArrayList<Long>());
+
+ public static void success(long time) {
+ successCount.incrementAndGet();
+ times.add(time);
+ }
+
+ public static void error() {
+ errorCount.incrementAndGet();
+ }
+
+ public static void reset() {
+ successCount.set(0);
+ errorCount.set(0);
+ times.clear();
+ }
+
+ public static void report() {
+ long total = 0;
+ for (Iterator<Long> it = times.iterator(); it.hasNext(); ) {
+ total += it.next();
+ }
+ double average = total / (double)successCount.get();
+ System.out.println("successCount=" + successCount);
+ System.out.println("errorCount=" + errorCount);
+ System.out.println("average=" + average);
+ }
+
+ private static ExecutorService createExecutor(int num) throws Exception {
+ ExecutorService executor =
+ Executors.newFixedThreadPool(num);
+ final CountDownLatch latch = new CountDownLatch(1);
+ Runnable initTask = new Runnable() {
+ public void run() {
+ try {
+ latch.await();
+ } catch (Exception ignore) { }
+ }
+ };
+ for (int i = 0; i < num; i++) {
+ executor.submit(initTask);
+ }
+ latch.countDown();
+ return executor;
+ }
+
+ public static class SimpleURLConnectionTask implements Runnable {
+ URL url;
+ CountDownLatch latch;
+ public SimpleURLConnectionTask(URL url) {
+ this.url = url;
+ }
+ public void setLatch(CountDownLatch latch) {
+ this.latch = latch;
+ }
+ public void run() {
+ long start = System.currentTimeMillis();
+ try {
+ URLConnection conn = url.openConnection();
+ BufferedInputStream in =
+ new BufferedInputStream(conn.getInputStream());
+ try {
+ if (conn instanceof HttpURLConnection) {
+ int responseCode = ((HttpURLConnection)conn).getResponseCode();
+ if (responseCode < 200 || 299 < responseCode) {
+ error();
+ return;
+ }
+ }
+ while (true) {
+ int r = in.read();
+ if (r == -1) {
+ // EOF
+ break;
+ }
+ }
+ } finally {
+ in.close();
+ }
+ } catch (Throwable t) {
+ error();
+ return;
+ } finally {
+ if (latch != null) {
+ latch.countDown();
+ }
+ }
+ long end = System.currentTimeMillis();
+ success(end - start);
+ }
+ }
+}
diff --git a/jab/src/main/java/jp/programmers/jab/JABOptions.java b/jab/src/main/java/jp/programmers/jab/JABOptions.java
new file mode 100644
index 0000000..66a6209
--- /dev/null
+++ b/jab/src/main/java/jp/programmers/jab/JABOptions.java
@@ -0,0 +1,53 @@
+package jp.programmers.jab;
+
+import org.kohsuke.args4j.Option;
+import org.kohsuke.args4j.Argument;
+import java.util.List;
+import java.util.ArrayList;
+
+public class JABOptions {
+
+ @Option(name="-n",usage="Number of requests")
+ private int num = 1;
+
+ @Option(name="-c",usage="Concurrency, number of threads")
+ private int concurrency = 1;
+
+ @Option(name="-w",usage="Run twice, warm up run and actual benchmark run")
+ private boolean warmup = false;
+
+ @Argument
+ private List<String> arguments = new ArrayList<String>();
+
+ public int getNum() {
+ return num;
+ }
+
+ public void setNum(int num) {
+ this.num = num;
+ }
+
+ public int getConcurrency() {
+ return concurrency;
+ }
+
+ public void setConcurrency(int concurrency) {
+ this.concurrency = concurrency;
+ }
+
+ public boolean isWarmup() {
+ return warmup;
+ }
+
+ public void setWarmup(boolean warmup) {
+ this.warmup = warmup;
+ }
+
+ public List<String> getArguments() {
+ return arguments;
+ }
+
+ public void setArguments(List<String> arguments) {
+ this.arguments = arguments;
+ }
+}
diff --git a/jboss-debug/README.txt b/jboss-debug/README.txt
new file mode 100644
index 0000000..d2cb9f7
--- /dev/null
+++ b/jboss-debug/README.txt
@@ -0,0 +1,31 @@
+* What is this project
+
+This project aims to be a collection of classes which are useful for debugging JBoss Application Server.
+
+- aop/MethodCallLoggingInterceptor
+
+This is a JBoss AOP Interceptor implementation which generates start/end log on every call.
+
+- tx/*
+
+Mock XAResource implementation which works with JBoss Transactions. It stores prepared transaction information into files under data dir, so you can use this for crash recovery tests.
+
+- log4j/DumpStackTraceFilter
+
+Dump a stacktrace when log4j received specified log message.
+
+* Building
+
+Edit the build.properties and run ant command.
+
+* How to use
+
+Copy the target/jboss-debug.jar into $JBOSS_HOME/server/$JBOSS_SERVER_CONFIG/lib/ directory. You can use "ant deploy" command which copys the jar file into lib dir.
+
+- aop/MethodCallLoggingInterceptor
+
+For EJB3 you will just modify deploy/ejb3-interceptors-aop.xml to enable this interceptor. For the other classes, you need to enable JBoss AOP load-time instrumentation and deploy -aop.xml file. Example aop definition file is available at etc/example-aop.xml.
+
+- log4j/DumpStackTraceFilter
+
+Example is available at etc/example-log4j.xml. Copy it into your log4j appender definition.
diff --git a/jboss-debug/build.properties b/jboss-debug/build.properties
new file mode 100644
index 0000000..86fd815
--- /dev/null
+++ b/jboss-debug/build.properties
@@ -0,0 +1 @@
+jboss.home=
diff --git a/jboss-debug/build.xml b/jboss-debug/build.xml
new file mode 100644
index 0000000..5b90a31
--- /dev/null
+++ b/jboss-debug/build.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0"?>
+
+<project name="jboss-debug" default="jar" basedir=".">
+
+ <property file="${basedir}/build.properties" />
+
+ <property name="src.dir" value="${basedir}/src" />
+ <property name="main.dir" value="${src.dir}/main" />
+ <property name="java.dir" value="${main.dir}/java" />
+ <property name="target.dir" value="./target" />
+ <property name="classes.dir" value="${target.dir}/classes" />
+
+
+ <path id="build.classpath">
+ <pathelement path="${classes.dir}" />
+ <pathelement path="${jboss.home}/lib/jboss-logging-spi.jar" />
+ <pathelement path="${jboss.home}/lib/jboss-aop.jar" />
+ <pathelement path="${jboss.home}/common/lib/jbossjts.jar" />
+ <pathelement path="${jboss.home}/common/lib/log4j.jar" />
+ </path>
+
+ <target name="compile">
+ <mkdir dir="${classes.dir}" />
+ <javac srcdir="${src.dir}"
+ destdir="${classes.dir}"
+ encoding="UTF-8"
+ debug="on"
+ optimize="off">
+ <classpath refid="build.classpath" />
+ </javac>
+ </target>
+
+ <target name="jar" depends="compile">
+ <jar jarfile="${target.dir}/${ant.project.name}.jar">
+ <fileset dir="${classes.dir}" />
+ </jar>
+ </target>
+
+ <target name="deploy" depends="jar">
+ <copy file="${target.dir}/${ant.project.name}.jar" todir="${jboss.home}/server/${jboss.server.config}/lib" />
+ </target>
+
+ <target name="clean">
+ <delete dir="${target.dir}" />
+ </target>
+
+</project>
diff --git a/jboss-debug/etc/example-aop.xml b/jboss-debug/etc/example-aop.xml
new file mode 100644
index 0000000..a48449a
--- /dev/null
+++ b/jboss-debug/etc/example-aop.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE aop PUBLIC
+ "-//JBoss//DTD JBOSS AOP 1.0//EN"
+ "http://labs.jboss.com/portal/jbossaop/dtd/jboss-aop_1_0.dtd">
+
+<!--
+ You can apply interceptors included in this project:
+ 1. to your application (deploy with -aop.xml)
+ 2. to ejb3 (edit ejb3-interceptors-aop.xml)
+ -->
+
+<aop>
+ <typedef name="seam" expr="class(org.jboss.seam.*)"/>
+ <bind pointcut="execution(* $typedef{seam}->*(..))">
+ <interceptor class="com.redhat.jboss.support.debug.aop.MethodCallLoggingInterceptor"/>
+ </bind>
+</aop>
diff --git a/jboss-debug/etc/example-log4j.xml b/jboss-debug/etc/example-log4j.xml
new file mode 100644
index 0000000..63c2ddf
--- /dev/null
+++ b/jboss-debug/etc/example-log4j.xml
@@ -0,0 +1,3 @@
+<filter class="com.redhat.jboss.support.debug.log4j.DumpStackTraceFilter">
+ <param name="StringToMatch" value="Log message to match" />
+</filter>
diff --git a/jboss-debug/etc/example-txrecovery.xml b/jboss-debug/etc/example-txrecovery.xml
new file mode 100644
index 0000000..15b0998
--- /dev/null
+++ b/jboss-debug/etc/example-txrecovery.xml
@@ -0,0 +1,2 @@
+ <property name="com.arjuna.ats.jta.recovery.XAResourceRecovery.MOCK1"
+ value="com.redhat.jboss.support.debug.tx.MockXAResourceRecovery"/>
diff --git a/jboss-debug/etc/txtest.bsh b/jboss-debug/etc/txtest.bsh
new file mode 100644
index 0000000..7d8d3b6
--- /dev/null
+++ b/jboss-debug/etc/txtest.bsh
@@ -0,0 +1,22 @@
+start() {
+ try {
+ tm = new javax.naming.InitialContext().lookup("java:TransactionManager");
+ try {
+ // Clear existing tx
+ tm.rollback();
+ } catch (ignore) { }
+ tm.begin();
+ t = tm.getTransaction();
+ res1 = new com.redhat.jboss.support.debug.tx.MockXAResource();
+ res1.resourceId = 1;
+ res2 = new com.redhat.jboss.support.debug.tx.MockXAResource();
+ res2.resourceId = 2;
+ res2.exceptionInCommit = true;
+ res2.exceptionErrorCode = javax.transaction.xa.XAException.XAER_RMERR;
+ t.enlistResource(res1);
+ t.enlistResource(res2);
+ tm.commit();
+ } catch (ex) {
+ ex.printStackTrace();
+ }
+}
diff --git a/jboss-debug/src/main/java/com/redhat/jboss/support/debug/aop/MethodCallLoggingInterceptor.java b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/aop/MethodCallLoggingInterceptor.java
new file mode 100644
index 0000000..d8e81a2
--- /dev/null
+++ b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/aop/MethodCallLoggingInterceptor.java
@@ -0,0 +1,35 @@
+package com.redhat.jboss.support.debug.aop;
+
+import org.jboss.logging.Logger;
+import org.jboss.aop.advice.Interceptor;
+import org.jboss.aop.joinpoint.Invocation;
+import org.jboss.aop.joinpoint.MethodInvocation;
+
+public class MethodCallLoggingInterceptor implements Interceptor {
+
+ private static Logger log =
+ Logger.getLogger(MethodCallLoggingInterceptor.class);
+
+ public String getName() {
+ return "MethodCallLoggingInterceptor";
+ }
+
+ public Object invoke(Invocation invocation) throws Throwable {
+ if (invocation instanceof MethodInvocation &&
+ log.isInfoEnabled()) {
+ MethodInvocation mi = (MethodInvocation)invocation;
+ String cname = mi.getMethod().getDeclaringClass().getName();
+ String mname = mi.getMethod().getName();
+ String s = cname + "#" + mname + "()";
+ log.info("Start: " + s);
+ try {
+ return invocation.invokeNext();
+ } finally {
+ log.info("End: " + s);
+ }
+ } else {
+ return invocation.invokeNext();
+ }
+ }
+
+}
diff --git a/jboss-debug/src/main/java/com/redhat/jboss/support/debug/log4j/DumpStackTraceFilter.java b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/log4j/DumpStackTraceFilter.java
new file mode 100644
index 0000000..58124d6
--- /dev/null
+++ b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/log4j/DumpStackTraceFilter.java
@@ -0,0 +1,30 @@
+package com.redhat.jboss.support.debug.log4j;
+
+import org.apache.log4j.Logger;
+import org.apache.log4j.spi.Filter;
+import org.apache.log4j.spi.LoggingEvent;
+
+public class DumpStackTraceFilter extends Filter {
+ private static Logger log = Logger.getLogger(DumpStackTraceFilter.class);
+ private String stringToMatch = null;
+ public void setStringToMatch(String stringToMatch) {
+ this.stringToMatch = stringToMatch;
+ }
+ public String getStringToMatch() {
+ return stringToMatch;
+ }
+ public int decide(LoggingEvent event) {
+ String msg = event.getRenderedMessage();
+ if (msg == null || stringToMatch == null) {
+ return Filter.NEUTRAL;
+ }
+ // Avoid cyclic calls
+ if (event.getLoggerName().equals(DumpStackTraceFilter.class.getName())) {
+ return Filter.NEUTRAL;
+ }
+ if (msg.indexOf(stringToMatch) != -1) {
+ log.info("Found message: " + msg, new Exception());
+ }
+ return Filter.NEUTRAL;
+ }
+}
diff --git a/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/MockXAResource.java b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/MockXAResource.java
new file mode 100644
index 0000000..f4c265e
--- /dev/null
+++ b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/MockXAResource.java
@@ -0,0 +1,202 @@
+package com.redhat.jboss.support.debug.tx;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.ObjectInputStream;
+import java.io.BufferedInputStream;
+import java.io.FileOutputStream;
+import java.io.ObjectOutputStream;
+import java.io.BufferedOutputStream;
+import java.util.Arrays;
+
+import javax.transaction.xa.Xid;
+import javax.transaction.xa.XAResource;
+import javax.transaction.xa.XAException;
+import org.jboss.logging.Logger;
+
+public class MockXAResource implements XAResource {
+
+ private static Logger log = Logger.getLogger(MockXAResource.class);
+
+ public int resourceId = 0;
+ public boolean crashInPrepare = false;
+ public boolean crashInRollback = false;
+ public boolean crashInCommit = false;
+ public boolean exceptionInPrepare = false;
+ public boolean exceptionInEnd = false;
+ public boolean exceptionInRollback = false;
+ public boolean exceptionInCommit = false;
+ public boolean exceptionInRecover = false;
+ public boolean hangInPrepare = false;
+ public boolean hangInCommit = false;
+ public boolean logException = true;
+ public int exceptionErrorCode = -1;
+ private int transactionTimeout = 30000;
+
+ public void start(Xid xid, int flags) throws XAException {
+ log.info("start('" + xid + "', " + flags + ')');
+ }
+
+ public void end(Xid xid, int flags) throws XAException {
+ log.info("end('" + xid + "', " + flags + ')');
+ if (exceptionInEnd) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ }
+
+ public int prepare(Xid xid) throws XAException {
+ log.info("prepare('" + xid + "')");
+ if (crashInPrepare) {
+ Runtime.getRuntime().halt(0);
+ }
+ if (exceptionInPrepare) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ if (hangInPrepare) {
+ try {
+ Thread.sleep(Integer.MAX_VALUE);
+ } catch (InterruptedException ignore) { }
+ }
+ // Save the xid before return XA_OK
+ saveRecoverXid(xid);
+ return XAResource.XA_OK;
+ }
+
+ public void commit(Xid xid, boolean onePhase) throws XAException {
+ log.info("commit('" + xid + "', " + onePhase + ')');
+ if (crashInCommit) {
+ Runtime.getRuntime().halt(0);
+ }
+ if (exceptionInCommit) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ if (hangInCommit) {
+ try {
+ Thread.sleep(Integer.MAX_VALUE);
+ } catch (InterruptedException ignore) { }
+ }
+ // Commit suceeds, delete xid from
+ deleteRecoverXid(xid);
+ }
+
+ public void rollback(Xid xid) throws XAException {
+ log.info("rollback('" + xid + "')");
+ if (crashInRollback) {
+ Runtime.getRuntime().halt(0);
+ }
+ if (exceptionInRollback) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ deleteRecoverXid(xid);
+ }
+
+ public void forget(Xid xid) throws XAException {
+ log.trace("forget('" + xid + "')");
+ }
+
+ public Xid[] recover(int flags) throws XAException {
+ log.info("recover('" + flags + "')");
+ if (exceptionInRecover) {
+ XAException ex = new XAException(exceptionErrorCode);
+ if (logException) {
+ log.info("logException", ex);
+ }
+ throw ex;
+ }
+ Xid[] result = loadRecoverXids();
+ log.info("recover() returns: " + Arrays.asList(result));
+ return result;
+ }
+
+ public boolean isSameRM(XAResource xaResource) throws XAException {
+ boolean result =
+ this.resourceId == ((MockXAResource)xaResource).resourceId;
+ log.info("isSameRM() returns: " + result);
+ return result;
+ }
+
+ public int getTransactionTimeout() throws XAException {
+ return transactionTimeout;
+ }
+
+ public boolean setTransactionTimeout(int transactionTimeout) throws XAException {
+ this.transactionTimeout = transactionTimeout;
+ return true;
+ }
+
+
+ private Xid[] loadRecoverXids() {
+ File dataDir = new File(System.getProperty("jboss.server.data.dir"));
+ File debugDir = new File(dataDir, "debug");
+ if (!debugDir.exists()) {
+ return new Xid[0];
+ }
+ File[] files = debugDir.listFiles();
+ Xid[] xids = new Xid[files.length];
+ try {
+ for (int i = 0; i < files.length; i++) {
+ File f = files[i];
+ ObjectInputStream ois =
+ new ObjectInputStream(
+ new BufferedInputStream(
+ new FileInputStream(f)));
+ Object o = ois.readObject();
+ ois.close();
+ xids[i] = (Xid)o;
+ }
+ } catch (Exception ex) {
+ throw new RuntimeException("Error when load recoverXid", ex);
+ }
+ return xids;
+ }
+
+ private void saveRecoverXid(Xid xid) {
+ log.info("saveRecoverXid(): " + xid);
+ File dataDir = new File(System.getProperty("jboss.server.data.dir"));
+ File debugDir = new File(dataDir, "debug");
+ if (!debugDir.exists()) {
+ debugDir.mkdir();
+ }
+ File file = new File(debugDir, xid.toString());
+ try {
+ ObjectOutputStream oos =
+ new ObjectOutputStream(
+ new BufferedOutputStream(
+ new FileOutputStream(file)));
+ oos.writeObject(xid);
+ oos.flush();
+ oos.close();
+ } catch (Exception ex) {
+ throw new RuntimeException("Error when save recoverXid", ex);
+ }
+ }
+
+ private void deleteRecoverXid(Xid xid) {
+ log.info("deleteRecoverXid(): " + xid);
+ File dataDir = new File(System.getProperty("jboss.server.data.dir"));
+ File debugDir = new File(dataDir, "debug");
+ File file = new File(debugDir, xid.toString());
+ if (file.exists()) {
+ boolean success = file.delete();
+ if (!success) {
+ log.warn("Failed to delete recoverXid: " + file);
+ }
+ }
+ }
+}
diff --git a/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/MockXAResourceRecovery.java b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/MockXAResourceRecovery.java
new file mode 100644
index 0000000..44cc8ad
--- /dev/null
+++ b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/MockXAResourceRecovery.java
@@ -0,0 +1,31 @@
+package com.redhat.jboss.support.debug.tx;
+
+import com.arjuna.ats.jta.recovery.XAResourceRecovery;
+import javax.transaction.xa.XAResource;
+import org.jboss.logging.Logger;
+
+public class MockXAResourceRecovery implements XAResourceRecovery {
+
+ private static Logger log = Logger.getLogger(MockXAResourceRecovery.class);
+ private boolean hasMoreResources = false;
+
+ public MockXAResourceRecovery() {
+ log.info("MockXAResourceRecovery");
+ }
+
+ public XAResource getXAResource() {
+ log.info("getXAResource()");
+ return new MockXAResource();
+ }
+
+ public boolean initialise(String p) {
+ log.info("initialise(" + p + ")");
+ return true;
+ }
+
+ public boolean hasMoreResources() {
+ hasMoreResources = !hasMoreResources;
+ return hasMoreResources;
+ }
+
+}
diff --git a/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/SerializableMockXAResource.java b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/SerializableMockXAResource.java
new file mode 100644
index 0000000..cd5b393
--- /dev/null
+++ b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/SerializableMockXAResource.java
@@ -0,0 +1,7 @@
+package com.redhat.jboss.support.debug.tx;
+
+import java.io.Serializable;
+
+public class SerializableMockXAResource extends MockXAResource implements Serializable {
+
+}
diff --git a/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/XAResourceWrapper.java b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/XAResourceWrapper.java
new file mode 100644
index 0000000..3e87d3c
--- /dev/null
+++ b/jboss-debug/src/main/java/com/redhat/jboss/support/debug/tx/XAResourceWrapper.java
@@ -0,0 +1,71 @@
+package com.redhat.jboss.support.debug.tx;
+
+import java.io.Serializable;
+import javax.transaction.xa.Xid;
+import javax.transaction.xa.XAResource;
+import javax.transaction.xa.XAException;
+import org.jboss.logging.Logger;
+
+/*
+ * NOTE: This XAResource wrapper class is currently not used from other classes.
+ */
+public class XAResourceWrapper implements XAResource {
+
+ private static Logger log = Logger.getLogger(XAResourceWrapper.class);
+ private XAResource res;
+
+ public XAResourceWrapper(XAResource res) {
+ this.res = res;
+ }
+
+ public void start(Xid xid, int flags) throws XAException {
+ log.info("start('" + xid + "', " + flags + ')');
+ res.start(xid, flags);
+ }
+
+ public void end(Xid xid, int flags) throws XAException {
+ log.info("end('" + xid + "', " + flags + ')');
+ res.end(xid, flags);
+ }
+
+ public int prepare(Xid xid) throws XAException {
+ log.info("prepare('" + xid + "')");
+ return res.prepare(xid);
+ }
+
+ public void commit(Xid xid, boolean onePhase) throws XAException {
+ log.info("commit('" + xid + "', " + onePhase + ')');
+ res.commit(xid, onePhase);
+ }
+
+ public void rollback(Xid xid) throws XAException {
+ log.info("rollback('" + xid + "')");
+ res.rollback(xid);
+ }
+
+ public void forget(Xid xid) throws XAException {
+ log.info("forget('" + xid + "')");
+ res.forget(xid);
+ }
+
+ public Xid[] recover(int flags) throws XAException {
+ log.info("recover()");
+ return res.recover(flags);
+ }
+
+ public boolean isSameRM(XAResource xaResource) throws XAException {
+ if (xaResource instanceof XAResourceWrapper) {
+ xaResource = ((XAResourceWrapper)xaResource).res;
+ }
+ return res.isSameRM(xaResource);
+ }
+
+ public int getTransactionTimeout() throws XAException {
+ return res.getTransactionTimeout();
+ }
+
+ public boolean setTransactionTimeout(int transactionTimeout) throws XAException {
+ return res.setTransactionTimeout(transactionTimeout);
+ }
+
+}
diff --git a/jpa-standalone/pom.xml b/jpa-standalone/pom.xml
new file mode 100644
index 0000000..8d9fcef
--- /dev/null
+++ b/jpa-standalone/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-jpa</artifactId>
+ <packaging>jar</packaging>
+ <name>example-jpa</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-core</artifactId>
+ <version>3.5.6-Final</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-annotations</artifactId>
+ <version>3.5.6-Final</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-entitymanager</artifactId>
+ <version>3.5.6-Final</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>1.6.1</version>
+ </dependency>
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.8.0.10</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope></scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/jpa-standalone/src/main/java/jp/programmers/examples/jpa/Cat.java b/jpa-standalone/src/main/java/jp/programmers/examples/jpa/Cat.java
new file mode 100644
index 0000000..8a9b9d1
--- /dev/null
+++ b/jpa-standalone/src/main/java/jp/programmers/examples/jpa/Cat.java
@@ -0,0 +1,31 @@
+package jp.programmers.examples.jpa;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+@Entity
+public class Cat {
+
+ @Id @GeneratedValue
+ private Integer id;
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+
+}
diff --git a/jpa-standalone/src/main/java/jp/programmers/examples/jpa/MMCat.java b/jpa-standalone/src/main/java/jp/programmers/examples/jpa/MMCat.java
new file mode 100644
index 0000000..4b796a4
--- /dev/null
+++ b/jpa-standalone/src/main/java/jp/programmers/examples/jpa/MMCat.java
@@ -0,0 +1,36 @@
+package jp.programmers.examples.jpa;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.ManyToMany;
+
+@Entity
+public class MMCat {
+
+ @Id @GeneratedValue
+ private Integer id;
+ private String name;
+ @ManyToMany
+ private Set<MMCat> friends = new HashSet<MMCat>();
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+
+}
diff --git a/jpa-standalone/src/main/resources/META-INF/persistence.xml b/jpa-standalone/src/main/resources/META-INF/persistence.xml
new file mode 100644
index 0000000..76301f4
--- /dev/null
+++ b/jpa-standalone/src/main/resources/META-INF/persistence.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<persistence xmlns="http://java.sun.com/xml/ns/persistence"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_1_0.xsd"
+ version="1.0">
+ <persistence-unit name="test" transaction-type="RESOURCE_LOCAL">
+ <properties>
+ <property name="javax.persistence.jdbc.driver" value="org.hsqldb.jdbcDriver"/>
+ <property name="javax.persistence.jdbc.user" value="sa"/>
+ <property name="javax.persistence.jdbc.password" value=""/>
+ <property name="javax.persistence.jdbc.url" value="jdbc:hsqldb:."/>
+ <property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
+ <property name="hibernate.max_fetch_depth" value="3"/>
+ <property name="hibernate.hbm2ddl.auto" value="create-drop"/>
+ <property name="hibernate.show_sql" value="true"/>
+ </properties>
+ </persistence-unit>
+</persistence>
diff --git a/jpa-standalone/src/test/java/jp/programmers/examples/jpa/CatTest.java b/jpa-standalone/src/test/java/jp/programmers/examples/jpa/CatTest.java
new file mode 100644
index 0000000..1b4fcb0
--- /dev/null
+++ b/jpa-standalone/src/test/java/jp/programmers/examples/jpa/CatTest.java
@@ -0,0 +1,32 @@
+package jp.programmers.examples.jpa;
+
+import javax.persistence.Persistence;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Query;
+import junit.framework.TestCase;
+
+public class CatTest extends TestCase {
+
+ public void testPersist() throws Exception {
+ EntityManagerFactory emf = Persistence.createEntityManagerFactory("test");
+ EntityManager em = emf.createEntityManager();
+ em.getTransaction().begin();
+ em.createNativeQuery("SET WRITE_DELAY FALSE").executeUpdate();
+ em.getTransaction().commit();
+
+ em.getTransaction().begin();
+ Cat cat = new Cat();
+ em.persist(cat);
+ em.flush();
+ em.getTransaction().commit();
+ em.close();
+ em = emf.createEntityManager();
+ System.out.println(em.createQuery("from Cat").getSingleResult());
+ Query q = em.createQuery("select m from MMCat AS m, IN (m.friends) f where f.id = :id");
+ q.setParameter("id", 1);
+ System.out.println(q.getResultList());
+ em.close();
+ }
+
+}
diff --git a/servlet/pom.xml b/servlet/pom.xml
new file mode 100644
index 0000000..56ed2bd
--- /dev/null
+++ b/servlet/pom.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-servlet</artifactId>
+ <packaging>war</packaging>
+ <name>example-servlet</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <version>2.5</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-war-plugin</artifactId>
+ <configuration>
+ <webResources>
+ <resource>
+ <directory>${basedir}/src/main/webapp/WEB-INF</directory>
+ <targetPath>WEB-INF</targetPath>
+ <filtering>true</filtering>
+ </resource>
+ </webResources>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/servlet/src/main/java/jp/programmers/examples/ContentLengthFilter.java b/servlet/src/main/java/jp/programmers/examples/ContentLengthFilter.java
new file mode 100644
index 0000000..bbae424
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/ContentLengthFilter.java
@@ -0,0 +1,28 @@
+package jp.programmers.examples;
+
+import java.io.IOException;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+
+public class ContentLengthFilter implements Filter {
+
+ public void init(FilterConfig filterConfig) throws ServletException { }
+
+ public void doFilter(ServletRequest request,
+ ServletResponse response,
+ FilterChain chain)
+ throws IOException, ServletException {
+ ContentLengthResponseWrapper responseWrapper =
+ new ContentLengthResponseWrapper((HttpServletResponse)response);
+ chain.doFilter(request, responseWrapper);
+ responseWrapper.flushResponse();
+ }
+
+ public void destroy() { }
+}
+
diff --git a/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java b/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java
new file mode 100644
index 0000000..695a801
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/ContentLengthResponseWrapper.java
@@ -0,0 +1,92 @@
+package jp.programmers.examples;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.ServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServletResponseWrapper;
+
+/**
+ * A deferred response wrapper.
+ *
+ * Do not call getWriter() before setContentLength(). Otherwise
+ * the subsequent setContentLength() call will simply be ignored.
+ */
+public class ContentLengthResponseWrapper extends HttpServletResponseWrapper {
+
+ private HttpServletResponse response;
+
+ private boolean isOutputStream = false;
+ private ServletOutputStream sout;
+ private ByteArrayOutputStream bout;
+
+ private boolean isWriter = false;
+ private StringWriter sw;
+ private PrintWriter pw;
+
+ public ContentLengthResponseWrapper(HttpServletResponse response) {
+ super(response);
+ this.response = response;
+ bout = new ByteArrayOutputStream();
+ sout = new ServletOutputStream() {
+ public void write(int b) throws IOException {
+ bout.write(b);
+ }
+ };
+
+ sw = new StringWriter();
+ pw = new PrintWriter(sw);
+ }
+
+ public ServletOutputStream getOutputStream() throws IOException {
+ if (isWriter) {
+ // Invalid call sequence, propagate it to throw exception
+ response.getWriter();
+ response.getOutputStream();
+ }
+ isOutputStream = true;
+ return sout;
+ }
+
+ public PrintWriter getWriter() throws IOException {
+ if (isOutputStream) {
+ // Invalid call sequence, propagate it to throw exception
+ response.getOutputStream();
+ response.getWriter();
+ }
+ isWriter = true;
+ return pw;
+ }
+
+ public void flushResponse() throws IOException {
+ if (isOutputStream) {
+ try {
+ sout.flush();
+ } catch (IOException ignore) { }
+ byte[] result = bout.toByteArray();
+ int length = result.length;
+ System.out.println("length=" + length);
+ response.setContentLength(length);
+ response.getOutputStream().write(result);
+ } else if (isWriter) {
+ pw.flush();
+ String s = sw.toString();
+ String charset = response.getCharacterEncoding();
+ if (charset == null) {
+ charset = "ISO-8859-1";
+ }
+ int length = s.getBytes(charset).length;
+ response.setContentLength(length);
+ System.out.println("length=" + length);
+ response.getWriter().write(s);
+ }
+ }
+}
+
diff --git a/servlet/src/main/java/jp/programmers/examples/HelloServlet.java b/servlet/src/main/java/jp/programmers/examples/HelloServlet.java
new file mode 100644
index 0000000..a65d344
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/HelloServlet.java
@@ -0,0 +1,15 @@
+package jp.programmers.examples;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.ServletException;
+import java.io.IOException;
+
+public class HelloServlet extends HttpServlet {
+ public void doGet(HttpServletRequest req, HttpServletResponse res)
+ throws ServletException, IOException {
+ res.setContentType("text/plain");
+ res.getWriter().println("Hello world!");
+ }
+}
diff --git a/servlet/src/main/java/jp/programmers/examples/Redirect.java b/servlet/src/main/java/jp/programmers/examples/Redirect.java
new file mode 100644
index 0000000..509e0fa
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/Redirect.java
@@ -0,0 +1,40 @@
+package jp.programmers.examples;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.ServletException;
+import java.io.IOException;
+
+public class Redirect {
+
+ /**
+ * Build an absolute URL for redirect
+ */
+ public static String toRedirectURL(HttpServletRequest request, HttpServletResponse response, String url) {
+ StringBuilder sb = new StringBuilder();
+ String scheme = request.getScheme();
+ String serverName = request.getServerName();
+ int serverPort = request.getServerPort();
+
+ sb.append(scheme);
+ sb.append("://");
+ sb.append(serverName);
+ if ((scheme.equals("http") && serverPort != 80) ||
+ (scheme.equals("https") && serverPort != 443)) {
+ sb.append(":");
+ sb.append(serverPort);
+ }
+ if (url.startsWith("/")) {
+ sb.append(url);
+ } else {
+ String requestURI = request.getRequestURI();
+ int lastSlash = requestURI.lastIndexOf("/");
+ if (lastSlash != -1) {
+ sb.append(requestURI.substring(0, lastSlash));
+ sb.append("/");
+ sb.append(response.encodeRedirectURL(url));
+ }
+ }
+ return sb.toString();
+ }
+}
diff --git a/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java b/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java
new file mode 100644
index 0000000..5d9b892
--- /dev/null
+++ b/servlet/src/main/java/jp/programmers/examples/SessionFixationProtectionFilter.java
@@ -0,0 +1,138 @@
+package jp.programmers.examples;
+
+import java.io.IOException;
+import java.math.BigInteger;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Random;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+/**
+ * This filter issues additional cookie on login, and check it until
+ * logout. In short, it's login time cookie. This gives us more robust
+ * than single JSESSIONID cookie auth provided by a container.
+ *
+ * In clustered environment, you should have unique SALT value accross
+ * the cluster.
+ */
+public class SessionFixationProtectionFilter implements Filter {
+
+ public static final String SALT = "SALT";
+ public static final String COOKIE_NAME = "COOKIE_NAME";
+ public static final String COOKIE_PATH = "COOKIE_PATH";
+ public static final String COOKIE_DOMAIN = "COOKIE_DOMAIN";
+
+ public static final String DEFAULT_COOKIE_NAME = "SessionFixationProtection";
+ public static final String DEFAULT_SALT = String.valueOf(new Random().nextInt());
+
+ private String salt = null;
+ private String cookieName = null;
+ private String cookiePath = null;
+ private String cookieDomain = null;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ salt = filterConfig.getInitParameter(SALT);
+ if (salt == null) {
+ salt = DEFAULT_SALT;
+ }
+ cookieName = filterConfig.getInitParameter(COOKIE_NAME);
+ if (cookieName == null) {
+ cookieName = DEFAULT_COOKIE_NAME;
+ }
+ cookiePath = filterConfig.getInitParameter(COOKIE_PATH);
+ cookieDomain = filterConfig.getInitParameter(COOKIE_DOMAIN);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public void doFilter(ServletRequest request,
+ ServletResponse response,
+ FilterChain chain)
+ throws IOException, ServletException {
+ HttpServletRequest req = (HttpServletRequest)request;
+ HttpServletResponse res = (HttpServletResponse)response;
+ HttpSession session = req.getSession(false);
+ String user = req.getRemoteUser();
+
+ if (user != null && session.getAttribute(cookieName) == null) {
+ // just logged in!
+ // going to set login cookie
+ String value = md5(salt + session.getId());
+ Cookie cookie = new Cookie(cookieName, value);
+ configureLoginCookie(cookie);
+ res.addCookie(cookie);
+ // mark session as this user should have a login cookie
+ session.setAttribute(cookieName, "true");
+ } else if (user != null && session.getAttribute(cookieName) != null) {
+ // this user is logging in
+ // going to check login cookie
+ String expectedValue = md5(salt + session.getId());
+ boolean found = false;
+ for (Cookie c : req.getCookies()) {
+ if (c.getName().equals(cookieName)) {
+ if (expectedValue.equals(c.getValue())) {
+ found = true;
+ break;
+ }
+ }
+ }
+ if (!found) {
+ // possible session fixiation
+ handleCookieNotFound(req, res, chain);
+ return;
+ }
+ } else {
+ // this user is not logged in
+ // do nothing
+ }
+ chain.doFilter(request, response);
+ }
+
+ /**
+ * Handles possible session fixiation. Calls HttpSession#invalidate() by default.
+ */
+ protected void handleCookieNotFound(HttpServletRequest req,
+ HttpServletResponse res,
+ FilterChain chain)
+ throws IOException, ServletException {
+ // force invalidate
+ req.getSession().invalidate();
+ chain.doFilter(req, res);
+ }
+
+ protected void configureLoginCookie(Cookie cookie) {
+ cookie.setMaxAge(-1);
+ if (cookiePath != null) {
+ cookie.setPath(cookiePath);
+ }
+ if (cookieDomain != null) {
+ cookie.setDomain(cookieDomain);
+ }
+ }
+
+ @Override
+ public void destroy() { }
+
+ public static String md5(String s) {
+ try {
+ MessageDigest m = MessageDigest.getInstance("MD5");
+ byte[] data = s.getBytes();
+ m.update(data, 0, data.length);
+ BigInteger i = new BigInteger(1, m.digest());
+ return String.format("%1$032X", i);
+ } catch (NoSuchAlgorithmException ex) {
+ throw new RuntimeException("no MD5", ex);
+ }
+ }
+}
+
diff --git a/servlet/src/main/webapp/WEB-INF/web.xml b/servlet/src/main/webapp/WEB-INF/web.xml
new file mode 100644
index 0000000..070c2db
--- /dev/null
+++ b/servlet/src/main/webapp/WEB-INF/web.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<web-app
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:web="http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+ version="2.5">
+
+ <servlet>
+ <servlet-name>Hello Servlet</servlet-name>
+ <servlet-class>jp.programmers.examples.HelloServlet</servlet-class>
+ </servlet>
+ <servlet-mapping>
+ <servlet-name>Hello Servlet</servlet-name>
+ <url-pattern>/hello</url-pattern>
+ </servlet-mapping>
+
+ <filter>
+ <filter-name>ContentLengthFilter</filter-name>
+ <filter-class>jp.programmers.examples.ContentLengthFilter</filter-class>
+ </filter>
+ <filter-mapping>
+ <filter-name>ContentLengthFilter</filter-name>
+ <url-pattern>/length/*</url-pattern>
+ </filter-mapping>
+
+</web-app>
diff --git a/servlet/src/main/webapp/index.html b/servlet/src/main/webapp/index.html
new file mode 100644
index 0000000..cfe52a6
--- /dev/null
+++ b/servlet/src/main/webapp/index.html
@@ -0,0 +1,5 @@
+<html>
+<head>
+ <meta http-equiv="Refresh" content="0; URL=hello">
+</head>
+</html>
diff --git a/servlet/src/main/webapp/length/index.jsp b/servlet/src/main/webapp/length/index.jsp
new file mode 100644
index 0000000..799bdd5
--- /dev/null
+++ b/servlet/src/main/webapp/length/index.jsp
@@ -0,0 +1,8 @@
+<% String s1k = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; %>
+
+<%
+ // print 8k
+ for (int i = 0; i < 8; i++) {
+ out.println(s1k);
+ }
+%>
diff --git a/twitter/pom.xml b/twitter/pom.xml
new file mode 100644
index 0000000..7e88846
--- /dev/null
+++ b/twitter/pom.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>jp.programmers.examples</groupId>
+ <artifactId>example-twitter</artifactId>
+ <packaging>jar</packaging>
+ <name>example-twitter</name>
+ <version>1.0</version>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.twitter4j</groupId>
+ <artifactId>twitter4j-core</artifactId>
+ <version>[2.1,)</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}</finalName>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ </properties>
+
+</project>
diff --git a/twitter/run.sh b/twitter/run.sh
new file mode 100644
index 0000000..31caa14
--- /dev/null
+++ b/twitter/run.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+mvn -e exec:java -Dexec.mainClass=jp.programmers.examples.twitter.HelloTwitter
diff --git a/twitter/src/main/java/jp/programmers/examples/twitter/HelloTwitter.java b/twitter/src/main/java/jp/programmers/examples/twitter/HelloTwitter.java
new file mode 100644
index 0000000..2adf8bf
--- /dev/null
+++ b/twitter/src/main/java/jp/programmers/examples/twitter/HelloTwitter.java
@@ -0,0 +1,13 @@
+package jp.programmers.examples.twitter;
+
+import twitter4j.Twitter;
+import twitter4j.TwitterFactory;
+import twitter4j.User;
+
+public class HelloTwitter {
+ public static void main(String... args) throws Exception {
+ Twitter twitter = new TwitterFactory().getInstance();
+ User user = twitter.showUser("twitter");
+ System.out.println(twitter.getUserTimeline(user.getId()));
+ }
+}
|
bhchen/wcecompat
|
a9199b8c27eec31a71b061a32103b5458b3796a1
|
only the x86 sdks do not provide these
|
diff --git a/include/stdlib.h b/include/stdlib.h
index 53964bf..bf87f6e 100644
--- a/include/stdlib.h
+++ b/include/stdlib.h
@@ -1,138 +1,140 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__STDLIB_H__
#define __wcecompat__STDLIB_H__
#include "stddef.h"
#include "malloc.h"
#include "memory.h"
#include "float.h"
#ifdef __cplusplus
extern "C" {
#endif
extern float ceilf(float); /* inside default stdlib.h */
extern float fabsf(float);
extern float floorf(float);
extern float sqrtf(float);
+#ifdef _X86_
extern float fmodf(float);
+#endif
extern int _fmode; /* default file translation mode */
void abort(void);
char * __cdecl getenv(const char *name);
int __cdecl _putenv(const char *);
int __cdecl _wputenv(const wchar_t *);
extern char** _environ; /* pointer to environment table */
extern wchar_t** _wenviron; /* pointer to wide environment table */
#define putenv _putenv
#define environ _environ
#ifdef UNICODE
#define _tputenv _wputenv
#define _tenviron _wenviron
#else
#define _tputenv _putenv
#define _tenviron _environ
#endif
void * __cdecl bsearch(const void *key, const void *base, size_t nmemb, size_t size,
int (__cdecl * compar)(const void *, const void *));
extern int __argc; /* count of cmd line args */
extern char ** __argv; /* pointer to table of cmd line args */
extern wchar_t ** __wargv; /* pointer to table of wide cmd line args */
// min and max macros
#define __max(a,b) (((a) > (b)) ? (a) : (b))
#define __min(a,b) (((a) < (b)) ? (a) : (b))
// Conversion function prototypes
_CRTIMP double __cdecl strtod(const char *, char **);
_CRTIMP long __cdecl strtol(const char *, char **, int);
_CRTIMP unsigned long __cdecl strtoul(const char *, char **, int);
_CRTIMP __int64 __cdecl _atoi64(const char *);
_CRTIMP int __cdecl atoi(const char *);
_CRTIMP double __cdecl atof(const char *);
_CRTIMP long __cdecl atol(const char *);
char * __cdecl _itoa(int, char *, int);
char * __cdecl _ltoa(long, char *, int);
char * __cdecl _ultoa(unsigned long, char *, int);
char * __cdecl _ecvt(double, int, int *, int *);
char * __cdecl _fcvt(double, int, int *, int *);
char * __cdecl _gcvt(double, int, char *);
_CRTIMP int __cdecl tolower(int);
_CRTIMP int __cdecl toupper(int);
_CRTIMP double __cdecl wcstod(const wchar_t *, wchar_t **);
_CRTIMP long __cdecl wcstol(const wchar_t *, wchar_t **, int);
wchar_t * __cdecl _ultow (unsigned long, wchar_t *, int);
wchar_t * __cdecl _itow (int, wchar_t *, int);
wchar_t * __cdecl _ltow (long, wchar_t *, int);
_CRTIMP long __cdecl _wtol(const wchar_t *);
_CRTIMP __int64 __cdecl _wtoll(const wchar_t *);
unsigned long __cdecl wcstoul(const wchar_t *, wchar_t **, int);
#define _wtoi _wtol
#define _wtoi64 _wtoll
// Extended logical Ops functions
unsigned long __cdecl _lrotl(unsigned long, int);
unsigned long __cdecl _lrotr(unsigned long, int);
unsigned int __cdecl _rotl(unsigned int, int);
unsigned int __cdecl _rotr(unsigned int, int);
// Other misc STDLIB functions
_CRTIMP void __cdecl _swab(char *, char *, int);
_CRTIMP int __cdecl rand(void);
_CRTIMP void __cdecl srand(unsigned int);
void __cdecl qsort(void *, size_t, size_t, int (__cdecl *)(const void *, const void *));
double __cdecl difftime(time_t, time_t);
int __cdecl atexit(void (__cdecl *)(void));
// Maximum value that can be returned by the rand function.
#define RAND_MAX 0x7fff
// onexit() defns
#define EXIT_SUCCESS 0
#define EXIT_FAILURE 1
typedef int (__cdecl * _onexit_t)(void);
#define onexit_t _onexit_t
// Data structure & function definitions for div and ldiv runtimes
typedef struct _div_t {
int quot;
int rem;
} div_t;
typedef struct _ldiv_t {
long quot;
long rem;
} ldiv_t;
div_t __cdecl div(int, int);
ldiv_t __cdecl ldiv(long, long);
#ifdef __cplusplus
}
#endif
#endif /* __wcecompat__STDLIB_H__ */
|
bhchen/wcecompat
|
ffe7dd780d237ec1309e494d2faa187fe9db4f10
|
Build fixes for usage in other projects than OpenSSL
|
diff --git a/include/assert.h b/include/assert.h
index 336cc3c..24339b9 100644
--- a/include/assert.h
+++ b/include/assert.h
@@ -1,44 +1,50 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__ASSERT_H__
#define __wcecompat__ASSERT_H__
+#include <ceconfig.h>
+#include <stdio.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifdef NDEBUG
#define assert(exp) ((void)0)
#else
+#ifndef COREDLL_CORESIOA
void _assert(void*, void*, unsigned);
+#else
+#define _assert(exp,file,line) fprintf(stderr, "Assertion failed: %s, file %s, line %d\n", (char*)exp, file, line)
+#endif
#define assert(exp) (void)( (exp) || (_assert(#exp, __FILE__, __LINE__), 0) )
#endif
#ifdef __cplusplus
}
#endif
#endif // __wcecompat__ASSERT_H__
diff --git a/include/errno.h b/include/errno.h
index b02eaec..19b66a7 100644
--- a/include/errno.h
+++ b/include/errno.h
@@ -1,45 +1,57 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__ERRNO_H__
#define __wcecompat__ERRNO_H__
#ifdef __cplusplus
extern "C" {
#endif
extern int errno;
-
+#ifndef ENOENT
#define ENOENT (2)
+#endif
+
+#ifndef EBADF
#define EBADF (9)
+#endif
+
+#ifndef EAGAIN
#define EAGAIN (11)
+#endif
+
+#ifndef ENOMEM
#define ENOMEM (12)
-#define EINVAL (22)
+#endif
+#ifndef EINVAL
+#define EINVAL (22)
+#endif
#ifdef __cplusplus
}
#endif
#endif // __wcecompat__ERRNO_H__
diff --git a/include/stddef.h b/include/stddef.h
index 01bd8ba..1b24a3a 100644
--- a/include/stddef.h
+++ b/include/stddef.h
@@ -1,86 +1,88 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__STDDEF_H__
#define __wcecompat__STDDEF_H__
#ifdef __cplusplus
extern "C" {
#endif
-
+#if 0
/* Define _CRTAPI1 (for compatibility with the NT SDK) */
#ifndef _CRTAPI1
#if _MSC_VER >= 800 && _M_IX86 >= 300
#define _CRTAPI1 __cdecl
#else
#define _CRTAPI1
#endif
#endif
/* Define _CRTAPI2 (for compatibility with the NT SDK) */
#ifndef _CRTAPI2
#if _MSC_VER >= 800 && _M_IX86 >= 300
#define _CRTAPI2 __cdecl
#else
#define _CRTAPI2
#endif
#endif
-
+#endif
#ifndef _CRTIMP
#define _CRTIMP
#endif
#ifndef _SIZE_T_DEFINED
typedef unsigned int size_t;
#define _SIZE_T_DEFINED
#endif
#ifndef _WCHAR_T_DEFINED
typedef unsigned short wchar_t;
#define _WCHAR_T_DEFINED
#endif
#ifndef _WCTYPE_T_DEFINED
typedef wchar_t wint_t;
typedef wchar_t wctype_t;
#define _WCTYPE_T_DEFINED
#endif
#ifndef _TIME_T_DEFINED
typedef unsigned long time_t;
#define _TIME_T_DEFINED /* avoid multiple def's of time_t */
#endif
#ifndef NULL
#ifdef __cplusplus
#define NULL 0
#else
#define NULL ((void *)0)
#endif
#endif
+#ifndef offsetof
#define offsetof(s,m) ((size_t)&(((s*)0)->m))
+#endif
#ifdef __cplusplus
}
#endif
#endif /* __wcecompat__STDDEF_H__ */
diff --git a/include/stdio.h b/include/stdio.h
index b20b85e..9eafe44 100644
--- a/include/stdio.h
+++ b/include/stdio.h
@@ -1,169 +1,175 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__STDIO_H__
#define __wcecompat__STDIO_H__
#include "stddef.h"
//#include "sys/types.h" /* size_t */
#include "stdarg.h"
#ifdef __cplusplus
extern "C" {
#endif
// STDIO constant defns
#define _MAX_PATH MAX_PATH
// EOF constants
#define EOF (-1)
#define WEOF (wint_t)(0xFFFF)
// Seek method constants
#define SEEK_CUR 1
#define SEEK_END 2
#define SEEK_SET 0
// mode constants for _setmode()
#define _O_TEXT 0x4000 /* file mode is text (translated) */
#define _O_BINARY 0x8000 /* file mode is binary (untranslated) */
// FILE is an opaque handle in Win CE. Users have no access to the internals
#ifndef _FILE_DEFINED
typedef void FILE;
#define _FILE_DEFINED
#endif
#if _INTEGRAL_MAX_BITS >= 64
typedef __int64 fpos_t;
#else
typedef long fpos_t;
#endif
// Std handle defns
#define stdin _getstdfilex(0)
#define stdout _getstdfilex(1)
#define stderr _getstdfilex(2)
// ANSI String formatting functions
_CRTIMP int __cdecl sscanf(const char *, const char *, ...);
_CRTIMP int __cdecl sprintf(char *, const char *, ...);
_CRTIMP int __cdecl vsprintf(char *, const char *, va_list);
_CRTIMP int __cdecl _snprintf(char *, size_t, const char *, ...);
_CRTIMP int __cdecl _vsnprintf(char *, size_t, const char *, va_list);
// Always present since wsprintfW (used by kernel) redirects to these
_CRTIMP int __cdecl swprintf(wchar_t *, const wchar_t *, ...);
_CRTIMP int __cdecl vswprintf(wchar_t *, const wchar_t *, va_list);
_CRTIMP int __cdecl _snwprintf(wchar_t *, size_t, const wchar_t *, ...);
_CRTIMP int __cdecl _vsnwprintf(wchar_t *, size_t, const wchar_t *, va_list);
// WIDE String formatting functions
_CRTIMP int __cdecl swscanf(const wchar_t *, const wchar_t *, ...);
// ANSI Stdin/Out functions & macros
_CRTIMP int __cdecl scanf(const char *, ...);
_CRTIMP int __cdecl printf(const char *, ...);
_CRTIMP int __cdecl vprintf(const char *, va_list);
_CRTIMP int __cdecl getchar(void);
_CRTIMP char * __cdecl gets(char *);
_CRTIMP int __cdecl putchar(int);
_CRTIMP int __cdecl puts(const char *);
// ANSI functions for Stdin/out and/or general buffered file handling
_CRTIMP int __cdecl fgetc(FILE *);
_CRTIMP char * __cdecl fgets(char *, int, FILE *);
_CRTIMP int __cdecl fputc(int, FILE *);
_CRTIMP int __cdecl fputs(const char *, FILE *);
_CRTIMP int __cdecl ungetc(int, FILE *);
// ANSI functions for general buffered file handling
_CRTIMP FILE * __cdecl fopen(const char *, const char *);
_CRTIMP int __cdecl fscanf(FILE *, const char *, ...);
_CRTIMP int __cdecl fprintf(FILE *, const char *, ...);
_CRTIMP int __cdecl vfprintf(FILE *, const char *, va_list);
#define getc(_stream) fgetc(_stream)
#define putc(_i, _stream) fputc(_i, _stream)
// functions for general buffered file handling in either ANSI or Wide
_CRTIMP FILE* __cdecl _getstdfilex(int);
_CRTIMP int __cdecl fclose(FILE *);
_CRTIMP int __cdecl _fcloseall(void);
_CRTIMP size_t __cdecl fread(void *, size_t, size_t, FILE *);
_CRTIMP size_t __cdecl fwrite(const void *, size_t, size_t, FILE *);
_CRTIMP int __cdecl fflush(FILE *);
_CRTIMP int __cdecl _flushall(void);
_CRTIMP int __cdecl feof(FILE *);
_CRTIMP int __cdecl ferror(FILE *);
_CRTIMP void __cdecl clearerr(FILE *);
_CRTIMP int __cdecl fgetpos(FILE *, fpos_t *);
_CRTIMP int __cdecl fsetpos(FILE *, const fpos_t *);
_CRTIMP int __cdecl fseek(FILE *, long, int);
_CRTIMP long __cdecl ftell(FILE *);
_CRTIMP int __cdecl _fileno(FILE *);
_CRTIMP int __cdecl _setmode(int fd, int mode);
_CRTIMP FILE* __cdecl _wfdopen(void*, const wchar_t*);
_CRTIMP FILE* __cdecl _wfreopen(const wchar_t *path, const wchar_t *mode, FILE *stream);
// old names
#define fcloseall _fcloseall
#define fileno _fileno
#define flushall _flushall
// WIDE Stdin/Out functions & macros
_CRTIMP int __cdecl wscanf(const wchar_t *, ...);
_CRTIMP int __cdecl wprintf(const wchar_t *, ...);
_CRTIMP int __cdecl vwprintf(const wchar_t *, va_list);
_CRTIMP wint_t __cdecl getwchar(void);
_CRTIMP wint_t __cdecl putwchar(wint_t);
_CRTIMP wchar_t * __cdecl _getws(wchar_t *);
_CRTIMP int __cdecl _putws(const wchar_t *);
// WIDE functions for Stdin/out and/or general buffered file handling
_CRTIMP wint_t __cdecl fgetwc(FILE *);
_CRTIMP wint_t __cdecl fputwc(wint_t, FILE *);
_CRTIMP wint_t __cdecl ungetwc(wint_t, FILE *);
_CRTIMP wchar_t * __cdecl fgetws(wchar_t *, int, FILE *);
_CRTIMP int __cdecl fputws(const wchar_t *, FILE *);
#define getwc(_stm) fgetwc(_stm)
#define putwc(_c,_stm) fputwc(_c,_stm)
// WIDE functions for general buffered file handling
_CRTIMP FILE * __cdecl _wfopen(const wchar_t *, const wchar_t *);
_CRTIMP int __cdecl fwscanf(FILE *, const wchar_t *, ...);
_CRTIMP int __cdecl fwprintf(FILE *, const wchar_t *, ...);
_CRTIMP int __cdecl vfwprintf(FILE *, const wchar_t *, va_list);
#define BUFSIZ (512)
void perror(const char *prefix);
+#ifndef _IOFBF
#define _IOFBF 0
+#endif
+#ifndef _IOLBF
#define _IOLBF 1
+#endif
+#ifndef _IONBF
#define _IONBF 2
+#endif
int setvbuf(FILE* stream, char* buffer, int mode, size_t size);
#ifdef __cplusplus
}
#endif
#endif /* __wcecompat__STDIO_H__ */
diff --git a/include/stdlib.h b/include/stdlib.h
index ee0bb9f..53964bf 100644
--- a/include/stdlib.h
+++ b/include/stdlib.h
@@ -1,131 +1,138 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__STDLIB_H__
#define __wcecompat__STDLIB_H__
#include "stddef.h"
#include "malloc.h"
#include "memory.h"
+#include "float.h"
#ifdef __cplusplus
extern "C" {
#endif
+extern float ceilf(float); /* inside default stdlib.h */
+extern float fabsf(float);
+extern float floorf(float);
+extern float sqrtf(float);
+extern float fmodf(float);
+
extern int _fmode; /* default file translation mode */
void abort(void);
char * __cdecl getenv(const char *name);
int __cdecl _putenv(const char *);
int __cdecl _wputenv(const wchar_t *);
extern char** _environ; /* pointer to environment table */
extern wchar_t** _wenviron; /* pointer to wide environment table */
#define putenv _putenv
#define environ _environ
#ifdef UNICODE
#define _tputenv _wputenv
#define _tenviron _wenviron
#else
#define _tputenv _putenv
#define _tenviron _environ
#endif
void * __cdecl bsearch(const void *key, const void *base, size_t nmemb, size_t size,
int (__cdecl * compar)(const void *, const void *));
extern int __argc; /* count of cmd line args */
extern char ** __argv; /* pointer to table of cmd line args */
extern wchar_t ** __wargv; /* pointer to table of wide cmd line args */
// min and max macros
#define __max(a,b) (((a) > (b)) ? (a) : (b))
#define __min(a,b) (((a) < (b)) ? (a) : (b))
// Conversion function prototypes
_CRTIMP double __cdecl strtod(const char *, char **);
_CRTIMP long __cdecl strtol(const char *, char **, int);
_CRTIMP unsigned long __cdecl strtoul(const char *, char **, int);
_CRTIMP __int64 __cdecl _atoi64(const char *);
_CRTIMP int __cdecl atoi(const char *);
_CRTIMP double __cdecl atof(const char *);
_CRTIMP long __cdecl atol(const char *);
char * __cdecl _itoa(int, char *, int);
char * __cdecl _ltoa(long, char *, int);
char * __cdecl _ultoa(unsigned long, char *, int);
char * __cdecl _ecvt(double, int, int *, int *);
char * __cdecl _fcvt(double, int, int *, int *);
char * __cdecl _gcvt(double, int, char *);
_CRTIMP int __cdecl tolower(int);
_CRTIMP int __cdecl toupper(int);
_CRTIMP double __cdecl wcstod(const wchar_t *, wchar_t **);
_CRTIMP long __cdecl wcstol(const wchar_t *, wchar_t **, int);
wchar_t * __cdecl _ultow (unsigned long, wchar_t *, int);
wchar_t * __cdecl _itow (int, wchar_t *, int);
wchar_t * __cdecl _ltow (long, wchar_t *, int);
_CRTIMP long __cdecl _wtol(const wchar_t *);
_CRTIMP __int64 __cdecl _wtoll(const wchar_t *);
unsigned long __cdecl wcstoul(const wchar_t *, wchar_t **, int);
#define _wtoi _wtol
#define _wtoi64 _wtoll
// Extended logical Ops functions
unsigned long __cdecl _lrotl(unsigned long, int);
unsigned long __cdecl _lrotr(unsigned long, int);
unsigned int __cdecl _rotl(unsigned int, int);
unsigned int __cdecl _rotr(unsigned int, int);
// Other misc STDLIB functions
_CRTIMP void __cdecl _swab(char *, char *, int);
_CRTIMP int __cdecl rand(void);
_CRTIMP void __cdecl srand(unsigned int);
void __cdecl qsort(void *, size_t, size_t, int (__cdecl *)(const void *, const void *));
double __cdecl difftime(time_t, time_t);
int __cdecl atexit(void (__cdecl *)(void));
// Maximum value that can be returned by the rand function.
#define RAND_MAX 0x7fff
// onexit() defns
#define EXIT_SUCCESS 0
#define EXIT_FAILURE 1
typedef int (__cdecl * _onexit_t)(void);
#define onexit_t _onexit_t
// Data structure & function definitions for div and ldiv runtimes
typedef struct _div_t {
int quot;
int rem;
} div_t;
typedef struct _ldiv_t {
long quot;
long rem;
} ldiv_t;
div_t __cdecl div(int, int);
ldiv_t __cdecl ldiv(long, long);
#ifdef __cplusplus
}
#endif
#endif /* __wcecompat__STDLIB_H__ */
diff --git a/include/string.h b/include/string.h
index 09b8d9e..3aef0b0 100644
--- a/include/string.h
+++ b/include/string.h
@@ -1,101 +1,102 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__STRING_H__
#define __wcecompat__STRING_H__
#include "stddef.h"
+#include "memory.h"
#ifdef __cplusplus
extern "C" {
#endif
char * __cdecl strerror(int errnum);
// Widechar string functions
_CRTIMP wchar_t * __cdecl wcscat(wchar_t *, const wchar_t *);
_CRTIMP wchar_t * __cdecl wcschr(const wchar_t *, wchar_t);
_CRTIMP int __cdecl wcscmp(const wchar_t *, const wchar_t *);
_CRTIMP wchar_t * __cdecl wcscpy(wchar_t *, const wchar_t *);
_CRTIMP size_t __cdecl wcscspn(const wchar_t *, const wchar_t *);
_CRTIMP size_t __cdecl wcslen(const wchar_t *);
_CRTIMP size_t __cdecl mbstowcs(wchar_t *wcstr, const char *mbstr, size_t count);
_CRTIMP size_t __cdecl wcstombs(char *mbstr, const wchar_t *wcstr, size_t count);
_CRTIMP wchar_t * __cdecl wcsncat(wchar_t *, const wchar_t *, size_t);
_CRTIMP int __cdecl wcsncmp(const wchar_t *, const wchar_t *, size_t);
_CRTIMP wchar_t * __cdecl wcsncpy(wchar_t *, const wchar_t *, size_t);
_CRTIMP wchar_t * __cdecl wcspbrk(const wchar_t *, const wchar_t *);
_CRTIMP wchar_t * __cdecl wcsrchr(const wchar_t *, wchar_t);
_CRTIMP size_t __cdecl wcsspn(const wchar_t *, const wchar_t *);
_CRTIMP wchar_t * __cdecl wcsstr(const wchar_t *, const wchar_t *);
_CRTIMP wchar_t * __cdecl wcstok(wchar_t *, const wchar_t *);
_CRTIMP wchar_t * __cdecl _wcsdup(const wchar_t *);
_CRTIMP int __cdecl _wcsicmp(const wchar_t *, const wchar_t *);
_CRTIMP int __cdecl _wcsnicmp(const wchar_t *, const wchar_t *, size_t);
_CRTIMP wchar_t * __cdecl _wcsnset(wchar_t *, wchar_t, size_t);
_CRTIMP wchar_t * __cdecl _wcsrev(wchar_t *);
_CRTIMP wchar_t * __cdecl _wcsset(wchar_t *, wchar_t);
_CRTIMP wchar_t * __cdecl _wcslwr(wchar_t *);
_CRTIMP wchar_t * __cdecl _wcsupr(wchar_t *);
void * __cdecl memcpy(void *, const void *, size_t);
// Non STDC old names for above
#define wcsdup _wcsdup
#define wcsicmp _wcsicmp
#define wcsnicmp _wcsnicmp
#define wcsnset _wcsnset
#define wcsrev _wcsrev
#define wcsset _wcsset
#define wcslwr _wcslwr
#define wcsupr _wcsupr
// ANSI string functions
size_t __cdecl strlen(const char *);
int __cdecl strcmp(const char *, const char *);
char * __cdecl strcat(char *, const char *);
char * __cdecl strcpy(char *, const char *);
_CRTIMP char * __cdecl strchr(const char *, int);
_CRTIMP size_t __cdecl strcspn(const char *, const char *);
_CRTIMP char * __cdecl strncat(char *, const char *, size_t);
_CRTIMP int __cdecl strncmp(const char *, const char *, size_t);
_CRTIMP char * __cdecl strncpy(char *, const char *, size_t);
_CRTIMP char * __cdecl strstr(const char *, const char *);
_CRTIMP char * __cdecl strtok(char *, const char *);
_CRTIMP int __cdecl _stricmp(const char *, const char *);
_CRTIMP int __cdecl _strnicmp(const char *, const char *, size_t);
_CRTIMP char * __cdecl strpbrk(const char *, const char *);
_CRTIMP char * __cdecl strrchr(const char *, int);
_CRTIMP size_t __cdecl strspn(const char *, const char *);
_CRTIMP char * __cdecl _strdup(const char *);
_CRTIMP char * __cdecl _strnset(char *, int, size_t);
_CRTIMP char * __cdecl _strrev(char *);
char * __cdecl _strset(char *, int);
_CRTIMP char * __cdecl _strlwr(char *);
_CRTIMP char * __cdecl _strupr(char *);
#ifdef __cplusplus
}
#endif
#endif /* __wcecompat__STRING_H__ */
diff --git a/include/time.h b/include/time.h
index 83edf8b..cc63a87 100644
--- a/include/time.h
+++ b/include/time.h
@@ -1,62 +1,69 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__TIME_H__
#define __wcecompat__TIME_H__
+#include <stdlib.h> // time_t defined there
+
+extern size_t wcsftime(wchar_t *, size_t, const char *,const struct tm *);
#ifdef __cplusplus
extern "C" {
#endif
-#include <stdlib.h> // time_t defined there
//typedef int time_t;
+#ifndef _CLOCK_T_DEFINED
typedef unsigned long clock_t;
+#endif
+#ifndef CLOCKS_PER_SEC
#define CLOCKS_PER_SEC (1000)
+#endif
time_t time(time_t* t);
clock_t __cdecl clock(void);
+#ifndef _TM_DEFINED
+#define _TM_DEFINED
struct tm
{
int tm_sec;
int tm_min;
int tm_hour;
int tm_mday;
int tm_mon;
int tm_year;
int tm_wday;
int tm_yday;
int tm_isdst;
};
+#endif
struct tm* localtime(const time_t* clock);
struct tm * __cdecl gmtime(const time_t *clock);
-
-
#ifdef __cplusplus
}
#endif
#endif // __wcecompat__TIME_H__
diff --git a/src/assert.cpp b/src/assert.cpp
index e5d7a8a..9f183aa 100644
--- a/src/assert.cpp
+++ b/src/assert.cpp
@@ -1,31 +1,32 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
-
+#ifndef COREDLL_CORESIOA
void _assert(void* expression, void* file, unsigned line)
{
fprintf(stderr, "Assertion failed: %s, file %s, line %d\n", (char*)expression, file, line);
exit(3);
}
+#endif
|
bhchen/wcecompat
|
ba15e42a2db3188541d5ec5f0e610cf95bd3ef92
|
MIPS build
|
diff --git a/config.pl b/config.pl
index 6e49230..8bf659d 100644
--- a/config.pl
+++ b/config.pl
@@ -1,82 +1,84 @@
#!/usr/local/bin/perl -w
### Input / Variable Parsing ###
die '%OSVERSION% is not defined, see README.txt' if (!defined($ENV{'OSVERSION'}));
die '%TARGETCPU% is not defined, see README.txt' if (!defined($ENV{'TARGETCPU'}));
$wince_version = uc($ENV{'OSVERSION'});
die '%OSVERSION% is not properly set, see README.txt' if ($wince_version !~ /^WCE([1-9])([0-9]{2})$/);
$wince_major_version = $1;
$wince_minor_version=$2;
die '%PLATFORM% is not defined, see README.txt' if ($wince_major_version < 5 && !defined($ENV{'PLATFORM'}));
print "windows ce version:".$wince_version."\n";
print "major:".$wince_major_version."\n";
print "minor:".$wince_minor_version."\n";
$target_cpu = uc($ENV{'TARGETCPU'});
print "Compiling for Architecture:".$target_cpu."\n";
### Setting up variables ###
# first set the version flags for compiling and linking
$compile_flags = " -D_WIN32_WCE=0x".$wince_major_version.$wince_minor_version;
$compile_flags.= " -DUNDER_CE=$wince_major_version$wince_minor_version";
$link_flags = "/SUBSYSTEM:windowsce,$wince_major_version.$wince_minor_version";
$link_target_cpu = $target_cpu;
# each platform has its own compiler and linker flags
if ($target_cpu eq "X86") {
$compile_flags.= " -Dx86 -D_X86_";
$link_flags.= " /NODEFAULTLIB:oldnames.lib coredll.lib corelibc.lib";
}
elsif ($target_cpu eq "X86EM" || $target_cpu eq "X86EMNSET CFG=NONE") {
$compile_flags.= " -Dx86 -D_X86_ -D_WIN32_WCE_EMULATION";
$link_target_cpu = "X86";
}
elsif ($target_cpu eq "SH3") {
$compile_flags.= " -DSH3 -D_SH3_ -DSHx";
}
elsif ($target_cpu eq "SH4") {
$compile_flags.= " -DSH4 -D_SH4_ -DSHx";
}
-elsif ($target_cpu eq "R4100" || $target_cpu eq "R4111" || $target_cpu eq "R4300") {
+elsif ($target_cpu eq "R4100" || $target_cpu eq "R4111" || $target_cpu eq "R4300" || $target_cpu eq "MIPSII" || $target_cpu eq "MIPSIV") {
$compile_flags.= " -DMIPS -D_MIPS_ -DMIPS_R4000";
}
elsif ($target_cpu eq "ARMV4" || $target_cpu eq "ARMV4T" || $target_cpu eq "ARMV4I") {
$compile_flags.= " -DARM -D_ARM_ -D_M_ARM -D$target_cpu -D_$target_cpu\_";
if ($wince_major_version >= 5) {
$link_target_cpu = "THUMB";
}
else {
$link_target_cpu = "ARM";
}
}
# We need to set the machine type for the link flag
if ($link_target_cpu eq "X86" && $wince_major_version < 5) {
$link_target_cpu = "IX86";
+} elsif ($link_target_cpu eq "MIPSII" || $link_target_cpu eq "MIPSIV") {
+ $link_target_cpu = "MIPS";
}
$link_flags.= " /MACHINE:".$link_target_cpu;
print "Compile:".$compile_flags."\n";
print "Link:".$link_flags."\n";
# And finally we write out the configuration
open (FILEHANDLE, '>wceconfig.mak');
#WCEVERSION=200
#WCELDVERSION=2.00
print FILEHANDLE 'WCEVERSION='.$wince_major_version.$wince_minor_version."\n";
print FILEHANDLE 'WCELDVERSION='.$wince_major_version.'.'.$wince_minor_version."\n";
print FILEHANDLE 'WCEPLATFORM=foobar'."\n";
print FILEHANDLE 'WCETARGETDEFS='.$compile_flags."\n";
print FILEHANDLE 'LFLAGS='.$link_flags."\n";
close(FILEHANDLE);
|
bhchen/wcecompat
|
4a0fda018279888921e591f46c67e4c68d934968
|
add support for Windows CE 5 and following
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..9605222
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,10 @@
+Makefile.Debug
+Makefile.Release
+debug/*
+vc80.pdb
+release/*
+wceconfig.mak
+lib/*
+obj/*
+tmp/*
+
diff --git a/README b/README
index 08d9a22..581fa5a 100644
--- a/README
+++ b/README
@@ -1,53 +1,55 @@
-wcecompat: Windows CE C Runtime Library "compatibility" library.
-Copyright (C) 2001-2005 Essemer Pty Ltd. All rights reserved.
+wcecompat: Windows CE Runtime Library "compatibility" library.
+LICENSE
+-------
This software is licensed under the terms and conditions in the LICENSE file.
-Contact: Steven Reddie ([email protected])
- Essemer Pty Ltd
- http://www.essemer.com.au/
-
-
-This is a work in progress. You will no doubt find that the code here is a
-real mess at the moment, but it does work. A cleaner version will be available
-shortly, keep an eye on the website.
-
-
-Building
---------
-
-To build, set the environment variables with one of the batch files installed
-with eVC, and then invoke nmake.
-Two libraries are built:
+ABOUT
+-----
+This software is derived from the work by Steven Reddie from Essemer Pty Ltd.
+For contact information, see README_essemer.txt.
+As Essemer was not continuing this project it has been forked and republished.
+The new version is supposed to stay compatible with previous versions plus add
+support for Windows CE 5 and later.
-* wcecompat.lib
-* wcecompatex.lib
-In this initial release, both libraries are identical and contain the
-functionality that is meant for wcecompatex.lib. This library contains
-features to support stdin/stdout/stderr and their redirection. The first
-library will in the future be a leaner library without these features.
-Whilst both libraries will be suitable for releasable applications it is
-assumed that some people will not want these extra features outside of
-debugging, and therefore the cutdown version will be provided.
-
-
-Todo
+NOTE
----
+This is work in progress and the content might change at any point in time.
-Build a version that doesn't contain the stdin/stdout/stderr support.
-Finish and cleanup implementation of existing functions, particularly
-the stream IO.
+REQUIREMENTS
+------------
+wcecompat requires the following software:
+- Perl (to create the makefile configuration)
+- Embedded Visual Studio 3/4 or Visual Studio 2005/following
+- A Windows CE SDK
-Changes
--------
-
-1.2 Add support for ARMV4T and ARMV4I to wcedefs.mak
-
-1.1 Changes to support eVC4 & Pocket PC 2003.
- Added EAGAIN and ENOMEM to errno.h as needed by OpenSSL 0.9.8.
-
-1.0 Initial release
+BUILDING
+--------
+- Ensure your environment is setup for crosscompilation for the target Windows
+ CE platform. Refer to the documentation in case you are not sure how to do so.
+- Depending on your version of Windows CE, you will have to set some environment
+ variables. These are
+ * OSVERSION: A string composed by "WCE" plus the version number ,eg. WCE500
+ Note that eg. Windows Mobile 5 is based on Windows CE 5.01, so
+ be sure about the proper setting. Otherwise runtime issues might
+ occur.
+ * TARGETCPU: The CPU architecture the SDK is designed for (eg. X86, ARMV4I,...)
+ * PLATFORM : For Windows CE previous to 5.00 this might be needed, but is
+ deprecated from this version on.
+- Call 'perl config.pl' to create the makefile configuration for your setup. In case
+ you do not have perl available, continue reading below to create your own
+ configuration without using perl.
+- Call 'nmake'
+- After successful compilation you will have "wcecompat.lib" and "wcecompatex.lib"
+ in your lib directory. These libraries are generated statically and are supposed
+ to be linked into your project.
+
+
+MANUAL MAKEFILE CONFIGURATION
+-----------------------------
+If Perl is not available, following steps are needed to build wcecompat successfully:
+- foo ### TODO:
diff --git a/README_essemer.txt b/README_essemer.txt
new file mode 100644
index 0000000..08d9a22
--- /dev/null
+++ b/README_essemer.txt
@@ -0,0 +1,53 @@
+wcecompat: Windows CE C Runtime Library "compatibility" library.
+Copyright (C) 2001-2005 Essemer Pty Ltd. All rights reserved.
+
+This software is licensed under the terms and conditions in the LICENSE file.
+
+Contact: Steven Reddie ([email protected])
+ Essemer Pty Ltd
+ http://www.essemer.com.au/
+
+
+This is a work in progress. You will no doubt find that the code here is a
+real mess at the moment, but it does work. A cleaner version will be available
+shortly, keep an eye on the website.
+
+
+Building
+--------
+
+To build, set the environment variables with one of the batch files installed
+with eVC, and then invoke nmake.
+
+Two libraries are built:
+
+* wcecompat.lib
+* wcecompatex.lib
+
+In this initial release, both libraries are identical and contain the
+functionality that is meant for wcecompatex.lib. This library contains
+features to support stdin/stdout/stderr and their redirection. The first
+library will in the future be a leaner library without these features.
+Whilst both libraries will be suitable for releasable applications it is
+assumed that some people will not want these extra features outside of
+debugging, and therefore the cutdown version will be provided.
+
+
+Todo
+----
+
+Build a version that doesn't contain the stdin/stdout/stderr support.
+
+Finish and cleanup implementation of existing functions, particularly
+the stream IO.
+
+
+Changes
+-------
+
+1.2 Add support for ARMV4T and ARMV4I to wcedefs.mak
+
+1.1 Changes to support eVC4 & Pocket PC 2003.
+ Added EAGAIN and ENOMEM to errno.h as needed by OpenSSL 0.9.8.
+
+1.0 Initial release
diff --git a/config.pl b/config.pl
new file mode 100644
index 0000000..6e49230
--- /dev/null
+++ b/config.pl
@@ -0,0 +1,82 @@
+#!/usr/local/bin/perl -w
+
+### Input / Variable Parsing ###
+
+die '%OSVERSION% is not defined, see README.txt' if (!defined($ENV{'OSVERSION'}));
+die '%TARGETCPU% is not defined, see README.txt' if (!defined($ENV{'TARGETCPU'}));
+
+$wince_version = uc($ENV{'OSVERSION'});
+die '%OSVERSION% is not properly set, see README.txt' if ($wince_version !~ /^WCE([1-9])([0-9]{2})$/);
+
+$wince_major_version = $1;
+$wince_minor_version=$2;
+
+die '%PLATFORM% is not defined, see README.txt' if ($wince_major_version < 5 && !defined($ENV{'PLATFORM'}));
+
+print "windows ce version:".$wince_version."\n";
+print "major:".$wince_major_version."\n";
+print "minor:".$wince_minor_version."\n";
+
+$target_cpu = uc($ENV{'TARGETCPU'});
+print "Compiling for Architecture:".$target_cpu."\n";
+
+
+### Setting up variables ###
+
+# first set the version flags for compiling and linking
+$compile_flags = " -D_WIN32_WCE=0x".$wince_major_version.$wince_minor_version;
+$compile_flags.= " -DUNDER_CE=$wince_major_version$wince_minor_version";
+$link_flags = "/SUBSYSTEM:windowsce,$wince_major_version.$wince_minor_version";
+$link_target_cpu = $target_cpu;
+
+# each platform has its own compiler and linker flags
+if ($target_cpu eq "X86") {
+ $compile_flags.= " -Dx86 -D_X86_";
+ $link_flags.= " /NODEFAULTLIB:oldnames.lib coredll.lib corelibc.lib";
+ }
+elsif ($target_cpu eq "X86EM" || $target_cpu eq "X86EMNSET CFG=NONE") {
+ $compile_flags.= " -Dx86 -D_X86_ -D_WIN32_WCE_EMULATION";
+ $link_target_cpu = "X86";
+ }
+elsif ($target_cpu eq "SH3") {
+ $compile_flags.= " -DSH3 -D_SH3_ -DSHx";
+ }
+elsif ($target_cpu eq "SH4") {
+ $compile_flags.= " -DSH4 -D_SH4_ -DSHx";
+ }
+elsif ($target_cpu eq "R4100" || $target_cpu eq "R4111" || $target_cpu eq "R4300") {
+ $compile_flags.= " -DMIPS -D_MIPS_ -DMIPS_R4000";
+ }
+elsif ($target_cpu eq "ARMV4" || $target_cpu eq "ARMV4T" || $target_cpu eq "ARMV4I") {
+ $compile_flags.= " -DARM -D_ARM_ -D_M_ARM -D$target_cpu -D_$target_cpu\_";
+ if ($wince_major_version >= 5) {
+ $link_target_cpu = "THUMB";
+ }
+ else {
+ $link_target_cpu = "ARM";
+ }
+}
+
+# We need to set the machine type for the link flag
+if ($link_target_cpu eq "X86" && $wince_major_version < 5) {
+ $link_target_cpu = "IX86";
+}
+$link_flags.= " /MACHINE:".$link_target_cpu;
+
+print "Compile:".$compile_flags."\n";
+print "Link:".$link_flags."\n";
+
+# And finally we write out the configuration
+open (FILEHANDLE, '>wceconfig.mak');
+#WCEVERSION=200
+#WCELDVERSION=2.00
+
+print FILEHANDLE 'WCEVERSION='.$wince_major_version.$wince_minor_version."\n";
+print FILEHANDLE 'WCELDVERSION='.$wince_major_version.'.'.$wince_minor_version."\n";
+print FILEHANDLE 'WCEPLATFORM=foobar'."\n";
+print FILEHANDLE 'WCETARGETDEFS='.$compile_flags."\n";
+print FILEHANDLE 'LFLAGS='.$link_flags."\n";
+
+close(FILEHANDLE);
+
+
diff --git a/makefile b/makefile
index 2c42441..8db6f65 100644
--- a/makefile
+++ b/makefile
@@ -1,57 +1,63 @@
-!INCLUDE <wcedefs.mak>
+!INCLUDE <wceconfig.mak>
-CFLAGS=/W3 /WX /Ox /O2 /Ob2 /GF /Gy /nologo $(WCETARGETDEFS) -DWIN32_PLATFORM_PSPC -DUNICODE -D_UNICODE -DWIN32 -DWIN32_LEAN_AND_MEAN -Iinclude -D_WINDLL -D_DLL /Foobj/ -D_MSC_VER=1200
+CFLAGS=/W3 /WX /Ox /O2 /Ob2 /GF /Gy /nologo $(WCETARGETDEFS) -Zc:wchar_t- -DUNICODE -D_UNICODE -DWIN32 -DWIN32_LEAN_AND_MEAN -Iinclude -D_WINDLL -D_DLL /Foobj/ -D_MSC_VER=1200
SRC = \
src/args.cpp \
src/assert.cpp \
src/ChildData.cpp \
src/env.cpp \
src/errno.cpp \
src/io.cpp \
src/pipe.cpp \
src/process.cpp \
src/redir.cpp \
src/stat.cpp \
src/stdio_extras.cpp \
src/stdlib_extras.cpp \
src/string_extras.cpp \
src/time.cpp \
src/timeb.cpp \
src/ts_string.cpp \
src/winmain.cpp \
src/winsock_extras.cpp
!IF "$(WCEVERSION)"=="211"
SRC = \
$(SRC) \
src/wce211_ctype.c \
src/wce211_string.c
!ENDIF
OBJS = $(SRC:src=obj)
OBJS = $(OBJS:.cpp=.obj)
OBJS = $(OBJS:.c=.obj)
{src}.c{obj}.obj:
- $(CC) $(CFLAGS) -c $<
+ @$(CC) $(CFLAGS) -c $<
{src}.cpp{obj}.obj:
- $(CC) $(CFLAGS) -c $<
+ @$(CC) $(CFLAGS) -c $<
all: lib\wcecompat.lib lib\wcecompatex.lib
# echo $(OBJS)
obj:
@md obj 2> NUL
lib:
@md lib 2> NUL
$(OBJS): makefile obj
+clean:
+ @echo Deleting target libraries...
+ @del lib\*.lib
+ @echo Deleting object files...
+ @del obj\*.obj
+
lib\wcecompat.lib: lib $(OBJS) makefile
- @lib /nologo /out:lib\wcecompat.lib $(OBJS)
+ @lib /nologo /out:lib\wcecompat.lib $(LFLAGS) $(OBJS)
lib\wcecompatex.lib: lib $(OBJS) makefile
@lib /nologo /out:lib\wcecompatex.lib $(OBJS)
diff --git a/src/assert.cpp b/src/assert.cpp
index 801d8c7..e5d7a8a 100644
--- a/src/assert.cpp
+++ b/src/assert.cpp
@@ -1,31 +1,31 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
void _assert(void* expression, void* file, unsigned line)
{
fprintf(stderr, "Assertion failed: %s, file %s, line %d\n", (char*)expression, file, line);
- abort();
+ exit(3);
}
diff --git a/src/env.cpp b/src/env.cpp
index e97c1bb..66fc1a1 100644
--- a/src/env.cpp
+++ b/src/env.cpp
@@ -1,84 +1,86 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include "ts_string.h"
#include "redir.h" // initStdHandles
int num_env_vars;
char** _environ;
wchar_t** _wenviron;
char* getenv(const char* name)
{
+#if _WIN32_WCE < 0x500 || !defined(COREDLL_CORESIOA)
initStdHandles(); // get environment variables from ChildData
+#endif
if (_environ == NULL)
return NULL;
// char** env = _environ;
// while (*env != NULL)
for (int i=0; i<num_env_vars; i++)
{
char** env = &_environ[i];
char* equals = strchr(*env, '=');
if (equals != NULL)
{
if (_strnicmp(name, *env, equals-*env) == 0)
{
return equals+1;
}
}
}
return NULL;
}
int _putenv(const char* str)
{
_environ = (char**)realloc(_environ, (num_env_vars+1)*sizeof(char*));
if (_environ == NULL)
return -1;
_wenviron = (wchar_t**)realloc(_wenviron, (num_env_vars+1)*sizeof(wchar_t*));
if (_wenviron == NULL)
return -1;
num_env_vars++;
_environ[num_env_vars-1] = ts_strdup(str);
_wenviron[num_env_vars-1] = ts_strdup_ascii_to_unicode(str);
return 0;
}
int _wputenv(const wchar_t* str)
{
_environ = (char**)realloc(_environ, (num_env_vars+1)*sizeof(char*));
if (_environ == NULL)
return -1;
_wenviron = (wchar_t**)realloc(_wenviron, (num_env_vars+1)*sizeof(wchar_t*));
if (_wenviron == NULL)
return -1;
num_env_vars++;
_environ[num_env_vars-1] = ts_strdup_unicode_to_ascii(str);
_wenviron[num_env_vars-1] = ts_strdup(str);
return 0;
}
diff --git a/src/io.cpp b/src/io.cpp
index b283ae8..35675d7 100644
--- a/src/io.cpp
+++ b/src/io.cpp
@@ -1,42 +1,42 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <io.h>
#include <stdio.h>
#include <errno.h>
-int access(const char* pathname, int mode)
+int access(const char* /*pathname*/, int /*mode*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: access(pathname=%s, mode=%d)\n", pathname, mode) <= 0)
// printf("NOT IMPLEMENTED: access(pathname=%s, mode=%d)\n", pathname, mode);
errno = -1;
return -1;
}
-int unlink(const char* pathname)
+int unlink(const char* /*pathname*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: unlink(pathname=%s)\n", pathname) <= 0)
// printf("NOT IMPLEMENTED: unlink(pathname=%s)\n", pathname);
errno = -1;
return -1;
}
diff --git a/src/redir.cpp b/src/redir.cpp
index 2fbafb5..3da109c 100644
--- a/src/redir.cpp
+++ b/src/redir.cpp
@@ -1,1669 +1,1698 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
// TODO: so that multiple DLL's and the executable itself, potentially all using this pipe stuff to redirect to/from
// the parent process, can all coexist, we need to write the address of the read/write function into the start of the
// memory mapped buffer, so that subsequent init's of the pipe can pick up the address and use it as the function
// for reading/writing. This will also require a mutex to control access to the first few bytes, when reading/writing
// the address.
//
// Also, if redirection to files is handled by the process rather than the parent, then we need to make sure one
// function is used otherwise different DLL's will overwrite each other's output to the files.
-
#include "redir.h"
#include <stdlib.h>
#include <stdarg.h>
#include <errno.h>
#include <windows.h>
#include <stdio.h>
#include <tchar.h>
#include "ts_string.h"
#include "pipe.h"
#include "ChildData.h"
#include <time.h>
#include <io.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <conio.h>
-
+#include <ceconfig.h>
+#if _WIN32_WCE < 0x500 || !defined(COREDLL_CORESIOA)
/*
extern "C" void wcelog(const char* format, ...)
{
TCHAR* filename = TEXT("\\log.txt");
HANDLE hFile = INVALID_HANDLE_VALUE;
va_list args;
char buffer[4096];
DWORD numWritten;
hFile = CreateFile(filename, GENERIC_READ|GENERIC_WRITE, 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
if (hFile == INVALID_HANDLE_VALUE)
goto cleanup;
if (SetFilePointer(hFile, 0, NULL, FILE_END) == 0xFFFFFFFF)
goto cleanup;
va_start(args, format);
if (_vsnprintf(buffer, sizeof(buffer), format, args) == -1)
buffer[sizeof(buffer)-1] = '\0';
va_end(args);
WriteFile(hFile, buffer, strlen(buffer), &numWritten, NULL);
cleanup:
if (hFile != INVALID_HANDLE_VALUE)
CloseHandle(hFile);
}
*/
#define STDIN (0)
#define STDOUT (1)
#define STDERR (2)
// low-level io
typedef struct _FD_STRUCT {
Pipe* pipe; // if non-NULL, use this instead of hFile
unsigned char pipeChannel; // fd2 of RedirArg for pipe
HANDLE hFile;
BOOL binary;
BOOL eof;
} _FD_STRUCT;
#define FD_MAX (2048)
#define FD_BLOCK_SIZE (32) /* changing this will screw up "in_use" code below */
#define FD_MAX_BLOCKS (FD_MAX/FD_BLOCK_SIZE)
typedef struct _FD_BLOCK {
unsigned long in_use; // bitmask of in-use entries, LSB=fds[0], MSB=fds[31]
_FD_STRUCT fds[FD_BLOCK_SIZE]; // fd's
} _FD_BLOCK;
_FD_BLOCK _fd_block0 = {
0x00000007, // first three in use (reserved)
{
{ NULL, -1, INVALID_HANDLE_VALUE, FALSE, FALSE },
{ NULL, -1, INVALID_HANDLE_VALUE, FALSE, FALSE },
{ NULL, -1, INVALID_HANDLE_VALUE, FALSE, FALSE }
}
};
_FD_BLOCK* _fd_blocks[FD_MAX_BLOCKS] = { &_fd_block0 };
// file stream
typedef struct _FILE_BLOCK _FILE_BLOCK;
typedef struct _FILE {
int file_index;
int fd;
int bufferedChar;
BOOL error;
} _FILE;
#define FILE_MAX (512)
#define FILE_BLOCK_SIZE (32) /* changing this will screw up "in_use" code below */
#define FILE_MAX_BLOCKS (FILE_MAX/FILE_BLOCK_SIZE)
typedef struct _FILE_BLOCK {
unsigned long in_use; // bitmask of in-use entries, LSB=file[0], MSB=file[31]
_FILE files[FILE_BLOCK_SIZE]; // file's
} _FILE_BLOCK;
_FILE_BLOCK _file_block0 = {
0x00000007, // first three in use (reserved)
{
{ 0, STDIN, -1 },
{ 1, STDOUT, -1 },
{ 2, STDERR, -1 },
// maybe there should get initialised at runtime, but that means re-initialising uneccesarily on each use
{ 3 },
{ 4 },
{ 5 },
{ 6 },
{ 7 },
{ 8 },
{ 9 },
{ 10 },
{ 11 },
{ 12 },
{ 13 },
{ 14 },
{ 15 },
{ 16 },
{ 17 },
{ 18 },
{ 19 },
{ 20 },
{ 21 },
{ 22 },
{ 23 },
{ 24 },
{ 25 },
{ 26 },
{ 27 },
{ 28 },
{ 29 },
{ 30 },
{ 31 }
}
};
_FILE_BLOCK* _file_blocks[FILE_MAX_BLOCKS] = { &_file_block0 };
static bool _open_fds(const char* filename, int flags, int mode, _FD_STRUCT* fds);
static bool _wopen_fds(const WCHAR* filename, int flags, int mode, _FD_STRUCT* fds);
static int fd_allocate()
{
for (int block=0; block<FD_MAX_BLOCKS; block++)
{
if (_fd_blocks[block] == NULL)
{ // unused block, allocate it
_fd_blocks[block] = (_FD_BLOCK*)malloc(sizeof(_FD_BLOCK));
if (_fd_blocks[block] == NULL)
return -1;
// we'll use the first index
_fd_blocks[block]->in_use = 0x00000001;
// return fd at first index
return block*FD_BLOCK_SIZE;
}
if (_fd_blocks[block]->in_use != 0xffffffff)
{ // there's an unused entry in this block, find it
int index;
unsigned long index_bit = 0x00000001;
for (index=0; index<FD_BLOCK_SIZE; index++)
{
if ((_fd_blocks[block]->in_use & index_bit) == 0)
break; // found it
index_bit <<= 1;
}
// mark it as in use and return it
_fd_blocks[block]->in_use |= index_bit;
return block*FD_BLOCK_SIZE + index;
}
}
// if we get here there are no free fd's
return -1;
}
static void fd_release(int fd)
{
// mask as not in use
int block = fd / FD_BLOCK_SIZE;
int index = fd % FD_BLOCK_SIZE;
unsigned long index_bit = 1 << index;
_fd_blocks[block]->in_use &= ~index_bit;
}
static _FILE* file_allocate()
{
for (int block=0; block<FILE_MAX_BLOCKS; block++)
{
if (_file_blocks[block] == NULL)
{ // unused block, allocate it
_file_blocks[block] = (_FILE_BLOCK*)malloc(sizeof(_FILE_BLOCK));
if (_file_blocks[block] == NULL)
return NULL;
// we'll use the first index
_file_blocks[block]->in_use = 0x00000001;
// set all file_index's
for (int index=0; index<FILE_BLOCK_SIZE; index++)
_file_blocks[block]->files[index].file_index = block*FILE_BLOCK_SIZE + index;
// return file at first index
return &_file_blocks[block]->files[0];
}
if (_file_blocks[block]->in_use != 0xffffffff)
{ // there's an unused entry in this block, find it
int index;
unsigned long index_bit = 0x00000001;
for (index=0; index<FILE_BLOCK_SIZE; index++)
{
if ((_file_blocks[block]->in_use & index_bit) == 0)
break; // found it
index_bit <<= 1;
}
// mark it as in use and return it
_file_blocks[block]->in_use |= index_bit;
return &_file_blocks[block]->files[index];
}
}
// if we get here there are no free files
return NULL;
}
static void file_release(_FILE* file)
{
if (file == NULL)
return;
// sanity-check file_index
if (file->file_index < 0 || file->file_index >= FILE_MAX)
return;
// mask as not in use
int block = file->file_index / FILE_BLOCK_SIZE;
int index = file->file_index % FILE_BLOCK_SIZE;
unsigned long index_bit = 1 << index;
_file_blocks[block]->in_use &= ~index_bit;
}
#ifdef stdin
#undef stdin
#endif
#ifdef stdout
#undef stdout
#endif
#ifdef stderr
#undef stderr
#endif
#define stdin (&_file_block0.files[0])
#define stdout (&_file_block0.files[1])
#define stderr (&_file_block0.files[2])
#define fd_stdin (&_fd_block0.fds[0])
#define fd_stdout (&_fd_block0.fds[1])
#define fd_stderr (&_fd_block0.fds[2])
static ChildData* g_childData = NULL;
//_FD_STRUCT mystdin = { _FD_STRUCT_KEY, NULL, -1, INVALID_HANDLE_VALUE, FALSE, -1, FALSE, FALSE };
//_FD_STRUCT mystdout = { _FD_STRUCT_KEY, NULL, -1, INVALID_HANDLE_VALUE, FALSE, -1, FALSE, FALSE };
//_FD_STRUCT mystderr = { _FD_STRUCT_KEY, NULL, -1, INVALID_HANDLE_VALUE, FALSE, -1, FALSE, FALSE };
/*
BOOL redirectStdin(const char* filename)
{
FILE* f = fopen(filename, "r");
if (f == NULL)
return FALSE;
memcpy(&mystdin, f, sizeof(_FD_STRUCT));
free(f);
return TRUE;
}
BOOL redirectStdout(const char* filename, BOOL append)
{
FILE* f = fopen(filename, "w");
if (f == NULL)
return FALSE;
memcpy(&mystdout, f, sizeof(_FD_STRUCT));
free(f);
return TRUE;
}
BOOL redirectStderr(const char* filename, BOOL append)
{
FILE* f = fopen(filename, "w");
if (f == NULL)
return FALSE;
memcpy(&mystderr, f, sizeof(_FD_STRUCT));
free(f);
return TRUE;
}
BOOL redirectStdoutStderr(const char* filename, BOOL append)
{
FILE* f = fopen(filename, "w");
if (f == NULL)
return FALSE;
memcpy(&mystdout, f, sizeof(_FD_STRUCT));
memcpy(&mystderr, f, sizeof(_FD_STRUCT));
free(f);
return TRUE;
}
*/
/*
close(int fd)
{
closePipe()
pipe = NULL
}
*/
inline bool valid_fd(int fd)
{
if (fd < FD_BLOCK_SIZE)
return (_fd_block0.in_use & (1 << fd)) == 0 ? false : true;
else
{
int block = fd / FD_BLOCK_SIZE;
if (_fd_blocks[block] == NULL)
return false;
int index = fd % FD_BLOCK_SIZE;
return (_fd_blocks[block]->in_use & (1 << index)) == 0 ? false : true;
}
}
inline _FD_STRUCT* fds_from_index(int fd)
{
if (fd < FD_BLOCK_SIZE)
return &_fd_block0.fds[fd];
else
{
int block = fd / FD_BLOCK_SIZE;
if (_fd_blocks[block] == NULL)
return NULL;
int index = fd % FD_BLOCK_SIZE;
return &_fd_blocks[block]->fds[index];
}
}
inline bool valid_file(int file_index)
{
if (file_index < FILE_BLOCK_SIZE)
return (_file_block0.in_use & (1 << file_index)) == 0 ? false : true;
else
{
int block = file_index / FILE_BLOCK_SIZE;
if (_file_blocks[block] == NULL)
return false;
int index = file_index % FILE_BLOCK_SIZE;
return (_file_blocks[block]->in_use & (1 << index)) == 0 ? false : true;
}
}
static bool initialisedStdHandles = false;
static void uninitStdHandles()
{
if (!initialisedStdHandles)
return;
if (g_childData != NULL)
{
delete g_childData;
g_childData = NULL;
}
if (valid_file(STDIN))
fclose(stdin);
if (valid_file(STDOUT))
fclose(stdout);
if (valid_file(STDERR))
fclose(stderr);
if (valid_fd(STDIN))
close(STDIN);
if (valid_fd(STDOUT))
close(STDOUT);
if (valid_fd(STDERR))
close(STDERR);
initialisedStdHandles = false;
}
static void shutdownIo()
{
// TODO: Flush and close all _FILE's and then _FD_STRUCT's.
// If we implement redirection of handles through other handles then we
// probably need to shutdown all of the redirecting handles first, and
// then the remaining handles.
uninitStdHandles();
}
// returns true only if pipes have been initialised successfully
bool initStdHandles()
{
if (initialisedStdHandles)
return true;
#if 0
// Get I/O redirection arguments from command-line
char* stdoutFilename;
char* stderrFilename;
char* stdinFilename;
BOOL stdoutAppend;
BOOL stderrAppend;
BOOL stdinAppend;
if (getRedirArgs(GetCommandLine(),
&stdinFilename, &stdoutFilename, &stderrFilename, &stdinAppend, &stdoutAppend, &stderrAppend))
{
}
#endif
TCHAR name[100];
HANDLE hFileMapping = NULL;
unsigned char* pBuffer = NULL;
_stprintf(name, TEXT("wcecompat.%08x.child_data"), GetCurrentProcessId());
hFileMapping = CreateFileMapping((HANDLE)INVALID_HANDLE_VALUE, NULL, PAGE_READWRITE, 0, 1, name);
if (hFileMapping == NULL)
goto cleanup;
else if (GetLastError() != ERROR_ALREADY_EXISTS)
{
CloseHandle(hFileMapping);
hFileMapping = NULL;
HANDLE hEvent = CreateEvent(NULL, FALSE, FALSE, TEXT("wcecompat.starting_child"));
if (hEvent == NULL)
{ // failed to create named event
}
else if (GetLastError() == ERROR_ALREADY_EXISTS)
{ // we're in DllMain, so do nothing
}
else
{
CloseHandle(hEvent);
}
}
else
{
pBuffer = (unsigned char*)MapViewOfFile(hFileMapping, FILE_MAP_WRITE, 0, 0, 0);
if (pBuffer == NULL)
{ // failed to map buffer
}
else
{
g_childData = new ChildData;
if (g_childData == NULL)
goto cleanup;
if (!g_childData->decode(pBuffer))//, 16384);
goto cleanup;
g_childData->restoreEnvironment();
RedirArg* stdinRedir = g_childData->getRedirArg(0);
RedirArg* stdoutRedir = g_childData->getRedirArg(1);
RedirArg* stderrRedir = g_childData->getRedirArg(2);
if (stdinRedir != NULL && stdinRedir->redirType != RT_NONE)
{
if (stdinRedir->redirType == RT_PIPE_UNSPEC)
{
_FD_STRUCT* fds = fds_from_index(STDIN);
if (fds == NULL)
goto cleanup;
fds->pipe = createPipe(stdinRedir->filename, OPEN_EXISTING);
if (fds->pipe == NULL)
{ // failed to open stdin pipe
goto cleanup;
}
fds->pipeChannel = (unsigned char)stdinRedir->fd2;
}
else if (stdinRedir->redirType == RT_HANDLE)
{
}
else if (stdinRedir->redirType == RT_FILE)
{
// WCHAR* mode = L"r"; // default to "r" for the cases we don't know how to handle
bool r = stdinRedir->openForRead;
bool w = stdinRedir->openForWrite;
bool a = stdinRedir->append;
/*
// rwa mode
// 010 "w"
// 011 "a"
// 100 "r"
// 110 "r+"
// 111 "a+"
if (a)
{
if (r)
mode = L"a+";
else
mode = L"a";
}
else if (r)
{
if (w)
mode = L"r+";
else
mode = L"r";
}
else if (w)
mode = L"w";
FILE* f = _wfopen(stdinRedir->filename, mode);
if (f == NULL)
goto cleanup;
memcpy(&mystdin, f, sizeof(_FD_STRUCT));
free(f);
*/
// rwa mode
// 010 "w" w, CREATE_ALWAYS O_WRONLY O_CREAT|O_TRUNC
// 011 "a" w, OPEN_ALWAYS (APPEND DATA) O_WRONLY O_APPEND O_CREAT
- // 100 "r" r, OPEN_EXISTING O_RDONLY
- // 110 "r+" r/w, OPEN_EXISTING O_RDWR
+ // 100 "r" r, OPEN_EXISTING O_RDONLY
+ // 110 "r+" r/w, OPEN_EXISTING O_RDWR
// 111 "a+" r/w, OPEN_ALWAYS (APPEND DATA) O_RDWR O_APPEND O_CREAT
int flags = 0;
int mode = 0;
if (r && w)
flags |= O_RDWR;
else if (r)
flags |= O_RDONLY;
else if (w)
flags |= O_WRONLY;
if (w)
{
if (!(r && !a))
{
flags |= O_CREAT;
mode = S_IREAD | S_IWRITE;
}
if (!r && !a)
flags |= O_TRUNC;
}
if (a)
flags |= O_APPEND;
_FD_STRUCT* fds = fds_from_index(STDIN);
if (fds == NULL)
goto cleanup;
if (!_wopen_fds(stdinRedir->filename, flags, mode, fds))
goto cleanup;
}
}
if (stdoutRedir != NULL && stdoutRedir->redirType != RT_NONE)
{
if (stdoutRedir->redirType == RT_PIPE_UNSPEC)
{
_FD_STRUCT* fds = fds_from_index(STDOUT);
if (fds == NULL)
goto cleanup;
fds->pipe = createPipe(stdoutRedir->filename, OPEN_EXISTING);
if (fds->pipe == NULL)
{ // failed to open stdout pipe
goto cleanup;
}
fds->pipeChannel = (unsigned char)stdoutRedir->fd2;
}
else if (stdoutRedir->redirType == RT_HANDLE)
{
}
else if (stdoutRedir->redirType == RT_FILE)
{
// WCHAR* mode = L"r"; // default to "r" for the cases we don't know how to handle
bool r = stdoutRedir->openForRead;
bool w = stdoutRedir->openForWrite;
bool a = stdoutRedir->append;
/*
// rwa mode
// 010 "w"
// 011 "a"
// 100 "r"
// 110 "r+"
// 111 "a+"
if (a)
{
if (r)
mode = L"a+";
else
mode = L"a";
}
else if (r)
{
if (w)
mode = L"r+";
else
mode = L"r";
}
else if (w)
mode = L"w";
FILE* f = _wfopen(stdoutRedir->filename, mode);
if (f == NULL)
goto cleanup;
memcpy(&mystdout, f, sizeof(_FD_STRUCT));
free(f);
*/
// rwa mode
// 010 "w" w, CREATE_ALWAYS O_WRONLY O_CREAT|O_TRUNC
// 011 "a" w, OPEN_ALWAYS (APPEND DATA) O_WRONLY O_APPEND O_CREAT
- // 100 "r" r, OPEN_EXISTING O_RDONLY
- // 110 "r+" r/w, OPEN_EXISTING O_RDWR
+ // 100 "r" r, OPEN_EXISTING O_RDONLY
+ // 110 "r+" r/w, OPEN_EXISTING O_RDWR
// 111 "a+" r/w, OPEN_ALWAYS (APPEND DATA) O_RDWR O_APPEND O_CREAT
int flags = 0;
int mode = 0;
if (r && w)
flags |= O_RDWR;
else if (r)
flags |= O_RDONLY;
else if (w)
flags |= O_WRONLY;
if (w)
{
if (!(r && !a))
{
flags |= O_CREAT;
mode = S_IREAD | S_IWRITE;
}
if (!r && !a)
flags |= O_TRUNC;
}
if (a)
flags |= O_APPEND;
_FD_STRUCT* fds = fds_from_index(STDOUT);
if (fds == NULL)
goto cleanup;
if (!_wopen_fds(stdoutRedir->filename, flags, mode, fds))
goto cleanup;
}
}
if (stderrRedir != NULL && stderrRedir->redirType != RT_NONE)
{
if (stderrRedir->redirType == RT_PIPE_UNSPEC)
{
_FD_STRUCT* fds = fds_from_index(STDERR);
if (fds == NULL)
goto cleanup;
if (stdoutRedir != NULL && stdoutRedir->redirType == RT_PIPE_UNSPEC &&
wcscmp(stderrRedir->filename, stdoutRedir->filename) == 0)
{
_FD_STRUCT* fds_stdout = fds_from_index(STDOUT);
if (fds_stdout == NULL)
goto cleanup;
fds->pipe = fds_stdout->pipe;
}
else
{
fds->pipe = createPipe(stderrRedir->filename, OPEN_EXISTING);
if (fds->pipe == NULL)
{ // failed to open stderr pipe
goto cleanup;
}
}
fds->pipeChannel = (unsigned char)stderrRedir->fd2;
}
else if (stderrRedir->redirType == RT_HANDLE)
{
}
else if (stderrRedir->redirType == RT_FILE)
{
// WCHAR* mode = L"r"; // default to "r" for the cases we don't know how to handle
bool r = stderrRedir->openForRead;
bool w = stderrRedir->openForWrite;
bool a = stderrRedir->append;
/*
// rwa mode
// 010 "w"
// 011 "a"
// 100 "r"
// 110 "r+"
// 111 "a+"
if (a)
{
if (r)
mode = L"a+";
else
mode = L"a";
}
else if (r)
{
if (w)
mode = L"r+";
else
mode = L"r";
}
else if (w)
mode = L"w";
FILE* f = _wfopen(stderrRedir->filename, mode);
if (f == NULL)
goto cleanup;
memcpy(&mystderr, f, sizeof(_FD_STRUCT));
free(f);
*/
// rwa mode
// 010 "w" w, CREATE_ALWAYS O_WRONLY O_CREAT|O_TRUNC
// 011 "a" w, OPEN_ALWAYS (APPEND DATA) O_WRONLY O_APPEND O_CREAT
- // 100 "r" r, OPEN_EXISTING O_RDONLY
- // 110 "r+" r/w, OPEN_EXISTING O_RDWR
+ // 100 "r" r, OPEN_EXISTING O_RDONLY
+ // 110 "r+" r/w, OPEN_EXISTING O_RDWR
// 111 "a+" r/w, OPEN_ALWAYS (APPEND DATA) O_RDWR O_APPEND O_CREAT
int flags = 0;
int mode = 0;
if (r && w)
flags |= O_RDWR;
else if (r)
flags |= O_RDONLY;
else if (w)
flags |= O_WRONLY;
if (w)
{
if (!(r && !a))
{
flags |= O_CREAT;
mode = S_IREAD | S_IWRITE;
}
if (!r && !a)
flags |= O_TRUNC;
}
if (a)
flags |= O_APPEND;
_FD_STRUCT* fds = fds_from_index(STDERR);
if (fds == NULL)
goto cleanup;
if (!_wopen_fds(stderrRedir->filename, flags, mode, fds))
goto cleanup;
}
}
}
}
initialisedStdHandles = true;
atexit(shutdownIo);
cleanup:
if (!initialisedStdHandles)
uninitStdHandles();
if (pBuffer != NULL)
UnmapViewOfFile(pBuffer);
if (hFileMapping != NULL)
CloseHandle(hFileMapping);
return initialisedStdHandles;
}
static inline bool initStdHandlesInline()
{
if (initialisedStdHandles)
return true;
return initStdHandles();
}
// returns non-zero if data is available on stdin
int _kbhit(void)
{
if (!valid_fd(STDIN))
{
if (!initStdHandlesInline())
return 0;
if (!valid_fd(STDIN))
return 0;
}
if (fd_stdin->pipe != NULL)
{
return pipeReadable(fd_stdin->pipe) ? 1 : 0;
}
else
return 0;
}
int _open(const char* filename, int flags, int mode)
{
bool result = false;
int fd = -1;
_FD_STRUCT* fds;
fd = fd_allocate();
if (fd == -1)
goto cleanup;
fds = fds_from_index(fd);
if (fds == NULL)
goto cleanup;
if (!_open_fds(filename, flags, mode, fds))
goto cleanup;
result = true;
cleanup:
if (result == false && fd != -1)
{
fd_release(fd);
fd = -1;
}
return fd;
}
int _wopen(const WCHAR* filename, int flags, int mode)
{
bool result = false;
int fd = -1;
_FD_STRUCT* fds;
fd = fd_allocate();
if (fd == -1)
goto cleanup;
fds = fds_from_index(fd);
if (fds == NULL)
goto cleanup;
if (!_wopen_fds(filename, flags, mode, fds))
goto cleanup;
result = true;
cleanup:
if (result == false && fd != -1)
{
fd_release(fd);
fd = -1;
}
return fd;
}
static bool _open_fds(const char* filename, int flags, int mode, _FD_STRUCT* fds)
{
WCHAR filenameW[1024];
ascii2unicode(filename, filenameW, 1024);
return _wopen_fds(filenameW, flags, mode, fds);
}
static bool _wopen_fds(const WCHAR* filename, int flags, int mode, _FD_STRUCT* fds)
{
bool result = false;
bool share_read = false;
DWORD dwDesiredAccess = 0;
DWORD dwShareMode = 0;
DWORD dwCreationDisposition = 0;
DWORD dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL;
HANDLE hFile = INVALID_HANDLE_VALUE;
if (filename == NULL || fds == NULL)
return NULL;
if ((flags & O_BINARY) && (flags & O_TEXT))
goto cleanup;
if (!(flags & O_WRONLY))
{
share_read = true;
dwDesiredAccess |= GENERIC_READ;
}
if ((flags & O_WRONLY) || (flags & O_RDWR))
{
share_read = false;
dwDesiredAccess |= GENERIC_WRITE;
}
if (share_read)
dwShareMode |= FILE_SHARE_READ;
if (flags & O_CREAT)
{
if (flags & O_TRUNC)
dwCreationDisposition = CREATE_ALWAYS;
else if (flags & O_EXCL)
dwCreationDisposition = CREATE_NEW;
else
dwCreationDisposition = OPEN_ALWAYS;
}
else if (flags & O_TRUNC)
dwCreationDisposition = TRUNCATE_EXISTING;
else
dwCreationDisposition = OPEN_EXISTING;
if ((flags & O_CREAT) && !(mode & S_IWRITE))
dwFlagsAndAttributes = FILE_ATTRIBUTE_READONLY;
hFile = CreateFile(filename, dwDesiredAccess, dwShareMode,
NULL, dwCreationDisposition, dwFlagsAndAttributes, NULL);
if (hFile == INVALID_HANDLE_VALUE)
goto cleanup;
if (flags & O_APPEND)
{
if (SetFilePointer(hFile, 0, NULL, FILE_END) == 0xFFFFFFFF)
goto cleanup;
}
fds->pipe = NULL;
fds->pipeChannel = -1;
fds->hFile = hFile;
fds->binary = (flags & O_BINARY);
fds->eof = FALSE;
result = true;
cleanup:
// close file on failure
if (!result && hFile != INVALID_HANDLE_VALUE)
CloseHandle(hFile);
return result;
}
FILE* fopen(const char* filename, const char* mode)
{
bool result = false;
bool mode_r = false;
bool mode_w = false;
bool mode_a = false;
bool mode_r_plus = false;
bool mode_w_plus = false;
bool mode_a_plus = false;
bool mode_t = false;
bool mode_b = false;
int num_rwa;
int flags = 0;
int pmode = 0;
int fd = -1;
_FILE* file = NULL;
if (filename == NULL || mode == NULL)
return NULL;
file = file_allocate();
if (file == NULL)
return NULL;
while (*mode != 0)
{
switch (*mode)
{
case 'r':
if (*(mode+1) == '+')
{
mode_r_plus = true;
mode++;
}
else
mode_r = true;
break;
case 'w':
if (*(mode+1) == '+')
{
mode_w_plus = true;
mode++;
}
else
mode_w = true;
break;
case 'a':
if (*(mode+1) == '+')
{
mode_a_plus = true;
mode++;
}
else
mode_a = true;
break;
case 't':
mode_t = true;
break;
case 'b':
mode_b = true;
break;
}
mode++;
}
num_rwa = 0;
if (mode_r) num_rwa++;
if (mode_w) num_rwa++;
if (mode_a) num_rwa++;
if (mode_r_plus) num_rwa++;
if (mode_w_plus) num_rwa++;
if (mode_a_plus) num_rwa++;
if (num_rwa != 1)
goto cleanup;
if (mode_t && mode_b)
goto cleanup;
// r = O_RDONLY
// w = O_CREAT|O_TRUNC | O_WRONLY
// a = O_CREAT | O_WRONLY | O_APPEND
// r+ = O_RDWR
// w+ = O_CREAT|O_TRUNC | O_RDWR
// a+ = O_CREAT | O_RDWR | O_APPEND
if (mode_w || mode_a || mode_w_plus || mode_a_plus)
{
flags |= O_CREAT;
pmode = S_IREAD | S_IWRITE;
}
if (mode_w || mode_w_plus)
flags |= O_TRUNC;
if (mode_r)
flags |= O_RDONLY;
else if (mode_w || mode_a)
flags |= O_WRONLY;
else
flags |= O_RDWR;
if (mode_a || mode_a_plus)
flags |= O_APPEND;
if (mode_t)
flags |= O_TEXT;
if (mode_b)
flags |= O_BINARY;
fd = open(filename, flags, pmode);
if (fd == -1)
goto cleanup;
file->fd = fd;
file->bufferedChar = -1;
file->error = FALSE;
result = true;
cleanup:
if (result == false)
{
if (file != NULL)
{
file_release(file);
file = NULL; // returned below
}
if (fd != -1)
close(fd);
}
return (FILE*)file;
}
int close(int fd)
{
bool result = false;
_FD_STRUCT* fds;
fds = fds_from_index(fd);
if (fds == NULL)
goto cleanup;
if (!CloseHandle(fds->hFile))
goto cleanup;
fd_release(fd);
result = true;
cleanup:
if (result == false)
errno = -1;
return result ? 0 : -1;
}
int fclose(FILE* stream)
{
_FILE* file = (_FILE*)stream;
bool result = false;
if (file == NULL)
return EOF;
if (close(file->fd) != 0)
goto cleanup;
file_release(file);
result = true;
cleanup:
return result;
}
int read(int fd, void* buffer, unsigned int count)
{
- bool result = false;
_FD_STRUCT* fds;
DWORD numRead;
fds = fds_from_index(fd);
if (fds == NULL)
{
errno = EBADF;
return -1;
}
initStdHandlesInline();
if (fds->pipe != NULL)
{
numRead = pipeRead(fds->pipe, (unsigned char*)buffer, count);
}
else if (fds->hFile != INVALID_HANDLE_VALUE)
{
if (!ReadFile(fds->hFile, buffer, count, &numRead, NULL))
{
if (GetLastError() == ERROR_HANDLE_EOF)
fds->eof = TRUE;
// else
// fds->error = TRUE;
return 0;
}
}
else
return 0;
return (int)numRead;
}
size_t fread(void* buffer, size_t size, size_t count, FILE* stream)
{
_FILE* file = (_FILE*)stream;
int read_result;
DWORD numRead;
if (file == NULL)
return 0;
read_result = read(file->fd, buffer, size*count);
numRead = (read_result == -1) ? 0 : read_result;
if (read_result == -1)
file->error = TRUE;
return numRead/size;
}
int write(int fd, const void* buffer, unsigned int count)
{
- bool result = false;
_FD_STRUCT* fds;
DWORD numWritten;
fds = fds_from_index(fd);
if (fds == NULL)
{
errno = EBADF;
return -1;
}
initStdHandlesInline();
if (fds->pipe != NULL)
{
if (fds->pipeChannel != -1)
{ // write header (for distinguishing stdout from stderr)
unsigned long length = count;
unsigned char header[5];
header[0] = fds->pipeChannel;
memcpy(&header[1], &length, sizeof(length));
- int x = pipeWrite(fds->pipe, header, sizeof(header));
+ /*int x = */
+ pipeWrite(fds->pipe, header, sizeof(header));
}
- int x = pipeWrite(fds->pipe, (unsigned char*)buffer, count);
+ /*int x =*/
+ pipeWrite(fds->pipe, (unsigned char*)buffer, count);
numWritten = count;
}
else if (fds->hFile != INVALID_HANDLE_VALUE)
{
if (!WriteFile(fds->hFile, buffer, count, &numWritten, NULL))
{
// fds->error = TRUE;
return 0;
}
}
else
return 0;
return (int)numWritten;
}
size_t fwrite(const void* buffer, size_t size, size_t count, FILE* stream)
{
_FILE* file = (_FILE*)stream;
int write_result;
DWORD numWritten;
if (file == NULL)
return 0;
write_result = write(file->fd, buffer, size*count);
numWritten = (write_result == -1) ? 0 : write_result;
if (write_result == -1)
file->error = TRUE;
return numWritten/size;
}
FILE* _getstdfilex(int n)
{
switch (n)
{
case STDIN:
return (FILE*)stdin;
case STDOUT:
return (FILE*)stdout;
case STDERR:
return (FILE*)stderr;
default:
return NULL;
}
}
int _fileno(FILE* stream)
{
return ((_FILE*)stream)->file_index;
}
int _commit(int fd)
{
bool result = false;
_FD_STRUCT* fds;
fds = fds_from_index(fd);
if (fds == NULL)
goto cleanup;
if (!FlushFileBuffers(fds->hFile))
goto cleanup;
result = true;
cleanup:
if (result == false)
errno = -1;
return result ? 0 : -1;
}
int fflush(FILE* stream)
{
_FILE* file = (_FILE*)stream;
if (file == NULL)
return EOF;
// TODO: when we implement buffering, this will need to flush
return _commit(file->fd) ? EOF : 0;
}
int _eof(int fd)
{
int result = -1;
_FD_STRUCT* fds;
fds = fds_from_index(fd);
if (fds == NULL)
goto cleanup;
result = fds->eof ? 1 : 0;
cleanup:
if (result == -1)
errno = EBADF;
return result;
}
int feof(FILE* stream)
{
_FILE* file = (_FILE*)stream;
if (file == NULL)
return EOF;
// since we don't have buffering, just return low-level eof
// TODO: when buffering is implemented, this will need more work
return _eof(file->fd) == 1 ? 1 : 0;
}
int ferror(FILE* stream)
{
_FILE* file = (_FILE*)stream;
if (file == NULL)
return 0;
return file->error;
}
void clearerr(FILE* stream)
{
_FILE* file = (_FILE*)stream;
if (file == NULL)
return;
file->error = 0;
}
long _tell(int fd)
{
bool result = false;
_FD_STRUCT* fds;
DWORD dwPos = (DWORD)-1L;
fds = fds_from_index(fd);
if (fds == NULL || fds->hFile == INVALID_HANDLE_VALUE)
goto cleanup;
dwPos = SetFilePointer(fds->hFile, 0, NULL, FILE_CURRENT);
if (dwPos == 0xffffffff)
goto cleanup;
result = true;
cleanup:
if (result == false)
{
errno = EBADF;
dwPos = (DWORD)-1L;
}
return (long)dwPos;
}
long _lseek(int fd, long offset, int whence)
{
bool result = false;
- int _errno = EBADF;
_FD_STRUCT* fds;
DWORD dwMoveMethod;
DWORD newPos;
fds = fds_from_index(fd);
if (fds == NULL || fds->hFile == INVALID_HANDLE_VALUE)
goto cleanup;
if (whence == SEEK_CUR)
dwMoveMethod = FILE_CURRENT;
else if (whence == SEEK_END)
dwMoveMethod = FILE_END;
else if (whence == SEEK_SET)
dwMoveMethod = FILE_BEGIN;
else
{
errno = EINVAL;
goto cleanup;
}
newPos = SetFilePointer(fds->hFile, offset, NULL, dwMoveMethod);
if (newPos == 0xffffffff)
goto cleanup;
result = true;
cleanup:
if (result == false)
newPos = (DWORD)-1L;
return (long)newPos;
}
int fsetpos(FILE* stream, const fpos_t* pos)
{
long longPos = (long)*pos;
return fseek(stream, longPos, SEEK_SET);
}
int fseek(FILE* stream, long offset, int origin)
{
_FILE* file = (_FILE*)stream;
long newPos = -1L;
if (file == NULL)
return EOF;
newPos = _lseek(file->fd, offset, origin);
return (newPos == -1) ? EOF : 0;
}
int fgetpos(FILE* stream, fpos_t* pos)
{
_FILE* file = (_FILE*)stream;
long _pos;
if (file == NULL || pos == NULL)
return -1;
_pos = _tell(file->fd);
if (_pos == -1L)
return -1;
*pos = (fpos_t)_pos;
return 0;
}
long ftell(FILE* stream)
{
_FILE* file = (_FILE*)stream;
long pos;
if (file == NULL)
return -1L;
pos = _tell(file->fd);
return pos;
}
int _setmode(int fd, int mode)
{
_FD_STRUCT* fds;
int prevMode;
fds = fds_from_index(fd);
if (fds == NULL)
return -1;
if (fds->binary)
prevMode = _O_BINARY;
else
prevMode = _O_TEXT;
if (mode == _O_TEXT)
fds->binary = FALSE;
else if (mode == _O_BINARY)
fds->binary = TRUE;
else
return -1;
return prevMode;
}
int fgetc(FILE* stream)
{
_FILE* file = (_FILE*)stream;
int result = EOF;
if (file == NULL)
return EOF;
if (file->bufferedChar != -1)
{
result = file->bufferedChar;
file->bufferedChar = -1;
}
else
{
char ch;
if (fread(&ch, 1, 1, stream) == 1)
result = ch;
}
return result;
}
char* fgets(char* string, int n, FILE* stream)
{
// _FILE* file = (_FILE*)stream;
char* result = string;
char ch;
// if (file == NULL)
// return NULL;
while (!ferror(stream) && !feof(stream) && n > 0)
{
ch = fgetc(stream);
// handle error/EOF
if (ch == EOF)
{
if (result == string) // no characters were read
result = NULL;
break;
}
// ignore CR
if (ch == '\r')
continue;
// add character to string
*string++ = ch;
*string = 0;
n--;
// check for end of line
if (ch == '\n')
break;
}
return result;
}
int fputc(int ch, FILE* stream)
{
char buffer[1] = { ch };
if (fwrite(buffer, 1, 1, stream) == 1)
return ch;
return EOF;
}
int fputs(const char* string, FILE* stream)
{
if (fwrite(string, strlen(string), 1, stream) == 1)
return 0;
return EOF;
}
int ungetc(int c, FILE* stream)
{
_FILE* file = (_FILE*)stream;
int result = EOF;
if (file == NULL)
return EOF;
if (file->bufferedChar == -1)
{
file->bufferedChar = c;
result = c;
}
return result;
}
-int fscanf(FILE* stream, const char* format, ...)
+int fscanf(FILE* /*stream*/, const char* /*format*/, ...)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fscanf(stream=%p, format=\"%s\")\n", stream, format) <= 0)
// printf("NOT IMPLEMENTED: fscanf(stream=%p, format=\"%s\")\n", stream, format);
return EOF;
}
int vfprintf(FILE* stream, const char* format, va_list argptr)
{
// TODO: use smaller buffer for short output, enable longer output
char buffer[4096];
if (_vsnprintf(buffer, sizeof(buffer), format, argptr) == -1)
buffer[sizeof(buffer)-1] = '\0';
return fwrite(buffer, 1, strlen(buffer), stream);
}
int fprintf(FILE* stream, const char* format, ...)
{
int result;
va_list args;
va_start(args, format);
result = vfprintf(stream, format, args);
va_end(args);
return result;
}
-FILE* _wfdopen(void* handle, const wchar_t* mode)
+FILE* _wfdopen(void* /*handle*/, const wchar_t* /*mode*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: _wfdopen(handle=%p, mode=\"%s\")\n", handle, mode) <= 0)
// printf("NOT IMPLEMENTED: _wfdopen(handle=%p, mode=\"%s\")\n", handle, mode);
return NULL;
}
-FILE* _wfreopen(const wchar_t* path, const wchar_t* mode, FILE* stream)
+FILE* _wfreopen(const wchar_t* /*path*/, const wchar_t* /*mode*/, FILE* /*stream*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: _wfreopen(path=\"%s\", mode=\"%s\", stream=%p)\n", path, mode, stream) <= 0)
// printf("NOT IMPLEMENTED: _wfreopen(path=\"%s\", mode=\"%s\", stream=%p)\n", path, mode, stream);
return NULL;
}
-wint_t fgetwc(FILE* stream)
+wint_t fgetwc(FILE* /*stream*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fgetwc(stream=%p)\n", stream) <= 0)
// printf("NOT IMPLEMENTED: fgetwc(stream=%p)\n", stream);
return WEOF;
}
-wint_t fputwc(wint_t ch, FILE* stream)
+wint_t fputwc(wint_t /*ch*/, FILE* /*stream*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fputwc(ch='%c', stream=%p)\n", ch, stream) <= 0)
// printf("NOT IMPLEMENTED: fputwc(ch='%c', stream=%p)\n", ch, stream);
return WEOF;
}
-wint_t ungetwc(wint_t ch, FILE* stream)
+wint_t ungetwc(wint_t /*ch*/, FILE* /*stream*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: ungetwc(ch='%c', stream=%p)\n", ch, stream) <= 0)
// printf("NOT IMPLEMENTED: ungetwc(ch='%c', stream=%p)\n", ch, stream);
return WEOF;
}
-wchar_t* fgetws(wchar_t* string, int n, FILE* stream)
+wchar_t* fgetws(wchar_t* /*string*/, int /*n*/, FILE* /*stream*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fgetws(string=\"%s\", n=%d, stream=%p)\n", string, n, stream) <= 0)
// printf("NOT IMPLEMENTED: fgetws(string=\"%s\", n=%d, stream=%p)\n", string, n, stream);
return NULL;
}
-int fputws(const wchar_t* string, FILE* stream)
+int fputws(const wchar_t* /*string*/, FILE* /*stream*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fputws(string=\"%s\", stream=%p)\n", string, stream) <= 0)
// printf("NOT IMPLEMENTED: fputws(string=\"%s\", stream=%p)\n", string, stream);
return WEOF;
}
FILE* _wfopen(const wchar_t* filename, const wchar_t* mode)
{
char filenameA[1024];
char modeA[10];
unicode2ascii(filename, filenameA, 1024);
unicode2ascii(mode, modeA, 10);
return fopen(filenameA, modeA);
}
-int fwscanf(FILE* stream, const wchar_t* format, ...)
+int fwscanf(FILE* /*stream*/, const wchar_t* /*format*/, ...)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fwscanf(stream=%p, format=\"%s\")\n", stream, format) <= 0)
// printf("NOT IMPLEMENTED: fwscanf(stream=%p, format=\"%s\")\n", stream, format);
return WEOF;
}
-int fwprintf(FILE* stream, const wchar_t* format, ...)
+int fwprintf(FILE* /*stream*/, const wchar_t* /*format*/, ...)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: fwprintf(stream=%p, format=\"%s\")\n", stream, format) <= 0)
// printf("NOT IMPLEMENTED: fwprintf(stream=%p, format=\"%s\")\n", stream, format);
return -1;
}
-int vfwprintf(FILE* stream, const wchar_t* format, va_list argptr)
+int vfwprintf(FILE* /*stream*/, const wchar_t* /*format*/, va_list /*argptr*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: vfwprintf(stream=%p, format=\"%s\")\n", stream, format) <= 0)
// printf("NOT IMPLEMENTED: vfwprintf(stream=%p, format=\"%s\")\n", stream, format);
return -1;
}
int printf(const char *format, ...)
{
int result;
va_list args;
va_start(args, format);
result = vfprintf(stdout, format, args);
va_end(args);
return result;
}
+
+#else
+
+int read(int, void*, unsigned int)
+{
+ errno = EBADF;
+ return -1;
+}
+
+int write(int, const void*, unsigned int)
+{
+ errno = EBADF;
+ return -1;
+}
+
+int close(int)
+{
+ return 0;
+}
+
+long _lseek(int, long, int)
+{
+ return -1;
+}
+
+int _kbhit(void)
+{
+ return 0;
+}
+#endif
diff --git a/src/redir.h b/src/redir.h
index 57e1fc7..0fb6e46 100644
--- a/src/redir.h
+++ b/src/redir.h
@@ -1,38 +1,42 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __wcecompat__redir_h__
#define __wcecompat__redir_h__
+#include <ceconfig.h>
+#if _WIN32_WCE < 0x500 || !defined(COREDLL_CORESIOA)
+
#include "pipe.h"
extern Pipe* stdoutPipe;
extern Pipe* stderrPipe;
extern Pipe* stdinPipe;
// returns true only if pipes have been initialised successfully
bool initStdHandles();
+#endif // COREDLL_CORESIOA
#endif /* __wcecompat__redir_h__ */
diff --git a/src/stat.cpp b/src/stat.cpp
index 6422588..6902864 100644
--- a/src/stat.cpp
+++ b/src/stat.cpp
@@ -1,75 +1,75 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <sys/stat.h>
#include <stdio.h>
#include <errno.h>
#include "ts_string.h"
#include "internal.h"
-int stat(const char* filename, struct stat* st)
+int stat(const char* /*filename*/, struct stat* /*st*/)
{
#if 0
if (filename == NULL || st == NULL)
{
errno = EINVAL;
return -1;
}
TCHAR filenameT[1000];
ts_strcpy(filenameT, filename);
WIN32_FILE_ATTRIBUTE_DATA fad;
if (!GetFileAttributesEx(filenameT, GetFileExInfoStandard, (LPVOID)&fad))
{
errno = ENOENT;
return -1;
}
st->st_dev = 0;
st->st_ino = 0;
st->st_mode = 0;
if (fad.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)
{
st->st_mode |= _S_IFDIR;
st->st_mode |= _S_IEXEC; // search permission
}
else
{
st->st_mode |= _S_IFREG;
if (strlen(filename) >= 4 && _stricmp(filename+strlen(filename)-4, ".exe") == 0)
st->st_mode |= _S_IEXEC; // execute permission
}
st->st_mode |= _S_IREAD; // TODO: assuming readable, but this may not be the case
if (!(fad.dwFileAttributes & FILE_ATTRIBUTE_READONLY))
st->st_mode |= _S_IWRITE;
st->st_nlink = 1; // TODO: NTFS can have links, so get the correct value
st->st_uid = 0;
st->st_gid = 0;
st->st_rdev = 0;
st->st_size = fad.nFileSizeLow;
st->st_atime = w32_filetime_to_time_t(&fad.ftLastAccessTime);
st->st_mtime = w32_filetime_to_time_t(&fad.ftLastWriteTime);
st->st_ctime = w32_filetime_to_time_t(&fad.ftCreationTime);
#endif
return 0;
}
diff --git a/src/stdio_extras.cpp b/src/stdio_extras.cpp
index cf7b53e..51cc088 100644
--- a/src/stdio_extras.cpp
+++ b/src/stdio_extras.cpp
@@ -1,39 +1,42 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <stdio.h>
#include <errno.h>
-
+#include <ceconfig.h>
void perror(const char *prefix)
{
if (prefix == NULL || *prefix == 0)
fprintf(stderr, "errno=%d\n", errno);
else
fprintf(stderr, "%s: errno=%d\n", prefix, errno);
}
-int setvbuf(FILE* stream, char* buffer, int mode, size_t size)
+#if _WIN32_WCE < 0x500 || !defined(COREDLL_CORESTRA)
+int setvbuf(FILE* /*stream*/, char* /*buffer*/, int /*mode*/, size_t /*size*/)
{
// TODO: implement buffering
return 0;
}
+#endif
+
diff --git a/src/timeb.cpp b/src/timeb.cpp
index 71d7ee1..dd99236 100644
--- a/src/timeb.cpp
+++ b/src/timeb.cpp
@@ -1,29 +1,29 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <sys/timeb.h>
-int ftime(struct timeb* tp)
+int ftime(struct timeb* /*tp*/)
{
// TODO: implement
return -1;
}
diff --git a/src/ts_string.cpp b/src/ts_string.cpp
index 8141e32..b3264ed 100644
--- a/src/ts_string.cpp
+++ b/src/ts_string.cpp
@@ -1,145 +1,147 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "ts_string.h"
#include <string.h>
#include <windows.h>
void ascii2unicode(const char* ascii, WCHAR* unicode)
{
if (((unsigned int)unicode & 1) == 0)
{ // word-aligned
while (*ascii != '\0')
*unicode++ = *ascii++;
*unicode = '\0';
}
else
{ // not word-aligned
while (*ascii != '\0')
{
*(char*)unicode = *ascii++;
*(((char*)unicode)+1) = 0;
unicode++;
}
*(char*)unicode = 0;
*(((char*)unicode)+1) = 0;
}
}
void unicode2ascii(const WCHAR* unicode, char* ascii)
{
if (((unsigned int)unicode & 1) == 0)
{ // word-aligned
while (*unicode != '\0')
*ascii++ = (char)*unicode++;
*ascii = '\0';
}
else
{ // not word-aligned
while (*(char*)unicode != 0 || *(((char*)unicode)+1) != 0)
*ascii++ = *(char*)unicode++;
*ascii = '\0';
}
}
void ascii2unicode(const char* ascii, WCHAR* unicode, int maxChars)
{
+ int i = 0;
if (((unsigned int)unicode & 1) == 0)
{ // word-aligned
- for (int i=0; ascii[i] != 0 && i<maxChars; i++)
+ for (i = 0; ascii[i] != 0 && i<maxChars; i++)
unicode[i] = ascii[i];
unicode[i] = 0;
}
else
{ // not word-aligned
- for (int i=0; ascii[i] != 0 && i<maxChars; i++)
+ for (i = 0; ascii[i] != 0 && i<maxChars; i++)
{
*(char*)&unicode[i] = ascii[i];
*(((char*)&unicode[i])+1) = 0;
unicode++;
}
*(char*)&unicode[i] = 0;
*(((char*)&unicode[i])+1) = 0;
}
}
void unicode2ascii(const WCHAR* unicode, char* ascii, int maxChars)
{
+ int i = 0;
if (((unsigned int)unicode & 1) == 0)
{ // word-aligned
- for (int i=0; unicode[i] != 0 && i<maxChars; i++)
+ for (i = 0; unicode[i] != 0 && i<maxChars; i++)
ascii[i] = (char)unicode[i];
ascii[i] = 0;
}
else
{ // not word-aligned
- for (int i=0; (*(char*)&unicode[i] != 0 || *(((char*)&unicode[i])+1) != 0) && i<maxChars; i++)
+ for (i = 0; (*(char*)&unicode[i] != 0 || *(((char*)&unicode[i])+1) != 0) && i<maxChars; i++)
ascii[i] = *(char*)&unicode[i];
ascii[i] = 0;
}
}
//
// ascii/unicode typesafe versions of strcat
//
char* ts_strcat(char* dest, const unsigned short* src)
{
char* p = dest;
while (*p != '\0')
p++;
unicode2ascii(src, p);
return dest;
}
unsigned short* ts_strcat(unsigned short* dest, const char* src)
{
unsigned short* p = dest;
while (*p != '\0')
p++;
ascii2unicode(src, p);
return dest;
}
//
// ascii/unicode typesafe versions of strdup
//
char* ts_strdup_unicode_to_ascii(const unsigned short* str)
{
char* result = (char*)malloc(wcslen(str)+1);
if (result == NULL)
return NULL;
unicode2ascii(str, result);
return result;
}
unsigned short* ts_strdup_ascii_to_unicode(const char* str)
{
unsigned short* result = (unsigned short*)malloc((strlen(str)+1)*2);
if (result == NULL)
return NULL;
ascii2unicode(str, result);
return result;
}
diff --git a/src/winmain.cpp b/src/winmain.cpp
index 3d24d93..9b80e42 100644
--- a/src/winmain.cpp
+++ b/src/winmain.cpp
@@ -1,50 +1,51 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <windows.h>
#include "args.h"
#include "redir.h" // initStdHandles
-
int main(int argc, char* argv[]);
int
WINAPI
WinMain(
- HINSTANCE hInstance,
- HINSTANCE hPrevInstance,
+ HINSTANCE /*hInstance*/,
+ HINSTANCE /*hPrevInstance*/,
LPWSTR lpCmdLine,
- int nShowCmd)
+ int /*nShowCmd*/)
{
int result;
int argc;
char** argv;
// convert program name and lpCmdLine into argc/argv, and handle I/O redirection
argc = processCmdLine(lpCmdLine, &argv);
+#if _WIN32_WCE < 0x500 || !defined(COREDLL_CORESIOA)
initStdHandles(); // get environment variables from ChildData
+#endif
result = main(argc, (char**)argv);
return result;
}
diff --git a/src/winsock_extras.cpp b/src/winsock_extras.cpp
index 4019fcb..3565b90 100644
--- a/src/winsock_extras.cpp
+++ b/src/winsock_extras.cpp
@@ -1,31 +1,31 @@
/* wcecompat: Windows CE C Runtime Library "compatibility" library.
*
* Copyright (C) 2001-2002 Essemer Pty Ltd. All rights reserved.
* http://www.essemer.com.au/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <winsock_extras.h>
#include <stdio.h>
-struct servent* PASCAL getservbyname(const char* name, const char* proto)
+struct servent* PASCAL getservbyname(const char* /*name*/, const char* /*proto*/)
{
// if (fprintf(stderr, "NOT IMPLEMENTED: getservbyname(name=%s, proto=%s)\n", name, proto) <= 0)
// printf("NOT IMPLEMENTED: getservbyname(name=%s, proto=%s)\n", name, proto);
return NULL;
}
diff --git a/wcecompat.pro b/wcecompat.pro
new file mode 100644
index 0000000..3842e8f
--- /dev/null
+++ b/wcecompat.pro
@@ -0,0 +1,65 @@
+TEMPLATE = lib
+TARGET = wcecompat
+DESTDIR = lib
+
+DEPENDPATH += . include src include/sys
+INCLUDEPATH += . include src include/sys
+
+CONFIG += staticlib
+CONFIG -= shared
+QT=
+
+LIBS += -lcorelibc -lcoredll /NODEFAULTLIB:LIBC.LIB
+
+# Input
+HEADERS += include/assert.h \
+ include/conio.h \
+ include/ctype.h \
+ include/errno.h \
+ include/fcntl.h \
+ include/float.h \
+ include/io.h \
+ include/limits.h \
+ include/malloc.h \
+ include/math.h \
+ include/memory.h \
+ include/process.h \
+ include/setjmp.h \
+ include/signal.h \
+ include/stdarg.h \
+ include/stddef.h \
+ include/stdio.h \
+ include/stdlib.h \
+ include/string.h \
+ include/time.h \
+ include/winsock_extras.h \
+ src/args.h \
+ src/ChildData.h \
+ src/internal.h \
+ src/pipe.h \
+ src/redir.h \
+ src/ts_string.h \
+ include/sys/stat.h \
+ include/sys/timeb.h \
+ include/sys/types.h
+SOURCES += src/args.cpp \
+ src/assert.cpp \
+ src/ChildData.cpp \
+ src/env.cpp \
+ src/errno.cpp \
+ src/io.cpp \
+ src/pipe.cpp \
+ src/process.cpp \
+ src/redir.cpp \
+ src/stat.cpp \
+ src/stdio_extras.cpp \
+ src/stdlib_extras.cpp \
+ src/string_extras.cpp \
+ src/time.cpp \
+ src/timeb.cpp \
+ src/ts_string.cpp \
+ src/winmain.cpp \
+ src/winsock_extras.cpp
+
+QMAKE_POST_LINK = copy $$DESTDIR\\$${TARGET}.lib $$DESTDIR\\$${TARGET}ex.lib
+
|
shenoudab/devise_traceable
|
34fc22ccde7fb3ac81810caa31b7da547b465d7b
|
devise_traceable now working
|
diff --git a/Rakefile b/Rakefile
index 842e45e..a7cc11f 100755
--- a/Rakefile
+++ b/Rakefile
@@ -1,44 +1,45 @@
# encoding: UTF-8
require 'rake'
require 'rake/rdoctask'
require 'rake/gempackagetask'
require 'rake/testtask'
+require File.join(File.dirname(__FILE__), 'lib', 'devise_traceable', 'version')
Rake::TestTask.new(:test) do |t|
t.libs << 'lib'
t.libs << 'test'
t.pattern = 'test/**/*_test.rb'
t.verbose = false
end
task :default => :test
Rake::RDocTask.new(:rdoc) do |rdoc|
rdoc.rdoc_dir = 'rdoc'
rdoc.title = 'DeviseTraceable'
rdoc.options << '--line-numbers' << '--inline-source'
rdoc.rdoc_files.include('README.rdoc')
rdoc.rdoc_files.include('lib/**/*.rb')
end
spec = Gem::Specification.new do |s|
s.name = "devise_traceable"
s.summary = "Devise Traceable For Traceing Devise Models"
s.description = "Devise Traceable For Traceing Devise Models Logins and Logouts in separate model table"
s.files = FileList["[A-Z]*", "{lib}/**/*"]
s.require_paths = ["lib"]
s.version = DeviseTraceable::VERSION.dup
s.email = "[email protected]"
s.homepage = "http://github.com/shenoudab/devise_traceable"
s.author = 'Shenouda Bertel'
s.add_dependency("warden")
s.add_dependency("devise")
end
Rake::GemPackageTask.new(spec) do |pkg|
end
desc "Install the gem #{spec.name}-#{spec.version}.gem"
task :install do
system("gem install pkg/#{spec.name}-#{spec.version}.gem --no-ri --no-rdoc")
end
\ No newline at end of file
diff --git a/lib/devise_traceable/hooks/traceable.rb b/lib/devise_traceable/hooks/traceable.rb
index ea9c4d4..1f39da9 100755
--- a/lib/devise_traceable/hooks/traceable.rb
+++ b/lib/devise_traceable/hooks/traceable.rb
@@ -1,19 +1,10 @@
# After each sign in, sign out.
# This is only triggered when the user is explicitly set (with set_user)
# and on authentication. Retrieving the user from session (:fetch) does
# not trigger it.
-Warden::Manager.after_set_user :except => :fetch do |record, warden, options|
- puts "Shenouda AfterSETUser" + record
- if record.respond_to?(:insert_login!) #&& warden.authenticated?(options[:scope])
- puts "WardenRequest" + warden.request
- record.insert_login!(warden.request)
- end
-end
-
Warden::Manager.before_logout do |record, warden, opts|
- puts "Shenouda Before Logout" + record
- if record.respond_to?(:update_logout!)
- record.update_logout!(warden.request)
+ if record.respond_to?(:stamp!)
+ record.stamp!
end
-end
+end
\ No newline at end of file
diff --git a/lib/devise_traceable/model.rb b/lib/devise_traceable/model.rb
index 7b773ee..156a63c 100755
--- a/lib/devise_traceable/model.rb
+++ b/lib/devise_traceable/model.rb
@@ -1,29 +1,18 @@
require 'devise_traceable/hooks/traceable'
module Devise
module Models
# Trace information about your user sign in. It tracks the following columns:
# * resource_id
# * sign_in_at
# * sign_out_at
- # * time
- #
-
+
module Traceable
- def insert_login!(request)
- new_current = Time.now
- self.sign_in_at = new_current
- save(:validate => false)
- end
-
- def update_logout!(request)
+ def stamp!
new_current = Time.now
- self.sign_out_at = new_current
- time = self.sign_out_at - self.sign_in_at
- self.time = time
- save(:validate => false)
+ "#{self.class}Tracing".constantize.create(:sign_in_at => self.current_sign_in_at, :sign_out_at => new_current, "#{self.class}".foreign_key.to_sym => self.id)
end
end
end
end
diff --git a/lib/devise_traceable/version.rb b/lib/devise_traceable/version.rb
index 6ce10fd..b81c336 100755
--- a/lib/devise_traceable/version.rb
+++ b/lib/devise_traceable/version.rb
@@ -1,3 +1,3 @@
module DeviseTraceable
- VERSION = "0.0.3".freeze
+ VERSION = "0.0.5".freeze
end
\ No newline at end of file
diff --git a/lib/generators/devise_traceable/devise_traceable_generator.rb b/lib/generators/devise_traceable/devise_traceable_generator.rb
index 5047299..561893a 100755
--- a/lib/generators/devise_traceable/devise_traceable_generator.rb
+++ b/lib/generators/devise_traceable/devise_traceable_generator.rb
@@ -1,31 +1,57 @@
require 'rails/generators/migration'
class DeviseTraceableGenerator < Rails::Generators::NamedBase
include Rails::Generators::Migration
desc "Generates a model with the given NAME (if one does not exist) with devise " <<
"configuration plus a migration file and devise routes."
def self.source_root
@_devise_source_root ||= File.expand_path("../templates", __FILE__)
end
def self.orm_has_migration?
Rails::Generators.options[:rails][:orm] == :active_record
end
def self.next_migration_number(dirname)
if ActiveRecord::Base.timestamped_migrations
Time.now.utc.strftime("%Y%m%d%H%M%S")
else
"%.3d" % (current_migration_number(dirname) + 1)
end
end
class_option :orm
class_option :migration, :type => :boolean, :default => orm_has_migration?
+ def invoke_orm_model
+ if model_exists?
+ say "* Model already exists."
+ elsif options[:orm].present?
+ invoke "model", ["#{name}Tracing"], :migration => false, :orm => options[:orm]
+
+ unless model_exists?
+ abort "Tried to invoke the model generator for '#{options[:orm]}' but could not find it.\n" <<
+ "Please create your model by hand before calling `rails g devise_traceable #{name}`."
+ end
+ else
+ abort "Cannot create a devise model because config.generators.orm is blank.\n" <<
+ "Please create your model by hand or configure your generators orm before calling `rails g devise_traceable #{name}`."
+ end
+ end
+
def create_migration_file
- migration_template 'migration.rb', "db/migrate/devise_create_#{name.tableize}_tracing.rb"
+ migration_template 'migration.rb', "db/migrate/devise_create_#{name.downcase}_tracings.rb"
+ end
+
+ protected
+
+ def model_exists?
+ File.exists?(File.join(destination_root, model_path))
+ end
+
+ def model_path
+ @model_path ||= File.join("app", "models", "#{file_path}.rb")
end
end
diff --git a/lib/generators/devise_traceable/templates/migration.rb b/lib/generators/devise_traceable/templates/migration.rb
index 003701d..583101b 100755
--- a/lib/generators/devise_traceable/templates/migration.rb
+++ b/lib/generators/devise_traceable/templates/migration.rb
@@ -1,18 +1,17 @@
-class DeviseCreate<%= table_name.camelize %>Tracing < ActiveRecord::Migration
+class DeviseCreate<%= table_name.camelize.singularize %>Tracings < ActiveRecord::Migration
def self.up
- create_table :<%= table_name %>_tracing do |t|
+ create_table :<%= table_name.singularize %>_tracings do |t|
t.integer :<%= table_name.classify.foreign_key %>
- t.datetime :sign_in_at
- t.datetime :sign_out_at
- #Any additional fields here
-
- t.timestamps
+ t.datetime :sign_in_at
+ t.datetime :sign_out_at
+ #Any additional fields here
+ #t.timestamps
end
-add_index :<%= table_name %>_tracing, :<%= table_name.classify.foreign_key %>
+add_index :<%= table_name.singularize %>_tracings, :<%= table_name.classify.foreign_key %>
end
def self.down
- drop_table :<%= table_name %>
+drop_table :<%= table_name.singularize %>_tracings
end
end
|
shenoudab/devise_traceable
|
574c867ac9a535ff562a51c930ff6f722d07bb96
|
solving gem installation
|
diff --git a/Rakefile b/Rakefile
index 42f0dc5..f789e7a 100755
--- a/Rakefile
+++ b/Rakefile
@@ -1,42 +1,43 @@
# encoding: UTF-8
require 'rake'
require 'rake/rdoctask'
require 'rake/gempackagetask'
require 'rake/testtask'
require File.join(File.dirname(__FILE__), 'lib', 'devise_traceable', 'version')
Rake::TestTask.new(:test) do |t|
t.libs << 'lib'
t.libs << 'test'
t.pattern = 'test/**/*_test.rb'
t.verbose = false
end
task :default => :test
Rake::RDocTask.new(:rdoc) do |rdoc|
rdoc.rdoc_dir = 'rdoc'
rdoc.title = 'DeviseTraceable'
rdoc.options << '--line-numbers' << '--inline-source'
rdoc.rdoc_files.include('README.rdoc')
rdoc.rdoc_files.include('lib/**/*.rb')
end
spec = Gem::Specification.new do |s|
s.name = "devise_traceable"
- s.summary = "Devise Traceable"
- s.description = "Devise Traceable"
- s.files = FileList["[A-Z]*", "{app,config,lib}/**/*"]
+ s.summary = "Devise Traceable For Traceing Devise Models"
+ s.description = "Devise Traceable For Traceing Devise Models Logins and Logouts in separate model table"
+ s.files = FileList["[A-Z]*", "{lib}/**/*"]
+ s.require_paths = ["lib"]
s.version = DeviseTraceable::VERSION.dup
s.email = "[email protected]"
s.homepage = "http://github.com/shenoudab/devise_traceable"
s.author = 'Shenouda Bertel'
end
Rake::GemPackageTask.new(spec) do |pkg|
end
desc "Install the gem #{spec.name}-#{spec.version}.gem"
task :install do
system("gem install pkg/#{spec.name}-#{spec.version}.gem --no-ri --no-rdoc")
end
diff --git a/lib/devise_traceable.rb b/lib/devise_traceable.rb
index 45facd1..7385225 100755
--- a/lib/devise_traceable.rb
+++ b/lib/devise_traceable.rb
@@ -1,10 +1,10 @@
unless defined?(Devise)
require 'devise'
end
+Devise.add_module :traceable, :model => 'devise_traceable/model'
+
module DeviseTraceable
- require 'devise_traceable/rails'
end
-Devise.add_module :traceable, :model => 'devise_traceable/model'
-
+require 'devise_traceable/rails'
\ No newline at end of file
diff --git a/lib/devise_traceable/rails.rb b/lib/devise_traceable/rails.rb
index 2885fce..edbbd9d 100755
--- a/lib/devise_traceable/rails.rb
+++ b/lib/devise_traceable/rails.rb
@@ -1,8 +1,7 @@
require 'devise_traceable'
-require 'rails'
module DeviseTraceable
class Engine < ::Rails::Engine
engine_name :devise_taceable
end
end
\ No newline at end of file
|
shenoudab/devise_traceable
|
c4e1b9b53c8bbec0eee3797562bd09dedd69e85d
|
enabling hooks
|
diff --git a/.gitignore b/.gitignore
index c182818..4aedc10 100755
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
.bundle/
log/*.log
pkg/
-test/
+nbproject/
+test/
\ No newline at end of file
diff --git a/lib/devise_traceable.rb b/lib/devise_traceable.rb
index 8e953d6..45facd1 100755
--- a/lib/devise_traceable.rb
+++ b/lib/devise_traceable.rb
@@ -1,6 +1,10 @@
unless defined?(Devise)
require 'devise'
end
-require 'devise_traceable/model'
-require 'devise_traceable/rails'
+module DeviseTraceable
+ require 'devise_traceable/rails'
+end
+
+Devise.add_module :traceable, :model => 'devise_traceable/model'
+
diff --git a/lib/devise_traceable/hooks/traceable.rb b/lib/devise_traceable/hooks/traceable.rb
index b76c214..d597d78 100755
--- a/lib/devise_traceable/hooks/traceable.rb
+++ b/lib/devise_traceable/hooks/traceable.rb
@@ -1,16 +1,18 @@
# After each sign in, sign out.
# This is only triggered when the user is explicitly set (with set_user)
# and on authentication. Retrieving the user from session (:fetch) does
# not trigger it.
Warden::Manager.after_set_user :except => :fetch do |record, warden, options|
+ puts record
if record.respond_to?(:insert_login!) #&& warden.authenticated?(options[:scope])
record.insert_login!(warden.request)
end
end
-Warden::Manager.before_logout do |record, auth, opts|
- if record.respond_to?(:update_logout!) #&& warden.authenticated?(options[:scope])
+Warden::Manager.before_logout do |record, warden, opts|
+ puts record
+ if record.respond_to?(:update_logout!)
record.update_logout!(warden.request)
end
end
diff --git a/lib/devise_traceable/rails.rb b/lib/devise_traceable/rails.rb
index 8a4d01b..2885fce 100755
--- a/lib/devise_traceable/rails.rb
+++ b/lib/devise_traceable/rails.rb
@@ -1,5 +1,8 @@
+require 'devise_traceable'
+require 'rails'
+
module DeviseTraceable
class Engine < ::Rails::Engine
-
+ engine_name :devise_taceable
end
-end
+end
\ No newline at end of file
|
ish/couchish
|
ff468feb5017d55a4b73eaf09ca63d6f5a7a8630
|
Fix problem correctly
|
diff --git a/couchish/filehandling.py b/couchish/filehandling.py
index e37c546..32bfb12 100644
--- a/couchish/filehandling.py
+++ b/couchish/filehandling.py
@@ -1,244 +1,243 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from dottedish import dotted, flatten, dotteddict, api, dottedlist
from couchdbsession import a8n
import base64
import uuid
from schemaish.type import File
from StringIO import StringIO
import shutil
from couchish import jsonutil
def get_attr(prefix, parent=None):
# combine prefix and parent where prefix is a list and parent is a dotted string
if parent is None:
segments = [str(segment) for segment in prefix]
return '.'.join(segments)
if prefix is None:
return parent
segments = [str(segment) for segment in prefix]
if parent != '':
segments += parent.split('.')
attr = '.'.join( segments )
return attr
def get_files(data, original=None, prefix=None):
# scan old data to collect any file refs and then scan new data for file changes
files = {}
inlinefiles = {}
original_files = {}
get_files_from_original(data, original, files, inlinefiles, original_files, prefix)
get_files_from_data(data, original, files, inlinefiles, original_files, prefix)
return data, files, inlinefiles, original_files
def has_unmodified_signature(f):
if f.file is None:
return True
return False
def dotted_or_emptydict(d):
if d is None:
return {}
try:
return dotted(d)
except TypeError:
return d
def get_files_from_data(data, original, files, inlinefiles, original_files, prefix):
if isinstance(data, File):
get_file_from_item(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(data, dict) and not isinstance(data, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
if not dd:
return
file_names = {}
- for d in ddoriginal:
- for k in d:
- if isinstance(d.get(k), File):
- file_names[couch_attachement_to_full_id(d[k])] = d[k]
+ for _, d in flatten(ddoriginal):
+ if isinstance(d, File):
+ file_names[couch_attachement_to_full_id(d)] = d
for k,f in flatten(dd):
if isinstance(f, File):
name = couch_attachement_to_full_id(f)
if name in file_names:
of = file_names[name]
elif isinstance(ddoriginal.get(k), File):
of = ddoriginal[k]
else:
of = None
get_file_from_item(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def couch_attachement_to_full_id(attach):
return "%s/%s"%(attach.doc_id, attach.id)
api.wrap.when_type(a8n.List)(dottedlist.wrap_list)
api.wrap.when_type(a8n.Dictionary)(dotteddict.wrap_dict)
def get_file_from_item(f, of, files, inlinefiles, original_files, fullprefix):
if f.file is None:
# if we have no original data then we presume the file should remain unchanged
f.id = of.id
if f.mimetype is None:
f.mimetype = of.mimetype
if f.filename is None:
f.filename = of.filename
if not hasattr(f, 'metadata') or f.metadata is None or f.metadata=={}:
f.metadata = getattr(of, 'metadata', None)
else:
if of and hasattr(of,'id'):
f.id = of.id
else:
f.id = uuid.uuid4().hex
if getattr(f,'inline',False) is True:
filestore = inlinefiles
else:
filestore = files
if hasattr(f, 'inline'):
del f.inline
# add the files for attachment handling and remove the file data from document
if getattr(f,'b64', None):
filestore[fullprefix] = jsonutil.CouchishFile(f.file, f.filename, f.mimetype, f.id, metadata = f.metadata, b64=True)
del f.b64
else:
fh = StringIO()
shutil.copyfileobj(f.file, fh)
fh.seek(0)
filestore[fullprefix] = jsonutil.CouchishFile(fh, f.filename, f.mimetype, f.id, metadata = f.metadata)
del f.file
def get_file_from_original(f, of, files, inlinefiles, original_files, fullprefix):
if not isinstance(f, File):
original_files[fullprefix] = of
def get_files_from_original(data, original, files, inlinefiles, original_files, prefix):
if isinstance(original, File):
get_file_from_original(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(original, dict) and not isinstance(original, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
if not ddoriginal:
return
for k, of in flatten(ddoriginal):
if isinstance(of, File):
f = dd.get(k)
get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def _parse_changes_for_files(session, deletions, additions, changes):
""" returns deletions, additions """
all_separate_files = {}
all_inline_files = {}
for addition in additions:
addition, files, inlinefiles, original_files_notused = get_files(addition)
if files:
all_separate_files.setdefault(addition['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(addition['_id'],{}).update(inlinefiles)
_extract_inline_attachments(addition, inlinefiles)
deleted_files = {}
changes = list(changes)
for n, changeset in enumerate(changes):
d, cs = changeset
cs = list(cs)
for m, c in enumerate(cs):
if c['action'] in ['edit','create','remove']:
c['value'], files, inlinefiles, original_files = get_files(c.get('value'), original=c.get('was'), prefix=c['path'])
cs[m] = c
if files:
all_separate_files.setdefault(d['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(d['_id'],{}).update(inlinefiles)
final_file_ids = set(find_all_file_names(c['value']))
removed_files = dict((k, original_files[k]) for k in original_files if couch_attachement_to_full_id(original_files[k]) not in final_file_ids)
deleted_files.setdefault(d['_id'], {}).update(removed_files)
_extract_inline_attachments(d, inlinefiles)
changes[n] = (d, cs)
return deleted_files, all_separate_files
def find_all_file_names(src):
if isinstance(src, File):
yield couch_attachement_to_full_id(src)
elif isinstance(src, dict) or isinstance(src, list):
dd = dotted_or_emptydict(src)
for _, item in flatten(dd):
if isinstance(item, File):
yield couch_attachement_to_full_id(item)
def _extract_inline_attachments(doc, files):
"""
Move the any attachment data that we've found into the _attachments attribute
"""
for attr, f in files.items():
if f.b64:
data = f.file.replace('\n', '')
else:
data = base64.encodestring(f.file.read()).replace('\n','')
f.file.close()
del f.file
del f.b64
del f.inline
del f.doc_id
doc.setdefault('_attachments',{})[f.id] = {'content_type': f.mimetype,'data': data}
def _handle_separate_attachments(session, deletions, additions):
"""
add attachments that aren't inline and remove any attachments without references
"""
# XXX This needs to cope with files moving when sequences are re-numbered. We need
# XXX to talk to matt about what a renumbering like this looks like
for id, attrfiles in additions.items():
doc = session.get(id)
stubdoc = {'_id':doc['_id'], '_rev':doc['_rev']}
for attr, f in attrfiles.items():
data = ''
if f.file:
if f.b64:
data = base64.decodestring(f.file)
else:
data = f.file.read()
f.file.close()
session._db.put_attachment(stubdoc, data, filename=f.id, content_type=f.mimetype)
del f.file
del f.b64
del f.inline
del f.doc_id
for id, attrfiles in deletions.items():
# XXX had to use _db because delete attachment freeaked using session version.
doc = session._db.get(id)
for attr, f in attrfiles.items():
session._db.delete_attachment(doc, f.id)
additions = {}
deletions = {}
|
ish/couchish
|
40275cd8bc75b2e5fce30e13218c61ca043e68e6
|
Remove magic fix
|
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index b0e1a83..342bb5e 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,142 +1,141 @@
from cStringIO import StringIO
from jsonish import pythonjson
from schemaish.type import File
import base64
from dottedish import flatten, dotted
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
class AttachmentFileLike(object):
"""
A lazy-loading file-like object that reads the attachment via the session
on first call to read().
This object *must* stay close to the session instance so should never be
serialised by default. Instances are therefore marked un unpicklable,
uncopyable, etc to avoid them accidentally "leaking out".
"""
def __init__(self, session, doc_id, filename):
self.session = session
self.doc_id = doc_id
self.filename = filename
self._file = None
def read(self, *a, **k):
if self._file is None:
- self._filehandle = self.session.get_attachment(self.doc_id, self.filename)
- self._file = StringIO(self._filehandle)
+ self._file = self.session.get_attachment(self.doc_id, self.filename)
return self._file.read(*a, **k)
def close(self):
- if self._filehandle:
- self._filehandle.close()
+ if self._file:
+ self._file.close()
def __getstate__(self):
# Unpicklable
return False
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
elif 'old_name' in d.get('metadata', {}) and not d['id']:
# if there is no doc and attachement id check for old ones in the metadata
d['doc_id'], _, d['id'] = d['metadata']['old_name'][0].partition('/')
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
# Read the file into the dict, but not if it's an AttachmentFileLike that
# only works close to the session.
file = getattr(obj, 'file', None)
if isinstance(file, AttachmentFileLike):
pass
else:
if file and hasattr(obj,'b64'):
d['base64'] = obj.file
else:
if file and obj.file is not None:
d['base64'] = base64.encodestring(file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
def encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
def decode_from_dict(d, session=None):
obj = pythonjson.decode_from_dict(d)
obj = add_id_and_attr_to_files(obj, session)
return obj
def add_id_and_attr_to_files(data, session=None):
if not isinstance(data, dict):
return data
dd = dotted(data)
for k,f in flatten(data):
if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
if session:
f.file = AttachmentFileLike(session, dd['_id'], f.id)
f.doc_id = dd['_id']
f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
f.doc_id = dd[subpath]['_id']
f.rev = dd[subpath]['_rev']
return data
dumps = pythonjson.dumps
loads = pythonjson.loads
|
ish/couchish
|
8641fcbadbf0bd6608bcf2a15b40a154fb4dabc1
|
Fix bug through black magic :/
|
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index 342bb5e..b0e1a83 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,141 +1,142 @@
from cStringIO import StringIO
from jsonish import pythonjson
from schemaish.type import File
import base64
from dottedish import flatten, dotted
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
class AttachmentFileLike(object):
"""
A lazy-loading file-like object that reads the attachment via the session
on first call to read().
This object *must* stay close to the session instance so should never be
serialised by default. Instances are therefore marked un unpicklable,
uncopyable, etc to avoid them accidentally "leaking out".
"""
def __init__(self, session, doc_id, filename):
self.session = session
self.doc_id = doc_id
self.filename = filename
self._file = None
def read(self, *a, **k):
if self._file is None:
- self._file = self.session.get_attachment(self.doc_id, self.filename)
+ self._filehandle = self.session.get_attachment(self.doc_id, self.filename)
+ self._file = StringIO(self._filehandle)
return self._file.read(*a, **k)
def close(self):
- if self._file:
- self._file.close()
+ if self._filehandle:
+ self._filehandle.close()
def __getstate__(self):
# Unpicklable
return False
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
elif 'old_name' in d.get('metadata', {}) and not d['id']:
# if there is no doc and attachement id check for old ones in the metadata
d['doc_id'], _, d['id'] = d['metadata']['old_name'][0].partition('/')
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
# Read the file into the dict, but not if it's an AttachmentFileLike that
# only works close to the session.
file = getattr(obj, 'file', None)
if isinstance(file, AttachmentFileLike):
pass
else:
if file and hasattr(obj,'b64'):
d['base64'] = obj.file
else:
if file and obj.file is not None:
d['base64'] = base64.encodestring(file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
def encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
def decode_from_dict(d, session=None):
obj = pythonjson.decode_from_dict(d)
obj = add_id_and_attr_to_files(obj, session)
return obj
def add_id_and_attr_to_files(data, session=None):
if not isinstance(data, dict):
return data
dd = dotted(data)
for k,f in flatten(data):
if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
if session:
f.file = AttachmentFileLike(session, dd['_id'], f.id)
f.doc_id = dd['_id']
f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
f.doc_id = dd[subpath]['_id']
f.rev = dd[subpath]['_rev']
return data
dumps = pythonjson.dumps
loads = pythonjson.loads
|
ish/couchish
|
eac7379b980c014795d2c8238946192be9654041
|
Handle moved files in sequences better
|
diff --git a/couchish/filehandling.py b/couchish/filehandling.py
index ad3021d..e37c546 100644
--- a/couchish/filehandling.py
+++ b/couchish/filehandling.py
@@ -1,220 +1,244 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from dottedish import dotted, flatten, dotteddict, api, dottedlist
from couchdbsession import a8n
import base64
import uuid
from schemaish.type import File
from StringIO import StringIO
import shutil
from couchish import jsonutil
def get_attr(prefix, parent=None):
# combine prefix and parent where prefix is a list and parent is a dotted string
if parent is None:
segments = [str(segment) for segment in prefix]
return '.'.join(segments)
if prefix is None:
return parent
segments = [str(segment) for segment in prefix]
if parent != '':
segments += parent.split('.')
attr = '.'.join( segments )
return attr
def get_files(data, original=None, prefix=None):
# scan old data to collect any file refs and then scan new data for file changes
files = {}
inlinefiles = {}
original_files = {}
get_files_from_original(data, original, files, inlinefiles, original_files, prefix)
get_files_from_data(data, original, files, inlinefiles, original_files, prefix)
return data, files, inlinefiles, original_files
def has_unmodified_signature(f):
if f.file is None:
return True
return False
def dotted_or_emptydict(d):
if d is None:
return {}
try:
return dotted(d)
except TypeError:
return d
def get_files_from_data(data, original, files, inlinefiles, original_files, prefix):
if isinstance(data, File):
get_file_from_item(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(data, dict) and not isinstance(data, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
if not dd:
return
+ file_names = {}
+ for d in ddoriginal:
+ for k in d:
+ if isinstance(d.get(k), File):
+ file_names[couch_attachement_to_full_id(d[k])] = d[k]
for k,f in flatten(dd):
if isinstance(f, File):
- if isinstance(ddoriginal.get(k), File):
+ name = couch_attachement_to_full_id(f)
+ if name in file_names:
+ of = file_names[name]
+ elif isinstance(ddoriginal.get(k), File):
of = ddoriginal[k]
else:
of = None
get_file_from_item(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
+def couch_attachement_to_full_id(attach):
+ return "%s/%s"%(attach.doc_id, attach.id)
+
+
api.wrap.when_type(a8n.List)(dottedlist.wrap_list)
api.wrap.when_type(a8n.Dictionary)(dotteddict.wrap_dict)
def get_file_from_item(f, of, files, inlinefiles, original_files, fullprefix):
if f.file is None:
# if we have no original data then we presume the file should remain unchanged
f.id = of.id
if f.mimetype is None:
f.mimetype = of.mimetype
if f.filename is None:
f.filename = of.filename
if not hasattr(f, 'metadata') or f.metadata is None or f.metadata=={}:
f.metadata = getattr(of, 'metadata', None)
else:
if of and hasattr(of,'id'):
f.id = of.id
else:
f.id = uuid.uuid4().hex
if getattr(f,'inline',False) is True:
filestore = inlinefiles
else:
filestore = files
if hasattr(f, 'inline'):
del f.inline
# add the files for attachment handling and remove the file data from document
if getattr(f,'b64', None):
filestore[fullprefix] = jsonutil.CouchishFile(f.file, f.filename, f.mimetype, f.id, metadata = f.metadata, b64=True)
del f.b64
else:
fh = StringIO()
shutil.copyfileobj(f.file, fh)
fh.seek(0)
filestore[fullprefix] = jsonutil.CouchishFile(fh, f.filename, f.mimetype, f.id, metadata = f.metadata)
del f.file
def get_file_from_original(f, of, files, inlinefiles, original_files, fullprefix):
if not isinstance(f, File):
original_files[fullprefix] = of
def get_files_from_original(data, original, files, inlinefiles, original_files, prefix):
if isinstance(original, File):
get_file_from_original(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(original, dict) and not isinstance(original, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
if not ddoriginal:
return
for k, of in flatten(ddoriginal):
if isinstance(of, File):
f = dd.get(k)
get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def _parse_changes_for_files(session, deletions, additions, changes):
""" returns deletions, additions """
all_separate_files = {}
all_inline_files = {}
for addition in additions:
addition, files, inlinefiles, original_files_notused = get_files(addition)
if files:
all_separate_files.setdefault(addition['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(addition['_id'],{}).update(inlinefiles)
_extract_inline_attachments(addition, inlinefiles)
-
- all_original_files = {}
+
+ deleted_files = {}
changes = list(changes)
for n, changeset in enumerate(changes):
d, cs = changeset
cs = list(cs)
for m, c in enumerate(cs):
if c['action'] in ['edit','create','remove']:
c['value'], files, inlinefiles, original_files = get_files(c.get('value'), original=c.get('was'), prefix=c['path'])
cs[m] = c
if files:
all_separate_files.setdefault(d['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(d['_id'],{}).update(inlinefiles)
- all_original_files.setdefault(d['_id'], {}).update(original_files)
+ final_file_ids = set(find_all_file_names(c['value']))
+ removed_files = dict((k, original_files[k]) for k in original_files if couch_attachement_to_full_id(original_files[k]) not in final_file_ids)
+ deleted_files.setdefault(d['_id'], {}).update(removed_files)
_extract_inline_attachments(d, inlinefiles)
changes[n] = (d, cs)
- return all_original_files, all_separate_files
+ return deleted_files, all_separate_files
+
+
+def find_all_file_names(src):
+ if isinstance(src, File):
+ yield couch_attachement_to_full_id(src)
+ elif isinstance(src, dict) or isinstance(src, list):
+ dd = dotted_or_emptydict(src)
+ for _, item in flatten(dd):
+ if isinstance(item, File):
+ yield couch_attachement_to_full_id(item)
def _extract_inline_attachments(doc, files):
"""
Move the any attachment data that we've found into the _attachments attribute
"""
for attr, f in files.items():
if f.b64:
data = f.file.replace('\n', '')
else:
data = base64.encodestring(f.file.read()).replace('\n','')
f.file.close()
del f.file
del f.b64
del f.inline
del f.doc_id
doc.setdefault('_attachments',{})[f.id] = {'content_type': f.mimetype,'data': data}
def _handle_separate_attachments(session, deletions, additions):
"""
add attachments that aren't inline and remove any attachments without references
"""
# XXX This needs to cope with files moving when sequences are re-numbered. We need
# XXX to talk to matt about what a renumbering like this looks like
for id, attrfiles in additions.items():
doc = session.get(id)
stubdoc = {'_id':doc['_id'], '_rev':doc['_rev']}
for attr, f in attrfiles.items():
data = ''
if f.file:
if f.b64:
data = base64.decodestring(f.file)
else:
data = f.file.read()
f.file.close()
session._db.put_attachment(stubdoc, data, filename=f.id, content_type=f.mimetype)
del f.file
del f.b64
del f.inline
del f.doc_id
for id, attrfiles in deletions.items():
# XXX had to use _db because delete attachment freeaked using session version.
doc = session._db.get(id)
for attr, f in attrfiles.items():
session._db.delete_attachment(doc, f.id)
additions = {}
deletions = {}
|
ish/couchish
|
a685b280e31448a72d629ad5f05b37269b7ffb3d
|
Add fallback check for doc_id and id of files
|
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index 132b586..01a1a4b 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,135 +1,138 @@
from cStringIO import StringIO
from jsonish import pythonjson
from schemaish.type import File
import base64
from dottedish import flatten, dotted
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
class AttachmentFileLike(object):
"""
A lazy-loading file-like object that reads the attachment via the session
on first call to read().
This object *must* stay close to the session instance so should never be
serialised by default. Instances are therefore marked un unpicklable,
uncopyable, etc to avoid them accidentally "leaking out".
"""
def __init__(self, session, doc_id, filename):
self.session = session
self.doc_id = doc_id
self.filename = filename
self._file = None
def read(self, *a, **k):
if self._file is None:
data = self.session.get_attachment(self.doc_id, self.filename)
self._file = StringIO(data)
return self._file.read(*a, **k)
def __getstate__(self):
# Unpicklable
return False
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
+ elif 'old_name' in d.get('metadata', {}) and not d['id']:
+ # if there is no doc and attachement id check for old ones in the metadata
+ d['doc_id'], _, d['id'] = d['metadata']['old_name'][0].partition('/')
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
# Read the file into the dict, but not if it's an AttachmentFileLike that
# only works close to the session.
file = getattr(obj, 'file', None)
if isinstance(file, AttachmentFileLike):
pass
else:
if file and hasattr(obj,'b64'):
d['base64'] = obj.file
else:
if file and obj.file is not None:
d['base64'] = base64.encodestring(file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
def encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
def decode_from_dict(d, session=None):
obj = pythonjson.decode_from_dict(d)
obj = add_id_and_attr_to_files(obj, session)
return obj
def add_id_and_attr_to_files(data, session=None):
if not isinstance(data, dict):
return data
dd = dotted(data)
for k,f in flatten(data):
if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
if session:
f.file = AttachmentFileLike(session, dd['_id'], f.id)
f.doc_id = dd['_id']
f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
f.doc_id = dd[subpath]['_id']
f.rev = dd[subpath]['_rev']
return data
dumps = pythonjson.dumps
loads = pythonjson.loads
|
ish/couchish
|
d898218517135a81c5a989d8c360fc8409968715
|
couchdb-python >= 0.7 returns a file-like. Also add a close method for completeness.
|
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index 132b586..eff8c23 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,135 +1,138 @@
from cStringIO import StringIO
from jsonish import pythonjson
from schemaish.type import File
import base64
from dottedish import flatten, dotted
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
class AttachmentFileLike(object):
"""
A lazy-loading file-like object that reads the attachment via the session
on first call to read().
This object *must* stay close to the session instance so should never be
serialised by default. Instances are therefore marked un unpicklable,
uncopyable, etc to avoid them accidentally "leaking out".
"""
def __init__(self, session, doc_id, filename):
self.session = session
self.doc_id = doc_id
self.filename = filename
self._file = None
def read(self, *a, **k):
if self._file is None:
- data = self.session.get_attachment(self.doc_id, self.filename)
- self._file = StringIO(data)
+ self._file = self.session.get_attachment(self.doc_id, self.filename)
return self._file.read(*a, **k)
+ def close(self):
+ if self._file:
+ self._file.close()
+
def __getstate__(self):
# Unpicklable
return False
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
# Read the file into the dict, but not if it's an AttachmentFileLike that
# only works close to the session.
file = getattr(obj, 'file', None)
if isinstance(file, AttachmentFileLike):
pass
else:
if file and hasattr(obj,'b64'):
d['base64'] = obj.file
else:
if file and obj.file is not None:
d['base64'] = base64.encodestring(file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
def encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
def decode_from_dict(d, session=None):
obj = pythonjson.decode_from_dict(d)
obj = add_id_and_attr_to_files(obj, session)
return obj
def add_id_and_attr_to_files(data, session=None):
if not isinstance(data, dict):
return data
dd = dotted(data)
for k,f in flatten(data):
if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
if session:
f.file = AttachmentFileLike(session, dd['_id'], f.id)
f.doc_id = dd['_id']
f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
f.doc_id = dd[subpath]['_id']
f.rev = dd[subpath]['_rev']
return data
dumps = pythonjson.dumps
loads = pythonjson.loads
|
ish/couchish
|
8d31c3a6d78934955e62b60dd7e1304d57ef0dfe
|
Change to work with and require couchdb-python >= 0.7.
|
diff --git a/couchish/filestore.py b/couchish/filestore.py
index ccc60ff..d7411e8 100644
--- a/couchish/filestore.py
+++ b/couchish/filestore.py
@@ -1,44 +1,43 @@
from __future__ import with_statement
from cStringIO import StringIO
import couchish
class CouchDBAttachmentSource(object):
"""
A file source for the FileResource to use to read attachments from
documents in a CouchDB database.
Note: the application would be responsible for uploading files.
"""
def __init__(self, couchish_store):
self.couchish = couchish_store
def get(self, key, cache_tag=None):
# XXX This would be much better written using httplib2 and performing a
# single GET to request the image directly, using the ETag as the
# cache_tag (which is the document _rev anyway). But for now ...
try:
doc_id, attachment_name = key.split('/', 1)
except ValueError:
raise KeyError
# Get the document with the attachment to see if we actually need to
# fetch the whole attachment.
try:
with self.couchish.session() as S:
doc = S.doc_by_id(doc_id)
except couchish.NotFound:
raise KeyError(key)
# Check the attachment stub exists.
attachment_stub = doc.get('_attachments', {}).get(attachment_name)
if attachment_stub is None:
raise KeyError(key)
# See if the caller's version is up to date.
if cache_tag and doc['_rev'] == cache_tag:
return (doc['_rev'], [('Content-Type',None)], None)
# Get the attachment content.
with self.couchish.session() as S:
content = S.get_attachment(doc_id, attachment_name)
- return (doc['_rev'], [('Content-Type',attachment_stub['content_type'])], StringIO(content))
-
+ return (doc['_rev'], [('Content-Type',attachment_stub['content_type'])], content)
diff --git a/couchish/tests/test_couchish_store_files.py b/couchish/tests/test_couchish_store_files.py
index 218b782..9109357 100644
--- a/couchish/tests/test_couchish_store_files.py
+++ b/couchish/tests/test_couchish_store_files.py
@@ -1,226 +1,226 @@
from __future__ import with_statement
import unittest
import os.path
import couchdb
from couchish import config, store
from schemaish.type import File
from couchish import jsonutil
def data_filename(filename):
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
if namespace:
namespace = '_%s'%namespace
else:
namespace = ''
return data_filename('test_couchish%s_%s.yaml' % (namespace,type))
db_name = 'test-couchish'
def strip_id_rev(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
return couchdoc
def matches_supplied(test, supplied):
test = dict((key, value) for (key, value) in test.iteritems() if key in supplied)
return test == supplied
class TestFiles(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_addition_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = jsonutil.CouchishFile(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
- assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
+ assert attachment.read() == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
def test_change_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = jsonutil.CouchishFile(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
- assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
+ assert attachment.read() == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
# now lets replace the file
fh = open('couchish/tests/data/files/test-changed.txt','r')
f = jsonutil.CouchishFile(fh, 'test-changed.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = f
fh.close()
new_photo_id = matt.__subject__['photo'].id
sess = self.S.session()
attachment = 'foo'
attachment = sess.session._db.get_attachment(matt_id,new_photo_id)
- assert attachment == 'and now it\'s changed\n'
+ assert attachment.read() == 'and now it\'s changed\n'
assert new_photo_id == first_created_photo_id
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
def test_remove_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
- assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
+ assert attachment.read() == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = None
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert not '_attachments' in matt
assert matt['photo'] == None
def test_moving_in_sequence(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'book', 'first_name': 'Matt', 'last_name': 'Goodall','photo':[ f ]}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'][0].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
- assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
+ assert attachment.read() == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'][0],'id')
fh2 = open('couchish/tests/data/files/test-changed.txt','r')
f2 = File(fh2, 'test2.txt','text/plain')
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'].append( f2 )
fh2.close()
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert matches_supplied(matt['_attachments'][ matt['photo'][0].id ], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
assert matches_supplied(matt['_attachments'][ matt['photo'][1].id ], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
assert len(matt['_attachments']) == 2
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'].pop(0)
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][ matt['photo'][0].id ], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
def test_unchanged_file(self):
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'book', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f }
# create a file
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
- assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
+ assert attachment.read() == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
# now lets replace the file
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = File(None,'test_ADDEDSUFFIX.txt','text/plain')
new_photo_id = matt.__subject__['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, new_photo_id)
assert new_photo_id == first_created_photo_id
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt = matt.__subject__
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
assert matt['photo'].filename == 'test_ADDEDSUFFIX.txt'
diff --git a/setup.py b/setup.py
index b122f9f..d92f2de 100644
--- a/setup.py
+++ b/setup.py
@@ -1,38 +1,39 @@
from setuptools import setup, find_packages
import sys, os
version = '0.2.2'
setup(name='couchish',
version=version,
description="Couchdb library that includes reference cacheing triggers and consolidates updates.",
long_description="""\
A couch wrapper that includes reference info updating, serialisation of complex types into json, filehandling (returning filehandles, storing document filehandles as attachments and many other bits). Get in touch if any of this sounds interesting.
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Tim Parkin & Matt Goodall',
author_email='[email protected]',
url='http://ish.io',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"PyYAML",
+ "couchdb>=0.7",
"couchdb-session",
"dottedish",
"jsonish",
"schemaish",
],
extras_require={
'formish': ['formish'],
},
entry_points="""
# -*- Entry points: -*-
""",
test_suite='couchish.tests',
tests_require=['BeautifulSoup', 'WebOb', 'formish'],
)
|
ish/couchish
|
4aca64567a43c687bd0e1db786430e3983463b7d
|
prevented early consuption of changes/actions iterators in post flush hook
|
diff --git a/couchish/store.py b/couchish/store.py
index b5db300..a3401ab 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,300 +1,304 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from datetime import datetime
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config, pre_flush_hook=None, post_flush_hook=None):
self.db = db
self.config = config
self.pre_flush_hook = pre_flush_hook
self.post_flush_hook = post_flush_hook
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
self._flush_timestamp = None
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view(
'_all_docs',
remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_type(self, type, remove_rows_with_missing_doc=False,
**options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
return self.docs_by_view(
view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_view(self, view, remove_rows_with_missing_doc=False,
**options):
options['include_docs'] = True
results = self.view(view, **options)
docs = (row.doc for row in results.rows)
if remove_rows_with_missing_doc:
docs = (doc for doc in docs if doc is not None)
return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
# We're iterating the sequences multiple time so we might as well just
# turn them into lists and be done with it.
deletions, additions, changes = \
list(deletions), list(additions), list(changes)
if self.store.pre_flush_hook is not None:
self.store.pre_flush_hook(deletions, additions, changes)
# Record ctime and mtime for addited and updated documents.
for doc in additions:
metadata = doc.setdefault('metadata', {})
metadata['ctime'] = metadata['mtime'] = self._flush_timestamp
for doc, _ in changes:
metadata = doc.setdefault('metadata', {})
metadata['mtime'] = self._flush_timestamp
# Record any files that need storing.
file_deletions, file_additions = filehandling._parse_changes_for_files(
session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
# Record the timestamp of the flush, used for all timestamps during the save.
self._flush_timestamp = datetime.utcnow().isoformat()
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def make_refs(self, view, ref_keys):
"""
Build a mapping of ref_keys to refs, where a ref is a dict containing a
'_ref' item and anything else returned as the view's value.
"""
def ref_from_row(row):
ref = row.value
ref['_ref'] = row.key
return ref
rows = self.view(view, keys=ref_keys)
return dict((row.key, ref_from_row(row)) for row in rows)
def make_ref(self, view, ref_key):
"""
Build a ref (see make_refs) for the row with the given ref_key.
"""
return self.make_refs(view, [ref_key])[ref_key]
def _post_flush_hook(self, session, deletions, additions, changes):
+ # XXX generators are being re-used so need to turn them into lists
+ deletions, additions = list(deletions), list(additions)
+ changes = [(doc, list(actions)) for (doc, actions) in changes]
+
if self.store.post_flush_hook is not None:
self.store.post_flush_hook(deletions, additions, changes)
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
db5db3f419eecd172bfcee6115bac6d3182f0c5b
|
prevented early consuption of changes iterators in post flush hooks
|
diff --git a/couchish/store.py b/couchish/store.py
index b5db300..4b807b8 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,300 +1,304 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from datetime import datetime
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config, pre_flush_hook=None, post_flush_hook=None):
self.db = db
self.config = config
self.pre_flush_hook = pre_flush_hook
self.post_flush_hook = post_flush_hook
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
self._flush_timestamp = None
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view(
'_all_docs',
remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_type(self, type, remove_rows_with_missing_doc=False,
**options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
return self.docs_by_view(
view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_view(self, view, remove_rows_with_missing_doc=False,
**options):
options['include_docs'] = True
results = self.view(view, **options)
docs = (row.doc for row in results.rows)
if remove_rows_with_missing_doc:
docs = (doc for doc in docs if doc is not None)
return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
# We're iterating the sequences multiple time so we might as well just
# turn them into lists and be done with it.
deletions, additions, changes = \
list(deletions), list(additions), list(changes)
if self.store.pre_flush_hook is not None:
self.store.pre_flush_hook(deletions, additions, changes)
# Record ctime and mtime for addited and updated documents.
for doc in additions:
metadata = doc.setdefault('metadata', {})
metadata['ctime'] = metadata['mtime'] = self._flush_timestamp
for doc, _ in changes:
metadata = doc.setdefault('metadata', {})
metadata['mtime'] = self._flush_timestamp
# Record any files that need storing.
file_deletions, file_additions = filehandling._parse_changes_for_files(
session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
# Record the timestamp of the flush, used for all timestamps during the save.
self._flush_timestamp = datetime.utcnow().isoformat()
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def make_refs(self, view, ref_keys):
"""
Build a mapping of ref_keys to refs, where a ref is a dict containing a
'_ref' item and anything else returned as the view's value.
"""
def ref_from_row(row):
ref = row.value
ref['_ref'] = row.key
return ref
rows = self.view(view, keys=ref_keys)
return dict((row.key, ref_from_row(row)) for row in rows)
def make_ref(self, view, ref_key):
"""
Build a ref (see make_refs) for the row with the given ref_key.
"""
return self.make_refs(view, [ref_key])[ref_key]
def _post_flush_hook(self, session, deletions, additions, changes):
+ # XXX generators are being re-used so need to turn them into lists
+ deletions, additions = list(deletions), list(additions)
+ changes = [(doc, list(actions)) for (doc, actions) in changes]
+
if self.store.post_flush_hook is not None:
self.store.post_flush_hook(deletions, additions, changes)
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
15fb9b00275e8a42e30bde109265f2885e83d89a
|
Ignore stuff that's the result of running tests.
|
diff --git a/.gitignore b/.gitignore
index abfbc72..26ecae0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,5 @@
*.pyc
*.swp
*.egg-info
+/*.egg
+/.coverage
|
ish/couchish
|
fb5eaa155317946b3ccf8c7c21c4adf0167e8d7f
|
Fix schemaish attribute creation abitrary args are not passed to the nitializer.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index fbcae6d..62059fe 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,535 +1,532 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
-from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
+from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry, attr_kwargs
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
-
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
- def __init__(self, **k):
- self.refersto = k['attr']['refersto']
+ def __init__(self, refersto, **k):
+ self.refersto = refersto
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
-
class TypeRegistry(SchemaishTypeRegistry):
-
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
- return Reference(**field)
+ return Reference(field['attr']['refersto'], **attr_kwargs(field))
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
_options = list(options)
_options.sort(lambda x, y: cmp(len(x[0].split('.')), len(y[0].split('.'))))
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in _options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
options = [(key, value['data']['label']) for key, value in full_options]
formish.CheckboxMultiChoiceTree.__init__(self, options, css_class=css_class)
self.full_options = dict(full_options)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
return [self.full_options[item] for item in data]
class RefInput(formish.Input):
"""
Simple text input field for entering a reference to another object.
"""
type = "RefInput"
def __init__(self, db, **k):
self.db = db
self.additional_fields = k.pop('additional_fields', [])
formish.Input.__init__(self, **k)
def to_request_data(self, field, data):
if data is None:
return ['']
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(data.get(attr, '') for attr in additional_fields)]
def from_request_data(self, field, request_data):
data = request_data[0].strip()
# Extract the id from the content.
id = data.split('|', 1)[0]
# Return default if nothing entered.
if not id:
return self.empty
# Convert the id into a ref and return.
row = iter(self.db.view(field.attr.refersto, key=id)).next()
ref = row.value
ref.update({'_ref': row.key})
return ref
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, store):
FormishWidgetRegistry.__init__(self)
self.store = store
self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.store.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.store.db, view), **k)
def refinput_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
additional_fields = widget_spec.get('additional_fields',[])
return RefInput(self.store.db, additional_fields=additional_fields, **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.store.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return SelectChoiceFacetTreeCouchDB(options=options(self.store.db, view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.store.db, view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, store=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(store)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/schemaish_jsonbuilder.py b/couchish/schemaish_jsonbuilder.py
index 3878dd3..b604aa6 100644
--- a/couchish/schemaish_jsonbuilder.py
+++ b/couchish/schemaish_jsonbuilder.py
@@ -1,240 +1,245 @@
import schemaish
from validatish import validator
KEY_MUNGING = [
('-', '__dash__'),
('/', '__slash__'),
]
def relative_schemaish_key(item, parent):
"""
Calculate and return the item's key relative to the parent.
"""
# We only care about the keys
item = splitkey(item['key'])
if parent is not None:
parent = splitkey(parent['key'])
# Do basic sanity checks
if parent is not None and not is_descendant_key(item, parent):
raise ValueError("'item' is not a descendant of 'parent'")
# Remove the parent's part of the key, that should already have
# been accounted for as a group item.
if parent is not None:
item = descendant_key_part(item, parent)
# Turn the item key back into a string.
item = joinkey(item)
# Replace characters that formish doesn't allow
for search, replace in KEY_MUNGING:
item = item.replace(search, replace)
return item
def full_schemaish_key(item, parents):
"""
Calculate and return the full formish key of the item specified by the item
chain.
"""
# Build a chain of items with a None at the end that is convenient for
# feeding to relative_schemaish_key.
itemchain = list(parents)
itemchain.append(item)
itemchain.reverse()
itemchain.append(None)
# Build the full key from the relative formish key for each of the pairs,
# all joined together again.
fullkey = [relative_schemaish_key(item, parent) for (item, parent) in pairs(itemchain)]
fullkey.reverse()
return joinkey(fullkey)
def pairs(s):
"""
Simple generator that yields len(s)-1 pairs of items, i.e. each item except
the last is yielded as the first item.
"""
unset = object()
first, second = unset, unset
it = iter(s)
while True:
if first is unset:
first = it.next()
second = it.next()
yield (first, second)
first = second
def splitkey(key):
"""
Split a key in string form into its parts.
"""
return key.split('.')
def joinkey(key):
"""
Join a key's parts to create a key in string form.
"""
return '.'.join(key)
def is_descendant_key(item, ancestor):
"""
Test if item is a descendant of ancestor.
"""
return item[:len(ancestor)] == ancestor
def descendant_key_part(item, ancestor):
"""
Return the part of the item key that is not shared with the ancestor.
"""
return item[len(ancestor):]
def strip_stars(key):
outkey = []
for k in key.split('.'):
if k != '*':
outkey.append(k)
return '.'.join(outkey)
def split_prefix(key):
segments = key.split('.')
return '.'.join(segments[:-1]), segments[-1]
def rec_getattr(obj, attr):
return reduce(getattr, attr.split('.'), obj)
def rec_setattr(obj, attr, value):
attrs = attr.split('.')
setattr(reduce(getattr, attrs[:-1], obj), attrs[-1], value)
class SchemaishTypeRegistry(object):
"""
Registry for converting an field's type specification to a schemaish type
instance.
"""
def __init__(self):
self.registry = {
'String': self.string_factory,
'Integer': self.integer_factory,
'Float': self.float_factory,
'Boolean': self.boolean_factory,
'Decimal': self.decimal_factory,
'Date': self.date_factory,
'Time': self.time_factory,
'DateTime': self.datetime_factory,
'File': self.file_factory,
'Sequence': self.list_factory,
'Tuple': self.tuple_factory,
'Structure': self.structure_factory,
}
self.default_type = 'String'
-
def make_schemaish_type(self, field):
field_type = field.get('type',self.default_type)
return self.registry[field_type](field)
-
def string_factory(self, field):
- return schemaish.String(**field)
+ return schemaish.String(**attr_kwargs(field))
def integer_factory(self, field):
- return schemaish.Integer(**field)
+ return schemaish.Integer(**attr_kwargs(field))
def float_factory(self, field):
- return schemaish.Float(**field)
+ return schemaish.Float(**attr_kwargs(field))
def boolean_factory(self, field):
- return schemaish.Boolean(**field)
+ return schemaish.Boolean(**attr_kwargs(field))
def decimal_factory(self, field):
- return schemaish.Decimal(**field)
+ return schemaish.Decimal(**attr_kwargs(field))
def date_factory(self, field):
- return schemaish.Date(**field)
+ return schemaish.Date(**attr_kwargs(field))
def time_factory(self, field):
- return schemaish.Time(**field)
+ return schemaish.Time(**attr_kwargs(field))
def datetime_factory(self, field):
- return schemaish.DateTime(**field)
+ return schemaish.DateTime(**attr_kwargs(field))
def file_factory(self, field):
- return schemaish.File(**field)
+ return schemaish.File(**attr_kwargs(field))
def list_factory(self, field):
field = dict(field)
attr = field.pop('attr')
attr_type = self.make_schemaish_type(attr)
- return schemaish.Sequence(attr_type, **field)
+ return schemaish.Sequence(attr_type, **attr_kwargs(field))
def tuple_factory(self, field):
field = dict(field)
attr = field.pop('attr')
attr_types = []
for a in attr['types']:
attr_types.append(self.make_schemaish_type(a))
- return schemaish.Tuple(attr_types, **field)
+ return schemaish.Tuple(attr_types, **attr_kwargs(field))
def structure_factory(self, field):
- return schemaish.Structure(**field)
+ return schemaish.Structure(**attr_kwargs(field))
+
schemaish_type_registry=SchemaishTypeRegistry()
+
+SCHEMAISH_ATTRIBUTE_KWARGS = ['default', 'description', 'title', 'validator']
+def attr_kwargs(field):
+ """
+ Return the schemaish.Attribute kwargs from the field definition.
+ """
+ return dict((k,v) for (k,v) in field.iteritems()
+ if k in SCHEMAISH_ATTRIBUTE_KWARGS)
+
+
def expand_definition(pre_expand_definition):
definition = []
for item in pre_expand_definition['fields']:
field = {}
field['name'] = item['name']
field['fullkey'] = strip_stars(item['name'])
field['keyprefix'], field['key'] = split_prefix(field['fullkey'])
field['starkey'] = item['name']
field['title'] = item.get('title')
field['description'] = item.get('description')
field['type'] = item.get('type','String')
field['attr'] = item.get('attr')
if item.get('required') is True:
field['validator'] = validator.Required()
else:
field['validator'] = None
definition.append(field)
return definition
def get_nested_attr(schema_type):
if hasattr(schema_type, 'attr'):
return get_nested_attr(schema_type.attr)
else:
return schema_type
def build(definition, type_registry=schemaish_type_registry):
definition = expand_definition(definition)
schema = schemaish.Structure()
schema_pointer_hash = {'': schema}
for field in definition:
if 'name' not in field:
continue
fullkey = field['fullkey']
keyprefix = field['keyprefix']
key = field['key']
try:
S = schema_pointer_hash[keyprefix]
except KeyError:
raise KeyError('It is likely that you haven\'t defined your keys in the right order. A field must exist before sub-fields are encountered')
schema_type = type_registry.make_schemaish_type(field)
S.add( key, schema_type )
schema_pointer_hash[fullkey] = get_nested_attr(schema_type)
return schema
-
-
-
-
|
ish/couchish
|
33004c13249e919d85c9bce073538f8ed90a2ddd
|
Don't bypass the superclass's __init__.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 7cb5bb9..d9c3ce0 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,536 +1,535 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.LeafAttribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
_options = list(options)
_options.sort(lambda x, y: cmp(len(x[0].split('.')), len(y[0].split('.'))))
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in _options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
- self.options = [ (key, value['data']['label']) for key, value in full_options]
+ options = [(key, value['data']['label']) for key, value in full_options]
+ formish.CheckboxMultiChoiceTree.__init__(self, options, css_class=css_class)
self.full_options = dict(full_options)
- self.optiontree = mktree(self.options)
- widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
return [self.full_options[item] for item in data]
class RefInput(formish.Input):
"""
Simple text input field for entering a reference to another object.
"""
type = "RefInput"
def __init__(self, db, **k):
self.db = db
self.additional_fields = k.pop('additional_fields', [])
formish.Input.__init__(self, **k)
def to_request_data(self, field, data):
if data is None:
return ['']
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(data.get(attr, '') for attr in additional_fields)]
def from_request_data(self, field, request_data):
data = request_data[0].strip()
# Extract the id from the content.
id = data.split('|', 1)[0]
# Return default if nothing entered.
if not id:
return self.empty
# Convert the id into a ref and return.
row = iter(self.db.view(field.attr.refersto, key=id)).next()
ref = row.value
ref.update({'_ref': row.key})
return ref
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, store):
FormishWidgetRegistry.__init__(self)
self.store = store
self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.store.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.store.db, view), **k)
def refinput_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
additional_fields = widget_spec.get('additional_fields',[])
return RefInput(self.store.db, additional_fields=additional_fields, **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.store.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return SelectChoiceFacetTreeCouchDB(options=options(self.store.db, view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.store.db, view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, store=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(store)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
8c01269c5c7aaa5921b677c4bc51a688c9ba1ffa
|
Remove unused default_value.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index f33e136..7cb5bb9 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,537 +1,536 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.LeafAttribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
_options = list(options)
_options.sort(lambda x, y: cmp(len(x[0].split('.')), len(y[0].split('.'))))
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in _options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
- default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
return [self.full_options[item] for item in data]
class RefInput(formish.Input):
"""
Simple text input field for entering a reference to another object.
"""
type = "RefInput"
def __init__(self, db, **k):
self.db = db
self.additional_fields = k.pop('additional_fields', [])
formish.Input.__init__(self, **k)
def to_request_data(self, field, data):
if data is None:
return ['']
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(data.get(attr, '') for attr in additional_fields)]
def from_request_data(self, field, request_data):
data = request_data[0].strip()
# Extract the id from the content.
id = data.split('|', 1)[0]
# Return default if nothing entered.
if not id:
return self.empty
# Convert the id into a ref and return.
row = iter(self.db.view(field.attr.refersto, key=id)).next()
ref = row.value
ref.update({'_ref': row.key})
return ref
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, store):
FormishWidgetRegistry.__init__(self)
self.store = store
self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.store.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.store.db, view), **k)
def refinput_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
additional_fields = widget_spec.get('additional_fields',[])
return RefInput(self.store.db, additional_fields=additional_fields, **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.store.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return SelectChoiceFacetTreeCouchDB(options=options(self.store.db, view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.store.db, view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, store=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(store)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
efef253e9c83b6f5831eb2592d85c498b2658a97
|
added release info to setup
|
diff --git a/setup.py b/setup.py
index 2195546..72c22d4 100644
--- a/setup.py
+++ b/setup.py
@@ -1,37 +1,37 @@
from setuptools import setup, find_packages
import sys, os
version = '0.2.1'
setup(name='couchish',
version=version,
description="",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Tim Parkin & Matt Goodall',
author_email='[email protected]',
- url='',
+ url='http://ish.io',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"PyYAML",
"couchdb-session",
"dottedish",
"jsonish",
"schemaish",
],
extras_require={
'formish': ['formish'],
},
entry_points="""
# -*- Entry points: -*-
""",
test_suite='couchish.tests',
tests_require=['BeautifulSoup', 'WebOb', 'formish'],
)
|
ish/couchish
|
e6886bb1853b151a1d1f91ce6f7875818a86d120
|
Remove egg-info directory from repo.
|
diff --git a/.gitignore b/.gitignore
index c9b568f..abfbc72 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
*.pyc
*.swp
+*.egg-info
diff --git a/couchish.egg-info/PKG-INFO b/couchish.egg-info/PKG-INFO
deleted file mode 100644
index 097e5e6..0000000
--- a/couchish.egg-info/PKG-INFO
+++ /dev/null
@@ -1,10 +0,0 @@
-Metadata-Version: 1.0
-Name: couchish
-Version: 0.2.1
-Summary: UNKNOWN
-Home-page: UNKNOWN
-Author: Tim Parkin & Matt Goodall
-Author-email: [email protected]
-License: UNKNOWN
-Description: UNKNOWN
-Platform: UNKNOWN
diff --git a/couchish.egg-info/SOURCES.txt b/couchish.egg-info/SOURCES.txt
deleted file mode 100644
index 71ef926..0000000
--- a/couchish.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,104 +0,0 @@
-.gitignore
-README
-TODO
-run
-setup.py
-unittests
-couchish/__init__.py
-couchish/config.py
-couchish/couchish_formish_jsonbuilder.py
-couchish/couchish_jsonbuilder.py
-couchish/create_view.py
-couchish/errors.py
-couchish/filehandling.py
-couchish/filestore.py
-couchish/formish_jsonbuilder.py
-couchish/jsonutil.py
-couchish/schemaish_jsonbuilder.py
-couchish/store.py
-couchish/sync_categories.py
-couchish.egg-info/PKG-INFO
-couchish.egg-info/SOURCES.txt
-couchish.egg-info/dependency_links.txt
-couchish.egg-info/entry_points.txt
-couchish.egg-info/not-zip-safe
-couchish.egg-info/requires.txt
-couchish.egg-info/top_level.txt
-couchish/tests/__init__.py
-couchish/tests/test_couchish_formish_jsonbuilder.py
-couchish/tests/test_couchish_jsonbuilder.py
-couchish/tests/test_couchish_store.py
-couchish/tests/test_couchish_store_files.py
-couchish/tests/test_filestore.py
-couchish/tests/test_formish_jsonbuilder.py
-couchish/tests/test_schemaish_jsonbuilder.py
-couchish/tests/test_store.py
-couchish/tests/util.py
-couchish/tests/data/categories.yaml
-couchish/tests/data/test_couchish_author.yaml
-couchish/tests/data/test_couchish_book.yaml
-couchish/tests/data/test_couchish_dvd.yaml
-couchish/tests/data/test_couchish_post.yaml
-couchish/tests/data/test_couchish_simple.yaml
-couchish/tests/data/test_couchish_views.yaml
-couchish/tests/data/test_upload.yaml
-couchish/tests/data/autoviews/test_couchish_author.yaml
-couchish/tests/data/autoviews/test_couchish_post.yaml
-couchish/tests/data/autoviews/test_couchish_views.yaml
-couchish/tests/data/by/test_couchish_by_author.yaml
-couchish/tests/data/by/test_couchish_by_post.yaml
-couchish/tests/data/by/test_couchish_by_views.yaml
-couchish/tests/data/deepref/test_couchish_author.yaml
-couchish/tests/data/deepref/test_couchish_book.yaml
-couchish/tests/data/deepref/test_couchish_views.yaml
-couchish/tests/data/files/test-changed.txt
-couchish/tests/data/files/test.txt
-couchish/tests/data/formish_jsonbuilder/test_sequence.yaml
-couchish/tests/data/formish_jsonbuilder/test_sequenceofstructures.yaml
-couchish/tests/data/formish_jsonbuilder/test_simple.yaml
-couchish/tests/data/formish_jsonbuilder/test_substructure.yaml
-couchish/tests/data/formish_jsonbuilder/test_types.yaml
-couchish/tests/data/formish_jsonbuilder/test_widgets.yaml
-couchish/tests/data/nestedrefinnestedseq/test_couchish_author.yaml
-couchish/tests/data/nestedrefinnestedseq/test_couchish_book.yaml
-couchish/tests/data/nestedrefinnestedseq/test_couchish_views.yaml
-couchish/tests/data/nestedrefinseq/test_couchish_author.yaml
-couchish/tests/data/nestedrefinseq/test_couchish_book.yaml
-couchish/tests/data/nestedrefinseq/test_couchish_views.yaml
-couchish/tests/data/refinseq/test_couchish_author.yaml
-couchish/tests/data/refinseq/test_couchish_book.yaml
-couchish/tests/data/refinseq/test_couchish_views.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_sequence.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_sequenceofstructures.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_simple.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_substructure.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_types.yaml
-docs-build/Makefile
-docs-build/conf.py
-docs-build/future.rst
-docs-build/index.rst
-docs/doctrees/environment.pickle
-docs/doctrees/future.doctree
-docs/doctrees/index.doctree
-docs/html/future.html
-docs/html/genindex.html
-docs/html/index.html
-docs/html/objects.inv
-docs/html/search.html
-docs/html/searchindex.js
-docs/html/_sources/future.txt
-docs/html/_sources/index.txt
-docs/html/_static/contents.png
-docs/html/_static/default.css
-docs/html/_static/doctools.js
-docs/html/_static/file.png
-docs/html/_static/jquery.js
-docs/html/_static/minus.png
-docs/html/_static/navigation.png
-docs/html/_static/plus.png
-docs/html/_static/pygments.css
-docs/html/_static/rightsidebar.css
-docs/html/_static/searchtools.js
-docs/html/_static/sphinxdoc.css
-docs/html/_static/stickysidebar.css
-docs/html/_static/traditional.css
\ No newline at end of file
diff --git a/couchish.egg-info/dependency_links.txt b/couchish.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/couchish.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/couchish.egg-info/entry_points.txt b/couchish.egg-info/entry_points.txt
deleted file mode 100644
index 5d3e5f6..0000000
--- a/couchish.egg-info/entry_points.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-
- # -*- Entry points: -*-
-
\ No newline at end of file
diff --git a/couchish.egg-info/not-zip-safe b/couchish.egg-info/not-zip-safe
deleted file mode 100644
index 8b13789..0000000
--- a/couchish.egg-info/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/couchish.egg-info/requires.txt b/couchish.egg-info/requires.txt
deleted file mode 100644
index 85a1332..0000000
--- a/couchish.egg-info/requires.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-PyYAML
-couchdb-session
-dottedish
-jsonish
-schemaish
-
-[formish]
-formish
\ No newline at end of file
diff --git a/couchish.egg-info/top_level.txt b/couchish.egg-info/top_level.txt
deleted file mode 100644
index d23e4cb..0000000
--- a/couchish.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-couchish
|
ish/couchish
|
67cfafac77314da7bc108ae1a4e5266755b78a8e
|
Remove .egg-info directory
|
diff --git a/.gitignore b/.gitignore
index c9b568f..abfbc72 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
*.pyc
*.swp
+*.egg-info
diff --git a/couchish.egg-info/PKG-INFO b/couchish.egg-info/PKG-INFO
deleted file mode 100644
index 097e5e6..0000000
--- a/couchish.egg-info/PKG-INFO
+++ /dev/null
@@ -1,10 +0,0 @@
-Metadata-Version: 1.0
-Name: couchish
-Version: 0.2.1
-Summary: UNKNOWN
-Home-page: UNKNOWN
-Author: Tim Parkin & Matt Goodall
-Author-email: [email protected]
-License: UNKNOWN
-Description: UNKNOWN
-Platform: UNKNOWN
diff --git a/couchish.egg-info/SOURCES.txt b/couchish.egg-info/SOURCES.txt
deleted file mode 100644
index 71ef926..0000000
--- a/couchish.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,104 +0,0 @@
-.gitignore
-README
-TODO
-run
-setup.py
-unittests
-couchish/__init__.py
-couchish/config.py
-couchish/couchish_formish_jsonbuilder.py
-couchish/couchish_jsonbuilder.py
-couchish/create_view.py
-couchish/errors.py
-couchish/filehandling.py
-couchish/filestore.py
-couchish/formish_jsonbuilder.py
-couchish/jsonutil.py
-couchish/schemaish_jsonbuilder.py
-couchish/store.py
-couchish/sync_categories.py
-couchish.egg-info/PKG-INFO
-couchish.egg-info/SOURCES.txt
-couchish.egg-info/dependency_links.txt
-couchish.egg-info/entry_points.txt
-couchish.egg-info/not-zip-safe
-couchish.egg-info/requires.txt
-couchish.egg-info/top_level.txt
-couchish/tests/__init__.py
-couchish/tests/test_couchish_formish_jsonbuilder.py
-couchish/tests/test_couchish_jsonbuilder.py
-couchish/tests/test_couchish_store.py
-couchish/tests/test_couchish_store_files.py
-couchish/tests/test_filestore.py
-couchish/tests/test_formish_jsonbuilder.py
-couchish/tests/test_schemaish_jsonbuilder.py
-couchish/tests/test_store.py
-couchish/tests/util.py
-couchish/tests/data/categories.yaml
-couchish/tests/data/test_couchish_author.yaml
-couchish/tests/data/test_couchish_book.yaml
-couchish/tests/data/test_couchish_dvd.yaml
-couchish/tests/data/test_couchish_post.yaml
-couchish/tests/data/test_couchish_simple.yaml
-couchish/tests/data/test_couchish_views.yaml
-couchish/tests/data/test_upload.yaml
-couchish/tests/data/autoviews/test_couchish_author.yaml
-couchish/tests/data/autoviews/test_couchish_post.yaml
-couchish/tests/data/autoviews/test_couchish_views.yaml
-couchish/tests/data/by/test_couchish_by_author.yaml
-couchish/tests/data/by/test_couchish_by_post.yaml
-couchish/tests/data/by/test_couchish_by_views.yaml
-couchish/tests/data/deepref/test_couchish_author.yaml
-couchish/tests/data/deepref/test_couchish_book.yaml
-couchish/tests/data/deepref/test_couchish_views.yaml
-couchish/tests/data/files/test-changed.txt
-couchish/tests/data/files/test.txt
-couchish/tests/data/formish_jsonbuilder/test_sequence.yaml
-couchish/tests/data/formish_jsonbuilder/test_sequenceofstructures.yaml
-couchish/tests/data/formish_jsonbuilder/test_simple.yaml
-couchish/tests/data/formish_jsonbuilder/test_substructure.yaml
-couchish/tests/data/formish_jsonbuilder/test_types.yaml
-couchish/tests/data/formish_jsonbuilder/test_widgets.yaml
-couchish/tests/data/nestedrefinnestedseq/test_couchish_author.yaml
-couchish/tests/data/nestedrefinnestedseq/test_couchish_book.yaml
-couchish/tests/data/nestedrefinnestedseq/test_couchish_views.yaml
-couchish/tests/data/nestedrefinseq/test_couchish_author.yaml
-couchish/tests/data/nestedrefinseq/test_couchish_book.yaml
-couchish/tests/data/nestedrefinseq/test_couchish_views.yaml
-couchish/tests/data/refinseq/test_couchish_author.yaml
-couchish/tests/data/refinseq/test_couchish_book.yaml
-couchish/tests/data/refinseq/test_couchish_views.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_sequence.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_sequenceofstructures.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_simple.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_substructure.yaml
-couchish/tests/data/schemaish_jsonbuilder/test_types.yaml
-docs-build/Makefile
-docs-build/conf.py
-docs-build/future.rst
-docs-build/index.rst
-docs/doctrees/environment.pickle
-docs/doctrees/future.doctree
-docs/doctrees/index.doctree
-docs/html/future.html
-docs/html/genindex.html
-docs/html/index.html
-docs/html/objects.inv
-docs/html/search.html
-docs/html/searchindex.js
-docs/html/_sources/future.txt
-docs/html/_sources/index.txt
-docs/html/_static/contents.png
-docs/html/_static/default.css
-docs/html/_static/doctools.js
-docs/html/_static/file.png
-docs/html/_static/jquery.js
-docs/html/_static/minus.png
-docs/html/_static/navigation.png
-docs/html/_static/plus.png
-docs/html/_static/pygments.css
-docs/html/_static/rightsidebar.css
-docs/html/_static/searchtools.js
-docs/html/_static/sphinxdoc.css
-docs/html/_static/stickysidebar.css
-docs/html/_static/traditional.css
\ No newline at end of file
diff --git a/couchish.egg-info/dependency_links.txt b/couchish.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/couchish.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/couchish.egg-info/entry_points.txt b/couchish.egg-info/entry_points.txt
deleted file mode 100644
index 5d3e5f6..0000000
--- a/couchish.egg-info/entry_points.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-
- # -*- Entry points: -*-
-
\ No newline at end of file
diff --git a/couchish.egg-info/not-zip-safe b/couchish.egg-info/not-zip-safe
deleted file mode 100644
index 8b13789..0000000
--- a/couchish.egg-info/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/couchish.egg-info/requires.txt b/couchish.egg-info/requires.txt
deleted file mode 100644
index 85a1332..0000000
--- a/couchish.egg-info/requires.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-PyYAML
-couchdb-session
-dottedish
-jsonish
-schemaish
-
-[formish]
-formish
\ No newline at end of file
diff --git a/couchish.egg-info/top_level.txt b/couchish.egg-info/top_level.txt
deleted file mode 100644
index d23e4cb..0000000
--- a/couchish.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-couchish
|
ish/couchish
|
a571ffc3f7f4bfc5080fc20dca2e79dbd5395fdc
|
fixes for testing relating to formish changes
|
diff --git a/couchish/tests/test_formish_jsonbuilder.py b/couchish/tests/test_formish_jsonbuilder.py
index ad0c0a6..cd57430 100644
--- a/couchish/tests/test_formish_jsonbuilder.py
+++ b/couchish/tests/test_formish_jsonbuilder.py
@@ -1,94 +1,94 @@
import unittest
from couchish.formish_jsonbuilder import build
import yaml
DATADIR = 'couchish/tests/data/formish_jsonbuilder/%s'
def get_schema(filename):
definition = yaml.load( open(DATADIR%filename).read() )
form = build(definition)
return form.structure.attr
def get_form(filename):
definition = yaml.load( open(DATADIR%filename).read() )
form = build(definition)
return form
class Test(unittest.TestCase):
def test_simple(self):
schema = get_schema('test_simple.yaml')
assert 'schemaish.Structure' in repr(schema)
assert len(schema.attrs) == 3
keys = [ k for k,v in schema.attrs]
assert keys == ['first_name','last_name','birthday']
first_name = schema.attrs[0][1]
last_name = schema.attrs[1][1]
birthday = schema.attrs[2][1]
assert 'schemaish.String' in repr(first_name)
assert 'schemaish.String' in repr(last_name)
assert 'schemaish.Date' in repr(birthday)
def test_types(self):
schema = get_schema('test_types.yaml')
keys = [ k for k,v in schema.attrs]
expected = ['string', 'integer', 'float', 'boolean', 'decimal', 'date',
'file', 'sequence_string', 'sequence_integer', 'sequence_date','sequence_sequence_string']
assert keys == expected
for attr in schema.attrs:
firstbit = attr[0].split('_')[0]
assert firstbit in repr(attr[1]).lower()
if len(attr[0]) > len(firstbit):
nextbit = attr[0].split('_')[1]
assert nextbit in repr(attr[1].attr).lower()
if len(attr[0]) > len(firstbit+'_'+nextbit):
nextbit = attr[0].split('_')[2]
assert nextbit in repr(attr[1].attr.attr).lower()
def test_substructure(self):
schema = get_schema('test_substructure.yaml')
assert 'schemaish.Structure' in repr(schema)
assert len(schema.attrs) == 3
keys = [ k for k,v in schema.attrs]
assert keys == ['first_name','last_name','address']
first_name = schema.attrs[0][1]
last_name = schema.attrs[1][1]
address = schema.attrs[2][1]
assert 'schemaish.String' in repr(first_name)
assert 'schemaish.String' in repr(last_name)
assert 'schemaish.Structure' in repr(address)
address = address.attrs
assert len(address) == 4
for attr in address:
assert 'schemaish.String' in repr(attr[1])
def test_sequence(self):
schema = get_schema('test_sequence.yaml')
countries = schema.attrs[2][1]
assert 'schemaish.Sequence' in repr(countries)
assert 'schemaish.String' in repr(countries.attr)
def test_sequenceofstructs(self):
schema = get_schema('test_sequenceofstructures.yaml')
addresses = schema.attrs[2][1]
assert 'schemaish.Sequence' in repr(addresses)
address = addresses.attr.attrs
assert len(address) == 4
for attr in address:
assert 'schemaish.String' in repr(attr[1])
def test_widgets(self):
form = get_form('test_widgets.yaml')
assert repr(form['input'].widget) == "BoundWidget(widget=formish.Input(), field=formish.Field(name='input', attr=schemaish.String()))"
assert repr(form['hidden'].widget) == "BoundWidget(widget=formish.Hidden(), field=formish.Field(name='hidden', attr=schemaish.String()))"
assert repr(form['textarea'].widget) == "BoundWidget(widget=formish.TextArea(), field=formish.Field(name='textarea', attr=schemaish.String()))"
- assert repr(form['selectchoice'].widget) == "BoundWidget(widget=formish.SelectChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three')], none_option=(None, '- choose -')), field=formish.Field(name='selectchoice', attr=schemaish.String()))"
- assert repr(form['selectwithotherchoice'].widget) == "BoundWidget(widget=formish.SelectWithOtherChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three')], none_option=[None, '- choose -']), field=formish.Field(name='selectwithotherchoice', attr=schemaish.String()))"
- assert repr(form['radiochoice'].widget) == "BoundWidget(widget=formish.RadioChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three')], none_option=[None, '- choose -']), field=formish.Field(name='radiochoice', attr=schemaish.String()))"
+ assert repr(form['selectchoice'].widget) == "BoundWidget(widget=formish.SelectChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three')], none_option=('', '- choose -')), field=formish.Field(name='selectchoice', attr=schemaish.String()))"
+ assert repr(form['selectwithotherchoice'].widget) == "BoundWidget(widget=formish.SelectWithOtherChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three')], none_option=['', '- choose -']), field=formish.Field(name='selectwithotherchoice', attr=schemaish.String()))"
+ assert repr(form['radiochoice'].widget) == "BoundWidget(widget=formish.RadioChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three')]), field=formish.Field(name='radiochoice', attr=schemaish.String()))"
assert repr(form['checkboxmultichoice'].widget) == "BoundWidget(widget=formish.CheckboxMultiChoice(options=[('One', 'One'), ('Two', 'Two'), ('Three', 'Three'), ('Four', 'Four'), ('Five', 'Five')]), field=formish.Sequence(name='checkboxmultichoice', attr=schemaish.Sequence(schemaish.String())))"
diff --git a/couchish/tests/test_store.py b/couchish/tests/test_store.py
index 48b83e6..b93a2e5 100644
--- a/couchish/tests/test_store.py
+++ b/couchish/tests/test_store.py
@@ -1,483 +1,482 @@
from __future__ import with_statement
import os.path
import time
import unittest
import couchdb
from couchish import config, errors, store
from couchish.tests import util
def data_filename(filename, namespace=None):
if namespace:
return os.path.join('couchish/tests/data/%s'%namespace, filename)
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
return data_filename('test_couchish_%s.yaml' % type, namespace)
db_name = 'test-couchish'
def strip_id_rev_meta(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
# Clean up the metadata.
del couchdoc['metadata']['ctime']
del couchdoc['metadata']['mtime']
if not couchdoc['metadata']:
del couchdoc['metadata']
return couchdoc
class TestStore(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestStore, self).setUp()
self.store = store.CouchishStore(self.db, config.Config({}, {}))
def test_session(self):
S = self.store.session()
doc_id = S.create({})
S.flush()
assert self.db.get(doc_id)
def test_with_session(self):
with self.store.session() as S:
S.create({'_id': 'foo'})
assert self.db.get('foo')
def test_flush_again(self):
doc_id = self.db.create({'model_type': 'foo'})
S = self.store.session()
doc = S.doc_by_id(doc_id)
doc['num'] = 1
S.flush()
doc['num'] = 2
S.flush()
assert self.db.get(doc_id)['num'] == 2
def test_with_session_exc(self):
try:
with self.store.session() as S:
doc_id = S.create({'_id': 'foo'})
bang
except NameError:
pass
else:
self.fail("Should have raised an exception")
assert self.db.get('foo') is None
class TestMetadata(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestMetadata, self).setUp()
self.store = store.CouchishStore(self.db, config.Config({}, {}))
def test_create(self):
S = self.store.session()
doc_id = S.create({})
S.flush()
doc = self.db.get(doc_id)
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['ctime'] == doc['metadata']['mtime']
def test_create2(self):
S = self.store.session()
doc1_id = S.create({})
time.sleep(.5)
doc2_id = S.create({})
S.flush()
doc1 = self.db.get(doc1_id)
doc2 = self.db.get(doc2_id)
assert doc1['metadata']['ctime'] == doc1['metadata']['ctime']
def test_update(self):
S = self.store.session()
doc_id = S.create({'model_type': 'test'})
S.flush()
doc = S.doc_by_id(doc_id)
doc['foo'] = ['bar']
S.flush()
doc = self.db.get(doc_id)
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['mtime'] > doc['metadata']['ctime']
def test_graceful_upgrade(self):
doc_id = self.db.create({'model_type': 'foo'})
S = self.store.session()
doc = S.doc_by_id(doc_id)
doc['foo'] = 'bar'
S.flush()
assert 'ctime' not in doc['metadata']
assert doc['metadata']['mtime']
def test_non_destructive(self):
S = self.store.session()
docid = S.create({'model_type': 'test', 'metadata': {'schema_version': '1.1'}})
S.flush()
doc = S.doc_by_id(docid)
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['schema_version'] == '1.1'
doc['foo'] = 'bar'
S.flush()
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['schema_version'] == '1.1'
class Test(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_make_refs(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
sess.flush()
refs = sess.make_refs('customdes/author_name', [matt_id, tim_id])
assert refs == {matt_id: {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
tim_id: {'_ref': tim_id, 'first_name': 'Tim', 'last_name': 'Parkin'}}
ref = sess.make_ref('customdes/author_name', matt_id)
assert ref == {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}
def test_simple_reference(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_simple_reference_addingdictionary(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_multiple_changes(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
def test_doc_by_id_not_found(self):
sess = self.S.session()
self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
class TestDeep(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
type_filename('views','deepref')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
def test_missing_ref_container(self):
"""
Check references inside non-existant containers.
The flush hook drills into the document hunting for references but it
should check that whatever a reference is inside actually exists first.
"""
cfg = config.Config({
'author': {'fields': [
{'name': 'name'}
]},
'book': {'fields': [
{'name': 'title'},
{'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
{'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
]},
},
[{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
couchish_store = store.CouchishStore(self.db, cfg)
couchish_store.sync_views()
S = couchish_store.session()
author_id = S.create({'model_type': 'author', 'name': 'Matt'})
book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
'author': {'_ref': author_id, 'name': 'Matt'}})
S.flush()
# XXX Shouldn't need to do create a new session to make more changes.
S = couchish_store.session()
author = S.doc_by_id(author_id)
author['name'] = 'Jessica'
S.flush()
class TestRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
type_filename('views','refinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
{'_ref': tim_id, 'last_name': 'Parkin'}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
class TestNestedRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
class TestNestedRefsInNestedSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinnestedseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestMissingKeys, self).setUp()
couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
couchish_store.sync_views()
self.session = couchish_store.session()
for i in range(5):
self.session.create({'_id': str(i)})
self.session.flush()
def test_docs_by_id(self):
docs = list(self.session.docs_by_id(['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_view(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_id_filtered(self):
docs = list(self.session.docs_by_id(['3', '4', '5'], remove_rows_with_missing_doc=True))
- print docs
assert len(docs) == 2
assert None not in docs
def test_docs_by_view_filtered(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5'], remove_rows_with_missing_doc=True))
assert len(docs) == 2
assert None not in docs
|
ish/couchish
|
4258d17332ae2f18f8d3eef063de5753e91a4541
|
changed attribute on Reference to be a LeafAttribute. relates to schemaish change
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index aeffac4..f33e136 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,537 +1,537 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
-class Reference(schemaish.attr.Attribute):
+class Reference(schemaish.attr.LeafAttribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
_options = list(options)
_options.sort(lambda x, y: cmp(len(x[0].split('.')), len(y[0].split('.'))))
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in _options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
return [self.full_options[item] for item in data]
class RefInput(formish.Input):
"""
Simple text input field for entering a reference to another object.
"""
type = "RefInput"
def __init__(self, db, **k):
self.db = db
self.additional_fields = k.pop('additional_fields', [])
formish.Input.__init__(self, **k)
def to_request_data(self, field, data):
if data is None:
return ['']
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(data.get(attr, '') for attr in additional_fields)]
def from_request_data(self, field, request_data):
data = request_data[0].strip()
# Extract the id from the content.
id = data.split('|', 1)[0]
# Return default if nothing entered.
if not id:
return self.empty
# Convert the id into a ref and return.
row = iter(self.db.view(field.attr.refersto, key=id)).next()
ref = row.value
ref.update({'_ref': row.key})
return ref
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, store):
FormishWidgetRegistry.__init__(self)
self.store = store
self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.store.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.store.db, view), **k)
def refinput_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
additional_fields = widget_spec.get('additional_fields',[])
return RefInput(self.store.db, additional_fields=additional_fields, **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.store.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return SelectChoiceFacetTreeCouchDB(options=options(self.store.db, view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.store.db, view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, store=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(store)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
d75ec916c9d8d9ca3d16e1d4d3bd0fafdd5987c7
|
added pre and post flush hooks to allow custom handlers
|
diff --git a/couchish/store.py b/couchish/store.py
index 7aba937..b5db300 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,294 +1,300 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from datetime import datetime
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
- def __init__(self, db, config):
+ def __init__(self, db, config, pre_flush_hook=None, post_flush_hook=None):
self.db = db
self.config = config
+ self.pre_flush_hook = pre_flush_hook
+ self.post_flush_hook = post_flush_hook
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
self._flush_timestamp = None
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view(
'_all_docs',
remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_type(self, type, remove_rows_with_missing_doc=False,
**options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
return self.docs_by_view(
view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_view(self, view, remove_rows_with_missing_doc=False,
**options):
options['include_docs'] = True
results = self.view(view, **options)
docs = (row.doc for row in results.rows)
if remove_rows_with_missing_doc:
docs = (doc for doc in docs if doc is not None)
return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
# We're iterating the sequences multiple time so we might as well just
# turn them into lists and be done with it.
deletions, additions, changes = \
list(deletions), list(additions), list(changes)
+ if self.store.pre_flush_hook is not None:
+ self.store.pre_flush_hook(deletions, additions, changes)
# Record ctime and mtime for addited and updated documents.
for doc in additions:
metadata = doc.setdefault('metadata', {})
metadata['ctime'] = metadata['mtime'] = self._flush_timestamp
for doc, _ in changes:
metadata = doc.setdefault('metadata', {})
metadata['mtime'] = self._flush_timestamp
# Record any files that need storing.
file_deletions, file_additions = filehandling._parse_changes_for_files(
session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
# Record the timestamp of the flush, used for all timestamps during the save.
self._flush_timestamp = datetime.utcnow().isoformat()
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def make_refs(self, view, ref_keys):
"""
Build a mapping of ref_keys to refs, where a ref is a dict containing a
'_ref' item and anything else returned as the view's value.
"""
def ref_from_row(row):
ref = row.value
ref['_ref'] = row.key
return ref
rows = self.view(view, keys=ref_keys)
return dict((row.key, ref_from_row(row)) for row in rows)
def make_ref(self, view, ref_key):
"""
Build a ref (see make_refs) for the row with the given ref_key.
"""
return self.make_refs(view, [ref_key])[ref_key]
def _post_flush_hook(self, session, deletions, additions, changes):
+ if self.store.post_flush_hook is not None:
+ self.store.post_flush_hook(deletions, additions, changes)
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
2d69fac390944dec22b180fa3251118d57c47002
|
fixed to allow wrongly ordered cat paths in categories
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index f638aac..aeffac4 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,533 +1,537 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
+ _options = list(options)
+ _options.sort(lambda x, y: cmp(len(x[0].split('.')), len(y[0].split('.'))))
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
- for id, label in options:
+ for id, label in _options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
+
+
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
return [self.full_options[item] for item in data]
class RefInput(formish.Input):
"""
Simple text input field for entering a reference to another object.
"""
type = "RefInput"
def __init__(self, db, **k):
self.db = db
self.additional_fields = k.pop('additional_fields', [])
formish.Input.__init__(self, **k)
def to_request_data(self, field, data):
if data is None:
return ['']
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(data.get(attr, '') for attr in additional_fields)]
def from_request_data(self, field, request_data):
data = request_data[0].strip()
# Extract the id from the content.
id = data.split('|', 1)[0]
# Return default if nothing entered.
if not id:
return self.empty
# Convert the id into a ref and return.
row = iter(self.db.view(field.attr.refersto, key=id)).next()
ref = row.value
ref.update({'_ref': row.key})
return ref
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, store):
FormishWidgetRegistry.__init__(self)
self.store = store
self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.store.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.store.db, view), **k)
def refinput_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
additional_fields = widget_spec.get('additional_fields',[])
return RefInput(self.store.db, additional_fields=additional_fields, **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.store.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return SelectChoiceFacetTreeCouchDB(options=options(self.store.db, view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
config = self.store.config.types['facet_%s'%widgetSpec['facet']]
view = config['metadata']['views']['all']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.store.db, view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, store=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(store)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
5f49971247efbddaf41416ef0af529e8beb0053a
|
Support better customisation of the TextArea widget.
|
diff --git a/couchish/formish_jsonbuilder.py b/couchish/formish_jsonbuilder.py
index 9c686e6..6d23f62 100644
--- a/couchish/formish_jsonbuilder.py
+++ b/couchish/formish_jsonbuilder.py
@@ -1,248 +1,251 @@
from couchish.schemaish_jsonbuilder import build as schema_build, schemaish_type_registry, strip_stars, split_prefix
import formish
from formish import filestore
from validatish import validator
class FormishWidgetRegistry(object):
"""
A registry for mapping a widget specifiction to a Formish widget factory,
including sensible user-friendly defaults instead of the "developer"
versions Formish defaults to.
"""
def __init__(self):
self.registry = {
'Input': self.input_factory,
'Hidden': self.hidden_factory,
'TextArea': self.textarea_factory,
'SelectChoice': self.selectchoice_factory,
'SelectWithOtherChoice': self.selectwithotherchoice_factory,
'Checkbox': self.checkbox_factory,
'CheckboxMultiChoice': self.checkboxmultichoice_factory,
'RadioChoice': self.radiochoice_factory,
'DateParts': self.dateparts_factory,
}
self.defaults = {
'Date': self.dateparts_factory,
'String': self.input_factory,
'Integer': self.input_factory,
'File': self.fileupload_factory,
'Boolean': self.checkbox_factory,
}
def make_formish_widget(self,item):
"""
Create and return a Formish widget factory for the item type and widget
specifiction.
If widget_spec is provided then it is used to locate/create and return a
widget factory.
If widget_spec is None then either a user-friendly default for the
item_type is returned or it's left to Formish to decide.
The widget_spec dictionary must contain a 'type' key, as well as any
other information needed to build the widget.
Parameters:
item_type: the type of the value (string)
widget_spec: a dictionary containing a widget specification
"""
widget_spec = item.get('widget')
item_type = item.get('type')
# If there is a widget spec then that takes precedence
k = {}
if widget_spec:
if 'css_class' in widget_spec:
k['css_class'] = widget_spec['css_class']
if 'type' in widget_spec:
return self.registry[widget_spec['type']](item, k)
# No widget spec so see if there's a user-friendly default for the data type
default = self.defaults.get(item_type)
if default is not None:
return default(item, k)
# OK, so leave it for Formish to decide then
return None
def input_factory(self, spec, k):
"""
TextInput widget factory.
Specification attributes:
None
"""
return formish.Input(**k)
def hidden_factory(self, spec, k):
"""
Hidden widget factory.
Specification attributes:
None
"""
return formish.Hidden(**k)
def textarea_factory(self, spec, k):
"""
TextArea widget factory.
Specification attributes:
None
"""
- return formish.TextArea(**k)
+ widget_spec = dict(spec['widget'])
+ widget_spec.pop('type')
+ widget_spec.update(k)
+ return formish.TextArea(**widget_spec)
def selectchoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.SelectChoice(options=options, **k)
def radiochoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.RadioChoice(options=options, **k)
def selectwithotherchoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of strings
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.SelectWithOtherChoice(options=options, **k)
def checkboxmultichoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.CheckboxMultiChoice(options=options, **k)
def checkbox_factory(self, spec, k):
"""
Checkbox widget factory.
Specification attributes:
None
"""
return formish.Checkbox(**k)
def dateparts_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
None
"""
return formish.DateParts(day_first=True, **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
root_dir = widget_spec.get('root_dir',None)
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
return formish.FileUpload(
filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
**k )
formish_widget_registry = FormishWidgetRegistry()
def expand_definition(pre_expand_definition):
definition = []
for item in pre_expand_definition['fields']:
field = {}
field['name'] = item['name']
field['fullkey'] = strip_stars(item['name'])
field['keyprefix'], field['key'] = split_prefix(field['fullkey'])
field['starkey'] = item['name']
field['title'] = item.get('title')
field['description'] = item.get('description')
field['type'] = item.get('type','String')
if 'default' in item:
field['default'] = item['default']
field['attr'] = item.get('attr')
if item.get('required') is True:
field['validator'] = validator.Required()
else:
field['validator'] = None
field['widget'] = item.get('widget')
definition.append(field)
return definition
def build(definition, name=None, defaults=None, errors=None, action='', widget_registry=formish_widget_registry, type_registry=schemaish_type_registry):
schema = schema_build(definition, type_registry=type_registry)
definition = expand_definition(definition)
form = formish.Form(schema, name=name, defaults=defaults, errors=errors, action_url=action)
for item in definition:
w = widget_registry.make_formish_widget(item)
if w is not None:
form[item['name']].widget = w
if 'default' in item:
form[item['name']].default = item['default']
return form
|
ish/couchish
|
04ecad29523b4b5931d18f444348bb7994c90ba7
|
Fix idiotic error when moving make_ref/make_refs to the session.
|
diff --git a/couchish/store.py b/couchish/store.py
index e52853e..7aba937 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,294 +1,294 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from datetime import datetime
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
self._flush_timestamp = None
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view(
'_all_docs',
remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_type(self, type, remove_rows_with_missing_doc=False,
**options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
return self.docs_by_view(
view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_view(self, view, remove_rows_with_missing_doc=False,
**options):
options['include_docs'] = True
results = self.view(view, **options)
docs = (row.doc for row in results.rows)
if remove_rows_with_missing_doc:
docs = (doc for doc in docs if doc is not None)
return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
# We're iterating the sequences multiple time so we might as well just
# turn them into lists and be done with it.
deletions, additions, changes = \
list(deletions), list(additions), list(changes)
# Record ctime and mtime for addited and updated documents.
for doc in additions:
metadata = doc.setdefault('metadata', {})
metadata['ctime'] = metadata['mtime'] = self._flush_timestamp
for doc, _ in changes:
metadata = doc.setdefault('metadata', {})
metadata['mtime'] = self._flush_timestamp
# Record any files that need storing.
file_deletions, file_additions = filehandling._parse_changes_for_files(
session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
# Record the timestamp of the flush, used for all timestamps during the save.
self._flush_timestamp = datetime.utcnow().isoformat()
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def make_refs(self, view, ref_keys):
"""
Build a mapping of ref_keys to refs, where a ref is a dict containing a
'_ref' item and anything else returned as the view's value.
"""
def ref_from_row(row):
ref = row.value
ref['_ref'] = row.key
return ref
rows = self.view(view, keys=ref_keys)
return dict((row.key, ref_from_row(row)) for row in rows)
def make_ref(self, view, ref_key):
"""
Build a ref (see make_refs) for the row with the given ref_key.
"""
- return make_refs(self, view, [ref_key])[ref_key]
+ return self.make_refs(view, [ref_key])[ref_key]
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
diff --git a/couchish/tests/test_store.py b/couchish/tests/test_store.py
index 9362be3..48b83e6 100644
--- a/couchish/tests/test_store.py
+++ b/couchish/tests/test_store.py
@@ -1,471 +1,483 @@
from __future__ import with_statement
import os.path
import time
import unittest
import couchdb
from couchish import config, errors, store
from couchish.tests import util
def data_filename(filename, namespace=None):
if namespace:
return os.path.join('couchish/tests/data/%s'%namespace, filename)
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
return data_filename('test_couchish_%s.yaml' % type, namespace)
db_name = 'test-couchish'
def strip_id_rev_meta(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
# Clean up the metadata.
del couchdoc['metadata']['ctime']
del couchdoc['metadata']['mtime']
if not couchdoc['metadata']:
del couchdoc['metadata']
return couchdoc
class TestStore(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestStore, self).setUp()
self.store = store.CouchishStore(self.db, config.Config({}, {}))
def test_session(self):
S = self.store.session()
doc_id = S.create({})
S.flush()
assert self.db.get(doc_id)
def test_with_session(self):
with self.store.session() as S:
S.create({'_id': 'foo'})
assert self.db.get('foo')
def test_flush_again(self):
doc_id = self.db.create({'model_type': 'foo'})
S = self.store.session()
doc = S.doc_by_id(doc_id)
doc['num'] = 1
S.flush()
doc['num'] = 2
S.flush()
assert self.db.get(doc_id)['num'] == 2
def test_with_session_exc(self):
try:
with self.store.session() as S:
doc_id = S.create({'_id': 'foo'})
bang
except NameError:
pass
else:
self.fail("Should have raised an exception")
assert self.db.get('foo') is None
class TestMetadata(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestMetadata, self).setUp()
self.store = store.CouchishStore(self.db, config.Config({}, {}))
def test_create(self):
S = self.store.session()
doc_id = S.create({})
S.flush()
doc = self.db.get(doc_id)
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['ctime'] == doc['metadata']['mtime']
def test_create2(self):
S = self.store.session()
doc1_id = S.create({})
time.sleep(.5)
doc2_id = S.create({})
S.flush()
doc1 = self.db.get(doc1_id)
doc2 = self.db.get(doc2_id)
assert doc1['metadata']['ctime'] == doc1['metadata']['ctime']
def test_update(self):
S = self.store.session()
doc_id = S.create({'model_type': 'test'})
S.flush()
doc = S.doc_by_id(doc_id)
doc['foo'] = ['bar']
S.flush()
doc = self.db.get(doc_id)
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['mtime'] > doc['metadata']['ctime']
def test_graceful_upgrade(self):
doc_id = self.db.create({'model_type': 'foo'})
S = self.store.session()
doc = S.doc_by_id(doc_id)
doc['foo'] = 'bar'
S.flush()
assert 'ctime' not in doc['metadata']
assert doc['metadata']['mtime']
def test_non_destructive(self):
S = self.store.session()
docid = S.create({'model_type': 'test', 'metadata': {'schema_version': '1.1'}})
S.flush()
doc = S.doc_by_id(docid)
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['schema_version'] == '1.1'
doc['foo'] = 'bar'
S.flush()
assert doc['metadata']['ctime']
assert doc['metadata']['mtime']
assert doc['metadata']['schema_version'] == '1.1'
class Test(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
+ def test_make_refs(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ sess.flush()
+ refs = sess.make_refs('customdes/author_name', [matt_id, tim_id])
+ assert refs == {matt_id: {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
+ tim_id: {'_ref': tim_id, 'first_name': 'Tim', 'last_name': 'Parkin'}}
+ ref = sess.make_ref('customdes/author_name', matt_id)
+ assert ref == {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}
def test_simple_reference(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_simple_reference_addingdictionary(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_multiple_changes(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
def test_doc_by_id_not_found(self):
sess = self.S.session()
self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
class TestDeep(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
type_filename('views','deepref')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
def test_missing_ref_container(self):
"""
Check references inside non-existant containers.
The flush hook drills into the document hunting for references but it
should check that whatever a reference is inside actually exists first.
"""
cfg = config.Config({
'author': {'fields': [
{'name': 'name'}
]},
'book': {'fields': [
{'name': 'title'},
{'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
{'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
]},
},
[{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
couchish_store = store.CouchishStore(self.db, cfg)
couchish_store.sync_views()
S = couchish_store.session()
author_id = S.create({'model_type': 'author', 'name': 'Matt'})
book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
'author': {'_ref': author_id, 'name': 'Matt'}})
S.flush()
# XXX Shouldn't need to do create a new session to make more changes.
S = couchish_store.session()
author = S.doc_by_id(author_id)
author['name'] = 'Jessica'
S.flush()
class TestRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
type_filename('views','refinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
{'_ref': tim_id, 'last_name': 'Parkin'}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
class TestNestedRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
class TestNestedRefsInNestedSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinnestedseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev_meta(self.db[matt_id])
book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestMissingKeys, self).setUp()
couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
couchish_store.sync_views()
self.session = couchish_store.session()
for i in range(5):
self.session.create({'_id': str(i)})
self.session.flush()
def test_docs_by_id(self):
docs = list(self.session.docs_by_id(['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_view(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_id_filtered(self):
docs = list(self.session.docs_by_id(['3', '4', '5'], remove_rows_with_missing_doc=True))
print docs
assert len(docs) == 2
assert None not in docs
def test_docs_by_view_filtered(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5'], remove_rows_with_missing_doc=True))
assert len(docs) == 2
assert None not in docs
|
ish/couchish
|
24a5d10ceaefbf2de268b222b99f98d9e814a31b
|
Record ctime and mtime timestamps in a 'metadata' block.
|
diff --git a/couchish/filehandling.py b/couchish/filehandling.py
index 29ea0ee..ad3021d 100644
--- a/couchish/filehandling.py
+++ b/couchish/filehandling.py
@@ -1,223 +1,220 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from dottedish import dotted, flatten, dotteddict, api, dottedlist
from couchdbsession import a8n
import base64
import uuid
from schemaish.type import File
from StringIO import StringIO
import shutil
from couchish import jsonutil
def get_attr(prefix, parent=None):
# combine prefix and parent where prefix is a list and parent is a dotted string
if parent is None:
segments = [str(segment) for segment in prefix]
return '.'.join(segments)
if prefix is None:
return parent
segments = [str(segment) for segment in prefix]
if parent != '':
segments += parent.split('.')
attr = '.'.join( segments )
return attr
def get_files(data, original=None, prefix=None):
# scan old data to collect any file refs and then scan new data for file changes
files = {}
inlinefiles = {}
original_files = {}
get_files_from_original(data, original, files, inlinefiles, original_files, prefix)
get_files_from_data(data, original, files, inlinefiles, original_files, prefix)
return data, files, inlinefiles, original_files
def has_unmodified_signature(f):
if f.file is None:
return True
return False
def dotted_or_emptydict(d):
if d is None:
return {}
try:
return dotted(d)
except TypeError:
return d
def get_files_from_data(data, original, files, inlinefiles, original_files, prefix):
if isinstance(data, File):
get_file_from_item(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(data, dict) and not isinstance(data, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
if not dd:
return
for k,f in flatten(dd):
if isinstance(f, File):
if isinstance(ddoriginal.get(k), File):
of = ddoriginal[k]
else:
of = None
get_file_from_item(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
api.wrap.when_type(a8n.List)(dottedlist.wrap_list)
api.wrap.when_type(a8n.Dictionary)(dotteddict.wrap_dict)
def get_file_from_item(f, of, files, inlinefiles, original_files, fullprefix):
if f.file is None:
# if we have no original data then we presume the file should remain unchanged
f.id = of.id
if f.mimetype is None:
f.mimetype = of.mimetype
if f.filename is None:
f.filename = of.filename
if not hasattr(f, 'metadata') or f.metadata is None or f.metadata=={}:
f.metadata = getattr(of, 'metadata', None)
else:
if of and hasattr(of,'id'):
f.id = of.id
else:
f.id = uuid.uuid4().hex
if getattr(f,'inline',False) is True:
filestore = inlinefiles
else:
filestore = files
if hasattr(f, 'inline'):
del f.inline
# add the files for attachment handling and remove the file data from document
if getattr(f,'b64', None):
filestore[fullprefix] = jsonutil.CouchishFile(f.file, f.filename, f.mimetype, f.id, metadata = f.metadata, b64=True)
del f.b64
else:
fh = StringIO()
shutil.copyfileobj(f.file, fh)
fh.seek(0)
filestore[fullprefix] = jsonutil.CouchishFile(fh, f.filename, f.mimetype, f.id, metadata = f.metadata)
del f.file
def get_file_from_original(f, of, files, inlinefiles, original_files, fullprefix):
if not isinstance(f, File):
original_files[fullprefix] = of
def get_files_from_original(data, original, files, inlinefiles, original_files, prefix):
if isinstance(original, File):
get_file_from_original(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(original, dict) and not isinstance(original, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
if not ddoriginal:
return
for k, of in flatten(ddoriginal):
if isinstance(of, File):
f = dd.get(k)
get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def _parse_changes_for_files(session, deletions, additions, changes):
""" returns deletions, additions """
- additions = list(additions)
- changes = list(changes)
- deletions = list(deletions)
all_separate_files = {}
all_inline_files = {}
for addition in additions:
addition, files, inlinefiles, original_files_notused = get_files(addition)
if files:
all_separate_files.setdefault(addition['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(addition['_id'],{}).update(inlinefiles)
_extract_inline_attachments(addition, inlinefiles)
all_original_files = {}
changes = list(changes)
for n, changeset in enumerate(changes):
d, cs = changeset
cs = list(cs)
for m, c in enumerate(cs):
if c['action'] in ['edit','create','remove']:
c['value'], files, inlinefiles, original_files = get_files(c.get('value'), original=c.get('was'), prefix=c['path'])
cs[m] = c
if files:
all_separate_files.setdefault(d['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(d['_id'],{}).update(inlinefiles)
all_original_files.setdefault(d['_id'], {}).update(original_files)
_extract_inline_attachments(d, inlinefiles)
changes[n] = (d, cs)
return all_original_files, all_separate_files
def _extract_inline_attachments(doc, files):
"""
Move the any attachment data that we've found into the _attachments attribute
"""
for attr, f in files.items():
if f.b64:
data = f.file.replace('\n', '')
else:
data = base64.encodestring(f.file.read()).replace('\n','')
f.file.close()
del f.file
del f.b64
del f.inline
del f.doc_id
doc.setdefault('_attachments',{})[f.id] = {'content_type': f.mimetype,'data': data}
def _handle_separate_attachments(session, deletions, additions):
"""
add attachments that aren't inline and remove any attachments without references
"""
# XXX This needs to cope with files moving when sequences are re-numbered. We need
# XXX to talk to matt about what a renumbering like this looks like
for id, attrfiles in additions.items():
doc = session.get(id)
stubdoc = {'_id':doc['_id'], '_rev':doc['_rev']}
for attr, f in attrfiles.items():
data = ''
if f.file:
if f.b64:
data = base64.decodestring(f.file)
else:
data = f.file.read()
f.file.close()
session._db.put_attachment(stubdoc, data, filename=f.id, content_type=f.mimetype)
del f.file
del f.b64
del f.inline
del f.doc_id
for id, attrfiles in deletions.items():
# XXX had to use _db because delete attachment freeaked using session version.
doc = session._db.get(id)
for attr, f in attrfiles.items():
session._db.delete_attachment(doc, f.id)
additions = {}
deletions = {}
diff --git a/couchish/store.py b/couchish/store.py
index 36f0f0f..e52853e 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,277 +1,294 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
+from datetime import datetime
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
+ self._flush_timestamp = None
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view(
'_all_docs',
remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_type(self, type, remove_rows_with_missing_doc=False,
**options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
return self.docs_by_view(
view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_view(self, view, remove_rows_with_missing_doc=False,
**options):
options['include_docs'] = True
results = self.view(view, **options)
docs = (row.doc for row in results.rows)
if remove_rows_with_missing_doc:
docs = (doc for doc in docs if doc is not None)
return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
- file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
+ # We're iterating the sequences multiple time so we might as well just
+ # turn them into lists and be done with it.
+ deletions, additions, changes = \
+ list(deletions), list(additions), list(changes)
+ # Record ctime and mtime for addited and updated documents.
+ for doc in additions:
+ metadata = doc.setdefault('metadata', {})
+ metadata['ctime'] = metadata['mtime'] = self._flush_timestamp
+ for doc, _ in changes:
+ metadata = doc.setdefault('metadata', {})
+ metadata['mtime'] = self._flush_timestamp
+ # Record any files that need storing.
+ file_deletions, file_additions = filehandling._parse_changes_for_files(
+ session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
+ # Record the timestamp of the flush, used for all timestamps during the save.
+ self._flush_timestamp = datetime.utcnow().isoformat()
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def make_refs(self, view, ref_keys):
"""
Build a mapping of ref_keys to refs, where a ref is a dict containing a
'_ref' item and anything else returned as the view's value.
"""
def ref_from_row(row):
ref = row.value
ref['_ref'] = row.key
return ref
rows = self.view(view, keys=ref_keys)
return dict((row.key, ref_from_row(row)) for row in rows)
def make_ref(self, view, ref_key):
"""
Build a ref (see make_refs) for the row with the given ref_key.
"""
return make_refs(self, view, [ref_key])[ref_key]
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
diff --git a/couchish/tests/test_store.py b/couchish/tests/test_store.py
index 111cd27..9362be3 100644
--- a/couchish/tests/test_store.py
+++ b/couchish/tests/test_store.py
@@ -1,404 +1,471 @@
from __future__ import with_statement
-import unittest
import os.path
+import time
+import unittest
import couchdb
from couchish import config, errors, store
from couchish.tests import util
def data_filename(filename, namespace=None):
if namespace:
return os.path.join('couchish/tests/data/%s'%namespace, filename)
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
return data_filename('test_couchish_%s.yaml' % type, namespace)
db_name = 'test-couchish'
-def strip_id_rev(doc):
+def strip_id_rev_meta(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
+ # Clean up the metadata.
+ del couchdoc['metadata']['ctime']
+ del couchdoc['metadata']['mtime']
+ if not couchdoc['metadata']:
+ del couchdoc['metadata']
return couchdoc
class TestStore(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestStore, self).setUp()
self.store = store.CouchishStore(self.db, config.Config({}, {}))
def test_session(self):
S = self.store.session()
doc_id = S.create({})
S.flush()
assert self.db.get(doc_id)
def test_with_session(self):
with self.store.session() as S:
S.create({'_id': 'foo'})
assert self.db.get('foo')
def test_flush_again(self):
doc_id = self.db.create({'model_type': 'foo'})
S = self.store.session()
doc = S.doc_by_id(doc_id)
doc['num'] = 1
S.flush()
doc['num'] = 2
S.flush()
assert self.db.get(doc_id)['num'] == 2
def test_with_session_exc(self):
try:
with self.store.session() as S:
doc_id = S.create({'_id': 'foo'})
bang
except NameError:
pass
else:
self.fail("Should have raised an exception")
assert self.db.get('foo') is None
+class TestMetadata(util.TempDatabaseMixin, unittest.TestCase):
+
+ def setUp(self):
+ super(TestMetadata, self).setUp()
+ self.store = store.CouchishStore(self.db, config.Config({}, {}))
+
+ def test_create(self):
+ S = self.store.session()
+ doc_id = S.create({})
+ S.flush()
+ doc = self.db.get(doc_id)
+ assert doc['metadata']['ctime']
+ assert doc['metadata']['mtime']
+ assert doc['metadata']['ctime'] == doc['metadata']['mtime']
+
+ def test_create2(self):
+ S = self.store.session()
+ doc1_id = S.create({})
+ time.sleep(.5)
+ doc2_id = S.create({})
+ S.flush()
+ doc1 = self.db.get(doc1_id)
+ doc2 = self.db.get(doc2_id)
+ assert doc1['metadata']['ctime'] == doc1['metadata']['ctime']
+
+ def test_update(self):
+ S = self.store.session()
+ doc_id = S.create({'model_type': 'test'})
+ S.flush()
+ doc = S.doc_by_id(doc_id)
+ doc['foo'] = ['bar']
+ S.flush()
+ doc = self.db.get(doc_id)
+ assert doc['metadata']['ctime']
+ assert doc['metadata']['mtime']
+ assert doc['metadata']['mtime'] > doc['metadata']['ctime']
+
+ def test_graceful_upgrade(self):
+ doc_id = self.db.create({'model_type': 'foo'})
+ S = self.store.session()
+ doc = S.doc_by_id(doc_id)
+ doc['foo'] = 'bar'
+ S.flush()
+ assert 'ctime' not in doc['metadata']
+ assert doc['metadata']['mtime']
+
+ def test_non_destructive(self):
+ S = self.store.session()
+ docid = S.create({'model_type': 'test', 'metadata': {'schema_version': '1.1'}})
+ S.flush()
+ doc = S.doc_by_id(docid)
+ assert doc['metadata']['ctime']
+ assert doc['metadata']['mtime']
+ assert doc['metadata']['schema_version'] == '1.1'
+ doc['foo'] = 'bar'
+ S.flush()
+ assert doc['metadata']['ctime']
+ assert doc['metadata']['mtime']
+ assert doc['metadata']['schema_version'] == '1.1'
+
+
class Test(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_simple_reference(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_simple_reference_addingdictionary(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_multiple_changes(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
def test_doc_by_id_not_found(self):
sess = self.S.session()
self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
class TestDeep(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
type_filename('views','deepref')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
def test_missing_ref_container(self):
"""
Check references inside non-existant containers.
The flush hook drills into the document hunting for references but it
should check that whatever a reference is inside actually exists first.
"""
cfg = config.Config({
'author': {'fields': [
{'name': 'name'}
]},
'book': {'fields': [
{'name': 'title'},
{'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
{'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
]},
},
[{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
couchish_store = store.CouchishStore(self.db, cfg)
couchish_store.sync_views()
S = couchish_store.session()
author_id = S.create({'model_type': 'author', 'name': 'Matt'})
book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
'author': {'_ref': author_id, 'name': 'Matt'}})
S.flush()
# XXX Shouldn't need to do create a new session to make more changes.
S = couchish_store.session()
author = S.doc_by_id(author_id)
author['name'] = 'Jessica'
S.flush()
class TestRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
type_filename('views','refinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
{'_ref': tim_id, 'last_name': 'Parkin'}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
class TestNestedRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
class TestNestedRefsInNestedSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinnestedseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
+ matt = strip_id_rev_meta(self.db[matt_id])
+ book = strip_id_rev_meta(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestMissingKeys, self).setUp()
couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
couchish_store.sync_views()
self.session = couchish_store.session()
for i in range(5):
self.session.create({'_id': str(i)})
self.session.flush()
def test_docs_by_id(self):
docs = list(self.session.docs_by_id(['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_view(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_id_filtered(self):
docs = list(self.session.docs_by_id(['3', '4', '5'], remove_rows_with_missing_doc=True))
print docs
assert len(docs) == 2
assert None not in docs
def test_docs_by_view_filtered(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5'], remove_rows_with_missing_doc=True))
assert len(docs) == 2
assert None not in docs
|
ish/couchish
|
ca1089c1382e580057011d9f82ffa5a15114918b
|
Move all store tests into a single test module.
|
diff --git a/couchish/tests/test_couchish_store.py b/couchish/tests/test_couchish_store.py
deleted file mode 100644
index 2689534..0000000
--- a/couchish/tests/test_couchish_store.py
+++ /dev/null
@@ -1,365 +0,0 @@
-from __future__ import with_statement
-import unittest
-import os.path
-import couchdb
-from couchish import config, errors, store
-from couchish.tests import util
-
-def data_filename(filename, namespace=None):
- if namespace:
- return os.path.join('couchish/tests/data/%s'%namespace, filename)
- return os.path.join('couchish/tests/data', filename)
-
-def type_filename(type,namespace=None):
- return data_filename('test_couchish_%s.yaml' % type, namespace)
-
-db_name = 'test-couchish'
-
-def strip_id_rev(doc):
- couchdoc = dict(doc)
- couchdoc.pop('_id')
- couchdoc.pop('_rev')
- return couchdoc
-
-
-class Test(unittest.TestCase):
-
- def setUp(self):
- server = couchdb.Server()
- if db_name in server:
- del server[db_name]
- self.db = server.create(db_name)
- self.S = store.CouchishStore(self.db, config.Config.from_yaml(
- dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
- data_filename('test_couchish_views.yaml')
- ))
- self.S.sync_views()
-
-
- def test_simple_reference(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
- tim_id = sess.create(tim)
- book = {'model_type': 'book', 'title': 'Title',
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
- 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title',
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
- 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
-
- def test_simple_reference_addingdictionary(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
- tim_id = sess.create(tim)
- book = {'model_type': 'book', 'title': 'Title',
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
- 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
- assert book == {'model_type': 'book', 'title': 'Title',
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
- 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
-
- def test_multiple_changes(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- book = {'model_type': 'book', 'title': 'Title',
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
- 'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title',
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
- 'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
-
- def test_doc_by_id_not_found(self):
- sess = self.S.session()
- self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
-
-
-class TestDeep(unittest.TestCase):
-
- def setUp(self):
- server = couchdb.Server()
- if db_name in server:
- del server[db_name]
- self.db = server.create(db_name)
- self.S = store.CouchishStore(self.db, config.Config.from_yaml(
- dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
- type_filename('views','deepref')
- ))
- self.S.sync_views()
-
- def test_simple(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
- tim_id = sess.create(tim)
- book = {'model_type': 'book', 'title': 'Title', 'metadata': {
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
- 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
- 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
- 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
-
-
-class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
-
- def test_missing_ref_container(self):
- """
- Check references inside non-existant containers.
-
- The flush hook drills into the document hunting for references but it
- should check that whatever a reference is inside actually exists first.
- """
- cfg = config.Config({
- 'author': {'fields': [
- {'name': 'name'}
- ]},
- 'book': {'fields': [
- {'name': 'title'},
- {'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
- {'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
- ]},
- },
- [{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
- couchish_store = store.CouchishStore(self.db, cfg)
- couchish_store.sync_views()
- S = couchish_store.session()
- author_id = S.create({'model_type': 'author', 'name': 'Matt'})
- book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
- 'author': {'_ref': author_id, 'name': 'Matt'}})
- S.flush()
- # XXX Shouldn't need to do create a new session to make more changes.
- S = couchish_store.session()
- author = S.doc_by_id(author_id)
- author['name'] = 'Jessica'
- S.flush()
-
-
-class TestRefsInSequences(unittest.TestCase):
-
-
- def setUp(self):
- server = couchdb.Server()
- if db_name in server:
- del server[db_name]
- self.db = server.create(db_name)
- self.S = store.CouchishStore(self.db, config.Config.from_yaml(
- dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
- type_filename('views','refinseq')
- ))
- self.S.sync_views()
-
- def test_simple(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
- tim_id = sess.create(tim)
- book = {'model_type': 'book', 'title': 'Title', 'authors':[
- {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
- {'_ref': tim_id, 'last_name': 'Parkin'}]}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
-
-
-class TestNestedRefsInSequences(unittest.TestCase):
-
-
- def setUp(self):
- server = couchdb.Server()
- if db_name in server:
- del server[db_name]
- self.db = server.create(db_name)
- self.S = store.CouchishStore(self.db, config.Config.from_yaml(
- dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
- type_filename('views','nestedrefinseq')
- ))
- self.S.sync_views()
-
- def test_simple(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
- tim_id = sess.create(tim)
- book = {'model_type': 'book', 'title': 'Title', 'authors':[
- {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
- {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
-
- def test_twoentries(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- book = {'model_type': 'book', 'title': 'Title', 'authors':[
- {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
- {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
-
-class TestNestedRefsInNestedSequences(unittest.TestCase):
-
-
- def setUp(self):
- server = couchdb.Server()
- if db_name in server:
- del server[db_name]
- self.db = server.create(db_name)
- self.S = store.CouchishStore(self.db, config.Config.from_yaml(
- dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
- type_filename('views','nestedrefinnestedseq')
- ))
- self.S.sync_views()
-
- def test_simple(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
- tim_id = sess.create(tim)
- book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
- {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
- {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
-
- def test_twoentries(self):
- sess = self.S.session()
- matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
- matt_id = sess.create(matt)
- book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
- {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
- {'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
- book_id = sess.create(book)
- sess.flush()
-
- sess = self.S.session()
- matt = sess.doc_by_id(matt_id)
- matt['last_name'] = 'Woodall'
- sess.flush()
-
- matt = strip_id_rev(self.db[matt_id])
- book = strip_id_rev(self.db[book_id])
- assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
- assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
-
-
-class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
-
- def setUp(self):
- super(TestMissingKeys, self).setUp()
- couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
- couchish_store.sync_views()
- self.session = couchish_store.session()
- for i in range(5):
- self.session.create({'_id': str(i)})
- self.session.flush()
-
- def test_docs_by_id(self):
- docs = list(self.session.docs_by_id(['3', '4', '5']))
- assert docs[-1] is None
-
- def test_docs_by_view(self):
- docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
- assert docs[-1] is None
-
- def test_docs_by_id_filtered(self):
- docs = list(self.session.docs_by_id(['3', '4', '5'], remove_rows_with_missing_doc=True))
- print docs
- assert len(docs) == 2
- assert None not in docs
-
- def test_docs_by_view_filtered(self):
- docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5'], remove_rows_with_missing_doc=True))
- assert len(docs) == 2
- assert None not in docs
-
diff --git a/couchish/tests/test_store.py b/couchish/tests/test_store.py
index e468eec..111cd27 100644
--- a/couchish/tests/test_store.py
+++ b/couchish/tests/test_store.py
@@ -1,50 +1,404 @@
from __future__ import with_statement
-
import unittest
-
-from couchish import config, store
+import os.path
+import couchdb
+from couchish import config, errors, store
from couchish.tests import util
+def data_filename(filename, namespace=None):
+ if namespace:
+ return os.path.join('couchish/tests/data/%s'%namespace, filename)
+ return os.path.join('couchish/tests/data', filename)
+
+def type_filename(type,namespace=None):
+ return data_filename('test_couchish_%s.yaml' % type, namespace)
+
+db_name = 'test-couchish'
+
+def strip_id_rev(doc):
+ couchdoc = dict(doc)
+ couchdoc.pop('_id')
+ couchdoc.pop('_rev')
+ return couchdoc
+
class TestStore(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestStore, self).setUp()
self.store = store.CouchishStore(self.db, config.Config({}, {}))
def test_session(self):
S = self.store.session()
doc_id = S.create({})
S.flush()
assert self.db.get(doc_id)
def test_with_session(self):
with self.store.session() as S:
S.create({'_id': 'foo'})
assert self.db.get('foo')
def test_flush_again(self):
doc_id = self.db.create({'model_type': 'foo'})
S = self.store.session()
doc = S.doc_by_id(doc_id)
doc['num'] = 1
S.flush()
doc['num'] = 2
S.flush()
assert self.db.get(doc_id)['num'] == 2
def test_with_session_exc(self):
try:
with self.store.session() as S:
doc_id = S.create({'_id': 'foo'})
bang
except NameError:
pass
else:
self.fail("Should have raised an exception")
assert self.db.get('foo') is None
-if __name__ == '__main__':
- unittest.main()
+class Test(unittest.TestCase):
+
+ def setUp(self):
+ server = couchdb.Server()
+ if db_name in server:
+ del server[db_name]
+ self.db = server.create(db_name)
+ self.S = store.CouchishStore(self.db, config.Config.from_yaml(
+ dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
+ data_filename('test_couchish_views.yaml')
+ ))
+ self.S.sync_views()
+
+
+ def test_simple_reference(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ book = {'model_type': 'book', 'title': 'Title',
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
+ 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title',
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
+ 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
+
+ def test_simple_reference_addingdictionary(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ book = {'model_type': 'book', 'title': 'Title',
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
+ 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
+ assert book == {'model_type': 'book', 'title': 'Title',
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
+ 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
+
+ def test_multiple_changes(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ book = {'model_type': 'book', 'title': 'Title',
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
+ 'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title',
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
+ 'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
+
+ def test_doc_by_id_not_found(self):
+ sess = self.S.session()
+ self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
+
+
+class TestDeep(unittest.TestCase):
+
+ def setUp(self):
+ server = couchdb.Server()
+ if db_name in server:
+ del server[db_name]
+ self.db = server.create(db_name)
+ self.S = store.CouchishStore(self.db, config.Config.from_yaml(
+ dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
+ type_filename('views','deepref')
+ ))
+ self.S.sync_views()
+
+ def test_simple(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ book = {'model_type': 'book', 'title': 'Title', 'metadata': {
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
+ 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
+ 'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
+ 'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
+
+
+class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
+
+ def test_missing_ref_container(self):
+ """
+ Check references inside non-existant containers.
+
+ The flush hook drills into the document hunting for references but it
+ should check that whatever a reference is inside actually exists first.
+ """
+ cfg = config.Config({
+ 'author': {'fields': [
+ {'name': 'name'}
+ ]},
+ 'book': {'fields': [
+ {'name': 'title'},
+ {'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
+ {'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
+ ]},
+ },
+ [{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
+ couchish_store = store.CouchishStore(self.db, cfg)
+ couchish_store.sync_views()
+ S = couchish_store.session()
+ author_id = S.create({'model_type': 'author', 'name': 'Matt'})
+ book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
+ 'author': {'_ref': author_id, 'name': 'Matt'}})
+ S.flush()
+ # XXX Shouldn't need to do create a new session to make more changes.
+ S = couchish_store.session()
+ author = S.doc_by_id(author_id)
+ author['name'] = 'Jessica'
+ S.flush()
+
+
+class TestRefsInSequences(unittest.TestCase):
+
+
+ def setUp(self):
+ server = couchdb.Server()
+ if db_name in server:
+ del server[db_name]
+ self.db = server.create(db_name)
+ self.S = store.CouchishStore(self.db, config.Config.from_yaml(
+ dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
+ type_filename('views','refinseq')
+ ))
+ self.S.sync_views()
+
+ def test_simple(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ book = {'model_type': 'book', 'title': 'Title', 'authors':[
+ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
+ {'_ref': tim_id, 'last_name': 'Parkin'}]}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
+
+
+class TestNestedRefsInSequences(unittest.TestCase):
+
+
+ def setUp(self):
+ server = couchdb.Server()
+ if db_name in server:
+ del server[db_name]
+ self.db = server.create(db_name)
+ self.S = store.CouchishStore(self.db, config.Config.from_yaml(
+ dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
+ type_filename('views','nestedrefinseq')
+ ))
+ self.S.sync_views()
+
+ def test_simple(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ book = {'model_type': 'book', 'title': 'Title', 'authors':[
+ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
+ {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
+
+ def test_twoentries(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ book = {'model_type': 'book', 'title': 'Title', 'authors':[
+ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
+ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
+
+class TestNestedRefsInNestedSequences(unittest.TestCase):
+
+
+ def setUp(self):
+ server = couchdb.Server()
+ if db_name in server:
+ del server[db_name]
+ self.db = server.create(db_name)
+ self.S = store.CouchishStore(self.db, config.Config.from_yaml(
+ dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
+ type_filename('views','nestedrefinnestedseq')
+ ))
+ self.S.sync_views()
+
+ def test_simple(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
+ tim_id = sess.create(tim)
+ book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
+ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
+ {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
+
+ def test_twoentries(self):
+ sess = self.S.session()
+ matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
+ matt_id = sess.create(matt)
+ book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
+ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
+ {'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
+ book_id = sess.create(book)
+ sess.flush()
+
+ sess = self.S.session()
+ matt = sess.doc_by_id(matt_id)
+ matt['last_name'] = 'Woodall'
+ sess.flush()
+
+ matt = strip_id_rev(self.db[matt_id])
+ book = strip_id_rev(self.db[book_id])
+ assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
+ assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
+
+
+class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
+
+ def setUp(self):
+ super(TestMissingKeys, self).setUp()
+ couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
+ couchish_store.sync_views()
+ self.session = couchish_store.session()
+ for i in range(5):
+ self.session.create({'_id': str(i)})
+ self.session.flush()
+
+ def test_docs_by_id(self):
+ docs = list(self.session.docs_by_id(['3', '4', '5']))
+ assert docs[-1] is None
+
+ def test_docs_by_view(self):
+ docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
+ assert docs[-1] is None
+
+ def test_docs_by_id_filtered(self):
+ docs = list(self.session.docs_by_id(['3', '4', '5'], remove_rows_with_missing_doc=True))
+ print docs
+ assert len(docs) == 2
+ assert None not in docs
+
+ def test_docs_by_view_filtered(self):
+ docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5'], remove_rows_with_missing_doc=True))
+ assert len(docs) == 2
+ assert None not in docs
|
ish/couchish
|
5b49b2b39d7ee0df3ac472da6b0f031bef1c6eb3
|
Add make_ref/make_refs - utility functions to build ref objects from views.
|
diff --git a/couchish/store.py b/couchish/store.py
index a598e91..36f0f0f 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,259 +1,277 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view(
'_all_docs',
remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_type(self, type, remove_rows_with_missing_doc=False,
**options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
return self.docs_by_view(
view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
**options)
def docs_by_view(self, view, remove_rows_with_missing_doc=False,
**options):
options['include_docs'] = True
results = self.view(view, **options)
docs = (row.doc for row in results.rows)
if remove_rows_with_missing_doc:
docs = (doc for doc in docs if doc is not None)
return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
+ def make_refs(self, view, ref_keys):
+ """
+ Build a mapping of ref_keys to refs, where a ref is a dict containing a
+ '_ref' item and anything else returned as the view's value.
+ """
+ def ref_from_row(row):
+ ref = row.value
+ ref['_ref'] = row.key
+ return ref
+ rows = self.view(view, keys=ref_keys)
+ return dict((row.key, ref_from_row(row)) for row in rows)
+
+ def make_ref(self, view, ref_key):
+ """
+ Build a ref (see make_refs) for the row with the given ref_key.
+ """
+ return make_refs(self, view, [ref_key])[ref_key]
+
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
68796de18024e44832e600adc308b230417526cb
|
Add options to remove rows with no, i.e. deleted, doc from docs_by_XXX methods.
|
diff --git a/couchish/store.py b/couchish/store.py
index 8f976bd..a598e91 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,249 +1,259 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
- def docs_by_id(self, ids, **options):
+ def docs_by_id(self, ids, remove_rows_with_missing_doc=False, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
- return self.docs_by_view('_all_docs', **options)
+ return self.docs_by_view(
+ '_all_docs',
+ remove_rows_with_missing_doc=remove_rows_with_missing_doc,
+ **options)
- def docs_by_type(self, type, **options):
+ def docs_by_type(self, type, remove_rows_with_missing_doc=False,
+ **options):
"""
Generate the sequence of docs of a given type.
"""
config = self.store.config.types[type]
view = config.get('metadata', {}).get('views', {}).get('all')
if not view:
view = '%s/all'%type
- return self.docs_by_view(view, **options)
+ return self.docs_by_view(
+ view, remove_rows_with_missing_doc=remove_rows_with_missing_doc,
+ **options)
- def docs_by_view(self, view, **options):
+ def docs_by_view(self, view, remove_rows_with_missing_doc=False,
+ **options):
options['include_docs'] = True
results = self.view(view, **options)
- return (row.doc for row in results.rows)
+ docs = (row.doc for row in results.rows)
+ if remove_rows_with_missing_doc:
+ docs = (doc for doc in docs if doc is not None)
+ return docs
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
diff --git a/couchish/tests/test_couchish_store.py b/couchish/tests/test_couchish_store.py
index 689351e..2689534 100644
--- a/couchish/tests/test_couchish_store.py
+++ b/couchish/tests/test_couchish_store.py
@@ -1,354 +1,365 @@
from __future__ import with_statement
import unittest
import os.path
import couchdb
from couchish import config, errors, store
from couchish.tests import util
def data_filename(filename, namespace=None):
if namespace:
return os.path.join('couchish/tests/data/%s'%namespace, filename)
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
return data_filename('test_couchish_%s.yaml' % type, namespace)
db_name = 'test-couchish'
def strip_id_rev(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
return couchdoc
class Test(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_simple_reference(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_simple_reference_addingdictionary(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_multiple_changes(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
def test_doc_by_id_not_found(self):
sess = self.S.session()
self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
class TestDeep(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
type_filename('views','deepref')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
def test_missing_ref_container(self):
"""
Check references inside non-existant containers.
The flush hook drills into the document hunting for references but it
should check that whatever a reference is inside actually exists first.
"""
cfg = config.Config({
'author': {'fields': [
{'name': 'name'}
]},
'book': {'fields': [
{'name': 'title'},
{'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
{'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
]},
},
[{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
couchish_store = store.CouchishStore(self.db, cfg)
couchish_store.sync_views()
S = couchish_store.session()
author_id = S.create({'model_type': 'author', 'name': 'Matt'})
book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
'author': {'_ref': author_id, 'name': 'Matt'}})
S.flush()
# XXX Shouldn't need to do create a new session to make more changes.
S = couchish_store.session()
author = S.doc_by_id(author_id)
author['name'] = 'Jessica'
S.flush()
class TestRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
type_filename('views','refinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
{'_ref': tim_id, 'last_name': 'Parkin'}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
class TestNestedRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
class TestNestedRefsInNestedSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinnestedseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
def setUp(self):
super(TestMissingKeys, self).setUp()
couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
couchish_store.sync_views()
self.session = couchish_store.session()
for i in range(5):
self.session.create({'_id': str(i)})
self.session.flush()
def test_docs_by_id(self):
docs = list(self.session.docs_by_id(['3', '4', '5']))
assert docs[-1] is None
def test_docs_by_view(self):
docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
assert docs[-1] is None
+ def test_docs_by_id_filtered(self):
+ docs = list(self.session.docs_by_id(['3', '4', '5'], remove_rows_with_missing_doc=True))
+ print docs
+ assert len(docs) == 2
+ assert None not in docs
+
+ def test_docs_by_view_filtered(self):
+ docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5'], remove_rows_with_missing_doc=True))
+ assert len(docs) == 2
+ assert None not in docs
+
|
ish/couchish
|
ddbb9d0f706db66c22d86f25f8ee60fa1afe728c
|
added defaults for all views
|
diff --git a/couchish/config.py b/couchish/config.py
index 2408302..8a93f8b 100644
--- a/couchish/config.py
+++ b/couchish/config.py
@@ -1,25 +1,28 @@
"""
Couchish configuration.
"""
from couchish.couchish_jsonbuilder import get_views
class Config(object):
def __init__(self, types, views):
self.types = types
self.views = views
self.viewdata = get_views(types, views)
@classmethod
def from_yaml(cls, types, views):
"""
Load config from a set of YAML config files.
"""
import yaml
types = dict((name,yaml.load(file(filename)))
for (name, filename) in types.iteritems())
+ for name, value in types.items():
+ if not value.get('metadata', {}).get('views', {}).get('all'):
+ value.setdefault('metadata', {}).setdefault('views',{})['all'] = '%s/all'%name
views = yaml.load(file(views))
return cls(types, views)
|
ish/couchish
|
35237b9f381e77446ac90ee503b07016aaa34e6a
|
Various changes to make views more configurable.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 3a6f92b..10a80dc 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,534 +1,534 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
data = data or []
return [self.full_options[item] for item in data]
class RefInput(formish.Input):
"""
Simple text input field for entering a reference to another object.
"""
type = "RefInput"
def __init__(self, db, **k):
self.db = db
self.additional_fields = k.pop('additional_fields', [])
formish.Input.__init__(self, **k)
def to_request_data(self, field, data):
if data is None:
return ['']
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(data.get(attr, '') for attr in additional_fields)]
def from_request_data(self, field, request_data):
data = request_data[0].strip()
# Extract the id from the content.
id = data.split('|', 1)[0]
# Return default if nothing entered.
if not id:
return self.empty
# Convert the id into a ref and return.
row = iter(self.db.view(field.attr.refersto, key=id)).next()
ref = row.value
ref.update({'_ref': row.key})
return ref
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
- def __init__(self, db=None):
- self.db = db
+ def __init__(self, store):
FormishWidgetRegistry.__init__(self)
+ self.store = store
self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
- return SelectChoiceCouchDB(self.db, view, label_template, **k)
+ return SelectChoiceCouchDB(self.store.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
- return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
+ return formish.CheckboxMultiChoiceTree(options=options(self.store.db, view), **k)
def refinput_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
additional_fields = widget_spec.get('additional_fields',[])
- return RefInput(self.db, additional_fields=additional_fields, **k)
+ return RefInput(self.store.db, additional_fields=additional_fields, **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
- return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
+ return SeqRefTextArea(self.store.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
- view = 'facet_%s/all'%widgetSpec['facet']
-
- return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
+ config = self.store.config.types['facet_%s'%widgetSpec['facet']]
+ view = config['metadata']['views']['all']
+ return SelectChoiceFacetTreeCouchDB(options=options(self.store.db, view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
- view = 'facet_%s/all'%widgetSpec['facet']
-
- return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
+ config = self.store.config.types['facet_%s'%widgetSpec['facet']]
+ view = config['metadata']['views']['all']
+ return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.store.db, view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
-def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
+def build(definition, store=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
- widget_registry=WidgetRegistry(db)
+ widget_registry=WidgetRegistry(store)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/store.py b/couchish/store.py
index 990c166..8f976bd 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,245 +1,249 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view('_all_docs', **options)
def docs_by_type(self, type, **options):
"""
Generate the sequence of docs of a given type.
"""
- return self.docs_by_view('%s/all'%type, **options)
+ config = self.store.config.types[type]
+ view = config.get('metadata', {}).get('views', {}).get('all')
+ if not view:
+ view = '%s/all'%type
+ return self.docs_by_view(view, **options)
def docs_by_view(self, view, **options):
options['include_docs'] = True
results = self.view(view, **options)
return (row.doc for row in results.rows)
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
6c1b3595c1b52bd85183d4b2ad39cd405e645a5b
|
Optionally add a file-like to File instances in documents that can be read from as long as the object is still close to the session.
|
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index ac43e4b..132b586 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,100 +1,135 @@
+from cStringIO import StringIO
from jsonish import pythonjson
from schemaish.type import File
import base64
from dottedish import flatten, dotted
+
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
+class AttachmentFileLike(object):
+ """
+ A lazy-loading file-like object that reads the attachment via the session
+ on first call to read().
+
+ This object *must* stay close to the session instance so should never be
+ serialised by default. Instances are therefore marked un unpicklable,
+ uncopyable, etc to avoid them accidentally "leaking out".
+ """
+
+ def __init__(self, session, doc_id, filename):
+ self.session = session
+ self.doc_id = doc_id
+ self.filename = filename
+ self._file = None
+
+ def read(self, *a, **k):
+ if self._file is None:
+ data = self.session.get_attachment(self.doc_id, self.filename)
+ self._file = StringIO(data)
+ return self._file.read(*a, **k)
+
+ def __getstate__(self):
+ # Unpicklable
+ return False
+
+
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
- if hasattr(obj,'file') and hasattr(obj,'b64'):
- d['base64'] = obj.file
+ # Read the file into the dict, but not if it's an AttachmentFileLike that
+ # only works close to the session.
+ file = getattr(obj, 'file', None)
+ if isinstance(file, AttachmentFileLike):
+ pass
else:
- if hasattr(obj,'file') and obj.file is not None:
- d['base64'] = base64.encodestring(obj.file.read())
+ if file and hasattr(obj,'b64'):
+ d['base64'] = obj.file
+ else:
+ if file and obj.file is not None:
+ d['base64'] = base64.encodestring(file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
-def wrap_encode_to_dict(obj):
+def encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
-def wrap_decode_from_dict(d):
+
+def decode_from_dict(d, session=None):
obj = pythonjson.decode_from_dict(d)
- obj = add_id_and_attr_to_files(obj)
+ obj = add_id_and_attr_to_files(obj, session)
return obj
-encode_to_dict = wrap_encode_to_dict
-decode_from_dict = wrap_decode_from_dict
-
-def add_id_and_attr_to_files(data):
+def add_id_and_attr_to_files(data, session=None):
if not isinstance(data, dict):
return data
dd = dotted(data)
for k,f in flatten(data):
if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
+ if session:
+ f.file = AttachmentFileLike(session, dd['_id'], f.id)
f.doc_id = dd['_id']
f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
f.doc_id = dd[subpath]['_id']
f.rev = dd[subpath]['_rev']
-
return data
+
dumps = pythonjson.dumps
loads = pythonjson.loads
diff --git a/couchish/store.py b/couchish/store.py
index 5bfcd77..990c166 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,245 +1,245 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
- decode_doc=jsonutil.decode_from_dict)
+ decode_doc=lambda d: jsonutil.decode_from_dict(d, self))
self.file_additions = {}
self.file_deletions = {}
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
return self.docs_by_view('_all_docs', **options)
def docs_by_type(self, type, **options):
"""
Generate the sequence of docs of a given type.
"""
return self.docs_by_view('%s/all'%type, **options)
def docs_by_view(self, view, **options):
options['include_docs'] = True
results = self.view(view, **options)
return (row.doc for row in results.rows)
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
14955d32d7a4a2899d1564a518df09d023f2c75a
|
Remove debug print.
|
diff --git a/couchish/tests/test_couchish_store_files.py b/couchish/tests/test_couchish_store_files.py
index 7fa6578..218b782 100644
--- a/couchish/tests/test_couchish_store_files.py
+++ b/couchish/tests/test_couchish_store_files.py
@@ -1,227 +1,226 @@
from __future__ import with_statement
import unittest
import os.path
import couchdb
from couchish import config, store
from schemaish.type import File
from couchish import jsonutil
def data_filename(filename):
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
if namespace:
namespace = '_%s'%namespace
else:
namespace = ''
return data_filename('test_couchish%s_%s.yaml' % (namespace,type))
db_name = 'test-couchish'
def strip_id_rev(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
return couchdoc
def matches_supplied(test, supplied):
test = dict((key, value) for (key, value) in test.iteritems() if key in supplied)
return test == supplied
class TestFiles(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_addition_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = jsonutil.CouchishFile(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- print matt['_attachments']
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
def test_change_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = jsonutil.CouchishFile(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
# now lets replace the file
fh = open('couchish/tests/data/files/test-changed.txt','r')
f = jsonutil.CouchishFile(fh, 'test-changed.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = f
fh.close()
new_photo_id = matt.__subject__['photo'].id
sess = self.S.session()
attachment = 'foo'
attachment = sess.session._db.get_attachment(matt_id,new_photo_id)
assert attachment == 'and now it\'s changed\n'
assert new_photo_id == first_created_photo_id
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
def test_remove_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = None
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert not '_attachments' in matt
assert matt['photo'] == None
def test_moving_in_sequence(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'book', 'first_name': 'Matt', 'last_name': 'Goodall','photo':[ f ]}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'][0].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'][0],'id')
fh2 = open('couchish/tests/data/files/test-changed.txt','r')
f2 = File(fh2, 'test2.txt','text/plain')
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'].append( f2 )
fh2.close()
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert matches_supplied(matt['_attachments'][ matt['photo'][0].id ], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
assert matches_supplied(matt['_attachments'][ matt['photo'][1].id ], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
assert len(matt['_attachments']) == 2
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'].pop(0)
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][ matt['photo'][0].id ], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
def test_unchanged_file(self):
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'book', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f }
# create a file
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
# now lets replace the file
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = File(None,'test_ADDEDSUFFIX.txt','text/plain')
new_photo_id = matt.__subject__['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, new_photo_id)
assert new_photo_id == first_created_photo_id
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt = matt.__subject__
assert len(matt['_attachments']) == 1
assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
assert matt['photo'].filename == 'test_ADDEDSUFFIX.txt'
|
ish/couchish
|
e65304751a667408c53c03872f254fd8bd01c6a0
|
Ignore 'revpos' (and anything else of no interest) in attachment stubs.
|
diff --git a/couchish/tests/test_couchish_store_files.py b/couchish/tests/test_couchish_store_files.py
index cdd0bcb..7fa6578 100644
--- a/couchish/tests/test_couchish_store_files.py
+++ b/couchish/tests/test_couchish_store_files.py
@@ -1,221 +1,227 @@
from __future__ import with_statement
import unittest
import os.path
import couchdb
from couchish import config, store
from schemaish.type import File
from couchish import jsonutil
def data_filename(filename):
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
if namespace:
namespace = '_%s'%namespace
else:
namespace = ''
return data_filename('test_couchish%s_%s.yaml' % (namespace,type))
db_name = 'test-couchish'
def strip_id_rev(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
return couchdoc
+def matches_supplied(test, supplied):
+ test = dict((key, value) for (key, value) in test.iteritems() if key in supplied)
+ return test == supplied
+
+
class TestFiles(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_addition_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = jsonutil.CouchishFile(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][matt['photo'].id] == {'stub': True, 'length': 78, 'content_type': 'text/plain'}
+ print matt['_attachments']
+ assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
def test_change_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = jsonutil.CouchishFile(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][matt['photo'].id] == {'stub': True, 'length': 78, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
# now lets replace the file
fh = open('couchish/tests/data/files/test-changed.txt','r')
f = jsonutil.CouchishFile(fh, 'test-changed.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = f
fh.close()
new_photo_id = matt.__subject__['photo'].id
sess = self.S.session()
attachment = 'foo'
attachment = sess.session._db.get_attachment(matt_id,new_photo_id)
assert attachment == 'and now it\'s changed\n'
assert new_photo_id == first_created_photo_id
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][matt['photo'].id] == {'stub': True, 'length': 21, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
def test_remove_file(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][matt['photo'].id] == {'stub': True, 'length': 78, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = None
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert not '_attachments' in matt
assert matt['photo'] == None
def test_moving_in_sequence(self):
# create a file
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'book', 'first_name': 'Matt', 'last_name': 'Goodall','photo':[ f ]}
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'][0].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'][0],'id')
fh2 = open('couchish/tests/data/files/test-changed.txt','r')
f2 = File(fh2, 'test2.txt','text/plain')
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'].append( f2 )
fh2.close()
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
- assert matt['_attachments'][ matt['photo'][0].id ] == {'stub': True, 'length': 78, 'content_type': 'text/plain'}
- assert matt['_attachments'][ matt['photo'][1].id ] == {'stub': True, 'length': 21, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][ matt['photo'][0].id ], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
+ assert matches_supplied(matt['_attachments'][ matt['photo'][1].id ], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
assert len(matt['_attachments']) == 2
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'].pop(0)
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][ matt['photo'][0].id ] == {'stub': True, 'length': 21, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][ matt['photo'][0].id ], {'stub': True, 'length': 21, 'content_type': 'text/plain'})
def test_unchanged_file(self):
fh = open('couchish/tests/data/files/test.txt','r')
f = File(fh, 'test.txt','text/plain')
matt = {'model_type': 'book', 'first_name': 'Matt', 'last_name': 'Goodall','photo': f }
# create a file
with self.S.session() as S:
matt_id = S.create(matt)
fh.close()
# check the attachment
first_created_photo_id = matt['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, first_created_photo_id)
assert attachment == 'this is a test for the file attachment processing test in test_couchish_store\n'
assert hasattr(matt['photo'],'id')
# get the doc back out using couchish and check it's OK
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][matt['photo'].id] == {'stub': True, 'length': 78, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
# now lets replace the file
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt['photo'] = File(None,'test_ADDEDSUFFIX.txt','text/plain')
new_photo_id = matt.__subject__['photo'].id
sess = self.S.session()
attachment = sess.session._db.get_attachment(matt_id, new_photo_id)
assert new_photo_id == first_created_photo_id
with self.S.session() as S:
matt = S.doc_by_id(matt_id)
matt = matt.__subject__
assert len(matt['_attachments']) == 1
- assert matt['_attachments'][matt['photo'].id] == {'stub': True, 'length': 78, 'content_type': 'text/plain'}
+ assert matches_supplied(matt['_attachments'][matt['photo'].id], {'stub': True, 'length': 78, 'content_type': 'text/plain'})
assert matt['photo'].filename == 'test_ADDEDSUFFIX.txt'
|
ish/couchish
|
ff9ab5d73ebd71cc34e045924693f9fe6f688556
|
Add a "RefInput" widget - an input designed to be JS-enhanced as an autocomplete (unless you enjoy entering uids).
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 7f2e2c8..3a6f92b 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,488 +1,534 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
data = data or []
return [self.full_options[item] for item in data]
+class RefInput(formish.Input):
+ """
+ Simple text input field for entering a reference to another object.
+ """
+
+ type = "RefInput"
+
+ def __init__(self, db, **k):
+ self.db = db
+ self.additional_fields = k.pop('additional_fields', [])
+ formish.Input.__init__(self, **k)
+
+ def to_request_data(self, field, data):
+ if data is None:
+ return ['']
+ additional_fields = ['_ref'] + self.additional_fields
+ return ['|'.join(data.get(attr, '') for attr in additional_fields)]
+
+ def from_request_data(self, field, request_data):
+ data = request_data[0].strip()
+ # Extract the id from the content.
+ id = data.split('|', 1)[0]
+ # Return default if nothing entered.
+ if not id:
+ return self.empty
+ # Convert the id into a ref and return.
+ row = iter(self.db.view(field.attr.refersto, key=id)).next()
+ ref = row.value
+ ref.update({'_ref': row.key})
+ return ref
+
+
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
+ self.registry['RefInput'] = self.refinput_factory
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
-
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
+ def refinput_factory(self, spec, k):
+ if spec is None:
+ spec = {}
+ widget_spec = spec.get('widget')
+ if widget_spec is None:
+ widget_spec = {}
+ attr = spec.get('attr',{}).get('attr',{})
+ if attr is None:
+ refersto = None
+ else:
+ refersto = attr.get('refersto')
+ additional_fields = widget_spec.get('additional_fields',[])
+ return RefInput(self.db, additional_fields=additional_fields, **k)
+
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
0afa4baf1ba6988323e86cd5066b1c105dc85de5
|
Fix broken test after formish form name became optional.
|
diff --git a/couchish/tests/test_couchish_formish_jsonbuilder.py b/couchish/tests/test_couchish_formish_jsonbuilder.py
index 9ea36a7..05a66e7 100644
--- a/couchish/tests/test_couchish_formish_jsonbuilder.py
+++ b/couchish/tests/test_couchish_formish_jsonbuilder.py
@@ -1,78 +1,78 @@
import unittest
from couchish.couchish_formish_jsonbuilder import build
import yaml
import webob
from BeautifulSoup import BeautifulSoup
import urllib
from dottedish import flatten
DATADIR = 'couchish/tests/data/%s'
class Test(unittest.TestCase):
def request(self, d):
r = webob.Request.blank('http://localhost/')
r.method = 'POST'
r.content_type = 'application/x-www-form-urlencoded'
- kvpairs = [('__formish_form__', 'form')]
+ kvpairs = []
for k,v in flatten(d):
lastsegment = k.split('.')[-1]
try:
int(lastsegment)
k = '.'.join(k.split('.')[:-1])
except ValueError:
pass
for v in d[k]:
kvpairs.append( (k,v) )
r.body = urllib.urlencode(kvpairs)
return r
def assertRoundTrip(self, f, testdata):
r = self.request(f._get_request_data())
d = f.validate(r)
self.assertEquals(d, testdata)
def assertIdHasValue(self, f, id, v):
soup = BeautifulSoup(f())
self.assertEquals(soup.find(id=id)['value'],v)
def assertIdAttrHasValue(self, f, id, attr, v):
soup = BeautifulSoup(f())
s = soup.find(id=id)
assert 'attr' in s
self.assertEquals(s['attr'],v)
def assertIdAttrHasNoValue(self, f, id, attr):
soup = BeautifulSoup(f())
s = soup.find(id=id)
assert 'attr' not in s
def test_simple(self):
book_definition = yaml.load( open(DATADIR%'test_couchish_book.yaml').read() )
dvd_definition = yaml.load( open(DATADIR%'test_couchish_dvd.yaml').read() )
post_definition = yaml.load( open(DATADIR%'test_couchish_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'test_couchish_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'test_couchish_views.yaml').read() )
f = build(author_definition)
- self.assertIdHasValue(f, 'form-first_name', '')
+ self.assertIdHasValue(f, 'first_name', '')
# Test None data
f = build(author_definition)
testdata = {'first_name': None, 'last_name': None}
f.defaults = testdata
- self.assertIdHasValue(f, 'form-first_name', '')
+ self.assertIdHasValue(f, 'first_name', '')
self.assertRoundTrip(f, testdata)
# Test sample data
f = build(author_definition)
testdata = {'first_name': None, 'last_name': 'Goodall'}
f.defaults = testdata
- self.assertIdHasValue(f, 'form-last_name', 'Goodall')
+ self.assertIdHasValue(f, 'last_name', 'Goodall')
self.assertRoundTrip(f, testdata)
def test_fileupload(self):
upload_definition = yaml.load( open(DATADIR%'test_upload.yaml').read() )
f = build(upload_definition)
|
ish/couchish
|
4b780b225a7a762acc5fe7cf792db811add71dd5
|
removed redundant checker on from_request_data
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 7f2e2c8..44225d7 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,488 +1,487 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
- data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
4565d29327f778aed90fda4655aaf25dad288e5d
|
Add correct default_item to multichoice widget.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 78ae3cb..7f2e2c8 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,487 +1,488 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
+ default_value = []
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
38a0f820fa21f8309f0342bf80cca1b71e8af990
|
added TODO for auto generated all and all count config
|
diff --git a/TODO b/TODO
index 025047c..88fd195 100644
--- a/TODO
+++ b/TODO
@@ -1,49 +1,58 @@
+Change the 'type' config in adminish
+------------------------------------
+
+You should be able to specify where the magic 'all' and 'all_count' views come
+from. You should also be able to specify what the map and reduce are. This
+means that the 'type' is now just a marker that allows the system to know which
+view to use for all and all_count views. If the map is not specified for a
+type, an auto 'all' or 'all_count' is generated'. You should be able to pass in
+a key for the auto generated view so that you can set the order of items.
Change the Sequence Setup
-------------------------
Currently there is a little bit of magic going on when setting up sequences...
- name: foo
type: Sequence(String())
Which was nice until we realised that you couldn't apply a widget separately to
the Sequence and the String. The syntax for a sequence of sequences of strings
should change to ..
- name: foo
type: Sequence()
widget:
type: CustomSequenceWidget()
attr:
type: Sequence()
attr:
type: String()
- name: foo.*.*
widget:
type: CustomStringItemWidget()
Removing the parenthes?
-----------------------
I think the parenthese might be removable after the "Change the Sequence Setup"
is fixed. The 'type' used to try to look like the Python code but that turned
out to be wrong; after the change 'type' becomes just the name of the factory
(class name, typically) to call.
- name: foo
type: Sequence
widget:
type: CustomSequenceWidget
attr:
type: Sequence
attr:
type: String
- name: foo.*.*
widget:
type: CustomStringItemWidget
|
ish/couchish
|
05fc32de8e4b744cf684fc02523bd27e1f464eee
|
fixed default value for additional fields on seqreftextarea
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 87dd14b..78ae3cb 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,487 +1,487 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, field):
if field.value == ['']:
v = self.empty
else:
v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, field):
return self.none_option[0]
def get_options(self, field):
return self.options
def selected(self, option, field):
if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, field):
if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
def from_request_data(self, field, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
- additional_fields = widget_spec.get('additional_fields')
+ additional_fields = widget_spec.get('additional_fields',[])
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
36f9460438c21b74e4171ba563f7d47ab9970cbe
|
removed debugging print statements
|
diff --git a/couchish/filehandling.py b/couchish/filehandling.py
index 5f71047..29ea0ee 100644
--- a/couchish/filehandling.py
+++ b/couchish/filehandling.py
@@ -1,230 +1,223 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from dottedish import dotted, flatten, dotteddict, api, dottedlist
from couchdbsession import a8n
import base64
import uuid
from schemaish.type import File
from StringIO import StringIO
import shutil
from couchish import jsonutil
def get_attr(prefix, parent=None):
# combine prefix and parent where prefix is a list and parent is a dotted string
if parent is None:
segments = [str(segment) for segment in prefix]
return '.'.join(segments)
if prefix is None:
return parent
segments = [str(segment) for segment in prefix]
if parent != '':
segments += parent.split('.')
attr = '.'.join( segments )
return attr
def get_files(data, original=None, prefix=None):
- print 'in get_files (data, original, prefix)', data, original, prefix
# scan old data to collect any file refs and then scan new data for file changes
files = {}
inlinefiles = {}
original_files = {}
get_files_from_original(data, original, files, inlinefiles, original_files, prefix)
get_files_from_data(data, original, files, inlinefiles, original_files, prefix)
return data, files, inlinefiles, original_files
def has_unmodified_signature(f):
if f.file is None:
return True
return False
def dotted_or_emptydict(d):
if d is None:
return {}
try:
return dotted(d)
except TypeError:
return d
def get_files_from_data(data, original, files, inlinefiles, original_files, prefix):
if isinstance(data, File):
get_file_from_item(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(data, dict) and not isinstance(data, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
- print '3 type(dd)',type(dd)
if not dd:
return
for k,f in flatten(dd):
- print '4 Scanning files', k,f, type(f)
if isinstance(f, File):
if isinstance(ddoriginal.get(k), File):
of = ddoriginal[k]
else:
of = None
- print 'FOUND FILE on original',of
get_file_from_item(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
api.wrap.when_type(a8n.List)(dottedlist.wrap_list)
api.wrap.when_type(a8n.Dictionary)(dotteddict.wrap_dict)
def get_file_from_item(f, of, files, inlinefiles, original_files, fullprefix):
if f.file is None:
# if we have no original data then we presume the file should remain unchanged
f.id = of.id
if f.mimetype is None:
f.mimetype = of.mimetype
if f.filename is None:
f.filename = of.filename
if not hasattr(f, 'metadata') or f.metadata is None or f.metadata=={}:
f.metadata = getattr(of, 'metadata', None)
else:
if of and hasattr(of,'id'):
f.id = of.id
else:
f.id = uuid.uuid4().hex
if getattr(f,'inline',False) is True:
filestore = inlinefiles
else:
filestore = files
if hasattr(f, 'inline'):
del f.inline
# add the files for attachment handling and remove the file data from document
if getattr(f,'b64', None):
filestore[fullprefix] = jsonutil.CouchishFile(f.file, f.filename, f.mimetype, f.id, metadata = f.metadata, b64=True)
del f.b64
else:
fh = StringIO()
shutil.copyfileobj(f.file, fh)
fh.seek(0)
filestore[fullprefix] = jsonutil.CouchishFile(fh, f.filename, f.mimetype, f.id, metadata = f.metadata)
del f.file
def get_file_from_original(f, of, files, inlinefiles, original_files, fullprefix):
if not isinstance(f, File):
original_files[fullprefix] = of
def get_files_from_original(data, original, files, inlinefiles, original_files, prefix):
- print 'in get_files_from_original'
if isinstance(original, File):
get_file_from_original(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
- print 'type(origoinal)',type(original)
if not isinstance(original, dict) and not isinstance(original, list):
return
dd = dotted_or_emptydict(data)
ddoriginal = dotted_or_emptydict(original)
- print 'about to iteratie on flattened orig'
if not ddoriginal:
return
for k, of in flatten(ddoriginal):
if isinstance(of, File):
f = dd.get(k)
get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def _parse_changes_for_files(session, deletions, additions, changes):
""" returns deletions, additions """
additions = list(additions)
changes = list(changes)
deletions = list(deletions)
all_separate_files = {}
all_inline_files = {}
for addition in additions:
addition, files, inlinefiles, original_files_notused = get_files(addition)
if files:
all_separate_files.setdefault(addition['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(addition['_id'],{}).update(inlinefiles)
_extract_inline_attachments(addition, inlinefiles)
all_original_files = {}
changes = list(changes)
for n, changeset in enumerate(changes):
d, cs = changeset
cs = list(cs)
for m, c in enumerate(cs):
if c['action'] in ['edit','create','remove']:
c['value'], files, inlinefiles, original_files = get_files(c.get('value'), original=c.get('was'), prefix=c['path'])
cs[m] = c
if files:
all_separate_files.setdefault(d['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(d['_id'],{}).update(inlinefiles)
all_original_files.setdefault(d['_id'], {}).update(original_files)
_extract_inline_attachments(d, inlinefiles)
changes[n] = (d, cs)
return all_original_files, all_separate_files
def _extract_inline_attachments(doc, files):
"""
Move the any attachment data that we've found into the _attachments attribute
"""
for attr, f in files.items():
if f.b64:
data = f.file.replace('\n', '')
else:
data = base64.encodestring(f.file.read()).replace('\n','')
f.file.close()
del f.file
del f.b64
del f.inline
del f.doc_id
doc.setdefault('_attachments',{})[f.id] = {'content_type': f.mimetype,'data': data}
def _handle_separate_attachments(session, deletions, additions):
"""
add attachments that aren't inline and remove any attachments without references
"""
# XXX This needs to cope with files moving when sequences are re-numbered. We need
# XXX to talk to matt about what a renumbering like this looks like
for id, attrfiles in additions.items():
doc = session.get(id)
stubdoc = {'_id':doc['_id'], '_rev':doc['_rev']}
for attr, f in attrfiles.items():
data = ''
if f.file:
if f.b64:
data = base64.decodestring(f.file)
else:
data = f.file.read()
f.file.close()
session._db.put_attachment(stubdoc, data, filename=f.id, content_type=f.mimetype)
del f.file
del f.b64
del f.inline
del f.doc_id
for id, attrfiles in deletions.items():
# XXX had to use _db because delete attachment freeaked using session version.
doc = session._db.get(id)
for attr, f in attrfiles.items():
session._db.delete_attachment(doc, f.id)
additions = {}
deletions = {}
|
ish/couchish
|
cb97e6670df2b4bbb865b4d64867a48194747931
|
changed to use new simplegeneric driven dottedish
|
diff --git a/couchish.egg-info/PKG-INFO b/couchish.egg-info/PKG-INFO
index 64c70ef..097e5e6 100644
--- a/couchish.egg-info/PKG-INFO
+++ b/couchish.egg-info/PKG-INFO
@@ -1,10 +1,10 @@
Metadata-Version: 1.0
Name: couchish
-Version: 0.2
+Version: 0.2.1
Summary: UNKNOWN
Home-page: UNKNOWN
Author: Tim Parkin & Matt Goodall
Author-email: [email protected]
License: UNKNOWN
Description: UNKNOWN
Platform: UNKNOWN
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 9c456b1..87dd14b 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,487 +1,487 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
- def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
+ def pre_parse_incoming_request_data(self, field, data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
- def from_request_data(self, schema_type, request_data):
+ def from_request_data(self, field, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
- def selected(self, option, value, schemaType):
- if value == ['']:
+ def selected(self, option, field):
+ if field.value == ['']:
v = self.empty
else:
- v = value[0]
+ v = field.value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
- def to_request_data(self, schema_type, data):
+ def to_request_data(self, field, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
- def from_request_data(self, schema_type, request_data):
+ def from_request_data(self, field, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
- def get_none_option_value(self, schema_type):
+ def get_none_option_value(self, field):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
- def get_options(self, schema_type=None):
+ def get_options(self, field=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
- def to_request_data(self, schema_type, data):
+ def to_request_data(self, field, data):
if data is None:
return [None]
return [data['path']]
- def from_request_data(self, schema_type, data):
+ def from_request_data(self, field, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
- def get_none_option_value(self, schema_type):
+ def get_none_option_value(self, field):
return self.none_option[0]
- def get_options(self, schema_type):
+ def get_options(self, field):
return self.options
- def selected(self, option, value, schema_type):
- if value is not None and option[0] == value[0]:
+ def selected(self, option, field):
+ if field.value is not None and option[0] == field.value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
- def to_request_data(self, schema_type, data):
+ def to_request_data(self, field, data):
if data is None:
return []
return [c['path'] for c in data]
- def checked(self, option, values, schema_type):
- if values is not None and option[0] in values:
+ def checked(self, option, field):
+ if field.value is not None and option[0] in field.value:
return ' checked="checked"'
else:
return ''
- def from_request_data(self, schema_type, data):
+ def from_request_data(self, field, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
- def to_request_data(self, schema_type, data):
+ def to_request_data(self, field, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
- def from_request_data(self, schema_type, request_data):
+ def from_request_data(self, field, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields')
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/filehandling.py b/couchish/filehandling.py
index 4cd4c81..5f71047 100644
--- a/couchish/filehandling.py
+++ b/couchish/filehandling.py
@@ -1,209 +1,230 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
-from dottedish import dotted
+from dottedish import dotted, flatten, dotteddict, api, dottedlist
+from couchdbsession import a8n
import base64
import uuid
from schemaish.type import File
from StringIO import StringIO
import shutil
from couchish import jsonutil
def get_attr(prefix, parent=None):
# combine prefix and parent where prefix is a list and parent is a dotted string
if parent is None:
segments = [str(segment) for segment in prefix]
return '.'.join(segments)
if prefix is None:
return parent
segments = [str(segment) for segment in prefix]
if parent != '':
segments += parent.split('.')
attr = '.'.join( segments )
return attr
def get_files(data, original=None, prefix=None):
+ print 'in get_files (data, original, prefix)', data, original, prefix
# scan old data to collect any file refs and then scan new data for file changes
files = {}
inlinefiles = {}
original_files = {}
get_files_from_original(data, original, files, inlinefiles, original_files, prefix)
get_files_from_data(data, original, files, inlinefiles, original_files, prefix)
return data, files, inlinefiles, original_files
def has_unmodified_signature(f):
if f.file is None:
return True
return False
-def make_dotted_or_emptydict(d):
- return dotted(d)
+def dotted_or_emptydict(d):
+ if d is None:
+ return {}
+ try:
+ return dotted(d)
+ except TypeError:
+ return d
def get_files_from_data(data, original, files, inlinefiles, original_files, prefix):
if isinstance(data, File):
get_file_from_item(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
if not isinstance(data, dict) and not isinstance(data, list):
return
- dd = dotted(data)
- ddoriginal = make_dotted_or_emptydict(original)
- for k,f in dd.dotteditems():
+ dd = dotted_or_emptydict(data)
+ ddoriginal = dotted_or_emptydict(original)
+ print '3 type(dd)',type(dd)
+ if not dd:
+ return
+ for k,f in flatten(dd):
+
+ print '4 Scanning files', k,f, type(f)
if isinstance(f, File):
if isinstance(ddoriginal.get(k), File):
of = ddoriginal[k]
else:
of = None
+ print 'FOUND FILE on original',of
get_file_from_item(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
-
+
+
+api.wrap.when_type(a8n.List)(dottedlist.wrap_list)
+api.wrap.when_type(a8n.Dictionary)(dotteddict.wrap_dict)
def get_file_from_item(f, of, files, inlinefiles, original_files, fullprefix):
if f.file is None:
# if we have no original data then we presume the file should remain unchanged
f.id = of.id
if f.mimetype is None:
f.mimetype = of.mimetype
if f.filename is None:
f.filename = of.filename
if not hasattr(f, 'metadata') or f.metadata is None or f.metadata=={}:
f.metadata = getattr(of, 'metadata', None)
else:
if of and hasattr(of,'id'):
f.id = of.id
else:
f.id = uuid.uuid4().hex
if getattr(f,'inline',False) is True:
filestore = inlinefiles
else:
filestore = files
if hasattr(f, 'inline'):
del f.inline
# add the files for attachment handling and remove the file data from document
if getattr(f,'b64', None):
filestore[fullprefix] = jsonutil.CouchishFile(f.file, f.filename, f.mimetype, f.id, metadata = f.metadata, b64=True)
del f.b64
else:
fh = StringIO()
shutil.copyfileobj(f.file, fh)
fh.seek(0)
filestore[fullprefix] = jsonutil.CouchishFile(fh, f.filename, f.mimetype, f.id, metadata = f.metadata)
del f.file
def get_file_from_original(f, of, files, inlinefiles, original_files, fullprefix):
if not isinstance(f, File):
original_files[fullprefix] = of
def get_files_from_original(data, original, files, inlinefiles, original_files, prefix):
+ print 'in get_files_from_original'
if isinstance(original, File):
get_file_from_original(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
+ print 'type(origoinal)',type(original)
if not isinstance(original, dict) and not isinstance(original, list):
return
- dd = make_dotted_or_emptydict(data)
- ddoriginal = dotted(original)
- for k, of in ddoriginal.dotteditems():
+ dd = dotted_or_emptydict(data)
+ ddoriginal = dotted_or_emptydict(original)
+ print 'about to iteratie on flattened orig'
+ if not ddoriginal:
+ return
+ for k, of in flatten(ddoriginal):
if isinstance(of, File):
f = dd.get(k)
get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def _parse_changes_for_files(session, deletions, additions, changes):
""" returns deletions, additions """
additions = list(additions)
changes = list(changes)
deletions = list(deletions)
all_separate_files = {}
all_inline_files = {}
for addition in additions:
addition, files, inlinefiles, original_files_notused = get_files(addition)
if files:
all_separate_files.setdefault(addition['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(addition['_id'],{}).update(inlinefiles)
_extract_inline_attachments(addition, inlinefiles)
all_original_files = {}
changes = list(changes)
for n, changeset in enumerate(changes):
d, cs = changeset
cs = list(cs)
for m, c in enumerate(cs):
if c['action'] in ['edit','create','remove']:
c['value'], files, inlinefiles, original_files = get_files(c.get('value'), original=c.get('was'), prefix=c['path'])
cs[m] = c
if files:
all_separate_files.setdefault(d['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(d['_id'],{}).update(inlinefiles)
all_original_files.setdefault(d['_id'], {}).update(original_files)
_extract_inline_attachments(d, inlinefiles)
changes[n] = (d, cs)
return all_original_files, all_separate_files
def _extract_inline_attachments(doc, files):
"""
Move the any attachment data that we've found into the _attachments attribute
"""
for attr, f in files.items():
if f.b64:
data = f.file.replace('\n', '')
else:
data = base64.encodestring(f.file.read()).replace('\n','')
f.file.close()
del f.file
del f.b64
del f.inline
del f.doc_id
doc.setdefault('_attachments',{})[f.id] = {'content_type': f.mimetype,'data': data}
def _handle_separate_attachments(session, deletions, additions):
"""
add attachments that aren't inline and remove any attachments without references
"""
# XXX This needs to cope with files moving when sequences are re-numbered. We need
# XXX to talk to matt about what a renumbering like this looks like
for id, attrfiles in additions.items():
doc = session.get(id)
stubdoc = {'_id':doc['_id'], '_rev':doc['_rev']}
for attr, f in attrfiles.items():
data = ''
if f.file:
if f.b64:
data = base64.decodestring(f.file)
else:
data = f.file.read()
f.file.close()
session._db.put_attachment(stubdoc, data, filename=f.id, content_type=f.mimetype)
del f.file
del f.b64
del f.inline
del f.doc_id
for id, attrfiles in deletions.items():
# XXX had to use _db because delete attachment freeaked using session version.
doc = session._db.get(id)
for attr, f in attrfiles.items():
session._db.delete_attachment(doc, f.id)
additions = {}
deletions = {}
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index 99cb750..ac43e4b 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,101 +1,100 @@
from jsonish import pythonjson
from schemaish.type import File
import base64
-from dottedish import dotted
+from dottedish import flatten, dotted
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
if hasattr(obj,'file') and hasattr(obj,'b64'):
d['base64'] = obj.file
else:
if hasattr(obj,'file') and obj.file is not None:
d['base64'] = base64.encodestring(obj.file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
def wrap_encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
def wrap_decode_from_dict(d):
obj = pythonjson.decode_from_dict(d)
obj = add_id_and_attr_to_files(obj)
return obj
encode_to_dict = wrap_encode_to_dict
decode_from_dict = wrap_decode_from_dict
def add_id_and_attr_to_files(data):
if not isinstance(data, dict):
return data
dd = dotted(data)
- for k,f in dd.dotteditems():
+ for k,f in flatten(data):
if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
f.doc_id = dd['_id']
f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
f.doc_id = dd[subpath]['_id']
f.rev = dd[subpath]['_rev']
- data = dd.data
return data
dumps = pythonjson.dumps
loads = pythonjson.loads
diff --git a/couchish/tests/test_couchish_formish_jsonbuilder.py b/couchish/tests/test_couchish_formish_jsonbuilder.py
index 490a3bb..9ea36a7 100644
--- a/couchish/tests/test_couchish_formish_jsonbuilder.py
+++ b/couchish/tests/test_couchish_formish_jsonbuilder.py
@@ -1,77 +1,78 @@
import unittest
from couchish.couchish_formish_jsonbuilder import build
import yaml
import webob
from BeautifulSoup import BeautifulSoup
import urllib
+from dottedish import flatten
DATADIR = 'couchish/tests/data/%s'
class Test(unittest.TestCase):
def request(self, d):
r = webob.Request.blank('http://localhost/')
r.method = 'POST'
r.content_type = 'application/x-www-form-urlencoded'
kvpairs = [('__formish_form__', 'form')]
- for k in d.dottedkeys():
+ for k,v in flatten(d):
lastsegment = k.split('.')[-1]
try:
int(lastsegment)
k = '.'.join(k.split('.')[:-1])
except ValueError:
pass
for v in d[k]:
kvpairs.append( (k,v) )
r.body = urllib.urlencode(kvpairs)
return r
def assertRoundTrip(self, f, testdata):
r = self.request(f._get_request_data())
d = f.validate(r)
self.assertEquals(d, testdata)
def assertIdHasValue(self, f, id, v):
soup = BeautifulSoup(f())
self.assertEquals(soup.find(id=id)['value'],v)
def assertIdAttrHasValue(self, f, id, attr, v):
soup = BeautifulSoup(f())
s = soup.find(id=id)
assert 'attr' in s
self.assertEquals(s['attr'],v)
def assertIdAttrHasNoValue(self, f, id, attr):
soup = BeautifulSoup(f())
s = soup.find(id=id)
assert 'attr' not in s
def test_simple(self):
book_definition = yaml.load( open(DATADIR%'test_couchish_book.yaml').read() )
dvd_definition = yaml.load( open(DATADIR%'test_couchish_dvd.yaml').read() )
post_definition = yaml.load( open(DATADIR%'test_couchish_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'test_couchish_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'test_couchish_views.yaml').read() )
f = build(author_definition)
self.assertIdHasValue(f, 'form-first_name', '')
# Test None data
f = build(author_definition)
testdata = {'first_name': None, 'last_name': None}
f.defaults = testdata
self.assertIdHasValue(f, 'form-first_name', '')
self.assertRoundTrip(f, testdata)
# Test sample data
f = build(author_definition)
testdata = {'first_name': None, 'last_name': 'Goodall'}
f.defaults = testdata
self.assertIdHasValue(f, 'form-last_name', 'Goodall')
self.assertRoundTrip(f, testdata)
def test_fileupload(self):
upload_definition = yaml.load( open(DATADIR%'test_upload.yaml').read() )
f = build(upload_definition)
diff --git a/setup.py b/setup.py
index ca0f1d0..2195546 100644
--- a/setup.py
+++ b/setup.py
@@ -1,37 +1,37 @@
from setuptools import setup, find_packages
import sys, os
-version = '0.2'
+version = '0.2.1'
setup(name='couchish',
version=version,
description="",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Tim Parkin & Matt Goodall',
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"PyYAML",
"couchdb-session",
"dottedish",
"jsonish",
"schemaish",
],
extras_require={
'formish': ['formish'],
},
entry_points="""
# -*- Entry points: -*-
""",
test_suite='couchish.tests',
tests_require=['BeautifulSoup', 'WebOb', 'formish'],
)
|
ish/couchish
|
969e8123e60d217e031e9f9f19730ded4a38af99
|
removed unused dottedish module and used dotteditems item instead of doing a second lookup
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 8f61496..9c456b1 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,488 +1,487 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
-from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == ['']:
v = self.empty
else:
v = value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class SelectChoiceFacetTreeCouchDB(widgets.Widget):
"""
Select a single category from a facet using a <select> list.
"""
template='field.SelectChoice'
type = "SelectChoiceFacetTree"
none_option = ('', '- choose -')
def __init__(self, options, **k):
widgets.Widget.__init__(self, **k)
# "Indent" nodes' labels.
def indented_label(key, label):
return ''.join(['-']*(len(key.split('.'))-1)+[label])
self.options = [(key, indented_label(key, value['data']['label']))
for (key, value) in options]
# Used to map from chosen item back to category reference.
self.options_by_path = dict(options)
##
# Request data methods.
def to_request_data(self, schema_type, data):
if data is None:
return [None]
return [data['path']]
def from_request_data(self, schema_type, data):
if data[0] == self.none_option[0]:
return None
return self.options_by_path[data[0]]
##
# Methods required by the SelectChoice template
def get_none_option_value(self, schema_type):
return self.none_option[0]
def get_options(self, schema_type):
return self.options
def selected(self, option, value, schema_type):
if value is not None and option[0] == value[0]:
return ' selected="selected"'
return ''
# XXX Rename to include "Facet"
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields')
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def selectchoice_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/jsonutil.py b/couchish/jsonutil.py
index 893c1c0..99cb750 100644
--- a/couchish/jsonutil.py
+++ b/couchish/jsonutil.py
@@ -1,101 +1,101 @@
from jsonish import pythonjson
from schemaish.type import File
import base64
from dottedish import dotted
class CouchishFile(File):
def __init__(self, file, filename, mimetype, id=None, doc_id=None, inline=False, b64=False, metadata=None):
self.file = file
self.filename = filename
self.mimetype = mimetype
self.id = id
self.doc_id = doc_id
self.inline = inline
self.b64 = b64
if metadata is None:
metadata = {}
self.metadata = metadata
def __repr__(self):
return '<couchish.jsonutil.CouchishFile file="%r" filename="%s", mimetype="%s", id="%s", doc_id="%s", inline="%s", b64="%s", metadata="%r" >' % (getattr(self,'file',None), self.filename, self.mimetype, self.id, getattr(self, 'doc_id',None), getattr(self,'inline',None), getattr(self,'b64', None), getattr(self, 'metadata', {}))
def file_to_dict(obj):
d = {
'__type__': 'file',
'filename': obj.filename,
'mimetype': obj.mimetype,
'id': getattr(obj, 'id', None),
}
if hasattr(obj, 'metadata') and obj.metadata:
d['metadata'] = obj.metadata
if hasattr(obj,'doc_id') and obj.doc_id is not None:
d['doc_id'] = obj.doc_id
if hasattr(obj, 'inline') and obj.inline is not False:
d['inline'] = obj.inline
if hasattr(obj,'file') and hasattr(obj,'b64'):
d['base64'] = obj.file
else:
if hasattr(obj,'file') and obj.file is not None:
d['base64'] = base64.encodestring(obj.file.read())
return d
def file_from_dict(obj):
filename = obj['filename']
mimetype = obj['mimetype']
inline = obj.get('inline', False)
id = obj.get('id')
doc_id = obj.get('doc_id')
metadata = obj.get('metadata',{})
if 'base64' in obj:
data = obj['base64']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, b64=True, metadata=metadata)
elif 'file' in obj:
data = obj['file']
return CouchishFile(data, filename, mimetype, id=id, doc_id=doc_id, inline=inline, metadata=metadata)
else:
return CouchishFile(None, filename, mimetype, id=id, doc_id=doc_id, metadata=metadata)
pythonjson.json.register_type(File, file_to_dict, file_from_dict, "file")
pythonjson.json.register_type(CouchishFile, file_to_dict, file_from_dict, "file")
pythonjson.decode_mapping['file'] = file_from_dict
pythonjson.encode_mapping[File] = ('file',file_to_dict)
pythonjson.encode_mapping[CouchishFile] = ('file',file_to_dict)
def wrap_encode_to_dict(obj):
return pythonjson.encode_to_dict(obj)
def wrap_decode_from_dict(d):
obj = pythonjson.decode_from_dict(d)
obj = add_id_and_attr_to_files(obj)
return obj
encode_to_dict = wrap_encode_to_dict
decode_from_dict = wrap_decode_from_dict
def add_id_and_attr_to_files(data):
if not isinstance(data, dict):
return data
dd = dotted(data)
- for k in dd.dottedkeys():
- if isinstance(dd[k],File):
+ for k,f in dd.dotteditems():
+ if isinstance(f,File):
if '_id' in dd and '_rev' in dd:
- dd[k].doc_id = dd['_id']
- dd[k].rev = dd['_rev']
+ f.doc_id = dd['_id']
+ f.rev = dd['_rev']
segments = k.split('.')
for n in xrange(1,len(segments)):
subpath = '.'.join(segments[:-n])
if '_id' in dd[subpath] and '_rev' in dd[subpath]:
- dd[k].doc_id = dd[subpath]['_id']
- dd[k].rev = dd[subpath]['_rev']
+ f.doc_id = dd[subpath]['_id']
+ f.rev = dd[subpath]['_rev']
data = dd.data
return data
dumps = pythonjson.dumps
loads = pythonjson.loads
|
ish/couchish
|
20ef26efd1af387efe58502df167b6683cdebb88
|
Add new widget for selecting a single category using a <select> field.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index bc4e3ba..8f61496 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,424 +1,488 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == ['']:
v = self.empty
else:
v = value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
+
+class SelectChoiceFacetTreeCouchDB(widgets.Widget):
+ """
+ Select a single category from a facet using a <select> list.
+ """
+
+ template='field.SelectChoice'
+ type = "SelectChoiceFacetTree"
+
+ none_option = ('', '- choose -')
+
+ def __init__(self, options, **k):
+ widgets.Widget.__init__(self, **k)
+ # "Indent" nodes' labels.
+ def indented_label(key, label):
+ return ''.join(['-']*(len(key.split('.'))-1)+[label])
+ self.options = [(key, indented_label(key, value['data']['label']))
+ for (key, value) in options]
+ # Used to map from chosen item back to category reference.
+ self.options_by_path = dict(options)
+
+ ##
+ # Request data methods.
+
+ def to_request_data(self, schema_type, data):
+ if data is None:
+ return [None]
+ return [data['path']]
+
+ def from_request_data(self, schema_type, data):
+ if data[0] == self.none_option[0]:
+ return None
+ return self.options_by_path[data[0]]
+
+ ##
+ # Methods required by the SelectChoice template
+
+ def get_none_option_value(self, schema_type):
+ return self.none_option[0]
+
+ def get_options(self, schema_type):
+ return self.options
+
+ def selected(self, option, value, schema_type):
+ if value is not None and option[0] == value[0]:
+ return ' selected="selected"'
+ return ''
+
+
+# XXX Rename to include "Facet"
+
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
+ self.registry['SelectChoiceFacetTreeCouchDB'] = self.selectchoice_couchdbfacet_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields')
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
+ def selectchoice_couchdbfacet_factory(self, spec, k):
+ widgetSpec = spec.get('widget')
+ def options(db, view):
+ facet = list(db.view(view, include_docs=True))[0].doc
+ options = []
+ for item in facet['category']:
+ options.append( (item['path'],item) )
+ return options
+ view = 'facet_%s/all'%widgetSpec['facet']
+
+ return SelectChoiceFacetTreeCouchDB(options=options(self.db,view), **k)
+
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
2f8e387a1f64f2e38d7b9c17c7c9887fc34cfa0d
|
Fix to match new behaviour in formish's SelectChoice template.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index e028978..bc4e3ba 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,424 +1,424 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
- if value == '':
+ if value == ['']:
v = self.empty
else:
- v = value
+ v = value[0]
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
additional_fields = ['_ref'] + self.additional_fields
return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
# Extract the list of ids from the content, discarding empty lines.
rows = request_data[0].splitlines()
rows = (row.strip() for row in rows)
rows = (row for row in rows if row)
rows = (row.split('|', 1) for row in rows)
ids = [row[0] for row in rows]
# Return default if nothing entered.
if not ids:
return self.empty
# Convert the ids into refs.
rows = self.db.view(self.view, keys=ids)
for row in rows:
row.value.update({'_ref': row.key})
return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
additional_fields = widget_spec.get('additional_fields')
return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('show_download_link',False)
show_file_preview = widget_spec.get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
de44031a11aef495b4c820d1ebccc52341ff1dfc
|
Extend the SeqRefTextArea widget to provide additional per-item information that JavaScript might need.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 33125b1..0d6da9d 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,421 +1,424 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
data = data or []
return [self.full_options[item] for item in data]
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
- self.strip = k.pop('strip', True)
+ self.additional_fields = k.pop('additional_fields', [])
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
- string_data = [d['_ref'] for d in data]
- return string_data
+ additional_fields = ['_ref'] + self.additional_fields
+ return ['|'.join(d.get(attr, '') for attr in additional_fields) for d in data]
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
- string_data = request_data[0]
- if self.strip is True:
- string_data = string_data.strip()
- if string_data == '':
+ # Extract the list of ids from the content, discarding empty lines.
+ rows = request_data[0].splitlines()
+ rows = (row.strip() for row in rows)
+ rows = (row for row in rows if row)
+ rows = (row.split('|', 1) for row in rows)
+ ids = [row[0] for row in rows]
+ # Return default if nothing entered.
+ if not ids:
return self.empty
- ids = [s.strip() for s in string_data.splitlines()]
- docs = self.db.view(self.view, keys=ids)
- out = []
- for d in docs:
- d.value.update({'_ref': d.key})
- out.append(d.value)
- return out
+ # Convert the ids into refs.
+ rows = self.db.view(self.view, keys=ids)
+ for row in rows:
+ row.value.update({'_ref': row.key})
+ return [row.value for row in rows]
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
- return SeqRefTextArea(self.db, view, **k)
+ additional_fields = widget_spec.get('additional_fields')
+ return SeqRefTextArea(self.db, view, additional_fields=additional_fields, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
identify_size = widget_spec.get('options',{}).get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
951f9ceec0ca527290e9a5f64353c40a8206de07
|
Remove use of options subkey in fileupload
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index e8a32b1..56b7a64 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,422 +1,422 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
out = []
for item in data:
out.append(self.full_options[item])
return out
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.strip = k.pop('strip', True)
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
string_data = [d['_ref'] for d in data]
return string_data
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
string_data = request_data[0]
if self.strip is True:
string_data = string_data.strip()
if string_data == '':
return self.empty
ids = [s.strip() for s in string_data.splitlines()]
docs = self.db.view(self.view, keys=ids)
out = []
for d in docs:
d.value.update({'_ref': d.key})
out.append(d.value)
return out
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SeqRefTextArea(self.db, view, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
- url_base = widget_spec.get('options',{}).get('url_base',None)
+ url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
- show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
- show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
- show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
- identify_size = widget_spec.get('options',{}).get('identify_size',False)
+ show_download_link = widget_spec.get('show_download_link',False)
+ show_file_preview = widget_spec.get('show_file_preview',True)
+ show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
+ identify_size = widget_spec.get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/formish_jsonbuilder.py b/couchish/formish_jsonbuilder.py
index 55a2a34..9c686e6 100644
--- a/couchish/formish_jsonbuilder.py
+++ b/couchish/formish_jsonbuilder.py
@@ -1,248 +1,248 @@
from couchish.schemaish_jsonbuilder import build as schema_build, schemaish_type_registry, strip_stars, split_prefix
import formish
from formish import filestore
from validatish import validator
class FormishWidgetRegistry(object):
"""
A registry for mapping a widget specifiction to a Formish widget factory,
including sensible user-friendly defaults instead of the "developer"
versions Formish defaults to.
"""
def __init__(self):
self.registry = {
'Input': self.input_factory,
'Hidden': self.hidden_factory,
'TextArea': self.textarea_factory,
'SelectChoice': self.selectchoice_factory,
'SelectWithOtherChoice': self.selectwithotherchoice_factory,
'Checkbox': self.checkbox_factory,
'CheckboxMultiChoice': self.checkboxmultichoice_factory,
'RadioChoice': self.radiochoice_factory,
'DateParts': self.dateparts_factory,
}
self.defaults = {
'Date': self.dateparts_factory,
'String': self.input_factory,
'Integer': self.input_factory,
'File': self.fileupload_factory,
'Boolean': self.checkbox_factory,
}
def make_formish_widget(self,item):
"""
Create and return a Formish widget factory for the item type and widget
specifiction.
If widget_spec is provided then it is used to locate/create and return a
widget factory.
If widget_spec is None then either a user-friendly default for the
item_type is returned or it's left to Formish to decide.
The widget_spec dictionary must contain a 'type' key, as well as any
other information needed to build the widget.
Parameters:
item_type: the type of the value (string)
widget_spec: a dictionary containing a widget specification
"""
widget_spec = item.get('widget')
item_type = item.get('type')
# If there is a widget spec then that takes precedence
k = {}
if widget_spec:
if 'css_class' in widget_spec:
k['css_class'] = widget_spec['css_class']
if 'type' in widget_spec:
return self.registry[widget_spec['type']](item, k)
# No widget spec so see if there's a user-friendly default for the data type
default = self.defaults.get(item_type)
if default is not None:
return default(item, k)
# OK, so leave it for Formish to decide then
return None
def input_factory(self, spec, k):
"""
TextInput widget factory.
Specification attributes:
None
"""
return formish.Input(**k)
def hidden_factory(self, spec, k):
"""
Hidden widget factory.
Specification attributes:
None
"""
return formish.Hidden(**k)
def textarea_factory(self, spec, k):
"""
TextArea widget factory.
Specification attributes:
None
"""
return formish.TextArea(**k)
def selectchoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.SelectChoice(options=options, **k)
def radiochoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.RadioChoice(options=options, **k)
def selectwithotherchoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of strings
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.SelectWithOtherChoice(options=options, **k)
def checkboxmultichoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.CheckboxMultiChoice(options=options, **k)
def checkbox_factory(self, spec, k):
"""
Checkbox widget factory.
Specification attributes:
None
"""
return formish.Checkbox(**k)
def dateparts_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
None
"""
return formish.DateParts(day_first=True, **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
- root_dir = widget_spec.get('options',{}).get('root_dir',None)
- url_base = widget_spec.get('options',{}).get('url_base',None)
+ root_dir = widget_spec.get('root_dir',None)
+ url_base = widget_spec.get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
- show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
- show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
- show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
+ show_download_link = widget_spec.get('show_download_link',False)
+ show_file_preview = widget_spec.get('show_file_preview',True)
+ show_image_thumbnail = widget_spec.get('show_image_thumbnail',False)
return formish.FileUpload(
filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
**k )
formish_widget_registry = FormishWidgetRegistry()
def expand_definition(pre_expand_definition):
definition = []
for item in pre_expand_definition['fields']:
field = {}
field['name'] = item['name']
field['fullkey'] = strip_stars(item['name'])
field['keyprefix'], field['key'] = split_prefix(field['fullkey'])
field['starkey'] = item['name']
field['title'] = item.get('title')
field['description'] = item.get('description')
field['type'] = item.get('type','String')
if 'default' in item:
field['default'] = item['default']
field['attr'] = item.get('attr')
if item.get('required') is True:
field['validator'] = validator.Required()
else:
field['validator'] = None
field['widget'] = item.get('widget')
definition.append(field)
return definition
def build(definition, name=None, defaults=None, errors=None, action='', widget_registry=formish_widget_registry, type_registry=schemaish_type_registry):
schema = schema_build(definition, type_registry=type_registry)
definition = expand_definition(definition)
form = formish.Form(schema, name=name, defaults=defaults, errors=errors, action_url=action)
for item in definition:
w = widget_registry.make_formish_widget(item)
if w is not None:
form[item['name']].widget = w
if 'default' in item:
form[item['name']].default = item['default']
return form
|
ish/couchish
|
31f17563b2b38b651817fc5fb9722ec804ace461
|
Fix bug in category multi-select to cope with no request data.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index e8a32b1..33125b1 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,422 +1,421 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="ImageFileUpload"
def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
data['height'] = ['']
data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
else:
data['width'] = [None]
data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
- out = []
- for item in data:
- out.append(self.full_options[item])
- return out
+ data = data or []
+ return [self.full_options[item] for item in data]
+
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.strip = k.pop('strip', True)
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
string_data = [d['_ref'] for d in data]
return string_data
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
string_data = request_data[0]
if self.strip is True:
string_data = string_data.strip()
if string_data == '':
return self.empty
ids = [s.strip() for s in string_data.splitlines()]
docs = self.db.view(self.view, keys=ids)
out = []
for d in docs:
d.value.update({'_ref': d.key})
out.append(d.value)
return out
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SeqRefTextArea(self.db, view, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
identify_size = widget_spec.get('options',{}).get('identify_size',False)
return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
ede8d244a5f8b93a83b48f039df4b28e5057de30
|
Don't alter the field definition dicts so things don't get deleted on first use.
|
diff --git a/couchish/schemaish_jsonbuilder.py b/couchish/schemaish_jsonbuilder.py
index 7c00fdd..3878dd3 100644
--- a/couchish/schemaish_jsonbuilder.py
+++ b/couchish/schemaish_jsonbuilder.py
@@ -1,238 +1,240 @@
import schemaish
from validatish import validator
KEY_MUNGING = [
('-', '__dash__'),
('/', '__slash__'),
]
def relative_schemaish_key(item, parent):
"""
Calculate and return the item's key relative to the parent.
"""
# We only care about the keys
item = splitkey(item['key'])
if parent is not None:
parent = splitkey(parent['key'])
# Do basic sanity checks
if parent is not None and not is_descendant_key(item, parent):
raise ValueError("'item' is not a descendant of 'parent'")
# Remove the parent's part of the key, that should already have
# been accounted for as a group item.
if parent is not None:
item = descendant_key_part(item, parent)
# Turn the item key back into a string.
item = joinkey(item)
# Replace characters that formish doesn't allow
for search, replace in KEY_MUNGING:
item = item.replace(search, replace)
return item
def full_schemaish_key(item, parents):
"""
Calculate and return the full formish key of the item specified by the item
chain.
"""
# Build a chain of items with a None at the end that is convenient for
# feeding to relative_schemaish_key.
itemchain = list(parents)
itemchain.append(item)
itemchain.reverse()
itemchain.append(None)
# Build the full key from the relative formish key for each of the pairs,
# all joined together again.
fullkey = [relative_schemaish_key(item, parent) for (item, parent) in pairs(itemchain)]
fullkey.reverse()
return joinkey(fullkey)
def pairs(s):
"""
Simple generator that yields len(s)-1 pairs of items, i.e. each item except
the last is yielded as the first item.
"""
unset = object()
first, second = unset, unset
it = iter(s)
while True:
if first is unset:
first = it.next()
second = it.next()
yield (first, second)
first = second
def splitkey(key):
"""
Split a key in string form into its parts.
"""
return key.split('.')
def joinkey(key):
"""
Join a key's parts to create a key in string form.
"""
return '.'.join(key)
def is_descendant_key(item, ancestor):
"""
Test if item is a descendant of ancestor.
"""
return item[:len(ancestor)] == ancestor
def descendant_key_part(item, ancestor):
"""
Return the part of the item key that is not shared with the ancestor.
"""
return item[len(ancestor):]
def strip_stars(key):
outkey = []
for k in key.split('.'):
if k != '*':
outkey.append(k)
return '.'.join(outkey)
def split_prefix(key):
segments = key.split('.')
return '.'.join(segments[:-1]), segments[-1]
def rec_getattr(obj, attr):
return reduce(getattr, attr.split('.'), obj)
def rec_setattr(obj, attr, value):
attrs = attr.split('.')
setattr(reduce(getattr, attrs[:-1], obj), attrs[-1], value)
class SchemaishTypeRegistry(object):
"""
Registry for converting an field's type specification to a schemaish type
instance.
"""
def __init__(self):
self.registry = {
'String': self.string_factory,
'Integer': self.integer_factory,
'Float': self.float_factory,
'Boolean': self.boolean_factory,
'Decimal': self.decimal_factory,
'Date': self.date_factory,
'Time': self.time_factory,
'DateTime': self.datetime_factory,
'File': self.file_factory,
'Sequence': self.list_factory,
'Tuple': self.tuple_factory,
'Structure': self.structure_factory,
}
self.default_type = 'String'
def make_schemaish_type(self, field):
field_type = field.get('type',self.default_type)
return self.registry[field_type](field)
def string_factory(self, field):
return schemaish.String(**field)
def integer_factory(self, field):
return schemaish.Integer(**field)
def float_factory(self, field):
return schemaish.Float(**field)
def boolean_factory(self, field):
return schemaish.Boolean(**field)
def decimal_factory(self, field):
return schemaish.Decimal(**field)
def date_factory(self, field):
return schemaish.Date(**field)
def time_factory(self, field):
return schemaish.Time(**field)
def datetime_factory(self, field):
return schemaish.DateTime(**field)
def file_factory(self, field):
return schemaish.File(**field)
def list_factory(self, field):
+ field = dict(field)
attr = field.pop('attr')
attr_type = self.make_schemaish_type(attr)
return schemaish.Sequence(attr_type, **field)
def tuple_factory(self, field):
+ field = dict(field)
attr = field.pop('attr')
attr_types = []
for a in attr['types']:
attr_types.append(self.make_schemaish_type(a))
return schemaish.Tuple(attr_types, **field)
def structure_factory(self, field):
return schemaish.Structure(**field)
schemaish_type_registry=SchemaishTypeRegistry()
def expand_definition(pre_expand_definition):
definition = []
for item in pre_expand_definition['fields']:
field = {}
field['name'] = item['name']
field['fullkey'] = strip_stars(item['name'])
field['keyprefix'], field['key'] = split_prefix(field['fullkey'])
field['starkey'] = item['name']
field['title'] = item.get('title')
field['description'] = item.get('description')
field['type'] = item.get('type','String')
field['attr'] = item.get('attr')
if item.get('required') is True:
field['validator'] = validator.Required()
else:
field['validator'] = None
definition.append(field)
return definition
def get_nested_attr(schema_type):
if hasattr(schema_type, 'attr'):
return get_nested_attr(schema_type.attr)
else:
return schema_type
def build(definition, type_registry=schemaish_type_registry):
definition = expand_definition(definition)
schema = schemaish.Structure()
schema_pointer_hash = {'': schema}
for field in definition:
if 'name' not in field:
continue
fullkey = field['fullkey']
keyprefix = field['keyprefix']
key = field['key']
try:
S = schema_pointer_hash[keyprefix]
except KeyError:
raise KeyError('It is likely that you haven\'t defined your keys in the right order. A field must exist before sub-fields are encountered')
schema_type = type_registry.make_schemaish_type(field)
S.add( key, schema_type )
schema_pointer_hash[fullkey] = get_nested_attr(schema_type)
return schema
|
ish/couchish
|
d57d3f576046d883b028dbea988ea83ece9b943d
|
fixed problem round tripping height and width on couchdb file upload
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 7b9faca..e8a32b1 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,418 +1,422 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
- type="FileUpload"
+ type="ImageFileUpload"
- def __init__(self, filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
+ def __init__(self, filestore=filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
- formish.FileUpload.__init__(self, filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
+ formish.FileUpload.__init__(self, filestore=filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
+ data['height'] = ['']
+ data['width'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
key = uuid.uuid4().hex
self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
+ else:
+ data['width'] = [None]
+ data['height'] = [None]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
out = []
for item in data:
out.append(self.full_options[item])
return out
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.strip = k.pop('strip', True)
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
string_data = [d['_ref'] for d in data]
return string_data
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
string_data = request_data[0]
if self.strip is True:
string_data = string_data.strip()
if string_data == '':
return self.empty
ids = [s.strip() for s in string_data.splitlines()]
docs = self.db.view(self.view, keys=ids)
out = []
for d in docs:
d.value.update({'_ref': d.key})
out.append(d.value)
return out
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SeqRefTextArea(self.db, view, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
- root_dir = widget_spec.get('options',{}).get('root_dir','cache')
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
identify_size = widget_spec.get('options',{}).get('identify_size',False)
- return FileUpload( filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
+ return FileUpload( filestore=filestore.CachedTempFilestore(),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
4d4710fde6166a21a6ed2ded7e026e6f77bbd009
|
Fixed cache hit tests
|
diff --git a/couchish/filestore.py b/couchish/filestore.py
index b914b11..ccc60ff 100644
--- a/couchish/filestore.py
+++ b/couchish/filestore.py
@@ -1,45 +1,44 @@
from __future__ import with_statement
from cStringIO import StringIO
import couchish
class CouchDBAttachmentSource(object):
"""
A file source for the FileResource to use to read attachments from
documents in a CouchDB database.
Note: the application would be responsible for uploading files.
"""
- def __init__(self, couchish_store, name):
+ def __init__(self, couchish_store):
self.couchish = couchish_store
- self.name = name
def get(self, key, cache_tag=None):
# XXX This would be much better written using httplib2 and performing a
# single GET to request the image directly, using the ETag as the
# cache_tag (which is the document _rev anyway). But for now ...
try:
doc_id, attachment_name = key.split('/', 1)
except ValueError:
raise KeyError
# Get the document with the attachment to see if we actually need to
# fetch the whole attachment.
try:
with self.couchish.session() as S:
doc = S.doc_by_id(doc_id)
except couchish.NotFound:
raise KeyError(key)
# Check the attachment stub exists.
attachment_stub = doc.get('_attachments', {}).get(attachment_name)
if attachment_stub is None:
raise KeyError(key)
# See if the caller's version is up to date.
if cache_tag and doc['_rev'] == cache_tag:
return (doc['_rev'], [('Content-Type',None)], None)
# Get the attachment content.
with self.couchish.session() as S:
content = S.get_attachment(doc_id, attachment_name)
return (doc['_rev'], [('Content-Type',attachment_stub['content_type'])], StringIO(content))
diff --git a/couchish/tests/test_filestore.py b/couchish/tests/test_filestore.py
index 8a9b83a..7274dba 100644
--- a/couchish/tests/test_filestore.py
+++ b/couchish/tests/test_filestore.py
@@ -1,48 +1,50 @@
import unittest
import uuid
import couchdb
from couchish import config, filestore, store
class TestSource(unittest.TestCase):
def setUp(self):
self.db_name = 't' + uuid.uuid4().hex
self.server = couchdb.Server()
self.db = self.server.create(self.db_name)
self.store = store.CouchishStore(self.db, config.Config({}, []))
- self.source = filestore.CouchDBAttachmentSource(self.store, None)
+ self.source = filestore.CouchDBAttachmentSource(self.store)
def tearDown(self):
del self.server[self.db_name]
def test_get_cache_hit(self):
self.db['doc'] = {}
doc = self.db['doc']
self.db.put_attachment(doc, 'Yay!', 'foo.txt', 'text/plain')
- (cache_tag, content_type, f) = self.source.get('doc/foo.txt', cache_tag=doc['_rev'])
+ (cache_tag, headers, f) = self.source.get('doc/foo.txt', cache_tag=doc['_rev'])
+ headers = dict(headers)
assert cache_tag == doc['_rev']
- assert content_type is None
+ assert headers['Content-Type'] is None
assert f is None
def test_get_cache_miss(self):
self.db['doc'] = {}
doc = self.db['doc']
self.db.put_attachment(doc, 'Yay!', 'foo', 'text/plain')
- (cache_tag, content_type, f) = self.source.get('doc/foo', cache_tag='miss')
+ (cache_tag, headers, f) = self.source.get('doc/foo', cache_tag='miss')
+ headers = dict(headers)
try:
assert cache_tag == doc['_rev']
- assert content_type == 'text/plain'
+ assert headers['Content-Type'] == 'text/plain'
assert f is not None
assert f.read() == 'Yay!'
finally:
f.close()
def test_missing_doc(self):
self.assertRaises(KeyError, self.source.get, 'missing_doc/attachment')
def test_missing_attachment(self):
self.db['doc'] = {}
self.assertRaises(KeyError, self.source.get, 'doc/missing_attachment')
|
ish/couchish
|
5898fc0d820fa05d467382bae6e75347f37f44cb
|
cope with new formish file api and to fix problems with sequences of files
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index bde9019..7b9faca 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,418 +1,418 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
-from formish import widgets, filestore, safefilename
+from formish import widgets, filestore, safefilename, util
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="FileUpload"
def __init__(self, filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
- filename = '%s-%s'%(uuid.uuid4().hex,fieldstorage.filename)
- self.filestore.put(filename, fieldstorage.file, fieldstorage.type, uuid.uuid4().hex)
- data['name'] = [filename]
+ key = uuid.uuid4().hex
+ self.filestore.put(key, fieldstorage.file, uuid.uuid4().hex, [('Content-Type',fieldstorage.type),('Filename',fieldstorage.filename)])
+ data['name'] = [util.encode_file_resource_path('tmp', key)]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
- filename = request_data['name'][0]
+ key = util.decode_file_resource_path(request_data['name'][0])[1]
try:
- content_type, cache_tag, f = self.filestore.get(filename)
+ cache_tag, headers, f = self.filestore.get(key)
except KeyError:
return None
+ headers = dict(headers)
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
- return SchemaFile(f, filename, content_type, metadata=metadata)
+ return SchemaFile(f, headers['Filename'], headers['Content-Type'],metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
out = []
for item in data:
out.append(self.full_options[item])
return out
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.strip = k.pop('strip', True)
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
string_data = [d['_ref'] for d in data]
return string_data
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
string_data = request_data[0]
if self.strip is True:
string_data = string_data.strip()
if string_data == '':
return self.empty
ids = [s.strip() for s in string_data.splitlines()]
docs = self.db.view(self.view, keys=ids)
out = []
for d in docs:
d.value.update({'_ref': d.key})
out.append(d.value)
return out
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SeqRefTextArea(self.db, view, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
- root_dir = widget_spec.get('options',{}).get('root_dir',None)
+ root_dir = widget_spec.get('options',{}).get('root_dir','cache')
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
identify_size = widget_spec.get('options',{}).get('identify_size',False)
- return FileUpload(
- filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
+ return FileUpload( filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/filehandling.py b/couchish/filehandling.py
index a809f2c..4cd4c81 100644
--- a/couchish/filehandling.py
+++ b/couchish/filehandling.py
@@ -1,212 +1,209 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from dottedish import dotted
import base64
import uuid
from schemaish.type import File
from StringIO import StringIO
import shutil
from couchish import jsonutil
def get_attr(prefix, parent=None):
# combine prefix and parent where prefix is a list and parent is a dotted string
if parent is None:
segments = [str(segment) for segment in prefix]
return '.'.join(segments)
if prefix is None:
return parent
segments = [str(segment) for segment in prefix]
if parent != '':
segments += parent.split('.')
attr = '.'.join( segments )
return attr
def get_files(data, original=None, prefix=None):
# scan old data to collect any file refs and then scan new data for file changes
files = {}
inlinefiles = {}
original_files = {}
get_files_from_original(data, original, files, inlinefiles, original_files, prefix)
get_files_from_data(data, original, files, inlinefiles, original_files, prefix)
return data, files, inlinefiles, original_files
def has_unmodified_signature(f):
if f.file is None:
return True
return False
def make_dotted_or_emptydict(d):
- if isinstance(d, dict):
- return dotted(d)
- return dotted({})
+ return dotted(d)
def get_files_from_data(data, original, files, inlinefiles, original_files, prefix):
if isinstance(data, File):
get_file_from_item(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
- if not isinstance(data, dict):
+ if not isinstance(data, dict) and not isinstance(data, list):
return
dd = dotted(data)
ddoriginal = make_dotted_or_emptydict(original)
for k,f in dd.dotteditems():
if isinstance(f, File):
if isinstance(ddoriginal.get(k), File):
of = ddoriginal[k]
else:
of = None
get_file_from_item(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def get_file_from_item(f, of, files, inlinefiles, original_files, fullprefix):
if f.file is None:
# if we have no original data then we presume the file should remain unchanged
f.id = of.id
if f.mimetype is None:
f.mimetype = of.mimetype
if f.filename is None:
f.filename = of.filename
if not hasattr(f, 'metadata') or f.metadata is None or f.metadata=={}:
f.metadata = getattr(of, 'metadata', None)
else:
if of and hasattr(of,'id'):
f.id = of.id
else:
f.id = uuid.uuid4().hex
if getattr(f,'inline',False) is True:
filestore = inlinefiles
else:
filestore = files
if hasattr(f, 'inline'):
del f.inline
# add the files for attachment handling and remove the file data from document
if getattr(f,'b64', None):
filestore[fullprefix] = jsonutil.CouchishFile(f.file, f.filename, f.mimetype, f.id, metadata = f.metadata, b64=True)
del f.b64
else:
fh = StringIO()
shutil.copyfileobj(f.file, fh)
fh.seek(0)
filestore[fullprefix] = jsonutil.CouchishFile(fh, f.filename, f.mimetype, f.id, metadata = f.metadata)
del f.file
def get_file_from_original(f, of, files, inlinefiles, original_files, fullprefix):
if not isinstance(f, File):
original_files[fullprefix] = of
def get_files_from_original(data, original, files, inlinefiles, original_files, prefix):
if isinstance(original, File):
get_file_from_original(data, original, files, inlinefiles, original_files, get_attr(prefix))
return
- if not isinstance(original, dict):
+ if not isinstance(original, dict) and not isinstance(original, list):
return
dd = make_dotted_or_emptydict(data)
ddoriginal = dotted(original)
for k, of in ddoriginal.dotteditems():
if isinstance(of, File):
f = dd.get(k)
- get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, kparent))
+ get_file_from_original(f, of, files, inlinefiles, original_files, get_attr(prefix, k))
def _parse_changes_for_files(session, deletions, additions, changes):
""" returns deletions, additions """
additions = list(additions)
changes = list(changes)
deletions = list(deletions)
all_separate_files = {}
all_inline_files = {}
for addition in additions:
addition, files, inlinefiles, original_files_notused = get_files(addition)
if files:
all_separate_files.setdefault(addition['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(addition['_id'],{}).update(inlinefiles)
_extract_inline_attachments(addition, inlinefiles)
all_original_files = {}
changes = list(changes)
for n, changeset in enumerate(changes):
d, cs = changeset
cs = list(cs)
for m, c in enumerate(cs):
if c['action'] in ['edit','create','remove']:
c['value'], files, inlinefiles, original_files = get_files(c.get('value'), original=c.get('was'), prefix=c['path'])
cs[m] = c
if files:
all_separate_files.setdefault(d['_id'],{}).update(files)
if inlinefiles:
all_inline_files.setdefault(d['_id'],{}).update(inlinefiles)
all_original_files.setdefault(d['_id'], {}).update(original_files)
_extract_inline_attachments(d, inlinefiles)
changes[n] = (d, cs)
-
return all_original_files, all_separate_files
def _extract_inline_attachments(doc, files):
"""
Move the any attachment data that we've found into the _attachments attribute
"""
for attr, f in files.items():
if f.b64:
data = f.file.replace('\n', '')
else:
data = base64.encodestring(f.file.read()).replace('\n','')
f.file.close()
del f.file
del f.b64
del f.inline
del f.doc_id
doc.setdefault('_attachments',{})[f.id] = {'content_type': f.mimetype,'data': data}
def _handle_separate_attachments(session, deletions, additions):
"""
add attachments that aren't inline and remove any attachments without references
"""
# XXX This needs to cope with files moving when sequences are re-numbered. We need
# XXX to talk to matt about what a renumbering like this looks like
for id, attrfiles in additions.items():
doc = session.get(id)
stubdoc = {'_id':doc['_id'], '_rev':doc['_rev']}
for attr, f in attrfiles.items():
data = ''
if f.file:
if f.b64:
data = base64.decodestring(f.file)
else:
data = f.file.read()
f.file.close()
session._db.put_attachment(stubdoc, data, filename=f.id, content_type=f.mimetype)
del f.file
del f.b64
del f.inline
del f.doc_id
for id, attrfiles in deletions.items():
# XXX had to use _db because delete attachment freeaked using session version.
doc = session._db.get(id)
for attr, f in attrfiles.items():
session._db.delete_attachment(doc, f.id)
additions = {}
deletions = {}
diff --git a/couchish/filestore.py b/couchish/filestore.py
index 2d6efda..b914b11 100644
--- a/couchish/filestore.py
+++ b/couchish/filestore.py
@@ -1,45 +1,45 @@
from __future__ import with_statement
from cStringIO import StringIO
import couchish
class CouchDBAttachmentSource(object):
"""
A file source for the FileResource to use to read attachments from
documents in a CouchDB database.
Note: the application would be responsible for uploading files.
"""
def __init__(self, couchish_store, name):
self.couchish = couchish_store
self.name = name
def get(self, key, cache_tag=None):
# XXX This would be much better written using httplib2 and performing a
# single GET to request the image directly, using the ETag as the
# cache_tag (which is the document _rev anyway). But for now ...
try:
doc_id, attachment_name = key.split('/', 1)
except ValueError:
raise KeyError
# Get the document with the attachment to see if we actually need to
# fetch the whole attachment.
try:
with self.couchish.session() as S:
doc = S.doc_by_id(doc_id)
except couchish.NotFound:
raise KeyError(key)
# Check the attachment stub exists.
attachment_stub = doc.get('_attachments', {}).get(attachment_name)
if attachment_stub is None:
raise KeyError(key)
# See if the caller's version is up to date.
if cache_tag and doc['_rev'] == cache_tag:
- return (doc['_rev'], None, None)
+ return (doc['_rev'], [('Content-Type',None)], None)
# Get the attachment content.
with self.couchish.session() as S:
content = S.get_attachment(doc_id, attachment_name)
- return (doc['_rev'], attachment_stub['content_type'], StringIO(content))
+ return (doc['_rev'], [('Content-Type',attachment_stub['content_type'])], StringIO(content))
|
ish/couchish
|
45238bf16c19239d8391da6486cd372d2cecac7f
|
Fix after change to formish's filestore stuff.
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 60eac14..bde9019 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,417 +1,418 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="FileUpload"
def __init__(self, filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
filename = '%s-%s'%(uuid.uuid4().hex,fieldstorage.filename)
self.filestore.put(filename, fieldstorage.file, fieldstorage.type, uuid.uuid4().hex)
data['name'] = [filename]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
return data
def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
filename = request_data['name'][0]
try:
content_type, cache_tag, f = self.filestore.get(filename)
except KeyError:
return None
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, filename, content_type, metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
type="SelectChoice"
template='field.SelectChoice'
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
widgets.Widget.__init__(self,css_class=css_class)
def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
def from_request_data(self, schema_type, data):
out = []
for item in data:
out.append(self.full_options[item])
return out
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.strip = k.pop('strip', True)
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
string_data = [d['_ref'] for d in data]
return string_data
def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
string_data = request_data[0]
if self.strip is True:
string_data = string_data.strip()
if string_data == '':
return self.empty
ids = [s.strip() for s in string_data.splitlines()]
docs = self.db.view(self.view, keys=ids)
out = []
for d in docs:
d.value.update({'_ref': d.key})
out.append(d.value)
return out
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SeqRefTextArea(self.db, view, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
root_dir = widget_spec.get('options',{}).get('root_dir',None)
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
identify_size = widget_spec.get('options',{}).get('identify_size',False)
- return FileUpload(filestore.CachedTempFilestore(root_dir=root_dir), \
+ return FileUpload(
+ filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
diff --git a/couchish/formish_jsonbuilder.py b/couchish/formish_jsonbuilder.py
index 2677c8a..55a2a34 100644
--- a/couchish/formish_jsonbuilder.py
+++ b/couchish/formish_jsonbuilder.py
@@ -1,247 +1,248 @@
from couchish.schemaish_jsonbuilder import build as schema_build, schemaish_type_registry, strip_stars, split_prefix
import formish
from formish import filestore
from validatish import validator
class FormishWidgetRegistry(object):
"""
A registry for mapping a widget specifiction to a Formish widget factory,
including sensible user-friendly defaults instead of the "developer"
versions Formish defaults to.
"""
def __init__(self):
self.registry = {
'Input': self.input_factory,
'Hidden': self.hidden_factory,
'TextArea': self.textarea_factory,
'SelectChoice': self.selectchoice_factory,
'SelectWithOtherChoice': self.selectwithotherchoice_factory,
'Checkbox': self.checkbox_factory,
'CheckboxMultiChoice': self.checkboxmultichoice_factory,
'RadioChoice': self.radiochoice_factory,
'DateParts': self.dateparts_factory,
}
self.defaults = {
'Date': self.dateparts_factory,
'String': self.input_factory,
'Integer': self.input_factory,
'File': self.fileupload_factory,
'Boolean': self.checkbox_factory,
}
def make_formish_widget(self,item):
"""
Create and return a Formish widget factory for the item type and widget
specifiction.
If widget_spec is provided then it is used to locate/create and return a
widget factory.
If widget_spec is None then either a user-friendly default for the
item_type is returned or it's left to Formish to decide.
The widget_spec dictionary must contain a 'type' key, as well as any
other information needed to build the widget.
Parameters:
item_type: the type of the value (string)
widget_spec: a dictionary containing a widget specification
"""
widget_spec = item.get('widget')
item_type = item.get('type')
# If there is a widget spec then that takes precedence
k = {}
if widget_spec:
if 'css_class' in widget_spec:
k['css_class'] = widget_spec['css_class']
if 'type' in widget_spec:
return self.registry[widget_spec['type']](item, k)
# No widget spec so see if there's a user-friendly default for the data type
default = self.defaults.get(item_type)
if default is not None:
return default(item, k)
# OK, so leave it for Formish to decide then
return None
def input_factory(self, spec, k):
"""
TextInput widget factory.
Specification attributes:
None
"""
return formish.Input(**k)
def hidden_factory(self, spec, k):
"""
Hidden widget factory.
Specification attributes:
None
"""
return formish.Hidden(**k)
def textarea_factory(self, spec, k):
"""
TextArea widget factory.
Specification attributes:
None
"""
return formish.TextArea(**k)
def selectchoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.SelectChoice(options=options, **k)
def radiochoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.RadioChoice(options=options, **k)
def selectwithotherchoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of strings
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.SelectWithOtherChoice(options=options, **k)
def checkboxmultichoice_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
'options': a sequence of mappings containing 'name' and
'description' keys.
"""
widget_spec = spec.get('widget')
first = widget_spec['options'][0]
if isinstance(first, dict):
options = [(o['name'], o['description']) for o in widget_spec['options']]
elif isinstance(first, tuple) or isinstance(first, list):
options = [(o[0], o[1]) for o in widget_spec['options']]
else:
options = [(o, o) for o in widget_spec['options']]
return formish.CheckboxMultiChoice(options=options, **k)
def checkbox_factory(self, spec, k):
"""
Checkbox widget factory.
Specification attributes:
None
"""
return formish.Checkbox(**k)
def dateparts_factory(self, spec, k):
"""
SelectChoice widget factory.
Specification attributes:
None
"""
return formish.DateParts(day_first=True, **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
root_dir = widget_spec.get('options',{}).get('root_dir',None)
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
- return formish.FileUpload(filestore.CachedTempFilestore(root_dir=root_dir), \
+ return formish.FileUpload(
+ filestore.CachedTempFilestore(filestore.FileSystemHeaderedFilestore(root_dir=root_dir)),
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
**k )
formish_widget_registry = FormishWidgetRegistry()
def expand_definition(pre_expand_definition):
definition = []
for item in pre_expand_definition['fields']:
field = {}
field['name'] = item['name']
field['fullkey'] = strip_stars(item['name'])
field['keyprefix'], field['key'] = split_prefix(field['fullkey'])
field['starkey'] = item['name']
field['title'] = item.get('title')
field['description'] = item.get('description')
field['type'] = item.get('type','String')
if 'default' in item:
field['default'] = item['default']
field['attr'] = item.get('attr')
if item.get('required') is True:
field['validator'] = validator.Required()
else:
field['validator'] = None
field['widget'] = item.get('widget')
definition.append(field)
return definition
def build(definition, name=None, defaults=None, errors=None, action='', widget_registry=formish_widget_registry, type_registry=schemaish_type_registry):
schema = schema_build(definition, type_registry=type_registry)
definition = expand_definition(definition)
form = formish.Form(schema, name=name, defaults=defaults, errors=errors, action_url=action)
for item in definition:
w = widget_registry.make_formish_widget(item)
if w is not None:
form[item['name']].widget = w
if 'default' in item:
form[item['name']].default = item['default']
return form
|
ish/couchish
|
501abf003a603a149b0e1d64755365eabdbefcee
|
fixed form labels to cope with recent formish changes
|
diff --git a/couchish/couchish_formish_jsonbuilder.py b/couchish/couchish_formish_jsonbuilder.py
index 02862df..60eac14 100644
--- a/couchish/couchish_formish_jsonbuilder.py
+++ b/couchish/couchish_formish_jsonbuilder.py
@@ -1,417 +1,417 @@
import schemaish, formish, subprocess, uuid, os
from jsonish import pythonjson as json
from couchish.formish_jsonbuilder import build as formish_build
from couchish.schemaish_jsonbuilder import SchemaishTypeRegistry
from couchish.formish_jsonbuilder import FormishWidgetRegistry
from formish import widgets, filestore, safefilename
from PIL import Image
from schemaish.type import File as SchemaFile
from dottedish import get_dict_from_dotted_dict
from convertish.convert import string_converter
def get_size(filename):
IDENTIFY = '/usr/bin/identify'
stdout = subprocess.Popen([IDENTIFY, filename], stdout=subprocess.PIPE).communicate()[0]
if 'JPEG' in stdout:
type = 'JPEG'
if 'PNG' in stdout:
type = 'PNG'
if 'GIF' in stdout:
type = 'GIF'
dims = stdout.split(type)[1].split(' ')[1]
width, height = [int(s) for s in dims.split('x')]
return width, height
class Reference(schemaish.attr.Attribute):
""" a generic reference
"""
type = "Reference"
def __init__(self, **k):
self.refersto = k['attr']['refersto']
#self.uses = k['attr']['uses']
schemaish.attr.Attribute.__init__(self,**k)
class TypeRegistry(SchemaishTypeRegistry):
def __init__(self):
SchemaishTypeRegistry.__init__(self)
self.registry['Reference'] = self.reference_factory
def reference_factory(self, field):
return Reference(**field)
UNSET = object()
class FileUpload(formish.FileUpload):
type="FileUpload"
def __init__(self, filestore, show_file_preview=True, show_download_link=False, show_image_thumbnail=False, url_base=None, \
css_class=None, image_thumbnail_default=None, url_ident_factory=None, identify_size=False):
formish.FileUpload.__init__(self, filestore, show_file_preview=show_file_preview, show_download_link=show_download_link, \
show_image_thumbnail=show_image_thumbnail, url_base=url_base, css_class=css_class, image_thumbnail_default=image_thumbnail_default, url_ident_factory=url_ident_factory)
self.identify_size = identify_size
- def pre_parse_request(self, schema_type, data, full_request_data):
+ def pre_parse_incoming_request_data(self, schema_type, data, full_request_data):
"""
File uploads are wierd; in out case this means assymetric. We store the
file in a temporary location and just store an identifier in the field.
This at least makes the file look symmetric.
"""
if data.get('remove', [None])[0] is not None:
data['name'] = ['']
data['mimetype'] = ['']
return data
fieldstorage = data.get('file', [''])[0]
if getattr(fieldstorage,'file',None):
filename = '%s-%s'%(uuid.uuid4().hex,fieldstorage.filename)
self.filestore.put(filename, fieldstorage.file, fieldstorage.type, uuid.uuid4().hex)
data['name'] = [filename]
data['mimetype'] = [fieldstorage.type]
if self.identify_size is True and fieldstorage != '':
fieldstorage.file.seek(0)
width, height = Image.open(fieldstorage.file).size
data['width'] = [width]
data['height'] = [height]
return data
- def convert(self, schema_type, request_data):
+ def from_request_data(self, schema_type, request_data):
"""
Creates a File object if possible
"""
# XXX We could add a file converter that converts this to a string data?
if request_data['name'] == ['']:
return None
elif request_data['name'] == request_data['default']:
return SchemaFile(None, None, None)
else:
filename = request_data['name'][0]
try:
content_type, cache_tag, f = self.filestore.get(filename)
except KeyError:
return None
if self.identify_size == True:
metadata = {'width':request_data['width'][0], 'height': request_data['height'][0]}
else:
metadata = None
return SchemaFile(f, filename, content_type, metadata=metadata)
class SelectChoiceCouchDB(widgets.Widget):
none_option = (None, '- choose -')
- _template='SelectChoice'
-
type="SelectChoice"
+ template='field.SelectChoice'
+
def __init__(self, db, view, label_template, **k):
"""
:arg options: either a list of values ``[value,]`` where value is used for the label or a list of tuples of the form ``[(value, label),]``
:arg none_option: a tuple of ``(value, label)`` to use as the unselected option
:arg css_class: a css class to apply to the field
"""
none_option = k.pop('none_option', UNSET)
self.sort = k.pop('sort', UNSET)
if none_option is not UNSET:
self.none_option = none_option
widgets.Widget.__init__(self, **k)
self.db = db
self.view = view
self.label_template = label_template
self.options = None
self.results = None
def selected(self, option, value, schemaType):
if value == '':
v = self.empty
else:
v = value
if option[0] == v:
return ' selected="selected"'
else:
return ''
- def pre_render(self, schema_type, data):
+ def to_request_data(self, schema_type, data):
"""
Before the widget is rendered, the data is converted to a string
format.If the data is None then we return an empty string. The sequence
is request data representation.
"""
if data is None:
return ['']
string_data = data.get('_ref')
return [string_data]
- def convert(self, schema_type, request_data):
+ def from_request_data(self, schema_type, request_data):
"""
after the form has been submitted, the request data is converted into
to the schema type.
"""
self.get_options()
string_data = request_data[0]
if string_data == '':
return self.empty
result = self.results[string_data]
if isinstance(result, dict):
result['_ref'] = string_data
return result
else:
return {'_ref':string_data, 'data':result}
def get_none_option_value(self, schema_type):
"""
Get the default option (the 'unselected' option)
"""
none_option = self.none_option[0]
if none_option is self.empty:
return ''
return none_option
def get_options(self, schema_type=None):
"""
Return all of the options for the widget
"""
if self.options is not None:
return self.options
results = [json.decode_from_dict(item) for item in self.db.view(self.view)]
self.results = dict((result['id'], result['value']) for result in results)
_options = [ (result['id'], self.label_template%result['value']) for result in results]
if self.sort == True:
_options.sort(lambda x, y: cmp(x[1], y[1]))
self.options = []
for (value, label) in _options:
if value == self.empty:
self.options.append( ('',label) )
else:
self.options.append( (value,label) )
return self.options
def get_parent(segments):
if len(segments) == 1:
return ''
else:
return '.'.join(segments[:-1])
def mktree(options):
last_segments_len = 1
root = {'': {'data':('root', 'Root'), 'children':[]} }
for id, label in options:
segments = id.split('.')
parent = get_parent(segments)
root[id] = {'data': (id, label), 'children':[]}
root[parent]['children'].append(root[id])
return root['']
class CheckboxMultiChoiceTreeCouchDB(formish.CheckboxMultiChoiceTree):
- _template='CheckboxMultiChoiceTreeCouchDB'
+ template='field.CheckboxMultiChoiceTreeCouchDB'
type = "CheckboxMultiChoiceTree"
- def __init__(self, full_options, cssClass=None):
+ def __init__(self, full_options, css_class=None):
self.options = [ (key, value['data']['label']) for key, value in full_options]
self.full_options = dict(full_options)
self.optiontree = mktree(self.options)
- widgets.Widget.__init__(self,cssClass=cssClass)
+ widgets.Widget.__init__(self,css_class=css_class)
- def pre_render(self, schema_type, data):
+ def to_request_data(self, schema_type, data):
if data is None:
return []
return [c['path'] for c in data]
def checked(self, option, values, schema_type):
if values is not None and option[0] in values:
return ' checked="checked"'
else:
return ''
- def convert(self, schema_type, data):
+ def from_request_data(self, schema_type, data):
out = []
for item in data:
out.append(self.full_options[item])
return out
class SeqRefTextArea(formish.Input):
"""
Textarea input field
:arg cols: set the cols attr on the textarea element
:arg rows: set the cols attr on the textarea element
"""
- _template = 'SeqRefTextArea'
+ template = 'field.SeqRefTextArea'
type="SeqRefTextArea"
def __init__(self, db, view, **k):
self.cols = k.pop('cols', None)
self.rows = k.pop('rows', None)
self.strip = k.pop('strip', True)
self.db = db
self.view = view
formish.Input.__init__(self, **k)
if not self.converter_options.has_key('delimiter'):
self.converter_options['delimiter'] = '\n'
- def pre_render(self, schema_type, data):
+ def to_request_data(self, schema_type, data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
if data is None:
return []
string_data = [d['_ref'] for d in data]
return string_data
- def convert(self, schema_type, request_data):
+ def from_request_data(self, schema_type, request_data):
"""
We're using the converter options to allow processing sequence data
using the csv module
"""
string_data = request_data[0]
if self.strip is True:
string_data = string_data.strip()
if string_data == '':
return self.empty
ids = [s.strip() for s in string_data.splitlines()]
docs = self.db.view(self.view, keys=ids)
out = []
for d in docs:
d.value.update({'_ref': d.key})
out.append(d.value)
return out
def __repr__(self):
attributes = []
if self.strip is False:
attributes.append('strip=%r'%self.strip)
if self.converter_options != {'delimiter':','}:
attributes.append('converter_options=%r'%self.converter_options)
if self.css_class:
attributes.append('css_class=%r'%self.css_class)
if self.empty is not None:
attributes.append('empty=%r'%self.empty)
return 'couchish_formish_jsonbuilder.%s(%s)'%(self.__class__.__name__, ', '.join(attributes))
class WidgetRegistry(FormishWidgetRegistry):
def __init__(self, db=None):
self.db = db
FormishWidgetRegistry.__init__(self)
self.registry['SeqRefTextArea'] = self.seqreftextarea_factory
self.registry['SelectChoiceCouchDB'] = self.selectchoice_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDB'] = self.checkboxmultichoicetree_couchdb_factory
self.registry['CheckboxMultiChoiceTreeCouchDBFacet'] = self.checkboxmultichoicetree_couchdbfacet_factory
self.defaults['Reference'] = self.selectchoice_couchdb_factory
def selectchoice_couchdb_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
label_template = widget_spec.get('label', '%s')
k['sort'] = widget_spec.get('sort')
attr = spec.get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SelectChoiceCouchDB(self.db, view, label_template, **k)
def checkboxmultichoicetree_couchdb_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
return [(item.id,item.doc['label']) for item in list(db.view(view, include_docs=True))]
view = widgetSpec['options']
return formish.CheckboxMultiChoiceTree(options=options(self.db,view), **k)
def seqreftextarea_factory(self, spec, k):
if spec is None:
spec = {}
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
attr = spec.get('attr',{}).get('attr',{})
if attr is None:
refersto = None
else:
refersto = attr.get('refersto')
view = widget_spec.get('view', refersto)
return SeqRefTextArea(self.db, view, **k)
def checkboxmultichoicetree_couchdbfacet_factory(self, spec, k):
widgetSpec = spec.get('widget')
def options(db, view):
facet = list(db.view(view, include_docs=True))[0].doc
options = []
for item in facet['category']:
options.append( (item['path'],item) )
return options
view = 'facet_%s/all'%widgetSpec['facet']
return CheckboxMultiChoiceTreeCouchDB(full_options=options(self.db,view), **k)
def fileupload_factory(self, spec, k):
widget_spec = spec.get('widget')
if widget_spec is None:
widget_spec = {}
def url_ident_factory(obj):
if isinstance(obj,schemaish.type.File):
return '%s/%s'%(obj.doc_id, obj.id)
elif obj:
return obj
else:
return None
root_dir = widget_spec.get('options',{}).get('root_dir',None)
url_base = widget_spec.get('options',{}).get('url_base',None)
image_thumbnail_default = widget_spec.get('image_thumbnail_default','/images/missing-image.jpg')
show_download_link = widget_spec.get('options',{}).get('show_download_link',False)
show_file_preview = widget_spec.get('options',{}).get('show_file_preview',True)
show_image_thumbnail = widget_spec.get('options',{}).get('show_image_thumbnail',False)
identify_size = widget_spec.get('options',{}).get('identify_size',False)
return FileUpload(filestore.CachedTempFilestore(root_dir=root_dir), \
url_base=url_base,
image_thumbnail_default=image_thumbnail_default,
show_download_link=show_download_link,
show_file_preview=show_file_preview,
show_image_thumbnail=show_image_thumbnail,
url_ident_factory=url_ident_factory,
identify_size=identify_size,
**k )
def build(definition, db=None, name=None, defaults=None, errors=None, action='', widget_registry=None, type_registry=None, add_id_and_rev=False):
if widget_registry is None:
widget_registry=WidgetRegistry(db)
if type_registry is None:
type_registry=TypeRegistry()
if add_id_and_rev is True:
# Copy the definition fict and its fields item so we can make changes
# without affecting the spec.
definition = dict(definition)
definition['fields'] = list(definition['fields'])
definition['fields'].insert(0, {'name': '_rev', 'widget':{'type': 'Hidden'}})
definition['fields'].insert(0, {'name': '_id', 'widget':{'type': 'Hidden'}})
form = formish_build(definition, name=name, defaults=defaults, errors=errors, action=action, widget_registry=widget_registry, type_registry=type_registry)
return form
|
ish/couchish
|
7d85de3eb0404e5c1827770384ca70d9a470dba8
|
fixed problem with deeply nested sequences
|
diff --git a/couchish/couchish_jsonbuilder.py b/couchish/couchish_jsonbuilder.py
index b98a04a..ff7cfd0 100644
--- a/couchish/couchish_jsonbuilder.py
+++ b/couchish/couchish_jsonbuilder.py
@@ -1,193 +1,196 @@
from couchish.create_view import getjs
from couchish.schemaish_jsonbuilder import strip_stars
from string import Template
def buildview(view):
"""
function (doc) {
if (doc.model_type == 'book'){
for (var i1 in doc.metadata) {
for (var i2 in doc.metadata[i1].authors) {
emit(doc.metadata[i1].authors[i2]._ref, null);
}
}
}
}
"""
main_template = Template( \
""" function (doc) {
$body
}""")
if_template = Template( \
""" if (doc.model_type == '$type'){
$body
}
""")
for_template = Template( \
""" for (var i$n in doc$attr) {
$body
}""")
emit_template = Template( \
""" emit(doc$attr._ref, null);""")
out = ''
for type, attrs in view.items():
out_fors = ''
for attr in attrs:
templ_if = if_template.substitute({'type': type, 'body':'$body'})
segments = attr.replace('.*','*').split('.')
cleansegments = attr.replace('.*','').split('.')
out_attr = ''
templ_fors = '$body\n'
for n,segment in enumerate(segments):
if segment.endswith('*'):
out_loop_var = out_attr + '.%s'%cleansegments[n]
out_attr += '.%s[i%s]'%(cleansegments[n], n)
templ_for = for_template.substitute(n=n, attr=out_loop_var, body='$body')
templ_fors = Template(templ_fors).substitute(body=templ_for)
else:
out_attr += '.%s'%cleansegments[n]
out_emit = emit_template.substitute(attr=out_attr)
out_fors += Template(templ_fors).substitute(body=out_emit)
out += Template(templ_if).substitute(body=out_fors)
return (main_template.substitute(body=out), None)
def build_refersto_view(uses):
model_types = set()
if isinstance(uses, basestring):
model_type = uses.split('.')[0]
uses = [uses]
else:
for use in uses:
mt = use.split('.')[0]
model_types.add(mt)
if len(model_types) > 1:
raise ValueError('Can only use one model type in "uses" at the moment')
model_type = list(model_types)[0]
viewdef = 'function (doc) {\n'
viewdef += ' if (doc.model_type == \''+model_type+'\'){\n'
viewdef += ' emit(doc._id, %s )\n'%getjs(uses)
viewdef += ' }\n'
viewdef += '}\n'
return viewdef
def get_view(view, views, views_by_viewname, model_type=None):
if model_type is None:
# Then we have to have an explicit model type set if we want to use auto built views
model_type = view.get('model_type')
if 'designdoc' not in view:
# Then we use the type as the design doc
view['designdoc'] = model_type
if 'map' in view:
# Then we have explicit javascript functions
map = view['map']
reduce = view.get('reduce')
elif 'type' in view:
# Then we're auto building views if possible
if 'name' not in view:
# Use the view type for the name
view['name'] = view['type']
if view['type'] == 'all':
map, reduce = ("function(doc) { if (doc.model_type == '%s') { emit(doc._id, null); } }"%model_type,None)
if view['type'] == 'all_count':
map, reduce = ("function(doc) { if (doc.model_type == '%s') { emit(doc._id, 1); } }"%model_type, "function(keys, values) { return sum(values); }")
else:
map = build_refersto_view(view['uses'])
reduce = view.get('reduce')
if 'url' not in view:
# Then we need to provide one
if view['designdoc'] is None:
# Then we use the couchish namespace
raise KeyError('Cannot work out a design doc for view %s'%view.get('name'))
else:
view['url'] = '%s/%s'%(view['designdoc'],view['name'])
views_by_viewname[view['url']] = {'url':view['url'], 'key': view.get('key','_id'), 'uses': view.get('uses')}
views_by_viewname[view['url']]['map'] = (map,reduce)
views[view['url']] = (map,reduce)
+def get_reference(field):
+ if 'attr' not in field:
+ return field.get('refersto',None), field.get('uses',None)
+ return get_reference(field['attr'])
+
def get_views(models_definition, views_definition):
views = {}
views_by_viewname = {}
views_by_uses = {}
viewnames_by_attribute = {}
attributes_by_viewname = {}
for view in views_definition:
get_view(view, views, views_by_viewname)
for model_type, definition in models_definition.items():
for view in definition.get('views',[]):
get_view(view, views, views_by_viewname, model_type=model_type)
-
-
parents = []
field_to_view = {}
for model_type, definition in models_definition.items():
for field in definition['fields']:
# some uses need to know whether the attr is composed of any sequences
field['key'] = strip_stars(field['name'])
if field.get('type','').startswith('Sequence'):
fieldname = '%s.*'%field['name']
else:
fieldname = field['name']
# If we have any references, build the appropriate lookups
- if 'attr' in field and 'refersto' in field['attr']:
- refersto = field['attr']['refersto']
- view = views_by_viewname[refersto]
- if 'uses' in field['attr']:
- uses = field['attr']['uses']
- else:
- uses = view['uses']
- # Build the reference views dynamically if not explicit
-
- if isinstance(uses, basestring):
- views_by_uses.setdefault(view['url']+'-rev',{}).setdefault(model_type,[]).append( fieldname )
- viewnames_by_attribute.setdefault(uses, set()).add(refersto)
- attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,set()).add( fieldname.replace('.*','*') )
- else:
- views_by_uses.setdefault(view['url']+'-rev',{}).setdefault(model_type,[]).append( fieldname )
- attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,set()).add( fieldname.replace('.*','*') )
- for use in uses:
- viewnames_by_attribute.setdefault(use, set()).add(refersto)
+ if 'attr' in field:
+ refersto, uses = get_reference(field['attr'])
+
+ if refersto:
+ view = views_by_viewname[refersto]
+ if not uses:
+ uses = view['uses']
+
+ # Build the reference views dynamically if not explicit
+ if isinstance(uses, basestring):
+ views_by_uses.setdefault(view['url']+'-rev',{}).setdefault(model_type,[]).append( fieldname )
+ viewnames_by_attribute.setdefault(uses, set()).add(refersto)
+ attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,set()).add( fieldname.replace('.*','*') )
+ else:
+ views_by_uses.setdefault(view['url']+'-rev',{}).setdefault(model_type,[]).append( fieldname )
+ attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,set()).add( fieldname.replace('.*','*') )
+ for use in uses:
+ viewnames_by_attribute.setdefault(use, set()).add(refersto)
# Create any 'viewby' views
if 'viewby' in field:
if '*' in fieldname:
raise Exception('Can\'t generate viewby views on attributes in sequences')
if field['viewby'] == True:
url = '%s/by_%s'%(model_type,fieldname)
else:
url = field['viewby']
views[url] = ("function(doc) { if (doc.model_type=='%s') { emit(doc.%s, null ); } }"%(model_type,field['name']),None)
if 'viewby_count' in field:
if field['viewby_count'] == True:
url = '%s/by_%s_count'%(model_type,fieldname)
else:
url = field['viewby_count']
views[url] = ("function(doc) { if (doc.model_type == '%s') { emit(doc._id, 1); } }"%model_type, "function(keys, values) { return sum(values); }")
# Generate dynamic views for reference reverse lookups
for url, view in views_by_uses.items():
views[url] = buildview(view)
out = {'views': views,'views_by_viewname': views_by_viewname, 'viewnames_by_attribute': viewnames_by_attribute, 'attributes_by_viewname':attributes_by_viewname,'views_by_uses':views_by_uses}
return out
|
ish/couchish
|
6bb08b367391b955b7c2eeb5b52f00e749b3eebb
|
Fix model definition to match changes made to the file format.
|
diff --git a/couchish/tests/data/test_couchish_post.yaml b/couchish/tests/data/test_couchish_post.yaml
index 7e5d447..8ee107b 100644
--- a/couchish/tests/data/test_couchish_post.yaml
+++ b/couchish/tests/data/test_couchish_post.yaml
@@ -1,7 +1,8 @@
fields:
- name: subject
- name: author
type: Reference
- refersto: customdes/author_name
+ attr:
+ refersto: customdes/author_name
|
ish/couchish
|
a3171ff8ecd55adca1e5943aa23dc36334501308
|
Fix test.
|
diff --git a/couchish/tests/test_couchish_formish_jsonbuilder.py b/couchish/tests/test_couchish_formish_jsonbuilder.py
index 7a495c3..490a3bb 100644
--- a/couchish/tests/test_couchish_formish_jsonbuilder.py
+++ b/couchish/tests/test_couchish_formish_jsonbuilder.py
@@ -1,77 +1,77 @@
import unittest
from couchish.couchish_formish_jsonbuilder import build
import yaml
import webob
from BeautifulSoup import BeautifulSoup
import urllib
DATADIR = 'couchish/tests/data/%s'
class Test(unittest.TestCase):
def request(self, d):
r = webob.Request.blank('http://localhost/')
r.method = 'POST'
r.content_type = 'application/x-www-form-urlencoded'
- kvpairs = []
+ kvpairs = [('__formish_form__', 'form')]
for k in d.dottedkeys():
lastsegment = k.split('.')[-1]
try:
int(lastsegment)
k = '.'.join(k.split('.')[:-1])
except ValueError:
pass
for v in d[k]:
kvpairs.append( (k,v) )
r.body = urllib.urlencode(kvpairs)
return r
def assertRoundTrip(self, f, testdata):
r = self.request(f._get_request_data())
d = f.validate(r)
self.assertEquals(d, testdata)
def assertIdHasValue(self, f, id, v):
soup = BeautifulSoup(f())
self.assertEquals(soup.find(id=id)['value'],v)
def assertIdAttrHasValue(self, f, id, attr, v):
soup = BeautifulSoup(f())
s = soup.find(id=id)
assert 'attr' in s
self.assertEquals(s['attr'],v)
def assertIdAttrHasNoValue(self, f, id, attr):
soup = BeautifulSoup(f())
s = soup.find(id=id)
assert 'attr' not in s
def test_simple(self):
book_definition = yaml.load( open(DATADIR%'test_couchish_book.yaml').read() )
dvd_definition = yaml.load( open(DATADIR%'test_couchish_dvd.yaml').read() )
post_definition = yaml.load( open(DATADIR%'test_couchish_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'test_couchish_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'test_couchish_views.yaml').read() )
f = build(author_definition)
self.assertIdHasValue(f, 'form-first_name', '')
# Test None data
f = build(author_definition)
testdata = {'first_name': None, 'last_name': None}
f.defaults = testdata
self.assertIdHasValue(f, 'form-first_name', '')
self.assertRoundTrip(f, testdata)
# Test sample data
f = build(author_definition)
testdata = {'first_name': None, 'last_name': 'Goodall'}
f.defaults = testdata
self.assertIdHasValue(f, 'form-last_name', 'Goodall')
self.assertRoundTrip(f, testdata)
def test_fileupload(self):
upload_definition = yaml.load( open(DATADIR%'test_upload.yaml').read() )
f = build(upload_definition)
|
ish/couchish
|
408e3ce4401248b707455e426e62a496456d6313
|
Rewrite docs_by_(id|type) in terms of docs_by_view, and docs_by_view in terms of view.
|
diff --git a/couchish/store.py b/couchish/store.py
index b3646ee..5bfcd77 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,249 +1,245 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=jsonutil.decode_from_dict)
self.file_additions = {}
self.file_deletions = {}
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, **options):
"""
Generate the sequence of documents with the given ids.
"""
options['keys'] = ids
- options['include_docs'] = True
- results = self.session.view('_all_docs', **options)
- return (row.doc for row in results.rows)
+ return self.docs_by_view('_all_docs', **options)
def docs_by_type(self, type, **options):
"""
Generate the sequence of docs of a given type.
"""
- options['include_docs'] = True
- results = self.session.view('%s/all'%type, **options)
- return (row.doc for row in results.rows)
+ return self.docs_by_view('%s/all'%type, **options)
def docs_by_view(self, view, **options):
options['include_docs'] = True
- results = self.session.view(view, **options)
+ results = self.view(view, **options)
return (row.doc for row in results.rows)
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
d875d1499d0615f91e7911b729ea378fcd609370
|
Remove pointless copying.
|
diff --git a/couchish/store.py b/couchish/store.py
index 070190a..b3646ee 100644
--- a/couchish/store.py
+++ b/couchish/store.py
@@ -1,252 +1,249 @@
"""
Views we can build:
* by type, one view should be ok
* x_by_y views, from config (optional)
* ref and ref reversed views, one pair per relationship
"""
from couchdb.design import ViewDefinition
from couchdbsession import a8n, session
import schemaish.type
from couchish import filehandling, errors, jsonutil
class CouchishStore(object):
def __init__(self, db, config):
self.db = db
self.config = config
def sync_views(self):
for url, view in self.config.viewdata['views'].items():
segments = url.split('/')
designdoc = segments[0]
name = '/'.join(segments[1:])
view = ViewDefinition(designdoc, name, view[0], view[1])
view.get_doc(self.db)
view.sync(self.db)
def session(self):
"""
Create an editing session.
"""
return CouchishStoreSession(self)
class CouchishStoreSession(object):
def __init__(self, store):
self.store = store
self.session = Session(store.db,
pre_flush_hook=self._pre_flush_hook,
post_flush_hook=self._post_flush_hook,
encode_doc=jsonutil.encode_to_dict,
decode_doc=jsonutil.decode_from_dict)
self.file_additions = {}
self.file_deletions = {}
def __enter__(self):
"""
"with" statement entry.
"""
return self
def __exit__(self, type, value, traceback):
"""
"with" statement exit.
"""
if type is None:
self.flush()
else:
self.reset()
def create(self, doc):
"""
Create a document.
"""
return self.session.create(doc)
def delete(self, doc_or_tuple):
"""
Delete the given document.
"""
if isinstance(doc_or_tuple, tuple):
id, rev = doc_or_tuple
doc = {'_id': id, 'rev': rev}
else:
doc = doc_or_tuple
return self.session.delete(doc)
def get_attachment(self, id_or_doc, filename):
return self.session._db.get_attachment(id_or_doc, filename)
def put_attachment(self, doc, content, filename=None, content_type=None):
return self.session._db.put_attachment(doc, content,
filename=filename, content_type=content_type)
def delete_attachment(self, doc, filename):
return self.session._db.delete_attachment(doc, filename)
def doc_by_id(self, id):
"""
Return a single document, given it's ID.
"""
doc = self.session.get(id)
if doc is None:
raise errors.NotFound("No document with id %r" % (id,))
return doc
def doc_by_view(self, view, key=None):
if key is not None:
results = self.session.view(view, startkey=key, endkey=key, limit=2,
include_docs=True)
else:
results = self.session.view(view, limit=2, include_docs=True)
rows = results.rows
if len(rows) == 0:
message = "No document in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.NotFound(message)
elif len(rows) == 2:
message = "Too many documents in view %r"%view
if key is not None:
message += " with key %r"%key
raise errors.TooMany(message)
return rows[0].doc
def docs_by_id(self, ids, **options):
"""
Generate the sequence of documents with the given ids.
"""
- options = dict(options)
options['keys'] = ids
options['include_docs'] = True
results = self.session.view('_all_docs', **options)
return (row.doc for row in results.rows)
def docs_by_type(self, type, **options):
"""
Generate the sequence of docs of a given type.
"""
- options = dict(options)
options['include_docs'] = True
results = self.session.view('%s/all'%type, **options)
return (row.doc for row in results.rows)
def docs_by_view(self, view, **options):
- options = dict(options)
options['include_docs'] = True
results = self.session.view(view, **options)
return (row.doc for row in results.rows)
def view(self, view, **options):
"""
Call and return a view.
"""
return self.session.view(view, **options)
def _pre_flush_hook(self, session, deletions, additions, changes):
file_deletions, file_additions = filehandling._parse_changes_for_files(session, deletions, additions, changes)
self.file_deletions.update(file_deletions)
self.file_additions.update(file_additions)
def flush(self):
"""
Flush the session.
"""
returnvalue = self.session.flush()
filehandling._handle_separate_attachments(self.session, self.file_deletions, self.file_additions)
self.file_additions = {}
self.file_deletions = {}
return returnvalue
def reset(self):
"""
Reset the session, forgetting everything it knows.
"""
self.session.reset()
def _post_flush_hook(self, session, deletions, additions, changes):
# Sentinel to indicate we haven't retrieved the ref view data yet.
NO_REF_DATA = object()
# Easy access to the config.
views_by_viewname = self.store.config.viewdata['views_by_viewname']
viewnames_by_attribute = self.store.config.viewdata['viewnames_by_attribute']
attributes_by_viewname = self.store.config.viewdata['attributes_by_viewname']
# Updates any documents that refer to documents that have been changed.
for doc, actions in changes:
doc_type = doc['model_type']
edited = set('.'.join([doc_type, '.'.join(str(p) for p in action['path'])])
for action in actions if action['action'] == 'edit')
# Build a set of all the views affected by the changed attributes.
views = set()
for attr in edited:
views.update(viewnames_by_attribute.get(attr, []))
for view in views:
# Lazy load the ref_data.
ref_data = NO_REF_DATA
attrs_by_type = attributes_by_viewname[view]
view_url = views_by_viewname[view]['url']
# XXX should build a full key here, but let's assume just the
# id for a moment.
ref_key = doc['_id']
for ref_doc in self.docs_by_view(view_url+'-rev', startkey=ref_key, endkey=ref_key):
# Fetch the ref data for this ref view, if we don't already
# have it.
if ref_data is NO_REF_DATA:
ref_data = self.view(view_url, startkey=ref_key, limit=1).rows[0].value
if isinstance(ref_data, dict):
ref_data['_ref'] = ref_key
else:
ref_data = {'_ref': ref_key, 'data': ref_data}
for attr in attrs_by_type[ref_doc['model_type']]:
# Any of the attrs sections could be a sequence.. we need to iterate over them all to find matches..
# e.g. we may have authors*. or metadata*.authors*
self._find_and_match_nested_item(ref_doc, attr.split('.'), ref_data)
def _find_and_match_nested_item(self, ref_doc, segments, ref_data, prefix=None):
# Initialise of copy the prefix list, because we're about to change it.
if prefix is None:
prefix = []
else:
prefix = list(prefix)
if segments == []:
if ref_doc['_ref'] == ref_data['_ref']:
ref_doc.update(ref_data)
else:
current, segments = segments[0], segments[1:]
if current.endswith('*'):
is_seq = True
else:
is_seq = False
current = current.replace('*','')
prefix.append(current)
current_ref = ref_doc.get(current)
if current_ref is None:
return
if is_seq:
for ref_doc_ref in current_ref:
self._find_and_match_nested_item(ref_doc_ref, segments, ref_data, prefix)
else:
self._find_and_match_nested_item(current_ref, segments, ref_data, prefix)
class Tracker(a8n.Tracker):
def _track(self, obj, path):
if isinstance(obj, (jsonutil.CouchishFile, schemaish.type.File)):
return obj
return super(Tracker, self)._track(obj, path)
class Session(session.Session):
tracker_factory = Tracker
|
ish/couchish
|
a2abd9d391a96301b5648c31b3f14e35c9917b6e
|
Add tests for looking up docs by missing key.
|
diff --git a/couchish/tests/test_couchish_store.py b/couchish/tests/test_couchish_store.py
index c2e1416..689351e 100644
--- a/couchish/tests/test_couchish_store.py
+++ b/couchish/tests/test_couchish_store.py
@@ -1,336 +1,354 @@
from __future__ import with_statement
import unittest
import os.path
import couchdb
from couchish import config, errors, store
from couchish.tests import util
def data_filename(filename, namespace=None):
if namespace:
return os.path.join('couchish/tests/data/%s'%namespace, filename)
return os.path.join('couchish/tests/data', filename)
def type_filename(type,namespace=None):
return data_filename('test_couchish_%s.yaml' % type, namespace)
db_name = 'test-couchish'
def strip_id_rev(doc):
couchdoc = dict(doc)
couchdoc.pop('_id')
couchdoc.pop('_rev')
return couchdoc
class Test(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name)) for name in ['book', 'author', 'post', 'dvd']),
data_filename('test_couchish_views.yaml')
))
self.S.sync_views()
def test_simple_reference(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_simple_reference_addingdictionary(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = {'firstpart':'Woo','lastpart':'dall'}
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': {'firstpart':'Woo','lastpart':'dall'}},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}
def test_multiple_changes(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Goodall'}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title',
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': matt_id, 'last_name': 'Woodall'}}
def test_doc_by_id_not_found(self):
sess = self.S.session()
self.assertRaises(errors.NotFound, sess.doc_by_id, 'missing')
class TestDeep(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'deepref')) for name in ['book', 'author']),
type_filename('views','deepref')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'metadata': {
'writtenby': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'},
'coauthored': {'_ref': tim_id, 'last_name': 'Parkin'}}}
class TestDeep2(util.TempDatabaseMixin, unittest.TestCase):
def test_missing_ref_container(self):
"""
Check references inside non-existant containers.
The flush hook drills into the document hunting for references but it
should check that whatever a reference is inside actually exists first.
"""
cfg = config.Config({
'author': {'fields': [
{'name': 'name'}
]},
'book': {'fields': [
{'name': 'title'},
{'name': 'author', 'type': 'Reference()', 'refersto': 'test/author_summary'},
{'name': 'authors', 'type': 'Sequence(Reference())', 'refersto': 'test/author_summary'},
]},
},
[{'name': 'author_summary', 'designdoc': 'test', 'uses': ['author.name']}])
couchish_store = store.CouchishStore(self.db, cfg)
couchish_store.sync_views()
S = couchish_store.session()
author_id = S.create({'model_type': 'author', 'name': 'Matt'})
book_id = S.create({'model_type': 'book', 'title': 'My First Colouring Book',
'author': {'_ref': author_id, 'name': 'Matt'}})
S.flush()
# XXX Shouldn't need to do create a new session to make more changes.
S = couchish_store.session()
author = S.doc_by_id(author_id)
author['name'] = 'Jessica'
S.flush()
class TestRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'refinseq')) for name in ['book', 'author']),
type_filename('views','refinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'},
{'_ref': tim_id, 'last_name': 'Parkin'}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}, {'_ref': tim_id, 'last_name': 'Parkin'}]}
class TestNestedRefsInSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}]}
class TestNestedRefsInNestedSequences(unittest.TestCase):
def setUp(self):
server = couchdb.Server()
if db_name in server:
del server[db_name]
self.db = server.create(db_name)
self.S = store.CouchishStore(self.db, config.Config.from_yaml(
dict((name,type_filename(name,'nestedrefinnestedseq')) for name in ['book', 'author']),
type_filename('views','nestedrefinnestedseq')
))
self.S.sync_views()
def test_simple(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
tim = {'model_type': 'author', 'first_name': 'Tim', 'last_name': 'Parkin'}
tim_id = sess.create(tim)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': tim_id, 'last_name': 'Parkin'}}]}]}
def test_twoentries(self):
sess = self.S.session()
matt = {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Goodall'}
matt_id = sess.create(matt)
book = {'model_type': 'book', 'title': 'Title', 'people':[ {'authors':[
{'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Goodall'}},
{'nested': {'_ref': matt_id, 'first_name': 'Matt','last_name': 'Goodall'}}]}]}
book_id = sess.create(book)
sess.flush()
sess = self.S.session()
matt = sess.doc_by_id(matt_id)
matt['last_name'] = 'Woodall'
sess.flush()
matt = strip_id_rev(self.db[matt_id])
book = strip_id_rev(self.db[book_id])
assert matt == {'model_type': 'author', 'first_name': 'Matt', 'last_name': 'Woodall'}
assert book == {'model_type': 'book', 'title': 'Title', 'people': [{'authors': [ {'nested': {'_ref': matt_id, 'first_name': 'Matt', 'last_name': 'Woodall'}}, {'nested': {'_ref': matt_id, 'first_name':'Matt','last_name': 'Woodall'}}]}]}
+class TestMissingKeys(util.TempDatabaseMixin, unittest.TestCase):
+
+ def setUp(self):
+ super(TestMissingKeys, self).setUp()
+ couchish_store = store.CouchishStore(self.db, config.Config({}, {}))
+ couchish_store.sync_views()
+ self.session = couchish_store.session()
+ for i in range(5):
+ self.session.create({'_id': str(i)})
+ self.session.flush()
+
+ def test_docs_by_id(self):
+ docs = list(self.session.docs_by_id(['3', '4', '5']))
+ assert docs[-1] is None
+
+ def test_docs_by_view(self):
+ docs = list(self.session.docs_by_view('_all_docs', keys=['3', '4', '5']))
+ assert docs[-1] is None
|
ish/couchish
|
eae7b99b7d1442e1204da893ca8a5dbbf8574bc9
|
Fix dependencies.
|
diff --git a/couchish.egg-info/requires.txt b/couchish.egg-info/requires.txt
index a00502f..85a1332 100644
--- a/couchish.egg-info/requires.txt
+++ b/couchish.egg-info/requires.txt
@@ -1,2 +1,8 @@
PyYAML
-couchdb-session
\ No newline at end of file
+couchdb-session
+dottedish
+jsonish
+schemaish
+
+[formish]
+formish
\ No newline at end of file
diff --git a/setup.py b/setup.py
index a5d2d67..ca0f1d0 100644
--- a/setup.py
+++ b/setup.py
@@ -1,28 +1,37 @@
from setuptools import setup, find_packages
import sys, os
version = '0.2'
setup(name='couchish',
version=version,
description="",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Tim Parkin & Matt Goodall',
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"PyYAML",
"couchdb-session",
+ "dottedish",
+ "jsonish",
+ "schemaish",
],
+ extras_require={
+ 'formish': ['formish'],
+ },
entry_points="""
# -*- Entry points: -*-
""",
+ test_suite='couchish.tests',
+ tests_require=['BeautifulSoup', 'WebOb', 'formish'],
)
+
|
ish/couchish
|
50d8e5d325d3b0a08bb441205a886bbef8b61367
|
Replace sets.Set with set builtin.
|
diff --git a/couchish/couchish_jsonbuilder.py b/couchish/couchish_jsonbuilder.py
index 87dc959..b98a04a 100644
--- a/couchish/couchish_jsonbuilder.py
+++ b/couchish/couchish_jsonbuilder.py
@@ -1,194 +1,193 @@
-from sets import Set
from couchish.create_view import getjs
from couchish.schemaish_jsonbuilder import strip_stars
from string import Template
def buildview(view):
"""
function (doc) {
if (doc.model_type == 'book'){
for (var i1 in doc.metadata) {
for (var i2 in doc.metadata[i1].authors) {
emit(doc.metadata[i1].authors[i2]._ref, null);
}
}
}
}
"""
main_template = Template( \
""" function (doc) {
$body
}""")
if_template = Template( \
""" if (doc.model_type == '$type'){
$body
}
""")
for_template = Template( \
""" for (var i$n in doc$attr) {
$body
}""")
emit_template = Template( \
""" emit(doc$attr._ref, null);""")
out = ''
for type, attrs in view.items():
out_fors = ''
for attr in attrs:
templ_if = if_template.substitute({'type': type, 'body':'$body'})
segments = attr.replace('.*','*').split('.')
cleansegments = attr.replace('.*','').split('.')
out_attr = ''
templ_fors = '$body\n'
for n,segment in enumerate(segments):
if segment.endswith('*'):
out_loop_var = out_attr + '.%s'%cleansegments[n]
out_attr += '.%s[i%s]'%(cleansegments[n], n)
templ_for = for_template.substitute(n=n, attr=out_loop_var, body='$body')
templ_fors = Template(templ_fors).substitute(body=templ_for)
else:
out_attr += '.%s'%cleansegments[n]
out_emit = emit_template.substitute(attr=out_attr)
out_fors += Template(templ_fors).substitute(body=out_emit)
out += Template(templ_if).substitute(body=out_fors)
return (main_template.substitute(body=out), None)
def build_refersto_view(uses):
model_types = set()
if isinstance(uses, basestring):
model_type = uses.split('.')[0]
uses = [uses]
else:
for use in uses:
mt = use.split('.')[0]
model_types.add(mt)
if len(model_types) > 1:
raise ValueError('Can only use one model type in "uses" at the moment')
model_type = list(model_types)[0]
viewdef = 'function (doc) {\n'
viewdef += ' if (doc.model_type == \''+model_type+'\'){\n'
viewdef += ' emit(doc._id, %s )\n'%getjs(uses)
viewdef += ' }\n'
viewdef += '}\n'
return viewdef
def get_view(view, views, views_by_viewname, model_type=None):
if model_type is None:
# Then we have to have an explicit model type set if we want to use auto built views
model_type = view.get('model_type')
if 'designdoc' not in view:
# Then we use the type as the design doc
view['designdoc'] = model_type
if 'map' in view:
# Then we have explicit javascript functions
map = view['map']
reduce = view.get('reduce')
elif 'type' in view:
# Then we're auto building views if possible
if 'name' not in view:
# Use the view type for the name
view['name'] = view['type']
if view['type'] == 'all':
map, reduce = ("function(doc) { if (doc.model_type == '%s') { emit(doc._id, null); } }"%model_type,None)
if view['type'] == 'all_count':
map, reduce = ("function(doc) { if (doc.model_type == '%s') { emit(doc._id, 1); } }"%model_type, "function(keys, values) { return sum(values); }")
else:
map = build_refersto_view(view['uses'])
reduce = view.get('reduce')
if 'url' not in view:
# Then we need to provide one
if view['designdoc'] is None:
# Then we use the couchish namespace
raise KeyError('Cannot work out a design doc for view %s'%view.get('name'))
else:
view['url'] = '%s/%s'%(view['designdoc'],view['name'])
views_by_viewname[view['url']] = {'url':view['url'], 'key': view.get('key','_id'), 'uses': view.get('uses')}
views_by_viewname[view['url']]['map'] = (map,reduce)
views[view['url']] = (map,reduce)
def get_views(models_definition, views_definition):
views = {}
views_by_viewname = {}
views_by_uses = {}
viewnames_by_attribute = {}
attributes_by_viewname = {}
for view in views_definition:
get_view(view, views, views_by_viewname)
for model_type, definition in models_definition.items():
for view in definition.get('views',[]):
get_view(view, views, views_by_viewname, model_type=model_type)
parents = []
field_to_view = {}
for model_type, definition in models_definition.items():
for field in definition['fields']:
# some uses need to know whether the attr is composed of any sequences
field['key'] = strip_stars(field['name'])
if field.get('type','').startswith('Sequence'):
fieldname = '%s.*'%field['name']
else:
fieldname = field['name']
# If we have any references, build the appropriate lookups
if 'attr' in field and 'refersto' in field['attr']:
refersto = field['attr']['refersto']
view = views_by_viewname[refersto]
if 'uses' in field['attr']:
uses = field['attr']['uses']
else:
uses = view['uses']
# Build the reference views dynamically if not explicit
if isinstance(uses, basestring):
views_by_uses.setdefault(view['url']+'-rev',{}).setdefault(model_type,[]).append( fieldname )
- viewnames_by_attribute.setdefault(uses, Set()).add(refersto)
- attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,Set()).add( fieldname.replace('.*','*') )
+ viewnames_by_attribute.setdefault(uses, set()).add(refersto)
+ attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,set()).add( fieldname.replace('.*','*') )
else:
views_by_uses.setdefault(view['url']+'-rev',{}).setdefault(model_type,[]).append( fieldname )
- attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,Set()).add( fieldname.replace('.*','*') )
+ attributes_by_viewname.setdefault(refersto, {}).setdefault(model_type,set()).add( fieldname.replace('.*','*') )
for use in uses:
- viewnames_by_attribute.setdefault(use, Set()).add(refersto)
+ viewnames_by_attribute.setdefault(use, set()).add(refersto)
# Create any 'viewby' views
if 'viewby' in field:
if '*' in fieldname:
raise Exception('Can\'t generate viewby views on attributes in sequences')
if field['viewby'] == True:
url = '%s/by_%s'%(model_type,fieldname)
else:
url = field['viewby']
views[url] = ("function(doc) { if (doc.model_type=='%s') { emit(doc.%s, null ); } }"%(model_type,field['name']),None)
if 'viewby_count' in field:
if field['viewby_count'] == True:
url = '%s/by_%s_count'%(model_type,fieldname)
else:
url = field['viewby_count']
views[url] = ("function(doc) { if (doc.model_type == '%s') { emit(doc._id, 1); } }"%model_type, "function(keys, values) { return sum(values); }")
# Generate dynamic views for reference reverse lookups
for url, view in views_by_uses.items():
views[url] = buildview(view)
out = {'views': views,'views_by_viewname': views_by_viewname, 'viewnames_by_attribute': viewnames_by_attribute, 'attributes_by_viewname':attributes_by_viewname,'views_by_uses':views_by_uses}
return out
diff --git a/couchish/tests/test_couchish_jsonbuilder.py b/couchish/tests/test_couchish_jsonbuilder.py
index 34a31c4..288f69b 100644
--- a/couchish/tests/test_couchish_jsonbuilder.py
+++ b/couchish/tests/test_couchish_jsonbuilder.py
@@ -1,96 +1,95 @@
import unittest
from couchish.couchish_jsonbuilder import get_views
import yaml
-from sets import Set
from couchish import sync_categories
import couchdb
from couchdb.design import ViewDefinition
DATADIR = 'couchish/tests/data/%s'
def simplifyjs(string):
string = string.replace(';','')
string = string.replace(' ','')
string = string.replace('\n','')
return string
server = couchdb.Server('http://localhost:5984')
if 'test-couchish' in server:
del server['test-couchish']
db = server.create('test-couchish')
class Test(unittest.TestCase):
def setUp(self):
self.db = db
def test_simple(self):
book_definition = yaml.load( open(DATADIR%'test_couchish_book.yaml').read() )
dvd_definition = yaml.load( open(DATADIR%'test_couchish_dvd.yaml').read() )
post_definition = yaml.load( open(DATADIR%'test_couchish_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'test_couchish_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'test_couchish_views.yaml').read() )
models_definition = {'book': book_definition, 'author': author_definition,'post': post_definition, 'dvd': dvd_definition}
viewdata = get_views(models_definition, views_definition)
- assert viewdata['viewnames_by_attribute'] == {'author.first_name': Set(['customdes/author_name']), 'author.last_name': Set(['customdesigndoc/author_surname', 'customdes/author_name'])}
- assert viewdata['attributes_by_viewname'] == {'customdesigndoc/author_surname': {'dvd': Set(['writtenby']), \
- 'book': Set(['coauthored'])}, \
- 'customdes/author_name': {'post': Set(['author']), \
- 'book': Set(['writtenby'])}}
+ assert viewdata['viewnames_by_attribute'] == {'author.first_name': set(['customdes/author_name']), 'author.last_name': set(['customdesigndoc/author_surname', 'customdes/author_name'])}
+ assert viewdata['attributes_by_viewname'] == {'customdesigndoc/author_surname': {'dvd': set(['writtenby']), \
+ 'book': set(['coauthored'])}, \
+ 'customdes/author_name': {'post': set(['author']), \
+ 'book': set(['writtenby'])}}
views = viewdata['views']
assert simplifyjs(views['customdes/author_name'][0]) == "function(doc){if(doc.model_type=='author'){emit(doc._id,{first_name:doc.first_name,last_name:doc.last_name})}}"
assert simplifyjs(views['customdes/author_name-rev'][0]) == "function(doc){if(doc.model_type=='post'){emit(doc.author._ref,null)}if(doc.model_type=='book'){emit(doc.writtenby._ref,null)}}"
assert simplifyjs(views['customdesigndoc/author_surname'][0]) == "function(doc){if(doc.model_type=='author'){emit(doc._id,{last_name:doc.last_name})}}"
assert simplifyjs(views['customdesigndoc/author_surname-rev'][0]) == "function(doc){if(doc.model_type=='dvd'){emit(doc.writtenby._ref,null)}if(doc.model_type=='book'){emit(doc.coauthored._ref,null)}}"
def test_viewby(self):
post_definition = yaml.load( open(DATADIR%'by/test_couchish_by_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'by/test_couchish_by_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'by/test_couchish_by_views.yaml').read() )
models_definition = {'author': author_definition, 'post': post_definition}
viewdata = get_views(models_definition, views_definition)
assert simplifyjs(viewdata['views']['author/by_last_name'][0]) == "function(doc){if(doc.model_type=='author'){emit(doc.last_name,null)}}"
assert simplifyjs(viewdata['views']['post/all'][0]) == "function(doc){if(doc.model_type=='post'){emit(doc._id,null)}}"
def test_categories_creation(self):
categories_definition = yaml.load( open(DATADIR%'categories.yaml').read() )
sync_categories.sync(self.db, categories_definition)
def test_views_creation(self):
book_definition = yaml.load( open(DATADIR%'test_couchish_book.yaml').read() )
dvd_definition = yaml.load( open(DATADIR%'test_couchish_dvd.yaml').read() )
post_definition = yaml.load( open(DATADIR%'test_couchish_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'test_couchish_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'test_couchish_views.yaml').read() )
models_definition = {'book': book_definition, 'author': author_definition,'post': post_definition, 'dvd': dvd_definition}
viewdata = get_views(models_definition, views_definition)
for url, view in viewdata['views'].items():
designdoc = url.split('/')[0]
view = ViewDefinition(designdoc, url, view[0])
view.get_doc(self.db)
view.sync(self.db)
def test_autoviews(self):
post_definition = yaml.load( open(DATADIR%'autoviews/test_couchish_post.yaml').read() )
author_definition = yaml.load( open(DATADIR%'autoviews/test_couchish_author.yaml').read() )
views_definition = yaml.load( open(DATADIR%'autoviews/test_couchish_views.yaml').read() )
models_definition = {'author': author_definition, 'post': post_definition}
viewdata = get_views(models_definition, views_definition)
views = viewdata['views']
assert simplifyjs(views['couchish/author_name'][0]) == "function(doc){if(doc.model_type=='author'){emit(doc._id,{first_name:doc.first_name,last_name:doc.last_name})}}"
assert simplifyjs(views['couchish/author_name-rev'][0]) == "function(doc){if(doc.model_type=='post'){emit(doc.author._ref,null)}}"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.