repo
string | commit
string | message
string | diff
string |
---|---|---|---|
marook/tagfs
|
6f5337cd73528796b176af3b491ea6a120cd1cf3
|
refactored Config: removed not used code
|
diff --git a/src/modules/tagfs/config.py b/src/modules/tagfs/config.py
index 4fc8903..9653d30 100644
--- a/src/modules/tagfs/config.py
+++ b/src/modules/tagfs/config.py
@@ -1,92 +1,62 @@
#!/usr/bin/env python
#
-# Copyright 2009, 2010 Markus Pielmeier
+# Copyright 2009, 2010, 2012 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import ConfigParser
import logging
import os
-class ConfigError(Exception):
+def parseConfig(itemsDir):
+ config = ConfigParser.SafeConfigParser({
+ 'tagFileName': '.tag',
+ 'enableValueFilters': 'False',
+ 'enableRootItemLinks': 'False',
+ })
+ config.add_section(Config.GLOBAL_SECTION)
- pass
+ parsedFiles = config.read([os.path.join(itemsDir, '.tagfs', 'tagfs.conf'),
+ os.path.expanduser(os.path.join('~', '.tagfs', 'tagfs.conf')),
+ os.path.join('/', 'etc', 'tagfs', 'tagfs.conf')])
-class Config(object):
-
- GLOBAL_SECTION = 'global'
-
- def applyDefaults(self):
- self.tagFileName = '.tag'
- self.enableValueFilters = False
- self.enableRootItemLinks = False
-
- def __init__(self, itemsDir):
- self._config = ConfigParser.SafeConfigParser({
- 'tagFileName': '.tag',
- 'enableValueFilters': False,
- 'enableRootItemLinks': False,
- })
- self._config.add_section(Config.GLOBAL_SECTION)
-
- self.itemsDir = itemsDir
+ logging.debug('Parsed the following config files: %s' % ', '.join(parsedFiles))
- self.applyDefaults()
+ return Config(config)
- parsedFiles = self._config.read([os.path.join(itemsDir, '.tagfs', 'tagfs.conf'),
- os.path.expanduser(os.path.join('~', '.tagfs', 'tagfs.conf')),
- os.path.join('/', 'etc', 'tagfs', 'tagfs.conf')])
+class Config(object):
- logging.debug('Parsed the following config files: %s' % ', '.join(parsedFiles))
+ GLOBAL_SECTION = 'global'
- def _boolToStr(self, b):
- if b is True:
- return 'true'
- elif b is False:
- return 'false'
- else:
- # TODO make error more verbose
- raise ConfigError()
+ def __init__(self, _config):
+ self._config = _config
@property
def tagFileName(self):
return self._config.get(Config.GLOBAL_SECTION, 'tagFileName')
- @tagFileName.setter
- def tagFileName(self, tagFileName):
- self._config.set(Config.GLOBAL_SECTION, 'tagFileName', tagFileName)
-
- # TODO implement generic approach to get/set boolean values
@property
def enableValueFilters(self):
return self._config.getboolean(Config.GLOBAL_SECTION, 'enableValueFilters')
- @enableValueFilters.setter
- def enableValueFilters(self, enableValueFilters):
- self._config.set(Config.GLOBAL_SECTION, 'enableValueFilters', self._boolToStr(enableValueFilters))
-
@property
def enableRootItemLinks(self):
return self._config.getboolean(Config.GLOBAL_SECTION, 'enableRootItemLinks')
- @enableRootItemLinks.setter
- def enableRootItemLinks(self, enableRootItemLinks):
- self._config.set(Config.GLOBAL_SECTION, 'enableRootItemLinks', self._boolToStr(enableRootItemLinks))
-
def __str__(self):
#return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['tagFileName', 'enableValueFilters', 'enableRootItemLinks']]) + ']'
return '[tagFileName: %s, enableValueFilters: %s, enableRootItemLinks: %s]' % (self.tagFileName, self.enableValueFilters, self.enableRootItemLinks)
diff --git a/src/modules/tagfs/main.py b/src/modules/tagfs/main.py
index 352e0c1..a76dead 100644
--- a/src/modules/tagfs/main.py
+++ b/src/modules/tagfs/main.py
@@ -1,180 +1,180 @@
#!/usr/bin/env python
#
# Copyright 2009, 2010 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
#
# = tag fs =
# == glossary ==
# * item: An item is a directory in the item container directory. Items can be
# tagged using a tag file.
# * tag: A tag is a text string which can be assigned to an item. Tags can
# consist of any character except newlines.
import os
import stat
import errno
import exceptions
import time
import functools
import logging
import fuse
if not hasattr(fuse, '__version__'):
raise RuntimeError, \
"your fuse-py doesn't know of fuse.__version__, probably it's too old."
fuse.fuse_python_api = (0, 2)
from view import View
from cache import cache
from item_access import ItemAccess
-from config import Config
+from config import parseConfig
from log import logException
class TagFS(fuse.Fuse):
def __init__(self, initwd, *args, **kw):
fuse.Fuse.__init__(self, *args, **kw)
self._initwd = initwd
self._itemsRoot = None
# TODO change command line arguments structure
# goal: tagfs <items dir> <mount dir>
self.parser.add_option('-i',
'--items-dir',
dest = 'itemsDir',
help = 'items directory',
metavar = 'dir')
self.parser.add_option('-t',
'--tag-file',
dest = 'tagFileName',
help = 'tag file name',
metavar = 'file',
default = None)
self.parser.add_option('--value-filter',
action = 'store_true',
dest = 'enableValueFilters',
help = 'Displays value filter directories on toplevel instead of only context entries',
default = None)
self.parser.add_option('--root-items',
action = 'store_true',
dest = 'enableRootItemLinks',
help = 'Display item links in tagfs root directory.',
default = None)
def getItemAccess(self):
# Maybe we should move the parser run from main here.
# Or we should at least check if it was run once...
opts, args = self.cmdline
# Maybe we should add expand user? Maybe even vars???
assert opts.itemsDir != None and opts.itemsDir != ''
itemsRoot = os.path.normpath(
os.path.join(self._initwd, opts.itemsDir))
# TODO rel https://github.com/marook/tagfs/issues#issue/2
# Ensure that mount-point and items dir are disjoined.
# Something along
# assert not os.path.normpath(itemsDir).startswith(itemsRoot)
# try/except here?
try:
return ItemAccess(itemsRoot, self.config.tagFileName)
except OSError, e:
logging.error("Can't create item access from items directory %s. Reason: %s",
itemsRoot, str(e.strerror))
raise
@property
@cache
def config(self):
opts, args = self.cmdline
- c = Config(os.path.normpath(os.path.join(self._initwd, opts.itemsDir)))
+ c = parseConfig(os.path.normpath(os.path.join(self._initwd, opts.itemsDir)))
if opts.tagFileName:
c.tagFileName = opts.tagFileName
if opts.enableValueFilters:
c.enableValueFilters = opts.enableValueFilters
if opts.enableRootItemLinks:
c.enableRootItemLinks = opts.enableRootItemLinks
logging.debug('Using configuration %s' % c)
return c
@property
@cache
def view(self):
itemAccess = self.getItemAccess()
return View(itemAccess, self.config)
@logException
def getattr(self, path):
return self.view.getattr(path)
@logException
def readdir(self, path, offset):
return self.view.readdir(path, offset)
@logException
def readlink(self, path):
return self.view.readlink(path)
@logException
def open(self, path, flags):
return self.view.open(path, flags)
@logException
def read(self, path, size, offset):
return self.view.read(path, size, offset)
@logException
def write(self, path, data, pos):
return self.view.write(path, data, pos)
@logException
def symlink(self, path, linkPath):
return self.view.symlink(path, linkPath)
def main():
fs = TagFS(os.getcwd(),
version = "%prog " + fuse.__version__,
dash_s_do = 'setsingle')
fs.parse(errex = 1)
opts, args = fs.cmdline
if opts.itemsDir == None:
fs.parser.print_help()
# items dir should probably be an arg, not an option.
print "Error: Missing items directory option."
# Quickfix rel https://github.com/marook/tagfs/issues/#issue/3
# FIXME: since we run main via sys.exit(main()), this should
# probably be handled via some return code.
import sys
sys.exit()
return fs.main()
if __name__ == '__main__':
import sys
sys.exit(main())
|
marook/tagfs
|
b37cf071d51a9a6e50e5ab0bcba28da40b33bedb
|
the README now suggests to execute the unit tests before installing
|
diff --git a/README b/README
index 42c36dc..2245258 100644
--- a/README
+++ b/README
@@ -1,120 +1,120 @@
tagfs - tag file system
1) Introduction
2) Requirements
3) Installation
4) Usage
5) Configuration
5.1) Options
5.1.1) tagFileName
5.1.2) enableValueFilters
5.1.3) enableRootItemLinks
6) Further Reading
7) Contact
---------------------------------------------------------------------
Introduction
tagfs is used to organize your files using tags.
This document contains basic usage instructions for users. To develop or debug
tagfs see the README.dev file.
---------------------------------------------------------------------
Requirements
* python 2.5, 2.6, 2.7
* Linux kernel with fuse enabled
* python-fuse installed
---------------------------------------------------------------------
Installation
To install tagfs into your home directory type the following:
-$ python setup.py install --home ~/.local
+$ python setup.py test install --home ~/.local
If you haven't already extended your local python path then add the following
to your environment configuration script. For example to your ~/.bashrc:
export PYTHONPATH=~/.local/lib/python:$PYTHONPATH
---------------------------------------------------------------------
Usage
After installation tagfs can be started the following way.
Mount a tagged directory:
$ tagfs -i /path/to/my/items/directory /path/to/my/mount/point
Unmount a tagged directory:
$ fusermount -u /path/to/my/mount/point
---------------------------------------------------------------------
Configuration
tagfs can be configured through configuration files. Configuration files are
searched in different locations by tagfs. The following locations are used.
Locations with higher priority come first:
- <items directory>/.tagfs/tagfs.conf
- ~/.tagfs/tagfs.conf
- /etc/tagfs/tagfs.conf
Right now the following configuration options are supported.
---------------------------------------------------------------------
Configuration - Options - tagFileName
Through this option the name of the parsed tag files can be specified. The
default value is '.tag'.
Example:
[global]
tagFileName = ABOUT
---------------------------------------------------------------------
Configuration - Options - enableValueFilters
You can enable or disable value filters. If you enable value filters you will
see filter directories for each tag value. For value filters the tag's
context can be anyone. The default value is 'false'.
Example:
[global]
enableValueFilters = true
---------------------------------------------------------------------
Configuration - Options - enableRootItemLinks
To show links to all items in the tagfs '/' directory enable this option. The
default value is 'false'.
Example:
[global]
enableRootItemLinks = true
---------------------------------------------------------------------
Further Reading
Using a file system for my bank account (Markus Pielmeier)
http://pielmeier.blogspot.com/2010/08/using-file-system-for-my-bank-account.html
---------------------------------------------------------------------
Contact
* homepage: http://wiki.github.com/marook/tagfs
* user group: http://groups.google.com/group/tagfs
* author: Markus Pielmeier <[email protected]>
|
marook/tagfs
|
4a07c91fefe4b27ebf64586f695beac6c108ce4c
|
now setup.py fails if there are failing unit tests
|
diff --git a/setup.py b/setup.py
index bd3bea0..af7e1d6 100644
--- a/setup.py
+++ b/setup.py
@@ -1,254 +1,264 @@
#!/usr/bin/env python
#
# Copyright 2009 Peter Prohaska
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from distutils.core import setup, Command
import sys
import os
from os.path import (
basename,
dirname,
abspath,
splitext,
join as pjoin
)
from glob import glob
from unittest import TestLoader, TextTestRunner
import re
import datetime
projectdir = dirname(abspath(__file__))
reportdir = pjoin(projectdir, 'reports')
srcdir = pjoin(projectdir, 'src')
bindir = pjoin(srcdir, 'bin')
moddir = pjoin(srcdir, 'modules')
testdir = pjoin(srcdir, 'test')
testdatadir = pjoin(projectdir, 'etc', 'test', 'events')
testmntdir = pjoin(projectdir, 'mnt')
assert os.path.isdir(srcdir)
assert os.path.isdir(bindir)
assert os.path.isdir(moddir)
assert os.path.isdir(testdir)
assert os.path.isdir(testdatadir)
class Report(object):
def __init__(self):
self.reportDateTime = datetime.datetime.utcnow()
self.reportDir = os.path.join(reportdir, self.reportDateTime.strftime('%Y-%m-%d_%H_%M_%S'))
# fails when dir already exists which is nice
os.makedirs(self.reportDir)
@property
def coverageReportFileName(self):
return os.path.join(self.reportDir, 'coverage.txt')
@property
def unitTestReportFileName(self):
return os.path.join(self.reportDir, 'tests.txt')
def sourceFiles():
yield os.path.join(bindir, 'tagfs')
sourceFilePattern = re.compile('^.*[.]py$')
for root, dirs, files in os.walk(moddir):
for f in files:
if(not sourceFilePattern.match(f)):
continue
if(f.startswith('.#')):
continue
yield os.path.join(root, f)
def fullSplit(p):
head, tail = os.path.split(p)
if(len(head) > 0):
for n in fullSplit(head):
yield n
yield tail
def testModules():
testFilePattern = re.compile('^(test.*)[.]py$', re.IGNORECASE)
for root, dirs, files in os.walk(testdir):
for f in files:
m = testFilePattern.match(f)
if(not m):
continue
relDir = os.path.relpath(root, testdir)
yield '.'.join([n for n in fullSplit(relDir)] + [m.group(1), ])
def printFile(fileName):
if(not os.path.exists(fileName)):
# TODO maybe we should not silently return?
return
with open(fileName, 'r') as f:
for line in f:
sys.stdout.write(line)
+class TestFailException(Exception):
+ '''Indicates that at lease one of the unit tests has failed
+ '''
+
+ pass
+
class test(Command):
description = 'run tests'
user_options = []
def initialize_options(self):
self._cwd = os.getcwd()
self._verbosity = 2
def finalize_options(self): pass
def run(self):
report = Report()
tests = [m for m in testModules()]
print "..using:"
print " moddir:", moddir
print " testdir:", testdir
print " testdatadir:", testdatadir
print " testmntdir:", testmntdir
print " tests:", tests
print " sys.path:", sys.path
print
# insert project lookup paths at index 0 to make sure they are used
# over global libraries
sys.path.insert(0, moddir)
sys.path.insert(0, testdir)
# TODO try to import all test cases here. the TestLoader is throwing
# very confusing errors when imports can't be resolved.
# configure logging
# TODO not sure how to enable this... it's a bit complicate to enable
# logging only for 'make mt' and disable it then for
# 'python setup.py test'. 'python setup.py test' is such a gabber...
#if 'DEBUG' in os.environ:
# from tagfs import log_config
# log_config.setUpLogging()
if 'DEBUG' in os.environ:
import logging
logging.basicConfig(level = logging.DEBUG)
suite = TestLoader().loadTestsFromNames(tests)
- with open(report.unitTestReportFileName, 'w') as testResultsFile:
- r = TextTestRunner(stream = testResultsFile, verbosity = self._verbosity)
-
- def runTests():
- r.run(suite)
-
- try:
- import coverage
-
- c = coverage.coverage()
- c.start()
- runTests()
- c.stop()
-
- with open(report.coverageReportFileName, 'w') as reportFile:
- c.report([f for f in sourceFiles()], file = reportFile)
+ try:
+ with open(report.unitTestReportFileName, 'w') as testResultsFile:
+ r = TextTestRunner(stream = testResultsFile, verbosity = self._verbosity)
- except ImportError:
- # TODO ImportErrors from runTests() may look like coverage is missing
+ def runTests():
+ result = r.run(suite)
- print ''
- print 'coverage module not found.'
- print 'To view source coverage stats install http://nedbatchelder.com/code/coverage/'
- print ''
+ if(not result.wasSuccessful()):
+ raise TestFailException()
- runTests()
+ try:
+ import coverage
- # TODO use two streams instead of printing files after writing
- printFile(report.unitTestReportFileName)
- printFile(report.coverageReportFileName)
+ c = coverage.coverage()
+ c.start()
+ runTests()
+ c.stop()
+
+ with open(report.coverageReportFileName, 'w') as reportFile:
+ c.report([f for f in sourceFiles()], file = reportFile)
+
+ except ImportError:
+ # TODO ImportErrors from runTests() may look like coverage is missing
+
+ print ''
+ print 'coverage module not found.'
+ print 'To view source coverage stats install http://nedbatchelder.com/code/coverage/'
+ print ''
+
+ runTests()
+ finally:
+ # TODO use two streams instead of printing files after writing
+ printFile(report.unitTestReportFileName)
+ printFile(report.coverageReportFileName)
# Overrides default clean (which cleans from build runs)
# This clean should probably be hooked into that somehow.
class clean_pyc(Command):
description = 'remove *.pyc files from source directory'
user_options = []
def initialize_options(self):
self._delete = []
for cwd, dirs, files in os.walk(projectdir):
self._delete.extend(
pjoin(cwd, f) for f in files if f.endswith('.pyc')
)
def finalize_options(self):
pass
def run(self):
for f in self._delete:
try:
os.unlink(f)
except OSError, e:
print "Strange '%s': %s" % (f, e)
# Could be a directory.
# Can we detect file in use errors or are they OSErrors
# as well?
# Shall we catch all?
setup(
cmdclass = {
'test': test,
'clean_pyc': clean_pyc,
},
name = 'tagfs',
version = '0.1',
url = 'http://wiki.github.com/marook/tagfs',
description = '',
long_description = '',
author = 'Markus Pielmeier',
author_email = '[email protected]',
license = 'GPLv3',
download_url = 'http://github.com/marook/tagfs/downloads/tagfs_0.1-src.tar.bz2',
platforms = 'Linux',
requires = [],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Filesystems'
],
data_files = [
(pjoin('share', 'doc', 'tagfs'), ['AUTHORS', 'COPYING', 'README'])
],
# TODO maybe we should include src/bin/*?
scripts = [pjoin(bindir, 'tagfs')],
packages = ['tagfs'],
package_dir = {'': moddir},
)
|
marook/tagfs
|
a40fa4d0d5949d276e4668d88055422546fe96b7
|
fixed unit tests
|
diff --git a/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py b/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py
index 76e5b0c..177cbd4 100644
--- a/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py
+++ b/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py
@@ -1,73 +1,81 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from unittest import TestCase
from tagfs.node_filter_context import ContextValueFilterDirectoryNode
from tagfs_test.node_asserter import validateDirectoryInterface, validateLinkInterface
from tagfs_test.item_access_mock import ItemAccessMock
from tagfs_test.item_mock import ItemMock
class TagMock(object):
def __init__(self, context, value):
self.context = context
self.value = value
class TaggedItemMock(ItemMock):
def __init__(self, name, context, value):
super(TaggedItemMock, self).__init__(name, [TagMock(context, value), ])
self._context = context
self._value = value
def isTaggedWithContextValue(self, context, value):
return self._context == context and self._value == value
class ParentNodeMock(object):
def __init__(self, items):
self.items = items
+class ConfigMock(object):
+
+ @property
+ def enableValueFilters(self):
+ return False
+
class TestContextValueFilterDirectoryNode(TestCase):
def setUp(self):
self.context = 'c1'
self.value = 'v1'
self.itemAccess = ItemAccessMock()
self.itemAccess.taggedItems = [TaggedItemMock('item1', self.context, self.value), ]
+ self.config = ConfigMock()
+
self.parentNode = ParentNodeMock(self.itemAccess.taggedItems)
- self.node = ContextValueFilterDirectoryNode(self.itemAccess, self.parentNode, self.context, self.value)
+ self.node = ContextValueFilterDirectoryNode(self.itemAccess, self.config, self.parentNode, self.context, self.value)
def testNodeAttrMTimeIsItemAccessParseTime(self):
attr = self.node.attr
self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
def testNodeIsDirectory(self):
validateDirectoryInterface(self, self.node)
def testMatchingItemIsAvailableAsLink(self):
e = self.node.entries['item1']
validateLinkInterface(self, e)
diff --git a/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py b/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py
index 2768196..4e3b9ea 100644
--- a/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py
+++ b/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py
@@ -1,48 +1,48 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from unittest import TestCase
from tagfs.node_filter_context import ContextValueListDirectoryNode
from tagfs_test.node_asserter import validateDirectoryInterface, validateLinkInterface
from tagfs_test.item_access_mock import ItemAccessMock
from tagfs_test.item_mock import createItemMocks
class ParentNodeMock(object):
pass
class TestContextValueListDirectoryNode(TestCase):
def setUp(self):
self.itemAccess = ItemAccessMock()
self.itemAccess.taggedItems = createItemMocks(['item1'])
self.parentNode = ParentNodeMock()
self.context = 'c1'
- self.node = ContextValueListDirectoryNode(self.itemAccess, self.parentNode, self.context)
+ self.node = ContextValueListDirectoryNode(self.itemAccess, None, self.parentNode, self.context)
def testNodeAttrMTimeIsItemAccessParseTime(self):
attr = self.node.attr
self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
def testNodeIsDirectory(self):
validateDirectoryInterface(self, self.node)
diff --git a/src/test/tagfs_test_small/test_root_directory_node.py b/src/test/tagfs_test_small/test_root_directory_node.py
index 0d84ca2..6631a1c 100644
--- a/src/test/tagfs_test_small/test_root_directory_node.py
+++ b/src/test/tagfs_test_small/test_root_directory_node.py
@@ -1,76 +1,88 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from unittest import TestCase
from tagfs.node_root import RootDirectoryNode
from tagfs_test.node_asserter import validateDirectoryInterface, validateLinkInterface
from tagfs_test.item_access_mock import ItemAccessMock
from tagfs_test.item_mock import createItemMocks
+class ConfigMock(object):
+
+ @property
+ def enableValueFilters(self):
+ return False
+
+ @property
+ def enableRootItemLinks(self):
+ return True
+
class AbstractRootDirectoryNodeTest(TestCase):
@property
def _itemNames(self):
return self._taggedItemNames
def setUp(self):
self._taggedItemNames = ['item1']
self.itemAccess = ItemAccessMock()
self.itemAccess.taggedItems = createItemMocks(self._itemNames)
- self.node = RootDirectoryNode(self.itemAccess)
+ self.config = ConfigMock()
+
+ self.node = RootDirectoryNode(self.itemAccess, self.config)
class TestRootDirectoryNode(AbstractRootDirectoryNodeTest):
@property
def _itemNames(self):
return self._taggedItemNames + ['.untagged']
def testNodeAttrMTimeIsItemAccessParseTime(self):
attr = self.node.attr
self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
def testNodeIsDirectory(self):
validateDirectoryInterface(self, self.node)
def testItemLinksReplaceUntaggedDirectory(self):
untaggedNode = self.node.entries['.untagged']
# untagged node must be a link as the untagged directory node
# weights less than the '.untagged' item from the tagged items.
validateLinkInterface(self, untaggedNode)
def testNodeContainerContainsTaggedNodeLinks(self):
entries = self.node.entries
for itemName in self._taggedItemNames:
self.assertTrue(itemName in entries)
validateLinkInterface(self, entries[itemName])
class TestRootDirectoryNodeUntaggedDirectory(AbstractRootDirectoryNodeTest):
def testNodeContainsUntaggedDirectory(self):
untaggedNode = self.node.entries['.untagged']
validateDirectoryInterface(self, untaggedNode)
|
marook/tagfs
|
a6fe8695e64d8d94a966f4cf2d2fd1c411efc309
|
implemented root item links flag feature
|
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
index e1d45ae..4e0ed85 100644
--- a/src/modules/tagfs/node_filter.py
+++ b/src/modules/tagfs/node_filter.py
@@ -1,90 +1,95 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from cache import cache
from node import Stat, ItemLinkNode, DirectoryNode
from node_export import ExportDirectoryNode
class FilterDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
self.config = config
@property
def attr(self):
s = super(FilterDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def contexts(self):
c = set()
for item in self.items:
for t in item.tags:
context = t.context
if context is None:
continue
c.add(t.context)
return c
+ @property
+ def _enableItemLinks(self):
+ return True
+
@property
def _entries(self):
# the import is not global because we want to prevent a cyclic
# dependency (ugly but works)
from node_filter_context import ContextValueListDirectoryNode
from node_filter_value import ValueFilterDirectoryNode
yield ExportDirectoryNode(self.itemAccess, self)
if(self.config.enableValueFilters):
for value in self.itemAccess.values:
yield ValueFilterDirectoryNode(self.itemAccess, self.config, self, value)
for context in self.contexts:
yield ContextValueListDirectoryNode(self.itemAccess, self.config, self, context)
- for item in self.items:
- yield ItemLinkNode(item)
+ if(self._enableItemLinks):
+ for item in self.items:
+ yield ItemLinkNode(item)
def addsValue(self, parentItems):
itemsLen = len(list(self.items))
if(itemsLen == 0):
return False
# TODO we should not compare the lengths but whether the child and
# parent items are different
parentItemsLen = len(list(parentItems))
return itemsLen != parentItemsLen
def _addsValue(self, child):
return child.addsValue(self.items)
diff --git a/src/modules/tagfs/node_root.py b/src/modules/tagfs/node_root.py
index 15025d8..03aa4bf 100644
--- a/src/modules/tagfs/node_root.py
+++ b/src/modules/tagfs/node_root.py
@@ -1,37 +1,41 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from node_filter import FilterDirectoryNode
from node_untagged_items import UntaggedItemsDirectoryNode
class RootDirectoryNode(FilterDirectoryNode):
def __init__(self, itemAccess, config):
super(RootDirectoryNode, self).__init__(itemAccess, config)
@property
def items(self):
return self.itemAccess.taggedItems
+ @property
+ def _enableItemLinks(self):
+ return self.config.enableRootItemLinks
+
@property
def _entries(self):
yield UntaggedItemsDirectoryNode('.untagged', self.itemAccess)
for e in super(RootDirectoryNode, self)._entries:
yield e
|
marook/tagfs
|
4821699b4ca1e980e7ec7d07a32c43aeea914eeb
|
fixed tagfs imports
|
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
index a482c94..eeed313 100644
--- a/src/modules/tagfs/item_access.py
+++ b/src/modules/tagfs/item_access.py
@@ -1,292 +1,292 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import time
import traceback
-from tagfs.cache import cache
+from cache import cache
class Tag(object):
def __init__(self, value, context = None):
if context == None:
self.context = None
else:
self.context = context.strip()
self.value = value.strip()
if not self.context == None and len(self.context) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
if len(self.value) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
def __hash__(self):
return (self.context, self.value).__hash__()
def __eq__(self, other):
return self.value == other.value and self.context == other.context
def __repr__(self):
return '<Tag %s: %s>' % (self.context, self.value)
def parseTagsFromFile(tagFileName):
"""Parses the tags from the specified file.
@return: The parsed values are returned as a set containing Tag objects.
@see: Tag
"""
tags = set()
tagFile = open(tagFileName, 'r')
try:
for rawTag in tagFile.readlines():
rawTag = rawTag.strip()
try:
if len(rawTag) == 0:
continue
tagTuple = rawTag.split(':', 1)
if len(tagTuple) == 1:
tagContext = None
tagValue = tagTuple[0]
else:
tagContext = tagTuple[0]
tagValue = tagTuple[1]
tag = Tag(tagValue, context = tagContext)
tags.add(tag)
except:
logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
finally:
tagFile.close()
return tags
class Item(object):
def __init__(self, name, itemAccess):
self.name = name
self.itemAccess = itemAccess
# TODO register at file system to receive tag file change events.
@property
@cache
def itemDirectory(self):
return os.path.join(self.itemAccess.dataDirectory, self.name)
@property
@cache
def _tagFileName(self):
"""Returns the name of the tag file for this item.
"""
itemDirectory = self.itemDirectory
return os.path.join(itemDirectory, self.itemAccess.tagFileName)
def __parseTags(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return parseTagsFromFile(tagFileName)
@property
@cache
def tagsCreationTime(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getctime(self._tagFileName)
@property
@cache
def tagsModificationTime(self):
"""Returns the last time when the tags have been modified.
"""
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getmtime(tagFileName)
@property
@cache
def tags(self):
"""Returns the tags as a list for this item.
"""
return self.__parseTags()
def getTagsByContext(self, context):
for t in self.tags:
if context != t.context:
continue
yield t
def isTaggedWithContextValue(self, context, value):
for t in self.getTagsByContext(context):
if value == t.value:
return True
return False
def isTaggedWithContext(self, context):
# TODO don't create whole list... just check wheather list is empty
return (len([c for c in self.getTagsByContext(context)]) > 0)
def isTaggedWithValue(self, value):
for t in self.tags:
if value == t.value:
return True
return False
@property
@cache
def tagged(self):
return os.path.exists(self._tagFileName)
def __repr__(self):
return '<Item %s>' % self.name
class ItemAccess(object):
"""This is the access point to the Items.
"""
def __init__(self, dataDirectory, tagFileName):
self.dataDirectory = dataDirectory
self.tagFileName = tagFileName
self.__items = None
self.__tags = None
self.__taggedItems = None
self.__untaggedItems = None
self.parseTime = 0
def __parseItems(self):
items = {}
logging.debug('Start parsing items from dir: %s', self.dataDirectory)
for itemName in os.listdir(self.dataDirectory):
if itemName == '.tagfs':
# skip directory with configuration
continue
try:
item = Item(itemName, self)
items[itemName] = item
except IOError, (error, strerror):
logging.error('Can \'t read tags for item %s: %s',
itemName,
strerror)
logging.debug('Found %s items', len(items))
self.parseTime = time.time()
return items
@property
@cache
def items(self):
return self.__parseItems()
@property
@cache
def tags(self):
tags = set()
for item in self.items.itervalues():
if not item.tagged:
continue
tags = tags | item.tags
return tags
@property
@cache
def taggedItems(self):
return set([item for item in self.items.itervalues() if item.tagged])
@property
@cache
def untaggedItems(self):
return set([item for item in self.items.itervalues() if not item.tagged])
def getItemDirectory(self, item):
return os.path.join(self.dataDirectory, item)
def contextTags(self, context):
contextTags = set()
for tag in self.tags:
if tag.context == context:
contextTags.add(tag)
return contextTags
@property
@cache
def contexts(self):
contexts = set()
for tag in self.tags:
if tag.context == None:
continue
contexts.add(tag.context)
return contexts
@property
@cache
def values(self):
values = set()
for tag in self.tags:
values.add(tag.value)
return values
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
diff --git a/src/modules/tagfs/main.py b/src/modules/tagfs/main.py
index c7d3949..352e0c1 100644
--- a/src/modules/tagfs/main.py
+++ b/src/modules/tagfs/main.py
@@ -1,180 +1,180 @@
#!/usr/bin/env python
#
# Copyright 2009, 2010 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
#
# = tag fs =
# == glossary ==
# * item: An item is a directory in the item container directory. Items can be
# tagged using a tag file.
# * tag: A tag is a text string which can be assigned to an item. Tags can
# consist of any character except newlines.
import os
import stat
import errno
import exceptions
import time
import functools
import logging
import fuse
if not hasattr(fuse, '__version__'):
raise RuntimeError, \
"your fuse-py doesn't know of fuse.__version__, probably it's too old."
fuse.fuse_python_api = (0, 2)
-from tagfs.view import View
-from tagfs.cache import cache
-from tagfs.item_access import ItemAccess
+from view import View
+from cache import cache
+from item_access import ItemAccess
from config import Config
from log import logException
class TagFS(fuse.Fuse):
def __init__(self, initwd, *args, **kw):
fuse.Fuse.__init__(self, *args, **kw)
self._initwd = initwd
self._itemsRoot = None
# TODO change command line arguments structure
# goal: tagfs <items dir> <mount dir>
self.parser.add_option('-i',
'--items-dir',
dest = 'itemsDir',
help = 'items directory',
metavar = 'dir')
self.parser.add_option('-t',
'--tag-file',
dest = 'tagFileName',
help = 'tag file name',
metavar = 'file',
default = None)
self.parser.add_option('--value-filter',
action = 'store_true',
dest = 'enableValueFilters',
help = 'Displays value filter directories on toplevel instead of only context entries',
default = None)
self.parser.add_option('--root-items',
action = 'store_true',
dest = 'enableRootItemLinks',
help = 'Display item links in tagfs root directory.',
default = None)
def getItemAccess(self):
# Maybe we should move the parser run from main here.
# Or we should at least check if it was run once...
opts, args = self.cmdline
# Maybe we should add expand user? Maybe even vars???
assert opts.itemsDir != None and opts.itemsDir != ''
itemsRoot = os.path.normpath(
os.path.join(self._initwd, opts.itemsDir))
# TODO rel https://github.com/marook/tagfs/issues#issue/2
# Ensure that mount-point and items dir are disjoined.
# Something along
# assert not os.path.normpath(itemsDir).startswith(itemsRoot)
# try/except here?
try:
return ItemAccess(itemsRoot, self.config.tagFileName)
except OSError, e:
logging.error("Can't create item access from items directory %s. Reason: %s",
itemsRoot, str(e.strerror))
raise
@property
@cache
def config(self):
opts, args = self.cmdline
c = Config(os.path.normpath(os.path.join(self._initwd, opts.itemsDir)))
if opts.tagFileName:
c.tagFileName = opts.tagFileName
if opts.enableValueFilters:
c.enableValueFilters = opts.enableValueFilters
if opts.enableRootItemLinks:
c.enableRootItemLinks = opts.enableRootItemLinks
logging.debug('Using configuration %s' % c)
return c
@property
@cache
def view(self):
itemAccess = self.getItemAccess()
return View(itemAccess, self.config)
@logException
def getattr(self, path):
return self.view.getattr(path)
@logException
def readdir(self, path, offset):
return self.view.readdir(path, offset)
@logException
def readlink(self, path):
return self.view.readlink(path)
@logException
def open(self, path, flags):
return self.view.open(path, flags)
@logException
def read(self, path, size, offset):
return self.view.read(path, size, offset)
@logException
def write(self, path, data, pos):
return self.view.write(path, data, pos)
@logException
def symlink(self, path, linkPath):
return self.view.symlink(path, linkPath)
def main():
fs = TagFS(os.getcwd(),
version = "%prog " + fuse.__version__,
dash_s_do = 'setsingle')
fs.parse(errex = 1)
opts, args = fs.cmdline
if opts.itemsDir == None:
fs.parser.print_help()
# items dir should probably be an arg, not an option.
print "Error: Missing items directory option."
# Quickfix rel https://github.com/marook/tagfs/issues/#issue/3
# FIXME: since we run main via sys.exit(main()), this should
# probably be handled via some return code.
import sys
sys.exit()
return fs.main()
if __name__ == '__main__':
import sys
sys.exit(main())
diff --git a/src/modules/tagfs/node.py b/src/modules/tagfs/node.py
index 8f23a06..e15c2df 100644
--- a/src/modules/tagfs/node.py
+++ b/src/modules/tagfs/node.py
@@ -1,90 +1,90 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import fuse
import stat
-from tagfs.cache import cache
+from cache import cache
class Stat(fuse.Stat):
def __init__(self):
self.st_mode = 0
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 0
self.st_uid = 0
self.st_gid = 0
self.st_size = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in self.__dict__]) + ']'
class ItemLinkNode(object):
def __init__(self, item):
self.item = item
@property
def name(self):
return self.item.name
@property
def attr(self):
s = Stat()
s.st_mode = stat.S_IFLNK | 0444
s.st_nlink = 2
return s
def addsValue(self, items):
return True
@property
def link(self):
return self.item.itemDirectory
class DirectoryNode(object):
@property
def attr(self):
s = Stat()
s.st_mode = stat.S_IFDIR | 0555
s.st_mtime = 0
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
def addsValue(self, items):
return True
def _addsValue(self, child):
return True
@property
@cache
def entries(self):
return dict([[e.name, e] for e in self._entries if self._addsValue(e)])
diff --git a/src/modules/tagfs/node_export.py b/src/modules/tagfs/node_export.py
index 23ecab1..36fc57d 100644
--- a/src/modules/tagfs/node_export.py
+++ b/src/modules/tagfs/node_export.py
@@ -1,55 +1,55 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
-from tagfs.cache import cache
-from tagfs.node import Stat, ItemLinkNode, DirectoryNode
-from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
-from tagfs.node_export_csv import ExportCsvFileNode
+from cache import cache
+from node import Stat, ItemLinkNode, DirectoryNode
+from node_untagged_items import UntaggedItemsDirectoryNode
+from node_export_csv import ExportCsvFileNode
class ExportDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, parentNode):
self.itemAccess = itemAccess
self.parentNode = parentNode
@property
def name(self):
return '.export'
@property
def attr(self):
s = super(ExportDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def items(self):
return self.parentNode.items
@property
def _entries(self):
yield ExportCsvFileNode(self.itemAccess, self.parentNode)
diff --git a/src/modules/tagfs/node_export_csv.py b/src/modules/tagfs/node_export_csv.py
index c740a02..c493989 100644
--- a/src/modules/tagfs/node_export_csv.py
+++ b/src/modules/tagfs/node_export_csv.py
@@ -1,107 +1,107 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import stat
-from tagfs.cache import cache
-from tagfs.node import Stat
+from cache import cache
+from node import Stat
class ExportCsvFileNode(object):
COL_SEPARATOR = ';'
TEXT_CHAR = '"'
ROW_SEPARATOR = '\n'
TAG_VALUE_SEPARATOR = '\n'
def __init__(self, itemAccess, parentNode):
self.itemAccess = itemAccess
self.parentNode = parentNode
@property
def name(self):
return 'export.csv'
@property
def items(self):
return self.parentNode.items
def formatRow(self, row):
first = True
for col in row:
if first:
first = False
else:
yield ExportCsvFileNode.COL_SEPARATOR
# TODO escape TEXT_CHAR in col string
yield ExportCsvFileNode.TEXT_CHAR
yield str(col)
yield ExportCsvFileNode.TEXT_CHAR
yield ExportCsvFileNode.ROW_SEPARATOR
@property
def _content(self):
contexts = set()
for i in self.items:
for t in i.tags:
contexts.add(t.context)
headline = ['name', ]
for c in contexts:
headline.append(c)
for s in self.formatRow(headline):
yield s
for i in self.items:
row = [i.name, ]
for c in contexts:
row.append(ExportCsvFileNode.TAG_VALUE_SEPARATOR.join([t.value for t in i.getTagsByContext(c)]))
for s in self.formatRow(row):
yield s
@property
@cache
def content(self):
return ''.join(self._content)
@property
def attr(self):
s = Stat()
s.st_mode = stat.S_IFREG | 0444
s.st_nlink = 2
# TODO replace with memory saving size calculation
import array
s.st_size = len(array.array('c', self.content))
return s
def open(self, path, flags):
return
def read(self, path, size, offset):
return self.content[offset:offset + size]
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
index e024f9f..e1d45ae 100644
--- a/src/modules/tagfs/node_filter.py
+++ b/src/modules/tagfs/node_filter.py
@@ -1,90 +1,90 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
-from tagfs.cache import cache
-from tagfs.node import Stat, ItemLinkNode, DirectoryNode
-from tagfs.node_export import ExportDirectoryNode
+from cache import cache
+from node import Stat, ItemLinkNode, DirectoryNode
+from node_export import ExportDirectoryNode
class FilterDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
self.config = config
@property
def attr(self):
s = super(FilterDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def contexts(self):
c = set()
for item in self.items:
for t in item.tags:
context = t.context
if context is None:
continue
c.add(t.context)
return c
@property
def _entries(self):
# the import is not global because we want to prevent a cyclic
# dependency (ugly but works)
- from tagfs.node_filter_context import ContextValueListDirectoryNode
- from tagfs.node_filter_value import ValueFilterDirectoryNode
+ from node_filter_context import ContextValueListDirectoryNode
+ from node_filter_value import ValueFilterDirectoryNode
yield ExportDirectoryNode(self.itemAccess, self)
if(self.config.enableValueFilters):
for value in self.itemAccess.values:
yield ValueFilterDirectoryNode(self.itemAccess, self.config, self, value)
for context in self.contexts:
yield ContextValueListDirectoryNode(self.itemAccess, self.config, self, context)
for item in self.items:
yield ItemLinkNode(item)
def addsValue(self, parentItems):
itemsLen = len(list(self.items))
if(itemsLen == 0):
return False
# TODO we should not compare the lengths but whether the child and
# parent items are different
parentItemsLen = len(list(parentItems))
return itemsLen != parentItemsLen
def _addsValue(self, child):
return child.addsValue(self.items)
diff --git a/src/modules/tagfs/node_filter_context.py b/src/modules/tagfs/node_filter_context.py
index 5da749c..70f5703 100644
--- a/src/modules/tagfs/node_filter_context.py
+++ b/src/modules/tagfs/node_filter_context.py
@@ -1,102 +1,102 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
-from tagfs.cache import cache
-from tagfs.node import Stat, ItemLinkNode, DirectoryNode
-from tagfs.node_filter import FilterDirectoryNode
-from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+from cache import cache
+from node import Stat, ItemLinkNode, DirectoryNode
+from node_filter import FilterDirectoryNode
+from node_untagged_items import UntaggedItemsDirectoryNode
class ContextValueFilterDirectoryNode(FilterDirectoryNode):
def __init__(self, itemAccess, config, parentNode, context, value):
super(ContextValueFilterDirectoryNode, self).__init__(itemAccess, config)
self.parentNode = parentNode
self.context = context
self.value = value
@property
def name(self):
return self.value
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithContextValue(self.context, self.value):
continue
yield item
class ContextValueListDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, config, parentNode, context):
self.itemAccess = itemAccess
self.config = config
self.parentNode = parentNode
self.context = context
@property
def name(self):
return self.context
@property
def attr(self):
s = super(ContextValueListDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithContext(self.context):
continue
yield item
@property
def contextValues(self):
values = set()
for item in self.parentNode.items:
for tag in item.getTagsByContext(self.context):
values.add(tag.value)
return values
@property
def _entries(self):
for value in self.contextValues:
yield ContextValueFilterDirectoryNode(self.itemAccess, self.config, self, self.context, value)
def addsValue(self, parentItems):
if(super(ContextValueListDirectoryNode, self).addsValue(parentItems)):
return True
for e in self._entries:
if(e.addsValue(parentItems)):
return True
return False
diff --git a/src/modules/tagfs/node_filter_value.py b/src/modules/tagfs/node_filter_value.py
index a5b2c68..17156ec 100644
--- a/src/modules/tagfs/node_filter_value.py
+++ b/src/modules/tagfs/node_filter_value.py
@@ -1,40 +1,40 @@
#
# Copyright 2012 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
-from tagfs.node_filter import FilterDirectoryNode
+from node_filter import FilterDirectoryNode
class ValueFilterDirectoryNode(FilterDirectoryNode):
def __init__(self, itemAccess, config, parentNode, value):
super(ValueFilterDirectoryNode, self).__init__(itemAccess, config)
self.parentNode = parentNode
self.value = value
@property
def name(self):
return self.value
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithValue(self.value):
continue
yield item
diff --git a/src/modules/tagfs/node_root.py b/src/modules/tagfs/node_root.py
index abe0a98..15025d8 100644
--- a/src/modules/tagfs/node_root.py
+++ b/src/modules/tagfs/node_root.py
@@ -1,39 +1,37 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
-from tagfs.cache import cache
-from tagfs.node import Stat, ItemLinkNode
-from tagfs.node_filter import FilterDirectoryNode
-from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+from node_filter import FilterDirectoryNode
+from node_untagged_items import UntaggedItemsDirectoryNode
class RootDirectoryNode(FilterDirectoryNode):
def __init__(self, itemAccess, config):
super(RootDirectoryNode, self).__init__(itemAccess, config)
@property
def items(self):
return self.itemAccess.taggedItems
@property
def _entries(self):
yield UntaggedItemsDirectoryNode('.untagged', self.itemAccess)
for e in super(RootDirectoryNode, self)._entries:
yield e
diff --git a/src/modules/tagfs/node_untagged_items.py b/src/modules/tagfs/node_untagged_items.py
index 9ffcf8f..fbe44d6 100644
--- a/src/modules/tagfs/node_untagged_items.py
+++ b/src/modules/tagfs/node_untagged_items.py
@@ -1,46 +1,46 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
-from tagfs.cache import cache
-from tagfs.node import Stat, ItemLinkNode, DirectoryNode
+from cache import cache
+from node import Stat, ItemLinkNode, DirectoryNode
class UntaggedItemsDirectoryNode(DirectoryNode):
def __init__(self, name, itemAccess):
self.name = name
self.itemAccess = itemAccess
@property
def attr(self):
s = super(UntaggedItemsDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def _entries(self):
for item in self.itemAccess.untaggedItems:
yield ItemLinkNode(item)
diff --git a/src/modules/tagfs/view.py b/src/modules/tagfs/view.py
index a18a5ca..bb89462 100644
--- a/src/modules/tagfs/view.py
+++ b/src/modules/tagfs/view.py
@@ -1,174 +1,174 @@
#!/usr/bin/env python
#
# Copyright 2009, 2010 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import errno
import logging
import os
-from tagfs.log import logCall, logException
-from tagfs.cache import cache
-from tagfs.transient_dict import TransientDict
-from tagfs.node_root import RootDirectoryNode
+from log import logCall, logException
+from cache import cache
+from transient_dict import TransientDict
+from node_root import RootDirectoryNode
from fuse import Direntry
class View(object):
"""Abstraction layer from fuse API.
This class is an abstraction layer from the fuse API. This should ease
writing test cases for the file system.
"""
DEFAULT_NODES = {
# directory icons for rox filer
'.DirIcon': None,
# launch script for rox filer application directories
'AppRun': None
}
def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
self.config = config
self._entryCache = TransientDict(100)
@property
@cache
def rootNode(self):
return RootDirectoryNode(self.itemAccess, self.config)
def getNode(self, path):
if path in self._entryCache:
# simple path name based caching is implemented here
logging.debug('tagfs _entryCache hit')
return self._entryCache[path]
# ps contains the path segments
ps = [x for x in os.path.normpath(path).split(os.sep) if x != '']
psLen = len(ps)
if psLen > 0:
lastSegment = ps[psLen - 1]
if lastSegment in View.DEFAULT_NODES:
logging.debug('Using default node for path ' + path)
return View.DEFAULT_NODES[lastSegment]
e = self.rootNode
for pe in path.split('/')[1:]:
if pe == '':
continue
entries = e.entries
if not pe in entries:
# it seems like we are trying to fetch a node for an illegal
# path
return None
e = entries[pe]
logging.debug('tagfs _entryCache miss')
self._entryCache[path] = e
return e
@logCall
def getattr(self, path):
e = self.getNode(path)
if not e:
logging.debug('Try to read attributes from not existing node: ' + path)
return -errno.ENOENT
return e.attr
@logCall
def readdir(self, path, offset):
e = self.getNode(path)
if not e:
logging.warn('Try to read not existing directory: ' + path)
return -errno.ENOENT
# TODO care about offset parameter
return [Direntry(name) for name in e.entries.iterkeys()]
@logCall
def readlink(self, path):
n = self.getNode(path)
if not n:
logging.warn('Try to read not existing link from node: ' + path)
return -errno.ENOENT
return n.link
@logCall
def symlink(self, path, linkPath):
linkPathSegs = linkPath.split('/')
n = self.getNode('/'.join(linkPathSegs[0:len(linkPathSegs) - 2]))
if not n:
return -errno.ENOENT
return n.symlink(path, linkPath)
@logCall
def open(self, path, flags):
n = self.getNode(path)
if not n:
logging.warn('Try to open not existing node: ' + path)
return -errno.ENOENT
return n.open(path, flags)
@logCall
def read(self, path, len, offset):
n = self.getNode(path)
if not n:
logging.warn('Try to read from not existing node: ' + path)
return -errno.ENOENT
return n.read(path, len, offset)
@logCall
def write(self, path, data, pos):
n = self.getNode(path)
if not n:
logging.warn('Try to write to not existing node: ' + path)
return -errno.ENOENT
return n.write(path, data, pos)
|
marook/tagfs
|
26ed39dd4400ea80b81ba762e256324d0e49ad7d
|
filter directories will only show if they reduce the number of items
|
diff --git a/src/modules/tagfs/node.py b/src/modules/tagfs/node.py
index a8ded1e..8f23a06 100644
--- a/src/modules/tagfs/node.py
+++ b/src/modules/tagfs/node.py
@@ -1,81 +1,90 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import fuse
import stat
from tagfs.cache import cache
class Stat(fuse.Stat):
def __init__(self):
self.st_mode = 0
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 0
self.st_uid = 0
self.st_gid = 0
self.st_size = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in self.__dict__]) + ']'
class ItemLinkNode(object):
def __init__(self, item):
self.item = item
@property
def name(self):
return self.item.name
@property
def attr(self):
s = Stat()
s.st_mode = stat.S_IFLNK | 0444
s.st_nlink = 2
return s
+ def addsValue(self, items):
+ return True
+
@property
def link(self):
return self.item.itemDirectory
class DirectoryNode(object):
@property
def attr(self):
s = Stat()
s.st_mode = stat.S_IFDIR | 0555
s.st_mtime = 0
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
+ def addsValue(self, items):
+ return True
+
+ def _addsValue(self, child):
+ return True
+
@property
@cache
def entries(self):
- return dict([[e.name, e] for e in self._entries])
+ return dict([[e.name, e] for e in self._entries if self._addsValue(e)])
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
index 444ea7b..e024f9f 100644
--- a/src/modules/tagfs/node_filter.py
+++ b/src/modules/tagfs/node_filter.py
@@ -1,76 +1,90 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
from tagfs.node_export import ExportDirectoryNode
class FilterDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
self.config = config
@property
def attr(self):
s = super(FilterDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def contexts(self):
c = set()
for item in self.items:
for t in item.tags:
context = t.context
if context is None:
continue
c.add(t.context)
return c
@property
def _entries(self):
# the import is not global because we want to prevent a cyclic
# dependency (ugly but works)
from tagfs.node_filter_context import ContextValueListDirectoryNode
from tagfs.node_filter_value import ValueFilterDirectoryNode
yield ExportDirectoryNode(self.itemAccess, self)
if(self.config.enableValueFilters):
for value in self.itemAccess.values:
yield ValueFilterDirectoryNode(self.itemAccess, self.config, self, value)
for context in self.contexts:
yield ContextValueListDirectoryNode(self.itemAccess, self.config, self, context)
for item in self.items:
yield ItemLinkNode(item)
+
+ def addsValue(self, parentItems):
+ itemsLen = len(list(self.items))
+ if(itemsLen == 0):
+ return False
+
+ # TODO we should not compare the lengths but whether the child and
+ # parent items are different
+ parentItemsLen = len(list(parentItems))
+
+ return itemsLen != parentItemsLen
+
+ def _addsValue(self, child):
+ return child.addsValue(self.items)
diff --git a/src/modules/tagfs/node_filter_context.py b/src/modules/tagfs/node_filter_context.py
index d608cf4..5da749c 100644
--- a/src/modules/tagfs/node_filter_context.py
+++ b/src/modules/tagfs/node_filter_context.py
@@ -1,92 +1,102 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
from tagfs.node_filter import FilterDirectoryNode
from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
class ContextValueFilterDirectoryNode(FilterDirectoryNode):
def __init__(self, itemAccess, config, parentNode, context, value):
super(ContextValueFilterDirectoryNode, self).__init__(itemAccess, config)
self.parentNode = parentNode
self.context = context
self.value = value
@property
def name(self):
return self.value
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithContextValue(self.context, self.value):
continue
yield item
class ContextValueListDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, config, parentNode, context):
self.itemAccess = itemAccess
self.config = config
self.parentNode = parentNode
self.context = context
@property
def name(self):
return self.context
@property
def attr(self):
s = super(ContextValueListDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithContext(self.context):
continue
yield item
@property
def contextValues(self):
values = set()
for item in self.parentNode.items:
for tag in item.getTagsByContext(self.context):
values.add(tag.value)
return values
@property
def _entries(self):
for value in self.contextValues:
yield ContextValueFilterDirectoryNode(self.itemAccess, self.config, self, self.context, value)
+
+ def addsValue(self, parentItems):
+ if(super(ContextValueListDirectoryNode, self).addsValue(parentItems)):
+ return True
+
+ for e in self._entries:
+ if(e.addsValue(parentItems)):
+ return True
+
+ return False
|
marook/tagfs
|
4c1e2cd4d4eb5cfb7e3af4d6db95d3cffe13bf8b
|
fixed value filters
|
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
index 3b2beff..a482c94 100644
--- a/src/modules/tagfs/item_access.py
+++ b/src/modules/tagfs/item_access.py
@@ -1,285 +1,292 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import time
import traceback
from tagfs.cache import cache
class Tag(object):
def __init__(self, value, context = None):
if context == None:
self.context = None
else:
self.context = context.strip()
self.value = value.strip()
if not self.context == None and len(self.context) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
if len(self.value) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
def __hash__(self):
return (self.context, self.value).__hash__()
def __eq__(self, other):
return self.value == other.value and self.context == other.context
def __repr__(self):
return '<Tag %s: %s>' % (self.context, self.value)
def parseTagsFromFile(tagFileName):
"""Parses the tags from the specified file.
@return: The parsed values are returned as a set containing Tag objects.
@see: Tag
"""
tags = set()
tagFile = open(tagFileName, 'r')
try:
for rawTag in tagFile.readlines():
rawTag = rawTag.strip()
try:
if len(rawTag) == 0:
continue
tagTuple = rawTag.split(':', 1)
if len(tagTuple) == 1:
tagContext = None
tagValue = tagTuple[0]
else:
tagContext = tagTuple[0]
tagValue = tagTuple[1]
tag = Tag(tagValue, context = tagContext)
tags.add(tag)
except:
logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
finally:
tagFile.close()
return tags
class Item(object):
def __init__(self, name, itemAccess):
self.name = name
self.itemAccess = itemAccess
# TODO register at file system to receive tag file change events.
@property
@cache
def itemDirectory(self):
return os.path.join(self.itemAccess.dataDirectory, self.name)
@property
@cache
def _tagFileName(self):
"""Returns the name of the tag file for this item.
"""
itemDirectory = self.itemDirectory
return os.path.join(itemDirectory, self.itemAccess.tagFileName)
def __parseTags(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return parseTagsFromFile(tagFileName)
@property
@cache
def tagsCreationTime(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getctime(self._tagFileName)
@property
@cache
def tagsModificationTime(self):
"""Returns the last time when the tags have been modified.
"""
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getmtime(tagFileName)
@property
@cache
def tags(self):
"""Returns the tags as a list for this item.
"""
return self.__parseTags()
def getTagsByContext(self, context):
for t in self.tags:
if context != t.context:
continue
yield t
def isTaggedWithContextValue(self, context, value):
for t in self.getTagsByContext(context):
if value == t.value:
return True
return False
def isTaggedWithContext(self, context):
# TODO don't create whole list... just check wheather list is empty
return (len([c for c in self.getTagsByContext(context)]) > 0)
+
+ def isTaggedWithValue(self, value):
+ for t in self.tags:
+ if value == t.value:
+ return True
+
+ return False
@property
@cache
def tagged(self):
return os.path.exists(self._tagFileName)
def __repr__(self):
return '<Item %s>' % self.name
class ItemAccess(object):
"""This is the access point to the Items.
"""
def __init__(self, dataDirectory, tagFileName):
self.dataDirectory = dataDirectory
self.tagFileName = tagFileName
self.__items = None
self.__tags = None
self.__taggedItems = None
self.__untaggedItems = None
self.parseTime = 0
def __parseItems(self):
items = {}
logging.debug('Start parsing items from dir: %s', self.dataDirectory)
for itemName in os.listdir(self.dataDirectory):
if itemName == '.tagfs':
# skip directory with configuration
continue
try:
item = Item(itemName, self)
items[itemName] = item
except IOError, (error, strerror):
logging.error('Can \'t read tags for item %s: %s',
itemName,
strerror)
logging.debug('Found %s items', len(items))
self.parseTime = time.time()
return items
@property
@cache
def items(self):
return self.__parseItems()
@property
@cache
def tags(self):
tags = set()
for item in self.items.itervalues():
if not item.tagged:
continue
tags = tags | item.tags
return tags
@property
@cache
def taggedItems(self):
return set([item for item in self.items.itervalues() if item.tagged])
@property
@cache
def untaggedItems(self):
return set([item for item in self.items.itervalues() if not item.tagged])
def getItemDirectory(self, item):
return os.path.join(self.dataDirectory, item)
def contextTags(self, context):
contextTags = set()
for tag in self.tags:
if tag.context == context:
contextTags.add(tag)
return contextTags
@property
@cache
def contexts(self):
contexts = set()
for tag in self.tags:
if tag.context == None:
continue
contexts.add(tag.context)
return contexts
@property
@cache
def values(self):
values = set()
for tag in self.tags:
values.add(tag.value)
return values
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
index b037b50..444ea7b 100644
--- a/src/modules/tagfs/node_filter.py
+++ b/src/modules/tagfs/node_filter.py
@@ -1,75 +1,76 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
from tagfs.node_export import ExportDirectoryNode
class FilterDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
self.config = config
@property
def attr(self):
s = super(FilterDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def contexts(self):
c = set()
for item in self.items:
for t in item.tags:
context = t.context
if context is None:
continue
c.add(t.context)
return c
@property
def _entries(self):
# the import is not global because we want to prevent a cyclic
- # dependency
- from tagfs.node_filter_context import ContextValueFilterDirectoryNode, ContextValueListDirectoryNode
+ # dependency (ugly but works)
+ from tagfs.node_filter_context import ContextValueListDirectoryNode
+ from tagfs.node_filter_value import ValueFilterDirectoryNode
yield ExportDirectoryNode(self.itemAccess, self)
if(self.config.enableValueFilters):
for value in self.itemAccess.values:
- yield ContextValueFilterDirectoryNode(self.itemAccess, self.config, self, None, value)
+ yield ValueFilterDirectoryNode(self.itemAccess, self.config, self, value)
for context in self.contexts:
yield ContextValueListDirectoryNode(self.itemAccess, self.config, self, context)
for item in self.items:
yield ItemLinkNode(item)
diff --git a/src/modules/tagfs/node_filter_value.py b/src/modules/tagfs/node_filter_value.py
new file mode 100644
index 0000000..a5b2c68
--- /dev/null
+++ b/src/modules/tagfs/node_filter_value.py
@@ -0,0 +1,40 @@
+#
+# Copyright 2012 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.node_filter import FilterDirectoryNode
+
+class ValueFilterDirectoryNode(FilterDirectoryNode):
+
+ def __init__(self, itemAccess, config, parentNode, value):
+ super(ValueFilterDirectoryNode, self).__init__(itemAccess, config)
+ self.parentNode = parentNode
+ self.value = value
+
+ @property
+ def name(self):
+ return self.value
+
+ @property
+ def items(self):
+ for item in self.parentNode.items:
+ if not item.isTaggedWithValue(self.value):
+ continue
+
+ yield item
+
|
marook/tagfs
|
3b4dfd8edc0ed74a70e03106486f5cb7eccf881a
|
value filters can now be disabled
|
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
index 886b5dc..3b2beff 100644
--- a/src/modules/tagfs/item_access.py
+++ b/src/modules/tagfs/item_access.py
@@ -1,275 +1,285 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import time
import traceback
from tagfs.cache import cache
class Tag(object):
def __init__(self, value, context = None):
if context == None:
self.context = None
else:
self.context = context.strip()
self.value = value.strip()
if not self.context == None and len(self.context) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
if len(self.value) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
def __hash__(self):
return (self.context, self.value).__hash__()
def __eq__(self, other):
return self.value == other.value and self.context == other.context
def __repr__(self):
return '<Tag %s: %s>' % (self.context, self.value)
def parseTagsFromFile(tagFileName):
"""Parses the tags from the specified file.
@return: The parsed values are returned as a set containing Tag objects.
@see: Tag
"""
tags = set()
tagFile = open(tagFileName, 'r')
try:
for rawTag in tagFile.readlines():
rawTag = rawTag.strip()
try:
if len(rawTag) == 0:
continue
tagTuple = rawTag.split(':', 1)
if len(tagTuple) == 1:
tagContext = None
tagValue = tagTuple[0]
else:
tagContext = tagTuple[0]
tagValue = tagTuple[1]
tag = Tag(tagValue, context = tagContext)
tags.add(tag)
except:
logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
finally:
tagFile.close()
return tags
class Item(object):
def __init__(self, name, itemAccess):
self.name = name
self.itemAccess = itemAccess
# TODO register at file system to receive tag file change events.
@property
@cache
def itemDirectory(self):
return os.path.join(self.itemAccess.dataDirectory, self.name)
@property
@cache
def _tagFileName(self):
"""Returns the name of the tag file for this item.
"""
itemDirectory = self.itemDirectory
return os.path.join(itemDirectory, self.itemAccess.tagFileName)
def __parseTags(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return parseTagsFromFile(tagFileName)
@property
@cache
def tagsCreationTime(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getctime(self._tagFileName)
@property
@cache
def tagsModificationTime(self):
"""Returns the last time when the tags have been modified.
"""
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getmtime(tagFileName)
@property
@cache
def tags(self):
"""Returns the tags as a list for this item.
"""
return self.__parseTags()
def getTagsByContext(self, context):
for t in self.tags:
if context != t.context:
continue
yield t
def isTaggedWithContextValue(self, context, value):
for t in self.getTagsByContext(context):
if value == t.value:
return True
return False
def isTaggedWithContext(self, context):
# TODO don't create whole list... just check wheather list is empty
return (len([c for c in self.getTagsByContext(context)]) > 0)
@property
@cache
def tagged(self):
return os.path.exists(self._tagFileName)
def __repr__(self):
return '<Item %s>' % self.name
class ItemAccess(object):
"""This is the access point to the Items.
"""
def __init__(self, dataDirectory, tagFileName):
self.dataDirectory = dataDirectory
self.tagFileName = tagFileName
self.__items = None
self.__tags = None
self.__taggedItems = None
self.__untaggedItems = None
self.parseTime = 0
def __parseItems(self):
items = {}
logging.debug('Start parsing items from dir: %s', self.dataDirectory)
for itemName in os.listdir(self.dataDirectory):
if itemName == '.tagfs':
# skip directory with configuration
continue
try:
item = Item(itemName, self)
items[itemName] = item
except IOError, (error, strerror):
logging.error('Can \'t read tags for item %s: %s',
itemName,
strerror)
logging.debug('Found %s items', len(items))
self.parseTime = time.time()
return items
@property
@cache
def items(self):
return self.__parseItems()
@property
@cache
def tags(self):
tags = set()
for item in self.items.itervalues():
if not item.tagged:
continue
tags = tags | item.tags
return tags
@property
@cache
def taggedItems(self):
return set([item for item in self.items.itervalues() if item.tagged])
@property
@cache
def untaggedItems(self):
return set([item for item in self.items.itervalues() if not item.tagged])
def getItemDirectory(self, item):
return os.path.join(self.dataDirectory, item)
def contextTags(self, context):
contextTags = set()
for tag in self.tags:
if tag.context == context:
contextTags.add(tag)
return contextTags
@property
@cache
def contexts(self):
contexts = set()
for tag in self.tags:
if tag.context == None:
continue
contexts.add(tag.context)
return contexts
+ @property
+ @cache
+ def values(self):
+ values = set()
+
+ for tag in self.tags:
+ values.add(tag.value)
+
+ return values
+
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
index d9317e0..b037b50 100644
--- a/src/modules/tagfs/node_filter.py
+++ b/src/modules/tagfs/node_filter.py
@@ -1,70 +1,75 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
from tagfs.node_export import ExportDirectoryNode
class FilterDirectoryNode(DirectoryNode):
- def __init__(self, itemAccess):
+ def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
+ self.config = config
@property
def attr(self):
s = super(FilterDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def contexts(self):
c = set()
for item in self.items:
for t in item.tags:
context = t.context
if context is None:
continue
c.add(t.context)
return c
@property
def _entries(self):
# the import is not global because we want to prevent a cyclic
# dependency
- from tagfs.node_filter_context import ContextValueListDirectoryNode
+ from tagfs.node_filter_context import ContextValueFilterDirectoryNode, ContextValueListDirectoryNode
yield ExportDirectoryNode(self.itemAccess, self)
+ if(self.config.enableValueFilters):
+ for value in self.itemAccess.values:
+ yield ContextValueFilterDirectoryNode(self.itemAccess, self.config, self, None, value)
+
for context in self.contexts:
- yield ContextValueListDirectoryNode(self.itemAccess, self, context)
+ yield ContextValueListDirectoryNode(self.itemAccess, self.config, self, context)
for item in self.items:
yield ItemLinkNode(item)
diff --git a/src/modules/tagfs/node_filter_context.py b/src/modules/tagfs/node_filter_context.py
index 407a951..d608cf4 100644
--- a/src/modules/tagfs/node_filter_context.py
+++ b/src/modules/tagfs/node_filter_context.py
@@ -1,91 +1,92 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
from tagfs.node_filter import FilterDirectoryNode
from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
class ContextValueFilterDirectoryNode(FilterDirectoryNode):
- def __init__(self, itemAccess, parentNode, context, value):
- super(ContextValueFilterDirectoryNode, self).__init__(itemAccess)
+ def __init__(self, itemAccess, config, parentNode, context, value):
+ super(ContextValueFilterDirectoryNode, self).__init__(itemAccess, config)
self.parentNode = parentNode
self.context = context
self.value = value
@property
def name(self):
return self.value
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithContextValue(self.context, self.value):
continue
yield item
class ContextValueListDirectoryNode(DirectoryNode):
- def __init__(self, itemAccess, parentNode, context):
+ def __init__(self, itemAccess, config, parentNode, context):
self.itemAccess = itemAccess
+ self.config = config
self.parentNode = parentNode
self.context = context
@property
def name(self):
return self.context
@property
def attr(self):
s = super(ContextValueListDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def items(self):
for item in self.parentNode.items:
if not item.isTaggedWithContext(self.context):
continue
yield item
@property
def contextValues(self):
values = set()
for item in self.parentNode.items:
for tag in item.getTagsByContext(self.context):
values.add(tag.value)
return values
@property
def _entries(self):
for value in self.contextValues:
- yield ContextValueFilterDirectoryNode(self.itemAccess, self, self.context, value)
+ yield ContextValueFilterDirectoryNode(self.itemAccess, self.config, self, self.context, value)
diff --git a/src/modules/tagfs/node_root.py b/src/modules/tagfs/node_root.py
index ed0a933..abe0a98 100644
--- a/src/modules/tagfs/node_root.py
+++ b/src/modules/tagfs/node_root.py
@@ -1,40 +1,39 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode
from tagfs.node_filter import FilterDirectoryNode
from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
-from tagfs.node_filter_context import ContextValueListDirectoryNode
class RootDirectoryNode(FilterDirectoryNode):
-
- def __init__(self, itemAccess):
- super(RootDirectoryNode, self).__init__(itemAccess)
+
+ def __init__(self, itemAccess, config):
+ super(RootDirectoryNode, self).__init__(itemAccess, config)
@property
def items(self):
return self.itemAccess.taggedItems
@property
def _entries(self):
yield UntaggedItemsDirectoryNode('.untagged', self.itemAccess)
for e in super(RootDirectoryNode, self)._entries:
yield e
diff --git a/src/modules/tagfs/view.py b/src/modules/tagfs/view.py
index a24ffef..a18a5ca 100644
--- a/src/modules/tagfs/view.py
+++ b/src/modules/tagfs/view.py
@@ -1,175 +1,174 @@
#!/usr/bin/env python
#
# Copyright 2009, 2010 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import errno
import logging
import os
from tagfs.log import logCall, logException
from tagfs.cache import cache
from tagfs.transient_dict import TransientDict
from tagfs.node_root import RootDirectoryNode
from fuse import Direntry
class View(object):
"""Abstraction layer from fuse API.
This class is an abstraction layer from the fuse API. This should ease
writing test cases for the file system.
"""
DEFAULT_NODES = {
# directory icons for rox filer
'.DirIcon': None,
# launch script for rox filer application directories
'AppRun': None
}
def __init__(self, itemAccess, config):
self.itemAccess = itemAccess
self.config = config
self._entryCache = TransientDict(100)
@property
@cache
def rootNode(self):
- # TODO return node.RootNode(self.itemAccess, self.config)
- return RootDirectoryNode(self.itemAccess)
+ return RootDirectoryNode(self.itemAccess, self.config)
def getNode(self, path):
if path in self._entryCache:
# simple path name based caching is implemented here
logging.debug('tagfs _entryCache hit')
return self._entryCache[path]
# ps contains the path segments
ps = [x for x in os.path.normpath(path).split(os.sep) if x != '']
psLen = len(ps)
if psLen > 0:
lastSegment = ps[psLen - 1]
if lastSegment in View.DEFAULT_NODES:
logging.debug('Using default node for path ' + path)
return View.DEFAULT_NODES[lastSegment]
e = self.rootNode
for pe in path.split('/')[1:]:
if pe == '':
continue
entries = e.entries
if not pe in entries:
# it seems like we are trying to fetch a node for an illegal
# path
return None
e = entries[pe]
logging.debug('tagfs _entryCache miss')
self._entryCache[path] = e
return e
@logCall
def getattr(self, path):
e = self.getNode(path)
if not e:
logging.debug('Try to read attributes from not existing node: ' + path)
return -errno.ENOENT
return e.attr
@logCall
def readdir(self, path, offset):
e = self.getNode(path)
if not e:
logging.warn('Try to read not existing directory: ' + path)
return -errno.ENOENT
# TODO care about offset parameter
return [Direntry(name) for name in e.entries.iterkeys()]
@logCall
def readlink(self, path):
n = self.getNode(path)
if not n:
logging.warn('Try to read not existing link from node: ' + path)
return -errno.ENOENT
return n.link
@logCall
def symlink(self, path, linkPath):
linkPathSegs = linkPath.split('/')
n = self.getNode('/'.join(linkPathSegs[0:len(linkPathSegs) - 2]))
if not n:
return -errno.ENOENT
return n.symlink(path, linkPath)
@logCall
def open(self, path, flags):
n = self.getNode(path)
if not n:
logging.warn('Try to open not existing node: ' + path)
return -errno.ENOENT
return n.open(path, flags)
@logCall
def read(self, path, len, offset):
n = self.getNode(path)
if not n:
logging.warn('Try to read from not existing node: ' + path)
return -errno.ENOENT
return n.read(path, len, offset)
@logCall
def write(self, path, data, pos):
n = self.getNode(path)
if not n:
logging.warn('Try to write to not existing node: ' + path)
return -errno.ENOENT
return n.write(path, data, pos)
|
marook/tagfs
|
7c8902e2aab5c0d7831b8322e7e6d1a08acab2b1
|
now there are two empty lines after every chapter in docs
|
diff --git a/README b/README
index be85d57..9e613c6 100644
--- a/README
+++ b/README
@@ -1,119 +1,130 @@
tagfs - tag file system
1) Introduction
2) Requirements
3) Installation
4) Usage
5) Configuration
5.1) Options
5.1.1) tagFileName
5.1.2) enableValueFilters
5.1.3) enableRootItemLinks
6) Tests
7) Further Reading
8) Contact
+
---------------------------------------------------------------------
Introduction
tagfs is used to organize your documents using tags.
This document contains basic usage instructions for users. To develop or debug
tagfs see the README.dev file.
+
---------------------------------------------------------------------
Requirements
* python 2.5, 2.6, 2.7
* Linux kernel with fuse enabled
* python-fuse installed
+
---------------------------------------------------------------------
Installation
To install tagfs into your home directory type the following:
$ python setup.py install --home ~/.local
If you haven't already extended your local python path then add the following
to your environment configuration script. For example to your .bashrc:
export PYTHONPATH=~/.local/lib/python:$PYTHONPATH
+
---------------------------------------------------------------------
Usage
After installation tagfs can be started the following way.
Mount a tagged directory:
$ tagfs -i /path/to/my/items/directory /path/to/my/mount/point
Unmount a tagged directory:
$ fusermount -u /path/to/my/mount/point
+
---------------------------------------------------------------------
Configuration
tagfs can be configured through configuration files. Configuration files are
searched in different locations by tagfs. The following locations are used.
Locations with higher priority come first:
- <items directory>/.tagfs/tagfs.conf
- ~/.tagfs/tagfs.conf
- /etc/tagfs/tagfs.conf
Right now the following configuration options are supported.
+
---------------------------------------------------------------------
Configuration - Options - tagFileName
Through this option the name of the parsed tag files can be specified. The
default value is '.tag'.
Example:
[global]
tagFileName = ABOUT
+
---------------------------------------------------------------------
Configuration - Options - enableValueFilters
You can enable or disable value filters. If you enable value filters you will
see filter directories for each tag value. For value filters the tag's
context can be anyone. The default value is 'false'.
Example:
[global]
enableValueFilters = true
+
---------------------------------------------------------------------
Configuration - Options - enableRootItemLinks
To show links to all items in the tagfs '/' directory enable this option. The
default value is 'false'.
Example:
[global]
enableRootItemLinks = true
+
---------------------------------------------------------------------
Tests
You can execute the test cases via the setup.py script in the project's root
directory.
$ python setup.py test
+
---------------------------------------------------------------------
Further Reading
Using a file system for my bank account (Markus Pielmeier)
http://pielmeier.blogspot.com/2010/08/using-file-system-for-my-bank-account.html
+
---------------------------------------------------------------------
Contact
* homepage: http://wiki.github.com/marook/tagfs
* user group: http://groups.google.com/group/tagfs
* author: Markus Pielmeier <[email protected]>
diff --git a/README.dev b/README.dev
index 7f8f673..195702b 100644
--- a/README.dev
+++ b/README.dev
@@ -1,65 +1,70 @@
tagfs - tag file system
developer readme
1) Logging
2) Profiling
3) Tracing
4) Distribution
4.1) tar Distribution
5) Code Coverage
+
---------------------------------------------------------------------
Logging
You can enable logging by setting a debug environment variable before you
launch tagfs:
$ export DEBUG=1
tagfs will log to the console and the file /tmp/tagfs.log
+
---------------------------------------------------------------------
Profiling
You can enable profiling by setting a profile environment variable before you
launch tagfs:
$ export PROFILE=1
After unmounting your tagfs file system a profile file will be written. The
profile file will be written to the current directory. The profile file will
be named 'tagfs.profile'.
+
---------------------------------------------------------------------
Tracing
Tracing is done via the log output. There is a utility script to analyze the
log files. To analyze a log file execute the following
$ util/trace_logfiles.py /tmp/tagfs.log
The tracing script will output some statistics.
+
---------------------------------------------------------------------
tar Distribution
The tagfs project contains scripts for creating source distribution packages.
To create a tar distribution package you execute the following:
$ make distsnapshot
The make call will create an archive within the target directory. The created
tar file is used for tagfs source distribution.
+
---------------------------------------------------------------------
Code Coverage
The tagfs unit tests can be executed with code coverage measurement enabled.
setup.py will measure the code coverage if the coverage lib is installed.
The coverage lib is available here: http://nedbatchelder.com/code/coverage
If you're a debian user you can try:
$ apt-get install python-coverage
The code coverage will be written below the reports directory after executing
the test cases:
$ python setup.py test
|
marook/tagfs
|
565d23c271f41684bdb4fb4f3f7d1d65a04aac2a
|
added python 2.7 support in documentation
|
diff --git a/README b/README
index 610c47e..be85d57 100644
--- a/README
+++ b/README
@@ -1,119 +1,119 @@
tagfs - tag file system
1) Introduction
2) Requirements
3) Installation
4) Usage
5) Configuration
5.1) Options
5.1.1) tagFileName
5.1.2) enableValueFilters
5.1.3) enableRootItemLinks
6) Tests
7) Further Reading
8) Contact
---------------------------------------------------------------------
Introduction
tagfs is used to organize your documents using tags.
This document contains basic usage instructions for users. To develop or debug
tagfs see the README.dev file.
---------------------------------------------------------------------
Requirements
-* python 2.5, 2.6
+* python 2.5, 2.6, 2.7
* Linux kernel with fuse enabled
* python-fuse installed
---------------------------------------------------------------------
Installation
To install tagfs into your home directory type the following:
$ python setup.py install --home ~/.local
If you haven't already extended your local python path then add the following
to your environment configuration script. For example to your .bashrc:
export PYTHONPATH=~/.local/lib/python:$PYTHONPATH
---------------------------------------------------------------------
Usage
After installation tagfs can be started the following way.
Mount a tagged directory:
$ tagfs -i /path/to/my/items/directory /path/to/my/mount/point
Unmount a tagged directory:
$ fusermount -u /path/to/my/mount/point
---------------------------------------------------------------------
Configuration
tagfs can be configured through configuration files. Configuration files are
searched in different locations by tagfs. The following locations are used.
Locations with higher priority come first:
- <items directory>/.tagfs/tagfs.conf
- ~/.tagfs/tagfs.conf
- /etc/tagfs/tagfs.conf
Right now the following configuration options are supported.
---------------------------------------------------------------------
Configuration - Options - tagFileName
Through this option the name of the parsed tag files can be specified. The
default value is '.tag'.
Example:
[global]
tagFileName = ABOUT
---------------------------------------------------------------------
Configuration - Options - enableValueFilters
You can enable or disable value filters. If you enable value filters you will
see filter directories for each tag value. For value filters the tag's
context can be anyone. The default value is 'false'.
Example:
[global]
enableValueFilters = true
---------------------------------------------------------------------
Configuration - Options - enableRootItemLinks
To show links to all items in the tagfs '/' directory enable this option. The
default value is 'false'.
Example:
[global]
enableRootItemLinks = true
---------------------------------------------------------------------
Tests
You can execute the test cases via the setup.py script in the project's root
directory.
$ python setup.py test
---------------------------------------------------------------------
Further Reading
Using a file system for my bank account (Markus Pielmeier)
http://pielmeier.blogspot.com/2010/08/using-file-system-for-my-bank-account.html
---------------------------------------------------------------------
Contact
* homepage: http://wiki.github.com/marook/tagfs
* user group: http://groups.google.com/group/tagfs
* author: Markus Pielmeier <[email protected]>
|
marook/tagfs
|
16ae7d2dd34e2c44720456a0ee76feabb7b3260c
|
removed no longer used filter code
|
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
index 7052e44..886b5dc 100644
--- a/src/modules/tagfs/item_access.py
+++ b/src/modules/tagfs/item_access.py
@@ -1,351 +1,275 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import time
import traceback
from tagfs.cache import cache
class Tag(object):
def __init__(self, value, context = None):
if context == None:
self.context = None
else:
self.context = context.strip()
self.value = value.strip()
if not self.context == None and len(self.context) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
if len(self.value) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
def __hash__(self):
return (self.context, self.value).__hash__()
def __eq__(self, other):
return self.value == other.value and self.context == other.context
def __repr__(self):
return '<Tag %s: %s>' % (self.context, self.value)
def parseTagsFromFile(tagFileName):
"""Parses the tags from the specified file.
@return: The parsed values are returned as a set containing Tag objects.
@see: Tag
"""
tags = set()
tagFile = open(tagFileName, 'r')
try:
for rawTag in tagFile.readlines():
rawTag = rawTag.strip()
try:
if len(rawTag) == 0:
continue
tagTuple = rawTag.split(':', 1)
if len(tagTuple) == 1:
tagContext = None
tagValue = tagTuple[0]
else:
tagContext = tagTuple[0]
tagValue = tagTuple[1]
tag = Tag(tagValue, context = tagContext)
tags.add(tag)
except:
logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
finally:
tagFile.close()
return tags
class Item(object):
def __init__(self, name, itemAccess):
self.name = name
self.itemAccess = itemAccess
# TODO register at file system to receive tag file change events.
@property
@cache
def itemDirectory(self):
return os.path.join(self.itemAccess.dataDirectory, self.name)
@property
@cache
def _tagFileName(self):
"""Returns the name of the tag file for this item.
"""
itemDirectory = self.itemDirectory
return os.path.join(itemDirectory, self.itemAccess.tagFileName)
def __parseTags(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return parseTagsFromFile(tagFileName)
@property
@cache
def tagsCreationTime(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getctime(self._tagFileName)
@property
@cache
def tagsModificationTime(self):
"""Returns the last time when the tags have been modified.
"""
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getmtime(tagFileName)
@property
@cache
def tags(self):
"""Returns the tags as a list for this item.
"""
return self.__parseTags()
def getTagsByContext(self, context):
for t in self.tags:
if context != t.context:
continue
yield t
def isTaggedWithContextValue(self, context, value):
for t in self.getTagsByContext(context):
if value == t.value:
return True
return False
def isTaggedWithContext(self, context):
# TODO don't create whole list... just check wheather list is empty
return (len([c for c in self.getTagsByContext(context)]) > 0)
@property
@cache
def tagged(self):
return os.path.exists(self._tagFileName)
def __repr__(self):
return '<Item %s>' % self.name
-class TagValueFilter(object):
-
- def __init__(self, tagValue):
- self.tagValue = tagValue
-
- def filterItems(self, items):
- droppedItems = set()
-
- for item in items:
- hasTagValue = False
-
- for itemTag in item.tags:
- if itemTag.value == self.tagValue:
- hasTagValue = True
-
- break
-
- if not hasTagValue:
- droppedItems.add(item)
-
- items -= droppedItems
-
-class TagFilter(object):
-
- def __init__(self, tag):
- self.tag = tag
-
- def filterItems(self, items):
- droppedItems = set()
-
- for item in items:
- if not self.tag in item.tags:
- droppedItems.add(item)
-
- items -= droppedItems
-
-class AndFilter(object):
- """Concatenates two filters with a logical 'and'.
- """
-
- def __init__(self, subFilters):
- self.subFilters = subFilters
-
- def filterItems(self, items):
- for subFilter in self.subFilters:
- subFilter.filterItems(items)
-
-class NoneFilter(object):
-
- def filterItems(self, items):
- pass
-
-class NotContextFilter(object):
-
- def __init__(self, context):
- self.context = context
-
- def filterItems(self, items):
- droppedItems = set()
-
- for item in items:
- for tag in item.tags:
- if self.context == tag.context:
- droppedItems.add(item)
-
- break
-
- items -= droppedItems
-
class ItemAccess(object):
"""This is the access point to the Items.
"""
def __init__(self, dataDirectory, tagFileName):
self.dataDirectory = dataDirectory
self.tagFileName = tagFileName
self.__items = None
self.__tags = None
self.__taggedItems = None
self.__untaggedItems = None
self.parseTime = 0
def __parseItems(self):
items = {}
logging.debug('Start parsing items from dir: %s', self.dataDirectory)
for itemName in os.listdir(self.dataDirectory):
if itemName == '.tagfs':
# skip directory with configuration
continue
try:
item = Item(itemName, self)
items[itemName] = item
except IOError, (error, strerror):
logging.error('Can \'t read tags for item %s: %s',
itemName,
strerror)
logging.debug('Found %s items', len(items))
self.parseTime = time.time()
return items
@property
@cache
def items(self):
return self.__parseItems()
@property
@cache
def tags(self):
tags = set()
for item in self.items.itervalues():
if not item.tagged:
continue
tags = tags | item.tags
return tags
@property
@cache
def taggedItems(self):
return set([item for item in self.items.itervalues() if item.tagged])
@property
@cache
def untaggedItems(self):
return set([item for item in self.items.itervalues() if not item.tagged])
def getItemDirectory(self, item):
return os.path.join(self.dataDirectory, item)
- def filterItems(self, filter):
- resultItems = set([item for item in self.taggedItems])
-
- filter.filterItems(resultItems)
-
- return resultItems
-
def contextTags(self, context):
contextTags = set()
for tag in self.tags:
if tag.context == context:
contextTags.add(tag)
return contextTags
@property
@cache
def contexts(self):
contexts = set()
for tag in self.tags:
if tag.context == None:
continue
contexts.add(tag.context)
return contexts
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
|
marook/tagfs
|
061fe693d949b0b95ccbbdc713fcee3328c0f0b2
|
migrated csv export from tagfs master branch
|
diff --git a/src/modules/tagfs/node_export.py b/src/modules/tagfs/node_export.py
index 8301282..23ecab1 100644
--- a/src/modules/tagfs/node_export.py
+++ b/src/modules/tagfs/node_export.py
@@ -1,54 +1,55 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+from tagfs.node_export_csv import ExportCsvFileNode
class ExportDirectoryNode(DirectoryNode):
def __init__(self, itemAccess, parentNode):
self.itemAccess = itemAccess
self.parentNode = parentNode
@property
def name(self):
return '.export'
@property
def attr(self):
s = super(ExportDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def items(self):
return self.parentNode.items
@property
def _entries(self):
- return []
+ yield ExportCsvFileNode(self.itemAccess, self.parentNode)
diff --git a/src/modules/tagfs/node_export_csv.py b/src/modules/tagfs/node_export_csv.py
new file mode 100644
index 0000000..c740a02
--- /dev/null
+++ b/src/modules/tagfs/node_export_csv.py
@@ -0,0 +1,107 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import stat
+
+from tagfs.cache import cache
+from tagfs.node import Stat
+
+class ExportCsvFileNode(object):
+
+ COL_SEPARATOR = ';'
+
+ TEXT_CHAR = '"'
+
+ ROW_SEPARATOR = '\n'
+
+ TAG_VALUE_SEPARATOR = '\n'
+
+ def __init__(self, itemAccess, parentNode):
+ self.itemAccess = itemAccess
+ self.parentNode = parentNode
+
+ @property
+ def name(self):
+ return 'export.csv'
+
+ @property
+ def items(self):
+ return self.parentNode.items
+
+ def formatRow(self, row):
+ first = True
+
+ for col in row:
+ if first:
+ first = False
+ else:
+ yield ExportCsvFileNode.COL_SEPARATOR
+
+ # TODO escape TEXT_CHAR in col string
+ yield ExportCsvFileNode.TEXT_CHAR
+ yield str(col)
+ yield ExportCsvFileNode.TEXT_CHAR
+
+ yield ExportCsvFileNode.ROW_SEPARATOR
+
+ @property
+ def _content(self):
+ contexts = set()
+ for i in self.items:
+ for t in i.tags:
+ contexts.add(t.context)
+
+ headline = ['name', ]
+ for c in contexts:
+ headline.append(c)
+ for s in self.formatRow(headline):
+ yield s
+
+ for i in self.items:
+ row = [i.name, ]
+
+ for c in contexts:
+ row.append(ExportCsvFileNode.TAG_VALUE_SEPARATOR.join([t.value for t in i.getTagsByContext(c)]))
+
+ for s in self.formatRow(row):
+ yield s
+
+ @property
+ @cache
+ def content(self):
+ return ''.join(self._content)
+
+ @property
+ def attr(self):
+ s = Stat()
+
+ s.st_mode = stat.S_IFREG | 0444
+ s.st_nlink = 2
+
+ # TODO replace with memory saving size calculation
+ import array
+ s.st_size = len(array.array('c', self.content))
+
+ return s
+
+ def open(self, path, flags):
+ return
+
+ def read(self, path, size, offset):
+ return self.content[offset:offset + size]
|
marook/tagfs
|
fb250ddf0bf41e1b9c917191b537dacb4123f0db
|
introduced ExportDirectoryNode
|
diff --git a/src/modules/tagfs/node_export.py b/src/modules/tagfs/node_export.py
new file mode 100644
index 0000000..8301282
--- /dev/null
+++ b/src/modules/tagfs/node_export.py
@@ -0,0 +1,54 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.cache import cache
+from tagfs.node import Stat, ItemLinkNode, DirectoryNode
+from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+
+class ExportDirectoryNode(DirectoryNode):
+
+ def __init__(self, itemAccess, parentNode):
+ self.itemAccess = itemAccess
+ self.parentNode = parentNode
+
+ @property
+ def name(self):
+ return '.export'
+
+ @property
+ def attr(self):
+ s = super(ExportDirectoryNode, self).attr
+
+ # TODO why nlink == 2?
+ s.st_nlink = 2
+
+ # TODO write test case which tests st_mtime == itemAccess.parseTime
+ s.st_mtime = self.itemAccess.parseTime
+ s.st_ctime = s.st_mtime
+ s.st_atime = s.st_mtime
+
+ return s
+
+ @property
+ def items(self):
+ return self.parentNode.items
+
+ @property
+ def _entries(self):
+ return []
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
index 4da8ee5..d9317e0 100644
--- a/src/modules/tagfs/node_filter.py
+++ b/src/modules/tagfs/node_filter.py
@@ -1,67 +1,70 @@
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from tagfs.cache import cache
from tagfs.node import Stat, ItemLinkNode, DirectoryNode
+from tagfs.node_export import ExportDirectoryNode
class FilterDirectoryNode(DirectoryNode):
def __init__(self, itemAccess):
self.itemAccess = itemAccess
@property
def attr(self):
s = super(FilterDirectoryNode, self).attr
# TODO why nlink == 2?
s.st_nlink = 2
# TODO write test case which tests st_mtime == itemAccess.parseTime
s.st_mtime = self.itemAccess.parseTime
s.st_ctime = s.st_mtime
s.st_atime = s.st_mtime
return s
@property
def contexts(self):
c = set()
for item in self.items:
for t in item.tags:
context = t.context
if context is None:
continue
c.add(t.context)
return c
@property
def _entries(self):
# the import is not global because we want to prevent a cyclic
# dependency
from tagfs.node_filter_context import ContextValueListDirectoryNode
+ yield ExportDirectoryNode(self.itemAccess, self)
+
for context in self.contexts:
yield ContextValueListDirectoryNode(self.itemAccess, self, context)
for item in self.items:
yield ItemLinkNode(item)
|
marook/tagfs
|
c5c57eaa8f8e3ad53ac2f7c515f040492083e8d7
|
report snapshot
|
diff --git a/reports/2011-06-06_05_28_41/coverage.txt b/reports/2011-06-06_05_28_41/coverage.txt
new file mode 100644
index 0000000..9e8542a
--- /dev/null
+++ b/reports/2011-06-06_05_28_41/coverage.txt
@@ -0,0 +1,20 @@
+Name Stmts Exec Cover Missing
+---------------------------------------------------------------------
+src/bin/tagfs 15 0 0% 21-42
+src/modules/tagfs/__init__ 1 0 0% 1
+src/modules/tagfs/cache 33 7 21% 21-35, 38-75, 90-92
+src/modules/tagfs/config 38 0 0% 21-92
+src/modules/tagfs/item_access 195 0 0% 19-351
+src/modules/tagfs/log 24 0 0% 20-60
+src/modules/tagfs/log_config 15 0 0% 21-51
+src/modules/tagfs/main 77 0 0% 28-180
+src/modules/tagfs/node 40 23 57% 20-27, 39-44, 47, 51, 60-66, 78-79
+src/modules/tagfs/node_export 19 0 0% 20-53
+src/modules/tagfs/node_filter 27 19 70% 20-25, 28, 42, 51, 57
+src/modules/tagfs/node_filter_context 45 17 37% 20-27, 33-37, 41, 45-47, 52, 56, 70-91
+src/modules/tagfs/node_root 14 5 35% 20-28, 31, 35
+src/modules/tagfs/node_untagged_items 16 10 62% 20-25, 29, 43
+src/modules/tagfs/transient_dict 51 38 74% 20-24, 27, 30-32, 36, 43, 47, 54, 64, 73, 92
+src/modules/tagfs/view 81 0 0% 21-174
+---------------------------------------------------------------------
+TOTAL 691 119 17%
diff --git a/reports/2011-06-06_05_28_41/tests.txt b/reports/2011-06-06_05_28_41/tests.txt
new file mode 100644
index 0000000..fd73d63
--- /dev/null
+++ b/reports/2011-06-06_05_28_41/tests.txt
@@ -0,0 +1,21 @@
+testForgettValuesWhenDictSizeExceeded (tagfs_test_small.test_transient_dict.TestTransientDict) ... ok
+testGetAndSetValues (tagfs_test_small.test_transient_dict.TestTransientDict) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testNodeHasName (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testUntaggedItemAccessItemsAreUntaggedItemsDirectoryEntries (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testItemLinksReplaceUntaggedDirectory (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeContainerContainsTaggedNodeLinks (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeContainsUntaggedDirectory (tagfs_test_small.test_root_directory_node.TestRootDirectoryNodeUntaggedDirectory) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_filter_context_value_list_directory_node.TestContextValueListDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_filter_context_value_list_directory_node.TestContextValueListDirectoryNode) ... ok
+testMatchingItemIsAvailableAsLink (tagfs_test_small.test_filter_context_value_filter_directory_node.TestContextValueFilterDirectoryNode) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_filter_context_value_filter_directory_node.TestContextValueFilterDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_filter_context_value_filter_directory_node.TestContextValueFilterDirectoryNode) ... ok
+
+----------------------------------------------------------------------
+Ran 16 tests in 0.006s
+
+OK
|
marook/tagfs
|
a0c8f2d34abea505df7eaedf2b7dcbdab8b31e0d
|
implemented test for ContextValueFilterDirectoryNode
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..bd44ede
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+target
+build
diff --git a/.project b/.project
new file mode 100644
index 0000000..188e611
--- /dev/null
+++ b/.project
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>tagfs</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.python.pydev.PyDevBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.python.pydev.pythonNature</nature>
+ </natures>
+</projectDescription>
diff --git a/.pydevproject b/.pydevproject
new file mode 100644
index 0000000..80b5d5d
--- /dev/null
+++ b/.pydevproject
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?>
+
+<pydev_project>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/tagfs/src</path>
+<path>/tagfs/test</path>
+</pydev_pathproperty>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.5</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
+</pydev_project>
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..1a2c117
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,2 @@
+Markus Pielmeier <[email protected]>
+Peter Prohaska <[email protected]>
diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..37e498f
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ The tagfs is a virtual file system for filtering tagged directories.
+ Copyright (C) 2009 Markus Pielmeier
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ tagfs Copyright (C) 2009 Markus Pielmeier
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..e17582a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,105 @@
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+prefix = /usr/local
+bindir = $(prefix)/bin
+docdir = $(prefix)/share/doc/tagfs
+installdirs = $(bindir) $(docdir)
+
+srcdir = .
+targetdir = $(srcdir)/target
+
+testdatadir = $(srcdir)/etc/test/events
+testmntdir = $(shell pwd)/mnt
+
+pymoddir = $(srcdir)/src/modules
+
+PYTHON = python
+INSTALL = install
+INSTALL_DATA = $(INSTALL) -m 644
+INSTALL_PROGRAM = $(INSTALL)
+
+DOCS = AUTHORS COPYING README VERSION
+
+VERSION = `cat VERSION`
+TSTAMP = `date '+%Y%m%d_%H%M'`
+
+.PHONY: all
+all:
+ @echo "42. That's all."
+ @echo "Try 'make mounttest' for something more interesting."
+
+.PHONY: clean
+clean:
+ find $(srcdir) -name '*.pyc' -type f -exec rm {} \;
+ @if test "`mount | grep -e 'tagfs.*on.*$(testmntdir)'`"; then \
+ echo "tagfs mounted on '$(testmntdir)' -- keeping it."; \
+ elif test -d '$(testmntdir)'; then \
+ echo 'removing $(testmntdir)'; \
+ rmdir '$(testmntdir)'; \
+ fi
+
+ rm -r -- "$(targetdir)"
+
+.PHONY: test
+test:
+ $(PYTHON) $(srcdir)/test/test_all.py
+
+$(installdirs):
+ $(INSTALL) -d $(installdirs)
+
+.PHONY: install
+install: $(installdirs)
+ $(INSTALL_PROGRAM) $(srcdir)/src/tagfs $(bindir)/tagfs
+ $(INSTALL_DATA) $(DOCS) $(docdir)
+
+.PHONY: uninstall
+uninstall:
+ rm -- $(bindir)/tagfs
+ rm -r -- $(docdir)
+
+$(testmntdir):
+ mkdir -p $@
+
+.PHONY: mounttest
+mounttest: $(testmntdir)
+ PYTHONPATH=$(pymoddir):$(PYTHONPATH) \
+ $(PYTHON) $(srcdir)/src/bin/tagfs -i $(testdatadir) $(testmntdir)
+
+.PHONY: unmounttest
+unmounttest:
+ fusermount -u $(testmntdir)
+ rmdir -- $(testmntdir)
+
+.PHONY: umounttest
+umounttest: unmounttest
+
+.PHONY: mt
+mt: mounttest
+
+.PHONY: umt
+umt: unmounttest
+
+.PHONY: distsnapshot
+distsnapshot:
+ mkdir -p -- "$(targetdir)/tagfs_$(VERSION)_snapshot_$(TSTAMP)"
+
+ cp -a $(DOCS) etc src test util setup.py README.dev Makefile "$(targetdir)/tagfs_$(VERSION)_snapshot_$(TSTAMP)"
+
+ tar cjf $(targetdir)/tagfs_$(VERSION)_snapshot_$(TSTAMP)-src.tar.bz2 '--exclude=*~' '--exclude=*.pyc' -C "$(targetdir)" "tagfs_$(VERSION)_snapshot_$(TSTAMP)"
diff --git a/README b/README
new file mode 100644
index 0000000..9a8cbe4
--- /dev/null
+++ b/README
@@ -0,0 +1,119 @@
+tagfs - tag file system
+
+1) Introduction
+2) Requirements
+3) Installation
+4) Usage
+5) Configuration
+5.1) Options
+5.1.1) tagFileName
+5.1.2) enableValueFilters
+5.1.3) enableRootItemLinks
+6) Tests
+7) Further Reading
+8) Contact
+
+---------------------------------------------------------------------
+Introduction
+
+tagfs is used to organize your documents using tags.
+
+This document contains basic usage instructions for users. To develop or debug
+tagfs see the README.dev file.
+
+---------------------------------------------------------------------
+Requirements
+
+* python 2.5, 2.6
+* Linux kernel with fuse enabled
+* python-fuse installed
+
+---------------------------------------------------------------------
+Installation
+
+To install tagfs into your home directory type the following:
+
+$ python setup.py install --home ~/.local
+
+If you haven't already extended your local python path then add the following
+to your environment configuration script. For example to your .bashrc:
+
+export PYTHONPATH=~/.local/lib/python:$PYTHONPATH
+
+---------------------------------------------------------------------
+Usage
+
+After installation tagfs can be started the following way.
+
+Mount a tagged directory:
+$ tagfs -i /path/to/my/items/directory /path/to/my/mount/point
+
+Unmount a tagged directory:
+$ fusermount -u /path/to/my/mount/point
+
+---------------------------------------------------------------------
+Configuration
+
+tagfs can be configured through configuration files. Configuration files are
+searched in different locations by tagfs. The following locations are used.
+Locations with higher priority come first:
+- <items directory>/.tagfs/tagfs.conf
+- ~/.tagfs/tagfs.conf
+- /etc/tagfs/tagfs.conf
+
+Right now the following configuration options are supported.
+
+---------------------------------------------------------------------
+Configuration - Options - tagFileName
+
+Through this option the name of the parsed tag files can be specified. The
+default value is '.tag'.
+
+Example:
+
+[global]
+tagFileName = ABOUT
+
+---------------------------------------------------------------------
+Configuration - Options - enableValueFilters
+
+You can enable or disable value filters. If you enable value filters you will
+see filter directories for each tag value. For value filters the tag's
+context can be anyone. The default value is 'false'.
+
+Example:
+
+[global]
+enableValueFilters = true
+
+---------------------------------------------------------------------
+Configuration - Options - enableRootItemLinks
+
+To show links to all items in the tagfs '/' directory enable this option. The
+default value is 'false'.
+
+Example:
+
+[global]
+enableRootItemLinks = true
+
+---------------------------------------------------------------------
+Tests
+
+You can execute the test cases via the setup.py script in the project's root
+directory.
+
+$ python setup.py test
+
+---------------------------------------------------------------------
+Further Reading
+
+Using a file system for my bank account (Markus Pielmeier)
+http://pielmeier.blogspot.com/2010/08/using-file-system-for-my-bank-account.html
+
+---------------------------------------------------------------------
+Contact
+
+* homepage: http://wiki.github.com/marook/tagfs
+* user group: http://groups.google.com/group/tagfs
+* author: Markus Pielmeier <[email protected]>
diff --git a/README.dev b/README.dev
new file mode 100644
index 0000000..7f8f673
--- /dev/null
+++ b/README.dev
@@ -0,0 +1,65 @@
+tagfs - tag file system
+developer readme
+
+1) Logging
+2) Profiling
+3) Tracing
+4) Distribution
+4.1) tar Distribution
+5) Code Coverage
+
+---------------------------------------------------------------------
+Logging
+
+You can enable logging by setting a debug environment variable before you
+launch tagfs:
+$ export DEBUG=1
+
+tagfs will log to the console and the file /tmp/tagfs.log
+
+---------------------------------------------------------------------
+Profiling
+
+You can enable profiling by setting a profile environment variable before you
+launch tagfs:
+$ export PROFILE=1
+
+After unmounting your tagfs file system a profile file will be written. The
+profile file will be written to the current directory. The profile file will
+be named 'tagfs.profile'.
+
+---------------------------------------------------------------------
+Tracing
+
+Tracing is done via the log output. There is a utility script to analyze the
+log files. To analyze a log file execute the following
+
+$ util/trace_logfiles.py /tmp/tagfs.log
+
+The tracing script will output some statistics.
+
+---------------------------------------------------------------------
+tar Distribution
+
+The tagfs project contains scripts for creating source distribution packages.
+To create a tar distribution package you execute the following:
+
+$ make distsnapshot
+
+The make call will create an archive within the target directory. The created
+tar file is used for tagfs source distribution.
+
+---------------------------------------------------------------------
+Code Coverage
+
+The tagfs unit tests can be executed with code coverage measurement enabled.
+setup.py will measure the code coverage if the coverage lib is installed.
+
+The coverage lib is available here: http://nedbatchelder.com/code/coverage
+
+If you're a debian user you can try:
+$ apt-get install python-coverage
+
+The code coverage will be written below the reports directory after executing
+the test cases:
+$ python setup.py test
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000..49d5957
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+0.1
diff --git a/etc/demo/README b/etc/demo/README
new file mode 100644
index 0000000..94226fc
--- /dev/null
+++ b/etc/demo/README
@@ -0,0 +1,145 @@
+The demo subdirectory contains the file structure for a little example. It shows
+what tagfs is doing and how you can use it.
+
+---------------------------------------------------------------------
+tagfs is used to organize your documents using tags. tagfs requires you to
+keep your files in a simple directory structure.
+
+In our example we are organizing some holiday pictues from india and south korea.
+So we create two item directories below the events directory:
+* 2008-03-29 - holiday south korea
+* 2008-12-25 - holiday india
+
+The names of the item directories can be anything you want but it's recommended
+to add date timestamps. These timestamps allow you to have a look at your
+documents in a chronological order and prevent you from specifying duplicate
+names. For tagfs the timestamp is irrelevant.
+
+Now that we have created the item directories below the event directory we can
+tag them. To do so we add .tag files within them. And to make it more exciting
+we add some images which represent our documents. Then we have a directory
+structure like this:
+
+events/
+|-- 2008-03-29 - holiday south korea
+| |-- .tag
+| `-- 00_IMG008.jpg
+`-- 2008-12-25 - holiday india
+ |-- .tag
+ `-- cimg1029.jpg
+
+In this example the directory structure below the item directories is flat. In
+the real world the content and directory structure below the item directories
+is not limited. Except that the tag file must be named .tag.
+
+As already mentioned the .tag files contain the tags. The .tag file for the
+south korea holiday looks like this:
+
+holiday
+airport
+korea
+
+As you can imagine we have applied three tags: holiday, airport and korea. The
+tags are newline separated and can contain spaces too. Empty lines are ignored.
+For the india holiday we use the following .tag file:
+
+holiday
+airport
+india
+
+Now that we have organized our documents and applied tags on them we can start
+to search for our data. To do so we first mount the tagfs. Open your bash, enter
+the demo directory and execute the following:
+
+$ tagfs.py -i events tags
+
+This will mount the tagfs below the tags directory. The event directory contains
+the item directories which will be parsed for tags. As a result you will get the
+following directory tree below the tags directory:
+
+tags/
+|-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+|-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+|-- airport
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| |-- holiday
+| | |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | |-- india
+| | | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- korea
+| | `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- india
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- holiday
+| | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- korea
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| `-- holiday
+| `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+|-- holiday
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| |-- airport
+| | |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | |-- india
+| | | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- korea
+| | `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- india
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- airport
+| | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- korea
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| `-- airport
+| `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+|-- india
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| |-- airport
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- holiday
+| | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- holiday
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- airport
+| `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+`-- korea
+ |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ |-- airport
+ | |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ | `-- holiday
+ | `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ `-- holiday
+ |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ `-- airport
+ `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+
+OK... that's a lot! The idea behind the tagfs is a simple directory based filter
+system. If you want to see anything relevant for the tags india you type:
+
+$ ls -1 tags/india
+
+The output will be:
+
+2008-12-25 - holiday india
+airport
+holiday
+
+The output will show you all item directories as links which are tagged with
+india. Additionally you will see all tags which can be further combined with
+india and show you further results. The tag korea is not shown as there would
+be no results if you filter by india and korea.
+
+Filtering for multiple tags at once can be done like this:
+
+$ ls -1 tags/india/holiday
+
+You will get the output:
+
+2008-12-25 - holiday india
+airport
+
+I hope this explains the concept. Now it's your turn :-) Try tagfs yourself!
diff --git a/etc/demo/events/2008-03-29 - holiday south korea/.tag b/etc/demo/events/2008-03-29 - holiday south korea/.tag
new file mode 100644
index 0000000..6559d90
--- /dev/null
+++ b/etc/demo/events/2008-03-29 - holiday south korea/.tag
@@ -0,0 +1,3 @@
+holiday
+airport
+location: korea
diff --git a/etc/demo/events/2008-03-29 - holiday south korea/00_IMG008.jpg b/etc/demo/events/2008-03-29 - holiday south korea/00_IMG008.jpg
new file mode 100644
index 0000000..87cbd57
Binary files /dev/null and b/etc/demo/events/2008-03-29 - holiday south korea/00_IMG008.jpg differ
diff --git a/etc/demo/events/2008-12-25 - holiday india/.tag b/etc/demo/events/2008-12-25 - holiday india/.tag
new file mode 100644
index 0000000..5a0c76a
--- /dev/null
+++ b/etc/demo/events/2008-12-25 - holiday india/.tag
@@ -0,0 +1,3 @@
+holiday
+airport
+location: india
diff --git a/etc/demo/events/2008-12-25 - holiday india/cimg1029.jpg b/etc/demo/events/2008-12-25 - holiday india/cimg1029.jpg
new file mode 100644
index 0000000..08ace17
Binary files /dev/null and b/etc/demo/events/2008-12-25 - holiday india/cimg1029.jpg differ
diff --git a/etc/demo/events/2009-07-29 - no tags/emptydir b/etc/demo/events/2009-07-29 - no tags/emptydir
new file mode 100644
index 0000000..e69de29
diff --git a/etc/demo/tags/.deleteme b/etc/demo/tags/.deleteme
new file mode 100644
index 0000000..e69de29
diff --git a/etc/rdf/example-rdf.xml b/etc/rdf/example-rdf.xml
new file mode 100644
index 0000000..947273d
--- /dev/null
+++ b/etc/rdf/example-rdf.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:dc="http://purl.org/dc/elements/1.1/">
+ <!-- TODO define tfs XML namespace -->
+ <!-- TODO how to seperate between context and tag descriptions? -->
+
+ <rdf:Description rdf:about="location">
+ <!-- this is a context description -->
+
+ <dc:title>location</dc:title>
+
+ <!-- TODO -->
+ </rdf:Description>
+
+ <rdf:Description rdf:about="holiday">
+ <!-- this is a tag description -->
+
+ <dc:title>holiday</dc:title>
+
+ <!-- TODO -->
+ </rdf:Description>
+
+ <rdf:Description rdf:about="airport">
+ <!-- this is a tag description -->
+
+ <dc:title>airport</dc:title>
+
+ <!-- TODO -->
+ </rdf:Description>
+
+ <rdf:Description rdf:about="file://path/to/my/events/2008-03-29 - holiday south korea">
+ <!-- this is an item description -->
+
+ <dc:title>2008-03-29 - holiday south korea</dc:title>
+
+ <tfs:tagging>
+ <tfs:context rfd:resource="location" />
+ <tfs:tag rdf:resource="holiday" />
+ </tfs:tagging>
+
+ <tfs:tagging>
+ <tfs:tag rdf:resource="airport" />
+ </tfs:tagging>
+
+ <!-- TODO applied tags -->
+ </rdf:Description>
+</rdf:RDF>
diff --git a/etc/test/events/.tagfs/tagfs.conf b/etc/test/events/.tagfs/tagfs.conf
new file mode 100644
index 0000000..b355dd9
--- /dev/null
+++ b/etc/test/events/.tagfs/tagfs.conf
@@ -0,0 +1,5 @@
+
+[global]
+
+enableValueFilters = true
+enableRootItemLinks = true
diff --git a/etc/test/events/2008-03-29 - holiday south korea/.tag b/etc/test/events/2008-03-29 - holiday south korea/.tag
new file mode 100644
index 0000000..2732670
--- /dev/null
+++ b/etc/test/events/2008-03-29 - holiday south korea/.tag
@@ -0,0 +1,5 @@
+holiday
+airport
+korea
+creator: Markus Pielmeier
+object: tube
diff --git a/etc/test/events/2008-03-29 - holiday south korea/00_IMG008.jpg b/etc/test/events/2008-03-29 - holiday south korea/00_IMG008.jpg
new file mode 100644
index 0000000..87cbd57
Binary files /dev/null and b/etc/test/events/2008-03-29 - holiday south korea/00_IMG008.jpg differ
diff --git a/etc/test/events/2008-11-11 - airport underground railway/.tag b/etc/test/events/2008-11-11 - airport underground railway/.tag
new file mode 100644
index 0000000..bbe1207
--- /dev/null
+++ b/etc/test/events/2008-11-11 - airport underground railway/.tag
@@ -0,0 +1,4 @@
+airport
+creator: Tama Yuri
+source: flickr
+object: tube
diff --git a/etc/test/events/2008-11-11 - airport underground railway/airport.jpg b/etc/test/events/2008-11-11 - airport underground railway/airport.jpg
new file mode 100644
index 0000000..c1b1b43
Binary files /dev/null and b/etc/test/events/2008-11-11 - airport underground railway/airport.jpg differ
diff --git a/etc/test/events/2008-12-25 - holiday india/.tag b/etc/test/events/2008-12-25 - holiday india/.tag
new file mode 100644
index 0000000..55769b0
--- /dev/null
+++ b/etc/test/events/2008-12-25 - holiday india/.tag
@@ -0,0 +1,6 @@
+holiday
+airport
+
+india
+creator: Markus Pielmeier
+empty test:
diff --git a/etc/test/events/2008-12-25 - holiday india/cimg1029.jpg b/etc/test/events/2008-12-25 - holiday india/cimg1029.jpg
new file mode 100644
index 0000000..08ace17
Binary files /dev/null and b/etc/test/events/2008-12-25 - holiday india/cimg1029.jpg differ
diff --git a/etc/test/events/2009-07-29 - no tags/emptydir b/etc/test/events/2009-07-29 - no tags/emptydir
new file mode 100644
index 0000000..e69de29
diff --git a/reports/2011-02-07_08_53_03/coverage.txt b/reports/2011-02-07_08_53_03/coverage.txt
new file mode 100644
index 0000000..f8e4ff8
--- /dev/null
+++ b/reports/2011-02-07_08_53_03/coverage.txt
@@ -0,0 +1,15 @@
+Name Stmts Exec Cover Missing
+----------------------------------------------------------------
+src/modules/tagfs/__init__ 1 0 0% 1
+src/modules/tagfs/cache 33 8 24% 21-35, 38-75, 92
+src/modules/tagfs/config 38 20 52% 21-33, 38, 56, 63-65, 69, 74, 78, 82, 86, 90
+src/modules/tagfs/item_access 182 115 63% 19-27, 38, 43-45, 48, 51-54, 88-90, 96-97, 101-102, 111, 119-120, 127-130, 142-143, 150, 157-158, 162-167, 170, 187-189, 192, 201-205, 208, 212-214, 217-219, 222, 234-238, 248, 259-260, 270-271, 275-276, 288-289, 293-294, 298-301, 308, 317-318, 330-331
+src/modules/tagfs/log 9 4 44% 20-25, 34
+src/modules/tagfs/log_config 15 0 0% 21-51
+src/modules/tagfs/node 346 220 63% 20-30, 42, 45-47, 57-61, 65-66, 70, 74-76, 80-82, 92-93, 100, 108-115, 122, 125, 134-135, 141-142, 152-154, 159, 166, 173-178, 185-186, 193, 196-209, 214, 230, 234-235, 245, 267-268, 272, 279-284, 295-296, 303, 306, 309-311, 316, 326, 330-334, 338, 348-350, 354, 371-373, 379, 383-384, 388, 413-415, 421, 425-426, 430, 450-452, 458, 462-463, 467, 492-494, 499, 503-504, 508, 525-532, 538, 545, 549, 553-554, 558, 573-575, 580, 584, 588, 603-605, 610-611, 615, 649-661
+src/modules/tagfs/tagfs 77 0 0% 28-173
+src/modules/tagfs/transient_dict 51 38 74% 20-24, 27, 30-32, 36, 43, 47, 54, 64, 73, 92
+src/modules/tagfs/view 78 40 51% 21-41, 46, 50, 82, 89, 100, 105-107, 111, 116-118, 122-133, 138-140, 144, 149-151, 155-164
+src/tagfs 14 0 0% 21-41
+----------------------------------------------------------------
+TOTAL 844 445 52%
diff --git a/reports/2011-02-07_10_10_40/coverage.txt b/reports/2011-02-07_10_10_40/coverage.txt
new file mode 100644
index 0000000..6836d37
--- /dev/null
+++ b/reports/2011-02-07_10_10_40/coverage.txt
@@ -0,0 +1,15 @@
+Name Stmts Exec Cover Missing
+----------------------------------------------------------------
+src/bin/tagfs 14 0 0% 21-41
+src/modules/tagfs/__init__ 1 0 0% 1
+src/modules/tagfs/cache 33 8 24% 21-35, 38-75, 92
+src/modules/tagfs/config 38 20 52% 21-33, 38, 56, 63-65, 69, 74, 78, 82, 86, 90
+src/modules/tagfs/item_access 182 115 63% 19-27, 38, 43-45, 48, 51-54, 88-90, 96-97, 101-102, 111, 119-120, 127-130, 142-143, 150, 157-158, 162-167, 170, 187-189, 192, 201-205, 208, 212-214, 217-219, 222, 234-238, 248, 259-260, 270-271, 275-276, 288-289, 293-294, 298-301, 308, 317-318, 330-331
+src/modules/tagfs/log 9 4 44% 20-25, 34
+src/modules/tagfs/log_config 15 0 0% 21-51
+src/modules/tagfs/node 346 220 63% 20-30, 42, 45-47, 57-61, 65-66, 70, 74-76, 80-82, 92-93, 100, 108-115, 122, 125, 134-135, 141-142, 152-154, 159, 166, 173-178, 185-186, 193, 196-209, 214, 230, 234-235, 245, 267-268, 272, 279-284, 295-296, 303, 306, 309-311, 316, 326, 330-334, 338, 348-350, 354, 371-373, 379, 383-384, 388, 413-415, 421, 425-426, 430, 450-452, 458, 462-463, 467, 492-494, 499, 503-504, 508, 525-532, 538, 545, 549, 553-554, 558, 573-575, 580, 584, 588, 603-605, 610-611, 615, 649-661
+src/modules/tagfs/tagfs 77 0 0% 28-173
+src/modules/tagfs/transient_dict 51 38 74% 20-24, 27, 30-32, 36, 43, 47, 54, 64, 73, 92
+src/modules/tagfs/view 78 40 51% 21-41, 46, 50, 82, 89, 100, 105-107, 111, 116-118, 122-133, 138-140, 144, 149-151, 155-164
+----------------------------------------------------------------
+TOTAL 844 445 52%
diff --git a/reports/2011-02-07_10_10_40/tests.txt b/reports/2011-02-07_10_10_40/tests.txt
new file mode 100644
index 0000000..dc36480
--- /dev/null
+++ b/reports/2011-02-07_10_10_40/tests.txt
@@ -0,0 +1,41 @@
+Tests an item with tags assigned to. ... ok
+Tests the results for items which got no tags assigned. ... ok
+Tests the results for items which don't exist. ... ok
+Tests AndFilter filter arguments at once. ... ok
+Tests TagValueFilter filter argument. ... ok
+Test the items property of ItemAccess. ... FAIL
+Test the items property of ItemAccess. ... ok
+Test the tag property of ItemAccess ... ok
+Test the untaggedItems property of ItemAccess ... FAIL
+testItemNodeInterface (test.test_all.TestItemNode) ... ok
+testRecurse (test.test_all.TestNodeRecurse) ... ok
+Tests the parseTagsFromFile(...) method. ... ok
+testRootNode (test.test_all.TestRootNode) ... ok
+testTagNode (test.test_all.TestTagNode) ... ok
+Makes sure that the events directory is accessible. ... ok
+testUntaggedItemsNodeInterface (test.test_all.TestUntaggedItemsNode) ... ok
+Test the forgett feature ... ok
+Test some simple get, set an in calls. ... ok
+Testing view interface ... ok
+testConfig (test.test_config.TestConfig) ... ok
+
+======================================================================
+FAIL: Test the items property of ItemAccess.
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "/home/marook/work/devel/projects/tagfs/src/test/test_all.py", line 147, in testItems
+ set(items))
+AssertionError: set(['2008-12-25 - holiday india', '2009-07-29 - no tags', '2008-03-29 - holiday south korea', '2008-11-11 - airport underground railway']) != set(['.tagfs', '2008-12-25 - holiday india', '2008-03-29 - holiday south korea', '2009-07-29 - no tags', '2008-11-11 - airport underground railway'])
+
+======================================================================
+FAIL: Test the untaggedItems property of ItemAccess
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "/home/marook/work/devel/projects/tagfs/src/test/test_all.py", line 184, in testUntaggedItems
+ set([item.name for item in untaggedItems]))
+AssertionError: set(['2009-07-29 - no tags']) != set(['.tagfs', '2009-07-29 - no tags'])
+
+----------------------------------------------------------------------
+Ran 20 tests in 1.370s
+
+FAILED (failures=2)
diff --git a/reports/2011-05-31_19_05_56/coverage.txt b/reports/2011-05-31_19_05_56/coverage.txt
new file mode 100644
index 0000000..3b00f82
--- /dev/null
+++ b/reports/2011-05-31_19_05_56/coverage.txt
@@ -0,0 +1,18 @@
+Name Stmts Exec Cover Missing
+---------------------------------------------------------------------
+src/bin/tagfs 15 0 0% 21-42
+src/modules/tagfs/__init__ 1 0 0% 1
+src/modules/tagfs/cache 33 8 24% 21-35, 38-75, 92
+src/modules/tagfs/config 38 0 0% 21-92
+src/modules/tagfs/item_access 195 0 0% 19-351
+src/modules/tagfs/log 24 0 0% 20-60
+src/modules/tagfs/log_config 15 0 0% 21-51
+src/modules/tagfs/main 77 0 0% 28-180
+src/modules/tagfs/node 40 23 57% 20-27, 39-44, 47, 51, 60-66, 78-79
+src/modules/tagfs/node_filter_context 56 9 16% 20-66, 71-75, 89-111
+src/modules/tagfs/node_root 32 16 50% 20-27, 30, 44-45, 49, 55-60, 64, 69
+src/modules/tagfs/node_untagged_items 16 10 62% 20-25, 29, 43
+src/modules/tagfs/transient_dict 51 38 74% 20-24, 27, 30-32, 36, 43, 47, 54, 64, 73, 92
+src/modules/tagfs/view 81 0 0% 21-174
+---------------------------------------------------------------------
+TOTAL 674 104 15%
diff --git a/reports/2011-05-31_19_05_56/tests.txt b/reports/2011-05-31_19_05_56/tests.txt
new file mode 100644
index 0000000..066909c
--- /dev/null
+++ b/reports/2011-05-31_19_05_56/tests.txt
@@ -0,0 +1,18 @@
+testForgettValuesWhenDictSizeExceeded (tagfs_test_small.test_transient_dict.TestTransientDict) ... ok
+testGetAndSetValues (tagfs_test_small.test_transient_dict.TestTransientDict) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testNodeHasName (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testUntaggedItemAccessItemsAreUntaggedItemsDirectoryEntries (tagfs_test_small.test_untagged_items_directory_node.TestUntaggedItemsDirectoryNode) ... ok
+testItemLinksReplaceUntaggedDirectory (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeContainerContainsTaggedNodeLinks (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_root_directory_node.TestRootDirectoryNode) ... ok
+testNodeContainsUntaggedDirectory (tagfs_test_small.test_root_directory_node.TestRootDirectoryNodeUntaggedDirectory) ... ok
+testNodeAttrMTimeIsItemAccessParseTime (tagfs_test_small.test_filter_context_value_list_directory_node.TestContextValueListDirectoryNode) ... ok
+testNodeIsDirectory (tagfs_test_small.test_filter_context_value_list_directory_node.TestContextValueListDirectoryNode) ... ok
+
+----------------------------------------------------------------------
+Ran 13 tests in 0.005s
+
+OK
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..bd3bea0
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Peter Prohaska
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from distutils.core import setup, Command
+import sys
+import os
+from os.path import (
+ basename,
+ dirname,
+ abspath,
+ splitext,
+ join as pjoin
+)
+from glob import glob
+from unittest import TestLoader, TextTestRunner
+import re
+import datetime
+
+projectdir = dirname(abspath(__file__))
+reportdir = pjoin(projectdir, 'reports')
+
+srcdir = pjoin(projectdir, 'src')
+bindir = pjoin(srcdir, 'bin')
+moddir = pjoin(srcdir, 'modules')
+testdir = pjoin(srcdir, 'test')
+
+testdatadir = pjoin(projectdir, 'etc', 'test', 'events')
+testmntdir = pjoin(projectdir, 'mnt')
+
+assert os.path.isdir(srcdir)
+assert os.path.isdir(bindir)
+assert os.path.isdir(moddir)
+assert os.path.isdir(testdir)
+
+assert os.path.isdir(testdatadir)
+
+class Report(object):
+
+ def __init__(self):
+ self.reportDateTime = datetime.datetime.utcnow()
+ self.reportDir = os.path.join(reportdir, self.reportDateTime.strftime('%Y-%m-%d_%H_%M_%S'))
+
+ # fails when dir already exists which is nice
+ os.makedirs(self.reportDir)
+
+ @property
+ def coverageReportFileName(self):
+ return os.path.join(self.reportDir, 'coverage.txt')
+
+ @property
+ def unitTestReportFileName(self):
+ return os.path.join(self.reportDir, 'tests.txt')
+
+def sourceFiles():
+ yield os.path.join(bindir, 'tagfs')
+
+ sourceFilePattern = re.compile('^.*[.]py$')
+ for root, dirs, files in os.walk(moddir):
+ for f in files:
+ if(not sourceFilePattern.match(f)):
+ continue
+
+ if(f.startswith('.#')):
+ continue
+
+ yield os.path.join(root, f)
+
+def fullSplit(p):
+ head, tail = os.path.split(p)
+
+ if(len(head) > 0):
+ for n in fullSplit(head):
+ yield n
+
+ yield tail
+
+def testModules():
+ testFilePattern = re.compile('^(test.*)[.]py$', re.IGNORECASE)
+
+ for root, dirs, files in os.walk(testdir):
+ for f in files:
+ m = testFilePattern.match(f)
+
+ if(not m):
+ continue
+
+ relDir = os.path.relpath(root, testdir)
+
+ yield '.'.join([n for n in fullSplit(relDir)] + [m.group(1), ])
+
+def printFile(fileName):
+ if(not os.path.exists(fileName)):
+ # TODO maybe we should not silently return?
+ return
+
+ with open(fileName, 'r') as f:
+ for line in f:
+ sys.stdout.write(line)
+
+class test(Command):
+ description = 'run tests'
+ user_options = []
+
+ def initialize_options(self):
+ self._cwd = os.getcwd()
+ self._verbosity = 2
+
+ def finalize_options(self): pass
+
+ def run(self):
+ report = Report()
+
+ tests = [m for m in testModules()]
+
+ print "..using:"
+ print " moddir:", moddir
+ print " testdir:", testdir
+ print " testdatadir:", testdatadir
+ print " testmntdir:", testmntdir
+ print " tests:", tests
+ print " sys.path:", sys.path
+ print
+
+ # insert project lookup paths at index 0 to make sure they are used
+ # over global libraries
+ sys.path.insert(0, moddir)
+ sys.path.insert(0, testdir)
+
+ # TODO try to import all test cases here. the TestLoader is throwing
+ # very confusing errors when imports can't be resolved.
+
+ # configure logging
+ # TODO not sure how to enable this... it's a bit complicate to enable
+ # logging only for 'make mt' and disable it then for
+ # 'python setup.py test'. 'python setup.py test' is such a gabber...
+ #if 'DEBUG' in os.environ:
+ # from tagfs import log_config
+ # log_config.setUpLogging()
+
+ if 'DEBUG' in os.environ:
+ import logging
+ logging.basicConfig(level = logging.DEBUG)
+
+ suite = TestLoader().loadTestsFromNames(tests)
+
+ with open(report.unitTestReportFileName, 'w') as testResultsFile:
+ r = TextTestRunner(stream = testResultsFile, verbosity = self._verbosity)
+
+ def runTests():
+ r.run(suite)
+
+ try:
+ import coverage
+
+ c = coverage.coverage()
+ c.start()
+ runTests()
+ c.stop()
+
+ with open(report.coverageReportFileName, 'w') as reportFile:
+ c.report([f for f in sourceFiles()], file = reportFile)
+
+ except ImportError:
+ # TODO ImportErrors from runTests() may look like coverage is missing
+
+ print ''
+ print 'coverage module not found.'
+ print 'To view source coverage stats install http://nedbatchelder.com/code/coverage/'
+ print ''
+
+ runTests()
+
+ # TODO use two streams instead of printing files after writing
+ printFile(report.unitTestReportFileName)
+ printFile(report.coverageReportFileName)
+
+# Overrides default clean (which cleans from build runs)
+# This clean should probably be hooked into that somehow.
+class clean_pyc(Command):
+ description = 'remove *.pyc files from source directory'
+ user_options = []
+
+ def initialize_options(self):
+ self._delete = []
+ for cwd, dirs, files in os.walk(projectdir):
+ self._delete.extend(
+ pjoin(cwd, f) for f in files if f.endswith('.pyc')
+ )
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ for f in self._delete:
+ try:
+ os.unlink(f)
+ except OSError, e:
+ print "Strange '%s': %s" % (f, e)
+ # Could be a directory.
+ # Can we detect file in use errors or are they OSErrors
+ # as well?
+ # Shall we catch all?
+
+setup(
+ cmdclass = {
+ 'test': test,
+ 'clean_pyc': clean_pyc,
+ },
+ name = 'tagfs',
+ version = '0.1',
+ url = 'http://wiki.github.com/marook/tagfs',
+ description = '',
+ long_description = '',
+ author = 'Markus Pielmeier',
+ author_email = '[email protected]',
+ license = 'GPLv3',
+ download_url = 'http://github.com/marook/tagfs/downloads/tagfs_0.1-src.tar.bz2',
+ platforms = 'Linux',
+ requires = [],
+ classifiers = [
+ 'Development Status :: 2 - Pre-Alpha',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: GNU General Public License (GPL)',
+ 'Natural Language :: English',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python',
+ 'Topic :: System :: Filesystems'
+ ],
+ data_files = [
+ (pjoin('share', 'doc', 'tagfs'), ['AUTHORS', 'COPYING', 'README'])
+ ],
+ # TODO maybe we should include src/bin/*?
+ scripts = [pjoin(bindir, 'tagfs')],
+ packages = ['tagfs'],
+ package_dir = {'': moddir},
+)
diff --git a/src/bin/tagfs b/src/bin/tagfs
new file mode 100755
index 0000000..d5024f6
--- /dev/null
+++ b/src/bin/tagfs
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.main import main
+from tagfs.log_config import setUpLogging
+
+if __name__ == '__main__':
+ from os import environ as env
+ if 'DEBUG' in env:
+ setUpLogging()
+
+ import logging
+ import sys
+
+ if 'PROFILE' in env:
+ logging.info('Enabled tagfs profiling')
+
+ import cProfile
+ import os
+
+ profileFile = os.path.join(os.getcwd(), 'tagfs.profile')
+
+ sys.exit(cProfile.run('main()', profileFile))
+ else:
+ sys.exit(main())
+
diff --git a/src/modules/tagfs/__init__.py b/src/modules/tagfs/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/modules/tagfs/cache.py b/src/modules/tagfs/cache.py
new file mode 100644
index 0000000..02bf18b
--- /dev/null
+++ b/src/modules/tagfs/cache.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import time
+import functools
+
+class NoCacheStrategy(object):
+ """This cache strategy reloads the cache on every call.
+ """
+
+ def isCacheValid(self, f, *args, **kwargs):
+ return False
+
+class NoReloadStrategy(object):
+ """This cache strategy never reloads the cache.
+ """
+
+ def isCacheValid(self, f, *args, **kwargs):
+ return True
+
+class TimeoutReloadStrategy(object):
+
+ def __init__(self, timeoutDuration = 10 * 60):
+ self.timeoutDuration = timeoutDuration
+
+ def isCacheValid(self, f, *args, **kwargs):
+ obj = args[0]
+
+ timestampFieldName = '__' + f.__name__ + 'Timestamp'
+ now = time.time()
+
+ if not hasattr(obj, timestampFieldName):
+ setattr(obj, timestampFieldName, now)
+
+ return False
+
+ lastTime = getattr(obj, timestampFieldName)
+
+ if now - lastTime < self.timeoutDuration:
+ return True
+
+ setattr(obj, timestampFieldName, now)
+
+ return False
+
+
+def cache(f, reloadStrategy = NoReloadStrategy()):
+ """This annotation is used to cache the result of a method call.
+
+ @param f: This is the wrapped function which's return value will be cached.
+ @param reload: This is the reload strategy. This function returns True when
+ the cache should be reloaded. Otherwise False.
+ @attention: The cache is never deleted. The first call initializes the
+ cache. The method's parameters just passed to the called method. The cache
+ is not evaluating the parameters.
+ """
+
+ @functools.wraps(f)
+ def cacher(*args, **kwargs):
+ obj = args[0]
+
+ cacheMemberName = '__' + f.__name__ + 'Cache'
+
+ # the reload(...) call has to be first as we always have to call the
+ # method. not only when there is a cache member available in the object.
+ if (not reloadStrategy.isCacheValid(f, *args, **kwargs)) or (not hasattr(obj, cacheMemberName)):
+ value = f(*args, **kwargs)
+
+ setattr(obj, cacheMemberName, value)
+
+ return value
+
+ return getattr(obj, cacheMemberName)
+
+ return cacher
+
diff --git a/src/modules/tagfs/config.py b/src/modules/tagfs/config.py
new file mode 100644
index 0000000..4fc8903
--- /dev/null
+++ b/src/modules/tagfs/config.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import ConfigParser
+import logging
+import os
+
+class ConfigError(Exception):
+
+ pass
+
+class Config(object):
+
+ GLOBAL_SECTION = 'global'
+
+ def applyDefaults(self):
+ self.tagFileName = '.tag'
+ self.enableValueFilters = False
+ self.enableRootItemLinks = False
+
+ def __init__(self, itemsDir):
+ self._config = ConfigParser.SafeConfigParser({
+ 'tagFileName': '.tag',
+ 'enableValueFilters': False,
+ 'enableRootItemLinks': False,
+ })
+ self._config.add_section(Config.GLOBAL_SECTION)
+
+ self.itemsDir = itemsDir
+
+ self.applyDefaults()
+
+ parsedFiles = self._config.read([os.path.join(itemsDir, '.tagfs', 'tagfs.conf'),
+ os.path.expanduser(os.path.join('~', '.tagfs', 'tagfs.conf')),
+ os.path.join('/', 'etc', 'tagfs', 'tagfs.conf')])
+
+ logging.debug('Parsed the following config files: %s' % ', '.join(parsedFiles))
+
+ def _boolToStr(self, b):
+ if b is True:
+ return 'true'
+ elif b is False:
+ return 'false'
+ else:
+ # TODO make error more verbose
+ raise ConfigError()
+
+ @property
+ def tagFileName(self):
+ return self._config.get(Config.GLOBAL_SECTION, 'tagFileName')
+
+ @tagFileName.setter
+ def tagFileName(self, tagFileName):
+ self._config.set(Config.GLOBAL_SECTION, 'tagFileName', tagFileName)
+
+ # TODO implement generic approach to get/set boolean values
+ @property
+ def enableValueFilters(self):
+ return self._config.getboolean(Config.GLOBAL_SECTION, 'enableValueFilters')
+
+ @enableValueFilters.setter
+ def enableValueFilters(self, enableValueFilters):
+ self._config.set(Config.GLOBAL_SECTION, 'enableValueFilters', self._boolToStr(enableValueFilters))
+
+ @property
+ def enableRootItemLinks(self):
+ return self._config.getboolean(Config.GLOBAL_SECTION, 'enableRootItemLinks')
+
+ @enableRootItemLinks.setter
+ def enableRootItemLinks(self, enableRootItemLinks):
+ self._config.set(Config.GLOBAL_SECTION, 'enableRootItemLinks', self._boolToStr(enableRootItemLinks))
+
+ def __str__(self):
+ #return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['tagFileName', 'enableValueFilters', 'enableRootItemLinks']]) + ']'
+ return '[tagFileName: %s, enableValueFilters: %s, enableRootItemLinks: %s]' % (self.tagFileName, self.enableValueFilters, self.enableRootItemLinks)
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
new file mode 100644
index 0000000..7052e44
--- /dev/null
+++ b/src/modules/tagfs/item_access.py
@@ -0,0 +1,351 @@
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+import os
+import time
+import traceback
+
+from tagfs.cache import cache
+
+class Tag(object):
+
+ def __init__(self, value, context = None):
+ if context == None:
+ self.context = None
+ else:
+ self.context = context.strip()
+
+ self.value = value.strip()
+
+ if not self.context == None and len(self.context) == 0:
+ # we don't allow empty strings as they can't be represented as a
+ # directory very well
+ raise ValueError()
+
+ if len(self.value) == 0:
+ # we don't allow empty strings as they can't be represented as a
+ # directory very well
+ raise ValueError()
+
+ def __hash__(self):
+ return (self.context, self.value).__hash__()
+
+ def __eq__(self, other):
+ return self.value == other.value and self.context == other.context
+
+ def __repr__(self):
+ return '<Tag %s: %s>' % (self.context, self.value)
+
+def parseTagsFromFile(tagFileName):
+ """Parses the tags from the specified file.
+
+ @return: The parsed values are returned as a set containing Tag objects.
+ @see: Tag
+ """
+
+ tags = set()
+
+ tagFile = open(tagFileName, 'r')
+ try:
+ for rawTag in tagFile.readlines():
+ rawTag = rawTag.strip()
+
+ try:
+ if len(rawTag) == 0:
+ continue
+
+ tagTuple = rawTag.split(':', 1)
+
+ if len(tagTuple) == 1:
+ tagContext = None
+ tagValue = tagTuple[0]
+ else:
+ tagContext = tagTuple[0]
+ tagValue = tagTuple[1]
+
+ tag = Tag(tagValue, context = tagContext)
+
+ tags.add(tag)
+ except:
+ logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
+
+ finally:
+ tagFile.close()
+
+ return tags
+
+class Item(object):
+
+ def __init__(self, name, itemAccess):
+ self.name = name
+ self.itemAccess = itemAccess
+
+ # TODO register at file system to receive tag file change events.
+
+ @property
+ @cache
+ def itemDirectory(self):
+ return os.path.join(self.itemAccess.dataDirectory, self.name)
+
+ @property
+ @cache
+ def _tagFileName(self):
+ """Returns the name of the tag file for this item.
+ """
+
+ itemDirectory = self.itemDirectory
+
+ return os.path.join(itemDirectory, self.itemAccess.tagFileName)
+
+ def __parseTags(self):
+ tagFileName = self._tagFileName
+
+ if not os.path.exists(tagFileName):
+ return None
+
+ return parseTagsFromFile(tagFileName)
+
+ @property
+ @cache
+ def tagsCreationTime(self):
+ tagFileName = self._tagFileName
+
+ if not os.path.exists(tagFileName):
+ return None
+
+ return os.path.getctime(self._tagFileName)
+
+ @property
+ @cache
+ def tagsModificationTime(self):
+ """Returns the last time when the tags have been modified.
+ """
+
+ tagFileName = self._tagFileName
+
+ if not os.path.exists(tagFileName):
+ return None
+
+ return os.path.getmtime(tagFileName)
+
+ @property
+ @cache
+ def tags(self):
+ """Returns the tags as a list for this item.
+ """
+
+ return self.__parseTags()
+
+ def getTagsByContext(self, context):
+ for t in self.tags:
+ if context != t.context:
+ continue
+
+ yield t
+
+ def isTaggedWithContextValue(self, context, value):
+ for t in self.getTagsByContext(context):
+ if value == t.value:
+ return True
+
+ return False
+
+ def isTaggedWithContext(self, context):
+ # TODO don't create whole list... just check wheather list is empty
+ return (len([c for c in self.getTagsByContext(context)]) > 0)
+
+ @property
+ @cache
+ def tagged(self):
+ return os.path.exists(self._tagFileName)
+
+ def __repr__(self):
+ return '<Item %s>' % self.name
+
+class TagValueFilter(object):
+
+ def __init__(self, tagValue):
+ self.tagValue = tagValue
+
+ def filterItems(self, items):
+ droppedItems = set()
+
+ for item in items:
+ hasTagValue = False
+
+ for itemTag in item.tags:
+ if itemTag.value == self.tagValue:
+ hasTagValue = True
+
+ break
+
+ if not hasTagValue:
+ droppedItems.add(item)
+
+ items -= droppedItems
+
+class TagFilter(object):
+
+ def __init__(self, tag):
+ self.tag = tag
+
+ def filterItems(self, items):
+ droppedItems = set()
+
+ for item in items:
+ if not self.tag in item.tags:
+ droppedItems.add(item)
+
+ items -= droppedItems
+
+class AndFilter(object):
+ """Concatenates two filters with a logical 'and'.
+ """
+
+ def __init__(self, subFilters):
+ self.subFilters = subFilters
+
+ def filterItems(self, items):
+ for subFilter in self.subFilters:
+ subFilter.filterItems(items)
+
+class NoneFilter(object):
+
+ def filterItems(self, items):
+ pass
+
+class NotContextFilter(object):
+
+ def __init__(self, context):
+ self.context = context
+
+ def filterItems(self, items):
+ droppedItems = set()
+
+ for item in items:
+ for tag in item.tags:
+ if self.context == tag.context:
+ droppedItems.add(item)
+
+ break
+
+ items -= droppedItems
+
+class ItemAccess(object):
+ """This is the access point to the Items.
+ """
+
+ def __init__(self, dataDirectory, tagFileName):
+ self.dataDirectory = dataDirectory
+ self.tagFileName = tagFileName
+
+ self.__items = None
+ self.__tags = None
+ self.__taggedItems = None
+ self.__untaggedItems = None
+ self.parseTime = 0
+
+ def __parseItems(self):
+ items = {}
+
+ logging.debug('Start parsing items from dir: %s', self.dataDirectory)
+
+ for itemName in os.listdir(self.dataDirectory):
+ if itemName == '.tagfs':
+ # skip directory with configuration
+ continue
+
+ try:
+ item = Item(itemName, self)
+
+ items[itemName] = item
+
+ except IOError, (error, strerror):
+ logging.error('Can \'t read tags for item %s: %s',
+ itemName,
+ strerror)
+
+ logging.debug('Found %s items', len(items))
+
+ self.parseTime = time.time()
+
+ return items
+
+ @property
+ @cache
+ def items(self):
+ return self.__parseItems()
+
+ @property
+ @cache
+ def tags(self):
+ tags = set()
+
+ for item in self.items.itervalues():
+ if not item.tagged:
+ continue
+
+ tags = tags | item.tags
+
+ return tags
+
+ @property
+ @cache
+ def taggedItems(self):
+ return set([item for item in self.items.itervalues() if item.tagged])
+
+ @property
+ @cache
+ def untaggedItems(self):
+ return set([item for item in self.items.itervalues() if not item.tagged])
+
+ def getItemDirectory(self, item):
+ return os.path.join(self.dataDirectory, item)
+
+ def filterItems(self, filter):
+ resultItems = set([item for item in self.taggedItems])
+
+ filter.filterItems(resultItems)
+
+ return resultItems
+
+ def contextTags(self, context):
+ contextTags = set()
+
+ for tag in self.tags:
+ if tag.context == context:
+ contextTags.add(tag)
+
+ return contextTags
+
+ @property
+ @cache
+ def contexts(self):
+ contexts = set()
+
+ for tag in self.tags:
+ if tag.context == None:
+ continue
+
+ contexts.add(tag.context)
+
+ return contexts
+
+ def __str__(self):
+ return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
diff --git a/src/modules/tagfs/log.py b/src/modules/tagfs/log.py
new file mode 100644
index 0000000..8caa611
--- /dev/null
+++ b/src/modules/tagfs/log.py
@@ -0,0 +1,60 @@
+#
+# Copyright 2010, 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import functools
+import logging
+
+def getLogger(*args):
+ o = args[0]
+
+ logger = logging.getLogger(o.__class__.__name__)
+
+ return logger
+
+
+def logCall(f):
+
+ @functools.wraps(f)
+ def logCall(*args, **kwargs):
+ logger = getLogger(*args)
+
+ if(logger.isEnabledFor(logging.DEBUG)):
+ logger.debug(f.__name__ + '(' + (', '.join('\'' + str(a) + '\'' for a in args[1:])) + ')')
+
+ return f(*args, **kwargs)
+
+ return logCall
+
+def logException(f):
+
+ @functools.wraps(f)
+ def logException(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except:
+ logger = getLogger(*args)
+
+ if(logger.isEnabledFor(logging.ERROR)):
+ import traceback
+
+ logger.warn(traceback.format_exc())
+
+ raise
+
+ return logException
diff --git a/src/modules/tagfs/log_config.py b/src/modules/tagfs/log_config.py
new file mode 100644
index 0000000..ff159ae
--- /dev/null
+++ b/src/modules/tagfs/log_config.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import logging
+import sys
+
+def setUpLogging():
+ def exceptionCallback(eType, eValue, eTraceBack):
+ import cgitb
+
+ txt = cgitb.text((eType, eValue, eTraceBack))
+
+ logging.critical(txt)
+
+ # sys.exit(1)
+
+ # configure file logger
+ logging.basicConfig(level = logging.DEBUG,
+ format = '%(asctime)s %(levelname)s %(message)s',
+ filename = '/tmp/tagfs.log',
+ filemode = 'a')
+
+ # configure console logger
+ consoleHandler = logging.StreamHandler(sys.stdout)
+ consoleHandler.setLevel(logging.DEBUG)
+
+ consoleFormatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
+ consoleHandler.setFormatter(consoleFormatter)
+ logging.getLogger().addHandler(consoleHandler)
+
+ # replace default exception handler
+ sys.excepthook = exceptionCallback
+
+ logging.debug('Logging and exception handling has been set up')
diff --git a/src/modules/tagfs/main.py b/src/modules/tagfs/main.py
new file mode 100644
index 0000000..c7d3949
--- /dev/null
+++ b/src/modules/tagfs/main.py
@@ -0,0 +1,180 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+#
+# = tag fs =
+# == glossary ==
+# * item: An item is a directory in the item container directory. Items can be
+# tagged using a tag file.
+# * tag: A tag is a text string which can be assigned to an item. Tags can
+# consist of any character except newlines.
+
+import os
+import stat
+import errno
+import exceptions
+import time
+import functools
+import logging
+
+import fuse
+if not hasattr(fuse, '__version__'):
+ raise RuntimeError, \
+ "your fuse-py doesn't know of fuse.__version__, probably it's too old."
+fuse.fuse_python_api = (0, 2)
+
+from tagfs.view import View
+from tagfs.cache import cache
+from tagfs.item_access import ItemAccess
+from config import Config
+from log import logException
+
+class TagFS(fuse.Fuse):
+
+ def __init__(self, initwd, *args, **kw):
+ fuse.Fuse.__init__(self, *args, **kw)
+
+ self._initwd = initwd
+ self._itemsRoot = None
+
+ # TODO change command line arguments structure
+ # goal: tagfs <items dir> <mount dir>
+ self.parser.add_option('-i',
+ '--items-dir',
+ dest = 'itemsDir',
+ help = 'items directory',
+ metavar = 'dir')
+ self.parser.add_option('-t',
+ '--tag-file',
+ dest = 'tagFileName',
+ help = 'tag file name',
+ metavar = 'file',
+ default = None)
+ self.parser.add_option('--value-filter',
+ action = 'store_true',
+ dest = 'enableValueFilters',
+ help = 'Displays value filter directories on toplevel instead of only context entries',
+ default = None)
+ self.parser.add_option('--root-items',
+ action = 'store_true',
+ dest = 'enableRootItemLinks',
+ help = 'Display item links in tagfs root directory.',
+ default = None)
+
+ def getItemAccess(self):
+ # Maybe we should move the parser run from main here.
+ # Or we should at least check if it was run once...
+ opts, args = self.cmdline
+
+ # Maybe we should add expand user? Maybe even vars???
+ assert opts.itemsDir != None and opts.itemsDir != ''
+ itemsRoot = os.path.normpath(
+ os.path.join(self._initwd, opts.itemsDir))
+
+ # TODO rel https://github.com/marook/tagfs/issues#issue/2
+ # Ensure that mount-point and items dir are disjoined.
+ # Something along
+ # assert not os.path.normpath(itemsDir).startswith(itemsRoot)
+
+ # try/except here?
+ try:
+ return ItemAccess(itemsRoot, self.config.tagFileName)
+ except OSError, e:
+ logging.error("Can't create item access from items directory %s. Reason: %s",
+ itemsRoot, str(e.strerror))
+ raise
+
+ @property
+ @cache
+ def config(self):
+ opts, args = self.cmdline
+
+ c = Config(os.path.normpath(os.path.join(self._initwd, opts.itemsDir)))
+
+ if opts.tagFileName:
+ c.tagFileName = opts.tagFileName
+
+ if opts.enableValueFilters:
+ c.enableValueFilters = opts.enableValueFilters
+
+ if opts.enableRootItemLinks:
+ c.enableRootItemLinks = opts.enableRootItemLinks
+
+ logging.debug('Using configuration %s' % c)
+
+ return c
+
+ @property
+ @cache
+ def view(self):
+ itemAccess = self.getItemAccess()
+
+ return View(itemAccess, self.config)
+
+ @logException
+ def getattr(self, path):
+ return self.view.getattr(path)
+
+ @logException
+ def readdir(self, path, offset):
+ return self.view.readdir(path, offset)
+
+ @logException
+ def readlink(self, path):
+ return self.view.readlink(path)
+
+ @logException
+ def open(self, path, flags):
+ return self.view.open(path, flags)
+
+ @logException
+ def read(self, path, size, offset):
+ return self.view.read(path, size, offset)
+
+ @logException
+ def write(self, path, data, pos):
+ return self.view.write(path, data, pos)
+
+ @logException
+ def symlink(self, path, linkPath):
+ return self.view.symlink(path, linkPath)
+
+def main():
+ fs = TagFS(os.getcwd(),
+ version = "%prog " + fuse.__version__,
+ dash_s_do = 'setsingle')
+
+ fs.parse(errex = 1)
+ opts, args = fs.cmdline
+
+ if opts.itemsDir == None:
+ fs.parser.print_help()
+ # items dir should probably be an arg, not an option.
+ print "Error: Missing items directory option."
+ # Quickfix rel https://github.com/marook/tagfs/issues/#issue/3
+ # FIXME: since we run main via sys.exit(main()), this should
+ # probably be handled via some return code.
+ import sys
+ sys.exit()
+
+ return fs.main()
+
+if __name__ == '__main__':
+ import sys
+ sys.exit(main())
diff --git a/src/modules/tagfs/node.py b/src/modules/tagfs/node.py
new file mode 100644
index 0000000..a8ded1e
--- /dev/null
+++ b/src/modules/tagfs/node.py
@@ -0,0 +1,81 @@
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import fuse
+import stat
+
+from tagfs.cache import cache
+
+class Stat(fuse.Stat):
+
+ def __init__(self):
+ self.st_mode = 0
+ self.st_ino = 0
+ self.st_dev = 0
+ self.st_nlink = 0
+ self.st_uid = 0
+ self.st_gid = 0
+ self.st_size = 0
+ self.st_atime = 0
+ self.st_mtime = 0
+ self.st_ctime = 0
+
+ def __str__(self):
+ return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in self.__dict__]) + ']'
+
+class ItemLinkNode(object):
+
+ def __init__(self, item):
+ self.item = item
+
+ @property
+ def name(self):
+ return self.item.name
+
+ @property
+ def attr(self):
+ s = Stat()
+
+ s.st_mode = stat.S_IFLNK | 0444
+ s.st_nlink = 2
+
+ return s
+
+ @property
+ def link(self):
+ return self.item.itemDirectory
+
+class DirectoryNode(object):
+
+ @property
+ def attr(self):
+ s = Stat()
+
+ s.st_mode = stat.S_IFDIR | 0555
+
+ s.st_mtime = 0
+ s.st_ctime = s.st_mtime
+ s.st_atime = s.st_mtime
+
+ return s
+
+ @property
+ @cache
+ def entries(self):
+ return dict([[e.name, e] for e in self._entries])
diff --git a/src/modules/tagfs/node_filter.py b/src/modules/tagfs/node_filter.py
new file mode 100644
index 0000000..4da8ee5
--- /dev/null
+++ b/src/modules/tagfs/node_filter.py
@@ -0,0 +1,67 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.cache import cache
+from tagfs.node import Stat, ItemLinkNode, DirectoryNode
+
+class FilterDirectoryNode(DirectoryNode):
+
+ def __init__(self, itemAccess):
+ self.itemAccess = itemAccess
+
+ @property
+ def attr(self):
+ s = super(FilterDirectoryNode, self).attr
+
+ # TODO why nlink == 2?
+ s.st_nlink = 2
+
+ # TODO write test case which tests st_mtime == itemAccess.parseTime
+ s.st_mtime = self.itemAccess.parseTime
+ s.st_ctime = s.st_mtime
+ s.st_atime = s.st_mtime
+
+ return s
+
+ @property
+ def contexts(self):
+ c = set()
+
+ for item in self.items:
+ for t in item.tags:
+ context = t.context
+
+ if context is None:
+ continue
+
+ c.add(t.context)
+
+ return c
+
+ @property
+ def _entries(self):
+ # the import is not global because we want to prevent a cyclic
+ # dependency
+ from tagfs.node_filter_context import ContextValueListDirectoryNode
+
+ for context in self.contexts:
+ yield ContextValueListDirectoryNode(self.itemAccess, self, context)
+
+ for item in self.items:
+ yield ItemLinkNode(item)
diff --git a/src/modules/tagfs/node_filter_context.py b/src/modules/tagfs/node_filter_context.py
new file mode 100644
index 0000000..407a951
--- /dev/null
+++ b/src/modules/tagfs/node_filter_context.py
@@ -0,0 +1,91 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.cache import cache
+from tagfs.node import Stat, ItemLinkNode, DirectoryNode
+from tagfs.node_filter import FilterDirectoryNode
+from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+
+class ContextValueFilterDirectoryNode(FilterDirectoryNode):
+
+ def __init__(self, itemAccess, parentNode, context, value):
+ super(ContextValueFilterDirectoryNode, self).__init__(itemAccess)
+ self.parentNode = parentNode
+ self.context = context
+ self.value = value
+
+ @property
+ def name(self):
+ return self.value
+
+ @property
+ def items(self):
+ for item in self.parentNode.items:
+ if not item.isTaggedWithContextValue(self.context, self.value):
+ continue
+
+ yield item
+
+class ContextValueListDirectoryNode(DirectoryNode):
+
+ def __init__(self, itemAccess, parentNode, context):
+ self.itemAccess = itemAccess
+ self.parentNode = parentNode
+ self.context = context
+
+ @property
+ def name(self):
+ return self.context
+
+ @property
+ def attr(self):
+ s = super(ContextValueListDirectoryNode, self).attr
+
+ # TODO why nlink == 2?
+ s.st_nlink = 2
+
+ # TODO write test case which tests st_mtime == itemAccess.parseTime
+ s.st_mtime = self.itemAccess.parseTime
+ s.st_ctime = s.st_mtime
+ s.st_atime = s.st_mtime
+
+ return s
+
+ @property
+ def items(self):
+ for item in self.parentNode.items:
+ if not item.isTaggedWithContext(self.context):
+ continue
+
+ yield item
+
+ @property
+ def contextValues(self):
+ values = set()
+
+ for item in self.parentNode.items:
+ for tag in item.getTagsByContext(self.context):
+ values.add(tag.value)
+
+ return values
+
+ @property
+ def _entries(self):
+ for value in self.contextValues:
+ yield ContextValueFilterDirectoryNode(self.itemAccess, self, self.context, value)
diff --git a/src/modules/tagfs/node_root.py b/src/modules/tagfs/node_root.py
new file mode 100644
index 0000000..ed0a933
--- /dev/null
+++ b/src/modules/tagfs/node_root.py
@@ -0,0 +1,40 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.cache import cache
+from tagfs.node import Stat, ItemLinkNode
+from tagfs.node_filter import FilterDirectoryNode
+from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+from tagfs.node_filter_context import ContextValueListDirectoryNode
+
+class RootDirectoryNode(FilterDirectoryNode):
+
+ def __init__(self, itemAccess):
+ super(RootDirectoryNode, self).__init__(itemAccess)
+
+ @property
+ def items(self):
+ return self.itemAccess.taggedItems
+
+ @property
+ def _entries(self):
+ yield UntaggedItemsDirectoryNode('.untagged', self.itemAccess)
+
+ for e in super(RootDirectoryNode, self)._entries:
+ yield e
diff --git a/src/modules/tagfs/node_untagged_items.py b/src/modules/tagfs/node_untagged_items.py
new file mode 100644
index 0000000..9ffcf8f
--- /dev/null
+++ b/src/modules/tagfs/node_untagged_items.py
@@ -0,0 +1,46 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs.cache import cache
+from tagfs.node import Stat, ItemLinkNode, DirectoryNode
+
+class UntaggedItemsDirectoryNode(DirectoryNode):
+
+ def __init__(self, name, itemAccess):
+ self.name = name
+ self.itemAccess = itemAccess
+
+ @property
+ def attr(self):
+ s = super(UntaggedItemsDirectoryNode, self).attr
+
+ # TODO why nlink == 2?
+ s.st_nlink = 2
+
+ # TODO write test case which tests st_mtime == itemAccess.parseTime
+ s.st_mtime = self.itemAccess.parseTime
+ s.st_ctime = s.st_mtime
+ s.st_atime = s.st_mtime
+
+ return s
+
+ @property
+ def _entries(self):
+ for item in self.itemAccess.untaggedItems:
+ yield ItemLinkNode(item)
diff --git a/src/modules/tagfs/transient_dict.py b/src/modules/tagfs/transient_dict.py
new file mode 100644
index 0000000..7d20461
--- /dev/null
+++ b/src/modules/tagfs/transient_dict.py
@@ -0,0 +1,93 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+class TransientDict(object):
+
+ class Version(object):
+
+ def __init__(self, key):
+ self.key = key
+
+ def touch(self, version):
+ self.version = version
+
+ class Value(object):
+
+ def __init__(self, value, version):
+ self.value = value
+ self.version = version
+
+ def __init__(self, averageCapacity):
+ self.averageCapacity = averageCapacity
+ self.nextVersion = 0
+ self.setCounter = 0
+ self.data = {}
+ self.versions = []
+
+ def __getitem__(self, k):
+ v = self.data[k]
+
+ if not v:
+ return None
+
+ v.version.touch(self.nextVersion)
+ self.nextVersion += 1
+
+ return v.value
+
+ def _cleanUpCache(self):
+ if len(self.data) < self.averageCapacity:
+ return
+
+ def versionCmp(a, b):
+ if a.version < b.version:
+ return 1
+ if b.version < a.version:
+ return -1
+
+ return 0
+
+ self.versions.sort(versionCmp)
+
+ while len(self.versions) > self.averageCapacity:
+ version = self.versions.pop()
+
+ self.data.pop(version.key)
+
+ def __setitem__(self, k, v):
+ if k in self.data:
+ value = self.data[k]
+
+ value.value = v
+ else:
+ self.setCounter += 1
+ if self.setCounter % self.averageCapacity == 0:
+ self._cleanUpCache()
+
+ version = TransientDict.Version(k)
+ self.versions.append(version)
+
+ value = TransientDict.Value(v, version)
+ self.data[k] = value
+
+ value.version.touch(self.nextVersion)
+ self.nextVersion += 1
+
+ def __contains__(self, k):
+ return k in self.data
diff --git a/src/modules/tagfs/view.py b/src/modules/tagfs/view.py
new file mode 100644
index 0000000..a24ffef
--- /dev/null
+++ b/src/modules/tagfs/view.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import errno
+import logging
+import os
+from tagfs.log import logCall, logException
+from tagfs.cache import cache
+from tagfs.transient_dict import TransientDict
+from tagfs.node_root import RootDirectoryNode
+from fuse import Direntry
+
+class View(object):
+ """Abstraction layer from fuse API.
+
+ This class is an abstraction layer from the fuse API. This should ease
+ writing test cases for the file system.
+ """
+
+ DEFAULT_NODES = {
+ # directory icons for rox filer
+ '.DirIcon': None,
+
+ # launch script for rox filer application directories
+ 'AppRun': None
+ }
+
+ def __init__(self, itemAccess, config):
+ self.itemAccess = itemAccess
+ self.config = config
+ self._entryCache = TransientDict(100)
+
+ @property
+ @cache
+ def rootNode(self):
+ # TODO return node.RootNode(self.itemAccess, self.config)
+ return RootDirectoryNode(self.itemAccess)
+
+ def getNode(self, path):
+ if path in self._entryCache:
+ # simple path name based caching is implemented here
+
+ logging.debug('tagfs _entryCache hit')
+
+ return self._entryCache[path]
+
+ # ps contains the path segments
+ ps = [x for x in os.path.normpath(path).split(os.sep) if x != '']
+
+ psLen = len(ps)
+ if psLen > 0:
+ lastSegment = ps[psLen - 1]
+
+ if lastSegment in View.DEFAULT_NODES:
+ logging.debug('Using default node for path ' + path)
+
+ return View.DEFAULT_NODES[lastSegment]
+
+ e = self.rootNode
+
+ for pe in path.split('/')[1:]:
+ if pe == '':
+ continue
+
+ entries = e.entries
+
+ if not pe in entries:
+ # it seems like we are trying to fetch a node for an illegal
+ # path
+
+ return None
+
+ e = entries[pe]
+
+ logging.debug('tagfs _entryCache miss')
+ self._entryCache[path] = e
+
+ return e
+
+ @logCall
+ def getattr(self, path):
+ e = self.getNode(path)
+
+ if not e:
+ logging.debug('Try to read attributes from not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return e.attr
+
+ @logCall
+ def readdir(self, path, offset):
+ e = self.getNode(path)
+
+ if not e:
+ logging.warn('Try to read not existing directory: ' + path)
+
+ return -errno.ENOENT
+
+ # TODO care about offset parameter
+
+ return [Direntry(name) for name in e.entries.iterkeys()]
+
+ @logCall
+ def readlink(self, path):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to read not existing link from node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.link
+
+ @logCall
+ def symlink(self, path, linkPath):
+ linkPathSegs = linkPath.split('/')
+
+ n = self.getNode('/'.join(linkPathSegs[0:len(linkPathSegs) - 2]))
+
+ if not n:
+ return -errno.ENOENT
+
+ return n.symlink(path, linkPath)
+
+ @logCall
+ def open(self, path, flags):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to open not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.open(path, flags)
+
+ @logCall
+ def read(self, path, len, offset):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to read from not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.read(path, len, offset)
+
+ @logCall
+ def write(self, path, data, pos):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to write to not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.write(path, data, pos)
+
diff --git a/src/test/tagfs_test/__init__.py b/src/test/tagfs_test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/test/tagfs_test/item_access_mock.py b/src/test/tagfs_test/item_access_mock.py
new file mode 100644
index 0000000..2e3878d
--- /dev/null
+++ b/src/test/tagfs_test/item_access_mock.py
@@ -0,0 +1,27 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs_test.item_mock import ItemMock
+
+class ItemAccessMock(object):
+
+ def __init__(self):
+ self.parseTime = 42
+ self.taggedItems = []
+ self.untaggedItems = []
diff --git a/src/test/tagfs_test/item_mock.py b/src/test/tagfs_test/item_mock.py
new file mode 100644
index 0000000..c78252c
--- /dev/null
+++ b/src/test/tagfs_test/item_mock.py
@@ -0,0 +1,27 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+class ItemMock(object):
+
+ def __init__(self, name, tags = []):
+ self.name = name
+ self.tags = tags
+
+def createItemMocks(itemNames):
+ return [ItemMock(name, []) for name in itemNames]
diff --git a/src/test/tagfs_test/node_asserter.py b/src/test/tagfs_test/node_asserter.py
new file mode 100644
index 0000000..2786f27
--- /dev/null
+++ b/src/test/tagfs_test/node_asserter.py
@@ -0,0 +1,44 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import stat
+
+def hasMode(attr, mode):
+ return (attr.st_mode & mode > 0)
+
+def validateNodeInterface(test, node):
+ attr = node.attr
+
+ test.assertTrue(attr.st_atime >= 0)
+ test.assertTrue(attr.st_mtime >= 0)
+ test.assertTrue(attr.st_ctime >= 0)
+
+def validateDirectoryInterface(test, node):
+ attr = node.attr
+
+ test.assertTrue(hasMode(attr, stat.S_IFDIR))
+
+ validateNodeInterface(test, node)
+
+def validateLinkInterface(test, node):
+ attr = node.attr
+
+ test.assertTrue(hasMode(attr, stat.S_IFLNK))
+
+ validateNodeInterface(test, node)
diff --git a/src/test/tagfs_test_small/__init__.py b/src/test/tagfs_test_small/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py b/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py
new file mode 100644
index 0000000..76e5b0c
--- /dev/null
+++ b/src/test/tagfs_test_small/test_filter_context_value_filter_directory_node.py
@@ -0,0 +1,73 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from unittest import TestCase
+
+from tagfs.node_filter_context import ContextValueFilterDirectoryNode
+
+from tagfs_test.node_asserter import validateDirectoryInterface, validateLinkInterface
+from tagfs_test.item_access_mock import ItemAccessMock
+from tagfs_test.item_mock import ItemMock
+
+class TagMock(object):
+
+ def __init__(self, context, value):
+ self.context = context
+ self.value = value
+
+class TaggedItemMock(ItemMock):
+
+ def __init__(self, name, context, value):
+ super(TaggedItemMock, self).__init__(name, [TagMock(context, value), ])
+
+ self._context = context
+ self._value = value
+
+ def isTaggedWithContextValue(self, context, value):
+ return self._context == context and self._value == value
+
+class ParentNodeMock(object):
+
+ def __init__(self, items):
+ self.items = items
+
+class TestContextValueFilterDirectoryNode(TestCase):
+
+ def setUp(self):
+ self.context = 'c1'
+ self.value = 'v1'
+
+ self.itemAccess = ItemAccessMock()
+ self.itemAccess.taggedItems = [TaggedItemMock('item1', self.context, self.value), ]
+
+ self.parentNode = ParentNodeMock(self.itemAccess.taggedItems)
+ self.node = ContextValueFilterDirectoryNode(self.itemAccess, self.parentNode, self.context, self.value)
+
+ def testNodeAttrMTimeIsItemAccessParseTime(self):
+ attr = self.node.attr
+
+ self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
+
+ def testNodeIsDirectory(self):
+ validateDirectoryInterface(self, self.node)
+
+ def testMatchingItemIsAvailableAsLink(self):
+ e = self.node.entries['item1']
+
+ validateLinkInterface(self, e)
diff --git a/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py b/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py
new file mode 100644
index 0000000..2768196
--- /dev/null
+++ b/src/test/tagfs_test_small/test_filter_context_value_list_directory_node.py
@@ -0,0 +1,48 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from unittest import TestCase
+
+from tagfs.node_filter_context import ContextValueListDirectoryNode
+
+from tagfs_test.node_asserter import validateDirectoryInterface, validateLinkInterface
+from tagfs_test.item_access_mock import ItemAccessMock
+from tagfs_test.item_mock import createItemMocks
+
+class ParentNodeMock(object):
+
+ pass
+
+class TestContextValueListDirectoryNode(TestCase):
+
+ def setUp(self):
+ self.itemAccess = ItemAccessMock()
+ self.itemAccess.taggedItems = createItemMocks(['item1'])
+
+ self.parentNode = ParentNodeMock()
+ self.context = 'c1'
+ self.node = ContextValueListDirectoryNode(self.itemAccess, self.parentNode, self.context)
+
+ def testNodeAttrMTimeIsItemAccessParseTime(self):
+ attr = self.node.attr
+
+ self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
+
+ def testNodeIsDirectory(self):
+ validateDirectoryInterface(self, self.node)
diff --git a/src/test/tagfs_test_small/test_root_directory_node.py b/src/test/tagfs_test_small/test_root_directory_node.py
new file mode 100644
index 0000000..0d84ca2
--- /dev/null
+++ b/src/test/tagfs_test_small/test_root_directory_node.py
@@ -0,0 +1,76 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from unittest import TestCase
+
+from tagfs.node_root import RootDirectoryNode
+
+from tagfs_test.node_asserter import validateDirectoryInterface, validateLinkInterface
+from tagfs_test.item_access_mock import ItemAccessMock
+from tagfs_test.item_mock import createItemMocks
+
+class AbstractRootDirectoryNodeTest(TestCase):
+
+ @property
+ def _itemNames(self):
+ return self._taggedItemNames
+
+ def setUp(self):
+ self._taggedItemNames = ['item1']
+
+ self.itemAccess = ItemAccessMock()
+ self.itemAccess.taggedItems = createItemMocks(self._itemNames)
+
+ self.node = RootDirectoryNode(self.itemAccess)
+
+class TestRootDirectoryNode(AbstractRootDirectoryNodeTest):
+
+ @property
+ def _itemNames(self):
+ return self._taggedItemNames + ['.untagged']
+
+ def testNodeAttrMTimeIsItemAccessParseTime(self):
+ attr = self.node.attr
+
+ self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
+
+ def testNodeIsDirectory(self):
+ validateDirectoryInterface(self, self.node)
+
+ def testItemLinksReplaceUntaggedDirectory(self):
+ untaggedNode = self.node.entries['.untagged']
+
+ # untagged node must be a link as the untagged directory node
+ # weights less than the '.untagged' item from the tagged items.
+ validateLinkInterface(self, untaggedNode)
+
+ def testNodeContainerContainsTaggedNodeLinks(self):
+ entries = self.node.entries
+
+ for itemName in self._taggedItemNames:
+ self.assertTrue(itemName in entries)
+
+ validateLinkInterface(self, entries[itemName])
+
+class TestRootDirectoryNodeUntaggedDirectory(AbstractRootDirectoryNodeTest):
+
+ def testNodeContainsUntaggedDirectory(self):
+ untaggedNode = self.node.entries['.untagged']
+
+ validateDirectoryInterface(self, untaggedNode)
diff --git a/src/test/tagfs_test_small/test_transient_dict.py b/src/test/tagfs_test_small/test_transient_dict.py
new file mode 100644
index 0000000..ac8ed10
--- /dev/null
+++ b/src/test/tagfs_test_small/test_transient_dict.py
@@ -0,0 +1,52 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import unittest
+from tagfs.transient_dict import TransientDict
+
+class TestTransientDict(unittest.TestCase):
+
+ def testGetAndSetValues(self):
+ d = TransientDict(10)
+
+ self.assertTrue('1' not in d)
+
+ d['1'] = 'a'
+ d['2'] = 'b'
+
+ self.assertTrue(d['1'] == 'a')
+ self.assertTrue(d['2'] == 'b')
+
+ self.assertTrue('1' in d)
+ self.assertTrue('3' not in d)
+ self.assertTrue('a' not in d)
+
+ def testForgettValuesWhenDictSizeExceeded(self):
+ d = TransientDict(2)
+
+ d['1'] = 'a'
+ d['2'] = 'b'
+ d['1'] = 'a'
+ d['3'] = 'c'
+ d['1'] = 'a'
+ d['4'] = 'c'
+
+ self.assertTrue('1' in d)
+ self.assertTrue('2' not in d)
+ self.assertTrue('4' in d)
diff --git a/src/test/tagfs_test_small/test_untagged_items_directory_node.py b/src/test/tagfs_test_small/test_untagged_items_directory_node.py
new file mode 100644
index 0000000..e2f695f
--- /dev/null
+++ b/src/test/tagfs_test_small/test_untagged_items_directory_node.py
@@ -0,0 +1,56 @@
+#
+# Copyright 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from unittest import TestCase
+
+from tagfs.node_untagged_items import UntaggedItemsDirectoryNode
+
+from tagfs_test.node_asserter import validateLinkInterface, validateDirectoryInterface
+from tagfs_test.item_access_mock import ItemAccessMock
+from tagfs_test.item_mock import createItemMocks
+
+class TestUntaggedItemsDirectoryNode(TestCase):
+
+ def setUp(self):
+ self.itemAccess = ItemAccessMock()
+ self.itemAccess.untaggedItems = createItemMocks(['item1', 'item2'])
+
+ self.nodeName = 'e'
+ self.node = UntaggedItemsDirectoryNode(self.nodeName, self.itemAccess)
+
+ def testNodeAttrMTimeIsItemAccessParseTime(self):
+ attr = self.node.attr
+
+ self.assertEqual(self.itemAccess.parseTime, attr.st_mtime)
+
+ def testNodeIsDirectory(self):
+ validateDirectoryInterface(self, self.node)
+
+ def testUntaggedItemAccessItemsAreUntaggedItemsDirectoryEntries(self):
+ entries = self.node.entries
+
+ self.assertEqual(len(self.itemAccess.untaggedItems), len(entries))
+
+ for i in self.itemAccess.untaggedItems:
+ self.assertTrue(i.name in entries)
+
+ validateLinkInterface(self, entries[i.name])
+
+ def testNodeHasName(self):
+ self.assertEqual(self.nodeName, self.node.name)
diff --git a/util/trace_logfiles.py b/util/trace_logfiles.py
new file mode 100755
index 0000000..cea3b23
--- /dev/null
+++ b/util/trace_logfiles.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import logging
+import re
+
+class TraceLogEntry(object):
+
+ def __init__(self, context, path):
+ self.context = context
+ self.path = path
+
+class TraceLog(object):
+
+ LINE_BUFFER_SIZE = 100000
+
+ TRACE_PATTERN = re.compile('[0-9\-,: ]+DEBUG (readlink|getattr|readdir) (.*)$')
+
+ def __init__(self):
+ self.entries = []
+
+ def _readLogLine(self, line):
+ m = TraceLog.TRACE_PATTERN.match(line)
+
+ if not m:
+ return
+
+ context = m.group(1)
+ path = m.group(2)
+
+ self.entries.append(TraceLogEntry(context, path))
+
+ def readLogFile(self, fileName):
+ logging.info('Reading logfile ' + fileName)
+
+ f = open(fileName)
+
+ while True:
+ lines = f.readlines(TraceLog.LINE_BUFFER_SIZE)
+ if not lines:
+ break;
+
+ for line in lines:
+ self._readLogLine(line)
+
+class TraceResult(object):
+
+ def __init__(self):
+ self.contextHistogram = {}
+ self.contextPathHistogram = {}
+
+ def _analyzeContextHistogram(self, traceLog):
+ for e in traceLog.entries:
+ if not e.context in self.contextHistogram:
+ self.contextHistogram[e.context] = 0
+
+ self.contextHistogram[e.context] += 1
+
+ def _analyzeContextPathHistogram(self, traceLog):
+ for e in traceLog.entries:
+ if not e.context in self.contextPathHistogram:
+ self.contextPathHistogram[e.context] = {}
+
+ ph = self.contextPathHistogram[e.context]
+
+ if not e.path in ph:
+ ph[e.path] = 0
+
+ ph[e.path] += 1
+
+
+ def _analyzeTraceLog(self, traceLog):
+ self._analyzeContextHistogram(traceLog)
+ self._analyzeContextPathHistogram(traceLog)
+
+ def analyzeLogFile(self, fileName):
+ tl = TraceLog()
+ tl.readLogFile(fileName)
+
+ self._analyzeTraceLog(tl)
+
+def usage():
+ # TODO print usage
+
+ pass
+
+def writeCSV(fileName, pathHistogram):
+ import csv
+
+ w = csv.writer(open(fileName, 'w'))
+
+ for path, histogram in pathHistogram.iteritems():
+ w.writerow([path, histogram])
+
+if __name__ == '__main__':
+ logging.basicConfig(level = logging.DEBUG)
+
+ import getopt
+ import sys
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "", [])
+ except getopt.GetoptError:
+ usage()
+
+ sys.exit(1)
+
+ for opt, arg in opts:
+ if opt in ("-h", "--help"):
+ usage()
+ sys.exit()
+
+ tr = TraceResult()
+
+ for fileName in args:
+ tr.analyzeLogFile(fileName)
+
+ print "Context Histogram"
+ for context, calls in tr.contextHistogram.iteritems():
+ print ' %s: %s' % (context, calls)
+
+ for context, pathHistogram in tr.contextPathHistogram.iteritems():
+ writeCSV('pathHistogram_' + context + '.csv', pathHistogram)
|
marook/tagfs
|
20c8cc8ca5fdee41b88df78170ec47e474463680
|
migrated test runner from https://github.com/marook/minecraft-world-io to tagfs
|
diff --git a/setup.py b/setup.py
index 4c3334e..5c99920 100644
--- a/setup.py
+++ b/setup.py
@@ -1,213 +1,250 @@
#!/usr/bin/env python
#
# Copyright 2009 Peter Prohaska
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
from distutils.core import setup, Command
import sys
import os
from os.path import (
basename,
dirname,
abspath,
splitext,
join as pjoin
)
from glob import glob
from unittest import TestLoader, TextTestRunner
import re
import datetime
projectdir = dirname(abspath(__file__))
reportdir = pjoin(projectdir, 'reports')
srcdir = pjoin(projectdir, 'src')
bindir = pjoin(srcdir, 'bin')
moddir = pjoin(srcdir, 'modules')
testdir = pjoin(srcdir, 'test')
testdatadir = pjoin(projectdir, 'etc', 'test', 'events')
testmntdir = pjoin(projectdir, 'mnt')
+assert os.path.isdir(srcdir)
+assert os.path.isdir(bindir)
+assert os.path.isdir(moddir)
+assert os.path.isdir(testdir)
+
+assert os.path.isdir(testdatadir)
+
class Report(object):
def __init__(self):
self.reportDateTime = datetime.datetime.utcnow()
self.reportDir = os.path.join(reportdir, self.reportDateTime.strftime('%Y-%m-%d_%H_%M_%S'))
# fails when dir already exists which is nice
os.makedirs(self.reportDir)
@property
def coverageReportFileName(self):
return os.path.join(self.reportDir, 'coverage.txt')
@property
def unitTestReportFileName(self):
return os.path.join(self.reportDir, 'tests.txt')
def sourceFiles():
yield os.path.join(bindir, 'tagfs')
sourceFilePattern = re.compile('^.*[.]py$')
for root, dirs, files in os.walk(moddir):
for f in files:
if(not sourceFilePattern.match(f)):
continue
+ if(f.startswith('.#')):
+ continue
+
yield os.path.join(root, f)
+def fullSplit(p):
+ head, tail = os.path.split(p)
+
+ if(len(head) > 0):
+ for n in fullSplit(head):
+ yield n
+
+ yield tail
+
+def testModules():
+ testFilePattern = re.compile('^(test.*)[.]py$', re.IGNORECASE)
+
+ for root, dirs, files in os.walk(testdir):
+ for f in files:
+ m = testFilePattern.match(f)
+
+ if(not m):
+ continue
+
+ relDir = os.path.relpath(root, testdir)
+
+ print 'relDir %s' % relDir
+
+ yield '.'.join([n for n in fullSplit(relDir)] + [m.group(1), ])
+
def printFile(fileName):
if(not os.path.exists(fileName)):
# TODO maybe we should not silently return?
return
with open(fileName, 'r') as f:
for line in f:
sys.stdout.write(line)
class test(Command):
description = 'run tests'
user_options = []
def initialize_options(self):
self._cwd = os.getcwd()
self._verbosity = 2
def finalize_options(self): pass
def run(self):
report = Report()
- testPyMatcher = re.compile('(.*/)?test[^/]*[.]py', re.IGNORECASE)
-
- tests = ['.'.join([
- basename(testdir), splitext(basename(f))[0]
- ]) for f in glob(pjoin(
- testdir, '*.py'
- )) if testPyMatcher.match(f)]
+ tests = [m for m in testModules()]
print "..using:"
print " testdir:", testdir
print " testdatadir:", testdatadir
print " testmntdir:", testmntdir
print " tests:", tests
print " sys.path:", sys.path
print
sys.path.insert(0, moddir)
- sys.path.insert(0, srcdir)
+ sys.path.insert(0, testdir)
+
+ # TODO try to import all test cases here. the TestLoader is throwing
+ # very confusing errors when imports can't be resolved.
# configure logging
# TODO not sure how to enable this... it's a bit complicate to enable
# logging only for 'make mt' and disable it then for
# 'python setup.py test'. 'python setup.py test' is such a gabber...
#if 'DEBUG' in os.environ:
# from tagfs import log_config
# log_config.setUpLogging()
+ if 'DEBUG' in os.environ:
+ import logging
+ logging.basicConfig(level = logging.DEBUG)
+
suite = TestLoader().loadTestsFromNames(tests)
with open(report.unitTestReportFileName, 'w') as testResultsFile:
r = TextTestRunner(stream = testResultsFile, verbosity = self._verbosity)
def runTests():
r.run(suite)
try:
import coverage
c = coverage.coverage()
c.start()
runTests()
c.stop()
with open(report.coverageReportFileName, 'w') as reportFile:
c.report([f for f in sourceFiles()], file = reportFile)
except ImportError:
print ''
print 'coverage module not found.'
print 'To view source coverage stats install http://nedbatchelder.com/code/coverage/'
print ''
runTests()
# TODO use two streams instead of printing files after writing
printFile(report.unitTestReportFileName)
printFile(report.coverageReportFileName)
# Overrides default clean (which cleans from build runs)
# This clean should probably be hooked into that somehow.
class clean_pyc(Command):
description = 'remove *.pyc files from source directory'
user_options = []
def initialize_options(self):
self._delete = []
for cwd, dirs, files in os.walk(projectdir):
self._delete.extend(
pjoin(cwd, f) for f in files if f.endswith('.pyc')
)
- def finalize_options(self): pass
+ def finalize_options(self):
+ pass
def run(self):
for f in self._delete:
try:
os.unlink(f)
except OSError, e:
print "Strange '%s': %s" % (f, e)
# Could be a directory.
# Can we detect file in use errors or are they OSErrors
# as well?
# Shall we catch all?
setup(
cmdclass = {
'test': test,
'clean_pyc': clean_pyc,
},
name = 'tagfs',
version = '0.1',
url = 'http://wiki.github.com/marook/tagfs',
description = '',
long_description = '',
author = 'Markus Pielmeier',
author_email = '[email protected]',
license = 'GPLv3',
download_url = 'http://github.com/marook/tagfs/downloads/tagfs_0.1-src.tar.bz2',
platforms = 'Linux',
requires = [],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Filesystems'
],
data_files = [
(pjoin('share', 'doc', 'tagfs'), ['AUTHORS', 'COPYING', 'README'])
],
# TODO maybe we should include src/bin/*?
scripts = [pjoin(bindir, 'tagfs')],
packages = ['tagfs'],
package_dir = {'': moddir},
)
diff --git a/src/test/__init__.py b/src/test/tagfs_test/__init__.py
similarity index 100%
rename from src/test/__init__.py
rename to src/test/tagfs_test/__init__.py
diff --git a/src/test/env.py b/src/test/tagfs_test/env.py
similarity index 96%
rename from src/test/env.py
rename to src/test/tagfs_test/env.py
index 11bcae6..404cf50 100644
--- a/src/test/env.py
+++ b/src/test/tagfs_test/env.py
@@ -1,41 +1,41 @@
#
# Copyright 2010 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
#
import sys
def setupenv():
from os.path import dirname, abspath, exists, join as pjoin, split as psplit
global eventsdir
global projectdir
# TODO this code is duplicate with path generation from setup.py
testdir = dirname(abspath(__file__))
- projectdir = pjoin(testdir, '..', '..')
+ projectdir = pjoin(testdir, '..', '..', '..')
srcdir = pjoin(projectdir, 'src')
moddir = pjoin(srcdir, 'modules')
eventsdir = pjoin(projectdir, 'etc', 'test', 'events')
for x in (testdir, eventsdir, projectdir):
assert exists(x), "Directory not found: %s" % x
sys.path.insert(0, testdir)
sys.path.insert(0, moddir)
setupenv()
diff --git a/src/test/mock_config.py b/src/test/tagfs_test/mock_config.py
similarity index 100%
rename from src/test/mock_config.py
rename to src/test/tagfs_test/mock_config.py
diff --git a/src/test/setup.py b/src/test/tagfs_test/setup.py
similarity index 100%
rename from src/test/setup.py
rename to src/test/tagfs_test/setup.py
diff --git a/src/test/test_all.py b/src/test/tagfs_test/test_all.py
similarity index 100%
rename from src/test/test_all.py
rename to src/test/tagfs_test/test_all.py
diff --git a/src/test/test_config.py b/src/test/tagfs_test/test_config.py
similarity index 100%
rename from src/test/test_config.py
rename to src/test/tagfs_test/test_config.py
diff --git a/src/test/test_transient_dict.py b/src/test/tagfs_test/test_transient_dict.py
similarity index 100%
rename from src/test/test_transient_dict.py
rename to src/test/tagfs_test/test_transient_dict.py
diff --git a/src/test/test_view.py b/src/test/tagfs_test/test_view.py
similarity index 100%
rename from src/test/test_view.py
rename to src/test/tagfs_test/test_view.py
|
marook/tagfs
|
f52d508dd19aeccb1f245e04e688b313fb66ec49
|
now ignorring tagfs configuration directory as item
|
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
index 82f6cfa..45ad9b6 100644
--- a/src/modules/tagfs/item_access.py
+++ b/src/modules/tagfs/item_access.py
@@ -1,336 +1,340 @@
#
# Copyright 2009 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import time
import traceback
from cache import cache
class Tag(object):
def __init__(self, value, context = None):
if context == None:
self.context = None
else:
self.context = context.strip()
self.value = value.strip()
if not self.context == None and len(self.context) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
if len(self.value) == 0:
# we don't allow empty strings as they can't be represented as a
# directory very well
raise ValueError()
def __hash__(self):
return (self.context, self.value).__hash__()
def __eq__(self, other):
return self.value == other.value and self.context == other.context
def __repr__(self):
return '<Tag %s: %s>' % (self.context, self.value)
def parseTagsFromFile(tagFileName):
"""Parses the tags from the specified file.
@return: The parsed values are returned as a set containing Tag objects.
@see: Tag
"""
tags = set()
tagFile = open(tagFileName, 'r')
try:
for rawTag in tagFile.readlines():
rawTag = rawTag.strip()
try:
if len(rawTag) == 0:
continue
tagTuple = rawTag.split(':', 1)
if len(tagTuple) == 1:
tagContext = None
tagValue = tagTuple[0]
else:
tagContext = tagTuple[0]
tagValue = tagTuple[1]
tag = Tag(tagValue, context = tagContext)
tags.add(tag)
except:
logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
finally:
tagFile.close()
return tags
class Item(object):
def __init__(self, name, itemAccess):
self.name = name
self.itemAccess = itemAccess
# TODO register at file system to receive tag file change events.
@property
@cache
def itemDirectory(self):
return os.path.join(self.itemAccess.dataDirectory, self.name)
@property
@cache
def _tagFileName(self):
"""Returns the name of the tag file for this item.
"""
itemDirectory = self.itemDirectory
return os.path.join(itemDirectory, self.itemAccess.tagFileName)
def __parseTags(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return parseTagsFromFile(tagFileName)
@property
@cache
def tagsCreationTime(self):
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getctime(self._tagFileName)
@property
@cache
def tagsModificationTime(self):
"""Returns the last time when the tags have been modified.
"""
tagFileName = self._tagFileName
if not os.path.exists(tagFileName):
return None
return os.path.getmtime(tagFileName)
@property
@cache
def tags(self):
"""Returns the tags as a list for this item.
"""
return self.__parseTags()
def getTagsByContext(self, context):
for t in self.tags:
if context != t.context:
continue
yield t.value
@property
@cache
def tagged(self):
return os.path.exists(self._tagFileName)
def __repr__(self):
return '<Item %s>' % self.name
class TagValueFilter(object):
def __init__(self, tagValue):
self.tagValue = tagValue
def filterItems(self, items):
droppedItems = set()
for item in items:
hasTagValue = False
for itemTag in item.tags:
if itemTag.value == self.tagValue:
hasTagValue = True
break
if not hasTagValue:
droppedItems.add(item)
items -= droppedItems
class TagFilter(object):
def __init__(self, tag):
self.tag = tag
def filterItems(self, items):
droppedItems = set()
for item in items:
if not self.tag in item.tags:
droppedItems.add(item)
items -= droppedItems
class AndFilter(object):
"""Concatenates two filters with a logical 'and'.
"""
def __init__(self, subFilters):
self.subFilters = subFilters
def filterItems(self, items):
for subFilter in self.subFilters:
subFilter.filterItems(items)
class NoneFilter(object):
def filterItems(self, items):
pass
class NotContextFilter(object):
def __init__(self, context):
self.context = context
def filterItems(self, items):
droppedItems = set()
for item in items:
for tag in item.tags:
if self.context == tag.context:
droppedItems.add(item)
break
items -= droppedItems
class ItemAccess(object):
"""This is the access point to the Items.
"""
def __init__(self, dataDirectory, tagFileName):
self.dataDirectory = dataDirectory
self.tagFileName = tagFileName
self.__items = None
self.__tags = None
self.__taggedItems = None
self.__untaggedItems = None
self.parseTime = 0
def __parseItems(self):
items = {}
logging.debug('Start parsing items from dir: %s', self.dataDirectory)
for itemName in os.listdir(self.dataDirectory):
+ if itemName == '.tagfs':
+ # skip directory with configuration
+ continue
+
try:
item = Item(itemName, self)
items[itemName] = item
except IOError, (error, strerror):
logging.error('Can \'t read tags for item %s: %s',
itemName,
strerror)
logging.debug('Found %s items', len(items))
self.parseTime = time.time()
return items
@property
@cache
def items(self):
return self.__parseItems()
@property
@cache
def tags(self):
tags = set()
for item in self.items.itervalues():
if not item.tagged:
continue
tags = tags | item.tags
return tags
@property
@cache
def taggedItems(self):
return set([item for item in self.items.itervalues() if item.tagged])
@property
@cache
def untaggedItems(self):
return set([item for item in self.items.itervalues() if not item.tagged])
def getItemDirectory(self, item):
return os.path.join(self.dataDirectory, item)
def filterItems(self, filter):
resultItems = set([item for item in self.taggedItems])
filter.filterItems(resultItems)
return resultItems
def contextTags(self, context):
contextTags = set()
for tag in self.tags:
if tag.context == context:
contextTags.add(tag)
return contextTags
@property
@cache
def contexts(self):
contexts = set()
for tag in self.tags:
if tag.context == None:
continue
contexts.add(tag.context)
return contexts
def __str__(self):
return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
|
marook/tagfs
|
8bb718dddd5eedd75ace5dcb63cc1bbcc1373dc5
|
added comments to the default nodes
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..bd44ede
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+target
+build
diff --git a/.project b/.project
new file mode 100644
index 0000000..188e611
--- /dev/null
+++ b/.project
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>tagfs</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.python.pydev.PyDevBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.python.pydev.pythonNature</nature>
+ </natures>
+</projectDescription>
diff --git a/.pydevproject b/.pydevproject
new file mode 100644
index 0000000..80b5d5d
--- /dev/null
+++ b/.pydevproject
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?>
+
+<pydev_project>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/tagfs/src</path>
+<path>/tagfs/test</path>
+</pydev_pathproperty>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.5</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
+</pydev_project>
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..1a2c117
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,2 @@
+Markus Pielmeier <[email protected]>
+Peter Prohaska <[email protected]>
diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..37e498f
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ The tagfs is a virtual file system for filtering tagged directories.
+ Copyright (C) 2009 Markus Pielmeier
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ tagfs Copyright (C) 2009 Markus Pielmeier
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..e17582a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,105 @@
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+prefix = /usr/local
+bindir = $(prefix)/bin
+docdir = $(prefix)/share/doc/tagfs
+installdirs = $(bindir) $(docdir)
+
+srcdir = .
+targetdir = $(srcdir)/target
+
+testdatadir = $(srcdir)/etc/test/events
+testmntdir = $(shell pwd)/mnt
+
+pymoddir = $(srcdir)/src/modules
+
+PYTHON = python
+INSTALL = install
+INSTALL_DATA = $(INSTALL) -m 644
+INSTALL_PROGRAM = $(INSTALL)
+
+DOCS = AUTHORS COPYING README VERSION
+
+VERSION = `cat VERSION`
+TSTAMP = `date '+%Y%m%d_%H%M'`
+
+.PHONY: all
+all:
+ @echo "42. That's all."
+ @echo "Try 'make mounttest' for something more interesting."
+
+.PHONY: clean
+clean:
+ find $(srcdir) -name '*.pyc' -type f -exec rm {} \;
+ @if test "`mount | grep -e 'tagfs.*on.*$(testmntdir)'`"; then \
+ echo "tagfs mounted on '$(testmntdir)' -- keeping it."; \
+ elif test -d '$(testmntdir)'; then \
+ echo 'removing $(testmntdir)'; \
+ rmdir '$(testmntdir)'; \
+ fi
+
+ rm -r -- "$(targetdir)"
+
+.PHONY: test
+test:
+ $(PYTHON) $(srcdir)/test/test_all.py
+
+$(installdirs):
+ $(INSTALL) -d $(installdirs)
+
+.PHONY: install
+install: $(installdirs)
+ $(INSTALL_PROGRAM) $(srcdir)/src/tagfs $(bindir)/tagfs
+ $(INSTALL_DATA) $(DOCS) $(docdir)
+
+.PHONY: uninstall
+uninstall:
+ rm -- $(bindir)/tagfs
+ rm -r -- $(docdir)
+
+$(testmntdir):
+ mkdir -p $@
+
+.PHONY: mounttest
+mounttest: $(testmntdir)
+ PYTHONPATH=$(pymoddir):$(PYTHONPATH) \
+ $(PYTHON) $(srcdir)/src/bin/tagfs -i $(testdatadir) $(testmntdir)
+
+.PHONY: unmounttest
+unmounttest:
+ fusermount -u $(testmntdir)
+ rmdir -- $(testmntdir)
+
+.PHONY: umounttest
+umounttest: unmounttest
+
+.PHONY: mt
+mt: mounttest
+
+.PHONY: umt
+umt: unmounttest
+
+.PHONY: distsnapshot
+distsnapshot:
+ mkdir -p -- "$(targetdir)/tagfs_$(VERSION)_snapshot_$(TSTAMP)"
+
+ cp -a $(DOCS) etc src test util setup.py README.dev Makefile "$(targetdir)/tagfs_$(VERSION)_snapshot_$(TSTAMP)"
+
+ tar cjf $(targetdir)/tagfs_$(VERSION)_snapshot_$(TSTAMP)-src.tar.bz2 '--exclude=*~' '--exclude=*.pyc' -C "$(targetdir)" "tagfs_$(VERSION)_snapshot_$(TSTAMP)"
diff --git a/README b/README
new file mode 100644
index 0000000..9a8cbe4
--- /dev/null
+++ b/README
@@ -0,0 +1,119 @@
+tagfs - tag file system
+
+1) Introduction
+2) Requirements
+3) Installation
+4) Usage
+5) Configuration
+5.1) Options
+5.1.1) tagFileName
+5.1.2) enableValueFilters
+5.1.3) enableRootItemLinks
+6) Tests
+7) Further Reading
+8) Contact
+
+---------------------------------------------------------------------
+Introduction
+
+tagfs is used to organize your documents using tags.
+
+This document contains basic usage instructions for users. To develop or debug
+tagfs see the README.dev file.
+
+---------------------------------------------------------------------
+Requirements
+
+* python 2.5, 2.6
+* Linux kernel with fuse enabled
+* python-fuse installed
+
+---------------------------------------------------------------------
+Installation
+
+To install tagfs into your home directory type the following:
+
+$ python setup.py install --home ~/.local
+
+If you haven't already extended your local python path then add the following
+to your environment configuration script. For example to your .bashrc:
+
+export PYTHONPATH=~/.local/lib/python:$PYTHONPATH
+
+---------------------------------------------------------------------
+Usage
+
+After installation tagfs can be started the following way.
+
+Mount a tagged directory:
+$ tagfs -i /path/to/my/items/directory /path/to/my/mount/point
+
+Unmount a tagged directory:
+$ fusermount -u /path/to/my/mount/point
+
+---------------------------------------------------------------------
+Configuration
+
+tagfs can be configured through configuration files. Configuration files are
+searched in different locations by tagfs. The following locations are used.
+Locations with higher priority come first:
+- <items directory>/.tagfs/tagfs.conf
+- ~/.tagfs/tagfs.conf
+- /etc/tagfs/tagfs.conf
+
+Right now the following configuration options are supported.
+
+---------------------------------------------------------------------
+Configuration - Options - tagFileName
+
+Through this option the name of the parsed tag files can be specified. The
+default value is '.tag'.
+
+Example:
+
+[global]
+tagFileName = ABOUT
+
+---------------------------------------------------------------------
+Configuration - Options - enableValueFilters
+
+You can enable or disable value filters. If you enable value filters you will
+see filter directories for each tag value. For value filters the tag's
+context can be anyone. The default value is 'false'.
+
+Example:
+
+[global]
+enableValueFilters = true
+
+---------------------------------------------------------------------
+Configuration - Options - enableRootItemLinks
+
+To show links to all items in the tagfs '/' directory enable this option. The
+default value is 'false'.
+
+Example:
+
+[global]
+enableRootItemLinks = true
+
+---------------------------------------------------------------------
+Tests
+
+You can execute the test cases via the setup.py script in the project's root
+directory.
+
+$ python setup.py test
+
+---------------------------------------------------------------------
+Further Reading
+
+Using a file system for my bank account (Markus Pielmeier)
+http://pielmeier.blogspot.com/2010/08/using-file-system-for-my-bank-account.html
+
+---------------------------------------------------------------------
+Contact
+
+* homepage: http://wiki.github.com/marook/tagfs
+* user group: http://groups.google.com/group/tagfs
+* author: Markus Pielmeier <[email protected]>
diff --git a/README.dev b/README.dev
new file mode 100644
index 0000000..7f8f673
--- /dev/null
+++ b/README.dev
@@ -0,0 +1,65 @@
+tagfs - tag file system
+developer readme
+
+1) Logging
+2) Profiling
+3) Tracing
+4) Distribution
+4.1) tar Distribution
+5) Code Coverage
+
+---------------------------------------------------------------------
+Logging
+
+You can enable logging by setting a debug environment variable before you
+launch tagfs:
+$ export DEBUG=1
+
+tagfs will log to the console and the file /tmp/tagfs.log
+
+---------------------------------------------------------------------
+Profiling
+
+You can enable profiling by setting a profile environment variable before you
+launch tagfs:
+$ export PROFILE=1
+
+After unmounting your tagfs file system a profile file will be written. The
+profile file will be written to the current directory. The profile file will
+be named 'tagfs.profile'.
+
+---------------------------------------------------------------------
+Tracing
+
+Tracing is done via the log output. There is a utility script to analyze the
+log files. To analyze a log file execute the following
+
+$ util/trace_logfiles.py /tmp/tagfs.log
+
+The tracing script will output some statistics.
+
+---------------------------------------------------------------------
+tar Distribution
+
+The tagfs project contains scripts for creating source distribution packages.
+To create a tar distribution package you execute the following:
+
+$ make distsnapshot
+
+The make call will create an archive within the target directory. The created
+tar file is used for tagfs source distribution.
+
+---------------------------------------------------------------------
+Code Coverage
+
+The tagfs unit tests can be executed with code coverage measurement enabled.
+setup.py will measure the code coverage if the coverage lib is installed.
+
+The coverage lib is available here: http://nedbatchelder.com/code/coverage
+
+If you're a debian user you can try:
+$ apt-get install python-coverage
+
+The code coverage will be written below the reports directory after executing
+the test cases:
+$ python setup.py test
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000..49d5957
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+0.1
diff --git a/etc/demo/README b/etc/demo/README
new file mode 100644
index 0000000..94226fc
--- /dev/null
+++ b/etc/demo/README
@@ -0,0 +1,145 @@
+The demo subdirectory contains the file structure for a little example. It shows
+what tagfs is doing and how you can use it.
+
+---------------------------------------------------------------------
+tagfs is used to organize your documents using tags. tagfs requires you to
+keep your files in a simple directory structure.
+
+In our example we are organizing some holiday pictues from india and south korea.
+So we create two item directories below the events directory:
+* 2008-03-29 - holiday south korea
+* 2008-12-25 - holiday india
+
+The names of the item directories can be anything you want but it's recommended
+to add date timestamps. These timestamps allow you to have a look at your
+documents in a chronological order and prevent you from specifying duplicate
+names. For tagfs the timestamp is irrelevant.
+
+Now that we have created the item directories below the event directory we can
+tag them. To do so we add .tag files within them. And to make it more exciting
+we add some images which represent our documents. Then we have a directory
+structure like this:
+
+events/
+|-- 2008-03-29 - holiday south korea
+| |-- .tag
+| `-- 00_IMG008.jpg
+`-- 2008-12-25 - holiday india
+ |-- .tag
+ `-- cimg1029.jpg
+
+In this example the directory structure below the item directories is flat. In
+the real world the content and directory structure below the item directories
+is not limited. Except that the tag file must be named .tag.
+
+As already mentioned the .tag files contain the tags. The .tag file for the
+south korea holiday looks like this:
+
+holiday
+airport
+korea
+
+As you can imagine we have applied three tags: holiday, airport and korea. The
+tags are newline separated and can contain spaces too. Empty lines are ignored.
+For the india holiday we use the following .tag file:
+
+holiday
+airport
+india
+
+Now that we have organized our documents and applied tags on them we can start
+to search for our data. To do so we first mount the tagfs. Open your bash, enter
+the demo directory and execute the following:
+
+$ tagfs.py -i events tags
+
+This will mount the tagfs below the tags directory. The event directory contains
+the item directories which will be parsed for tags. As a result you will get the
+following directory tree below the tags directory:
+
+tags/
+|-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+|-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+|-- airport
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| |-- holiday
+| | |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | |-- india
+| | | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- korea
+| | `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- india
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- holiday
+| | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- korea
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| `-- holiday
+| `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+|-- holiday
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| |-- airport
+| | |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | |-- india
+| | | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- korea
+| | `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| |-- india
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- airport
+| | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- korea
+| |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+| `-- airport
+| `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+|-- india
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| |-- airport
+| | |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| | `-- holiday
+| | `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- holiday
+| |-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+| `-- airport
+| `-- 2008-12-25 - holiday india -> /demo/events/2008-12-25 - holiday india
+`-- korea
+ |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ |-- airport
+ | |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ | `-- holiday
+ | `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ `-- holiday
+ |-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+ `-- airport
+ `-- 2008-03-29 - holiday south korea -> /demo/events/2008-03-29 - holiday south korea
+
+OK... that's a lot! The idea behind the tagfs is a simple directory based filter
+system. If you want to see anything relevant for the tags india you type:
+
+$ ls -1 tags/india
+
+The output will be:
+
+2008-12-25 - holiday india
+airport
+holiday
+
+The output will show you all item directories as links which are tagged with
+india. Additionally you will see all tags which can be further combined with
+india and show you further results. The tag korea is not shown as there would
+be no results if you filter by india and korea.
+
+Filtering for multiple tags at once can be done like this:
+
+$ ls -1 tags/india/holiday
+
+You will get the output:
+
+2008-12-25 - holiday india
+airport
+
+I hope this explains the concept. Now it's your turn :-) Try tagfs yourself!
diff --git a/etc/demo/events/2008-03-29 - holiday south korea/.tag b/etc/demo/events/2008-03-29 - holiday south korea/.tag
new file mode 100644
index 0000000..6559d90
--- /dev/null
+++ b/etc/demo/events/2008-03-29 - holiday south korea/.tag
@@ -0,0 +1,3 @@
+holiday
+airport
+location: korea
diff --git a/etc/demo/events/2008-03-29 - holiday south korea/00_IMG008.jpg b/etc/demo/events/2008-03-29 - holiday south korea/00_IMG008.jpg
new file mode 100644
index 0000000..87cbd57
Binary files /dev/null and b/etc/demo/events/2008-03-29 - holiday south korea/00_IMG008.jpg differ
diff --git a/etc/demo/events/2008-12-25 - holiday india/.tag b/etc/demo/events/2008-12-25 - holiday india/.tag
new file mode 100644
index 0000000..5a0c76a
--- /dev/null
+++ b/etc/demo/events/2008-12-25 - holiday india/.tag
@@ -0,0 +1,3 @@
+holiday
+airport
+location: india
diff --git a/etc/demo/events/2008-12-25 - holiday india/cimg1029.jpg b/etc/demo/events/2008-12-25 - holiday india/cimg1029.jpg
new file mode 100644
index 0000000..08ace17
Binary files /dev/null and b/etc/demo/events/2008-12-25 - holiday india/cimg1029.jpg differ
diff --git a/etc/demo/events/2009-07-29 - no tags/emptydir b/etc/demo/events/2009-07-29 - no tags/emptydir
new file mode 100644
index 0000000..e69de29
diff --git a/etc/demo/tags/.deleteme b/etc/demo/tags/.deleteme
new file mode 100644
index 0000000..e69de29
diff --git a/etc/rdf/example-rdf.xml b/etc/rdf/example-rdf.xml
new file mode 100644
index 0000000..947273d
--- /dev/null
+++ b/etc/rdf/example-rdf.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:dc="http://purl.org/dc/elements/1.1/">
+ <!-- TODO define tfs XML namespace -->
+ <!-- TODO how to seperate between context and tag descriptions? -->
+
+ <rdf:Description rdf:about="location">
+ <!-- this is a context description -->
+
+ <dc:title>location</dc:title>
+
+ <!-- TODO -->
+ </rdf:Description>
+
+ <rdf:Description rdf:about="holiday">
+ <!-- this is a tag description -->
+
+ <dc:title>holiday</dc:title>
+
+ <!-- TODO -->
+ </rdf:Description>
+
+ <rdf:Description rdf:about="airport">
+ <!-- this is a tag description -->
+
+ <dc:title>airport</dc:title>
+
+ <!-- TODO -->
+ </rdf:Description>
+
+ <rdf:Description rdf:about="file://path/to/my/events/2008-03-29 - holiday south korea">
+ <!-- this is an item description -->
+
+ <dc:title>2008-03-29 - holiday south korea</dc:title>
+
+ <tfs:tagging>
+ <tfs:context rfd:resource="location" />
+ <tfs:tag rdf:resource="holiday" />
+ </tfs:tagging>
+
+ <tfs:tagging>
+ <tfs:tag rdf:resource="airport" />
+ </tfs:tagging>
+
+ <!-- TODO applied tags -->
+ </rdf:Description>
+</rdf:RDF>
diff --git a/etc/test/events/.tagfs/tagfs.conf b/etc/test/events/.tagfs/tagfs.conf
new file mode 100644
index 0000000..b355dd9
--- /dev/null
+++ b/etc/test/events/.tagfs/tagfs.conf
@@ -0,0 +1,5 @@
+
+[global]
+
+enableValueFilters = true
+enableRootItemLinks = true
diff --git a/etc/test/events/2008-03-29 - holiday south korea/.tag b/etc/test/events/2008-03-29 - holiday south korea/.tag
new file mode 100644
index 0000000..2732670
--- /dev/null
+++ b/etc/test/events/2008-03-29 - holiday south korea/.tag
@@ -0,0 +1,5 @@
+holiday
+airport
+korea
+creator: Markus Pielmeier
+object: tube
diff --git a/etc/test/events/2008-03-29 - holiday south korea/00_IMG008.jpg b/etc/test/events/2008-03-29 - holiday south korea/00_IMG008.jpg
new file mode 100644
index 0000000..87cbd57
Binary files /dev/null and b/etc/test/events/2008-03-29 - holiday south korea/00_IMG008.jpg differ
diff --git a/etc/test/events/2008-11-11 - airport underground railway/.tag b/etc/test/events/2008-11-11 - airport underground railway/.tag
new file mode 100644
index 0000000..bbe1207
--- /dev/null
+++ b/etc/test/events/2008-11-11 - airport underground railway/.tag
@@ -0,0 +1,4 @@
+airport
+creator: Tama Yuri
+source: flickr
+object: tube
diff --git a/etc/test/events/2008-11-11 - airport underground railway/airport.jpg b/etc/test/events/2008-11-11 - airport underground railway/airport.jpg
new file mode 100644
index 0000000..c1b1b43
Binary files /dev/null and b/etc/test/events/2008-11-11 - airport underground railway/airport.jpg differ
diff --git a/etc/test/events/2008-12-25 - holiday india/.tag b/etc/test/events/2008-12-25 - holiday india/.tag
new file mode 100644
index 0000000..55769b0
--- /dev/null
+++ b/etc/test/events/2008-12-25 - holiday india/.tag
@@ -0,0 +1,6 @@
+holiday
+airport
+
+india
+creator: Markus Pielmeier
+empty test:
diff --git a/etc/test/events/2008-12-25 - holiday india/cimg1029.jpg b/etc/test/events/2008-12-25 - holiday india/cimg1029.jpg
new file mode 100644
index 0000000..08ace17
Binary files /dev/null and b/etc/test/events/2008-12-25 - holiday india/cimg1029.jpg differ
diff --git a/etc/test/events/2009-07-29 - no tags/emptydir b/etc/test/events/2009-07-29 - no tags/emptydir
new file mode 100644
index 0000000..e69de29
diff --git a/reports/2011-02-07_08_53_03/coverage.txt b/reports/2011-02-07_08_53_03/coverage.txt
new file mode 100644
index 0000000..f8e4ff8
--- /dev/null
+++ b/reports/2011-02-07_08_53_03/coverage.txt
@@ -0,0 +1,15 @@
+Name Stmts Exec Cover Missing
+----------------------------------------------------------------
+src/modules/tagfs/__init__ 1 0 0% 1
+src/modules/tagfs/cache 33 8 24% 21-35, 38-75, 92
+src/modules/tagfs/config 38 20 52% 21-33, 38, 56, 63-65, 69, 74, 78, 82, 86, 90
+src/modules/tagfs/item_access 182 115 63% 19-27, 38, 43-45, 48, 51-54, 88-90, 96-97, 101-102, 111, 119-120, 127-130, 142-143, 150, 157-158, 162-167, 170, 187-189, 192, 201-205, 208, 212-214, 217-219, 222, 234-238, 248, 259-260, 270-271, 275-276, 288-289, 293-294, 298-301, 308, 317-318, 330-331
+src/modules/tagfs/log 9 4 44% 20-25, 34
+src/modules/tagfs/log_config 15 0 0% 21-51
+src/modules/tagfs/node 346 220 63% 20-30, 42, 45-47, 57-61, 65-66, 70, 74-76, 80-82, 92-93, 100, 108-115, 122, 125, 134-135, 141-142, 152-154, 159, 166, 173-178, 185-186, 193, 196-209, 214, 230, 234-235, 245, 267-268, 272, 279-284, 295-296, 303, 306, 309-311, 316, 326, 330-334, 338, 348-350, 354, 371-373, 379, 383-384, 388, 413-415, 421, 425-426, 430, 450-452, 458, 462-463, 467, 492-494, 499, 503-504, 508, 525-532, 538, 545, 549, 553-554, 558, 573-575, 580, 584, 588, 603-605, 610-611, 615, 649-661
+src/modules/tagfs/tagfs 77 0 0% 28-173
+src/modules/tagfs/transient_dict 51 38 74% 20-24, 27, 30-32, 36, 43, 47, 54, 64, 73, 92
+src/modules/tagfs/view 78 40 51% 21-41, 46, 50, 82, 89, 100, 105-107, 111, 116-118, 122-133, 138-140, 144, 149-151, 155-164
+src/tagfs 14 0 0% 21-41
+----------------------------------------------------------------
+TOTAL 844 445 52%
diff --git a/reports/2011-02-07_10_10_40/coverage.txt b/reports/2011-02-07_10_10_40/coverage.txt
new file mode 100644
index 0000000..6836d37
--- /dev/null
+++ b/reports/2011-02-07_10_10_40/coverage.txt
@@ -0,0 +1,15 @@
+Name Stmts Exec Cover Missing
+----------------------------------------------------------------
+src/bin/tagfs 14 0 0% 21-41
+src/modules/tagfs/__init__ 1 0 0% 1
+src/modules/tagfs/cache 33 8 24% 21-35, 38-75, 92
+src/modules/tagfs/config 38 20 52% 21-33, 38, 56, 63-65, 69, 74, 78, 82, 86, 90
+src/modules/tagfs/item_access 182 115 63% 19-27, 38, 43-45, 48, 51-54, 88-90, 96-97, 101-102, 111, 119-120, 127-130, 142-143, 150, 157-158, 162-167, 170, 187-189, 192, 201-205, 208, 212-214, 217-219, 222, 234-238, 248, 259-260, 270-271, 275-276, 288-289, 293-294, 298-301, 308, 317-318, 330-331
+src/modules/tagfs/log 9 4 44% 20-25, 34
+src/modules/tagfs/log_config 15 0 0% 21-51
+src/modules/tagfs/node 346 220 63% 20-30, 42, 45-47, 57-61, 65-66, 70, 74-76, 80-82, 92-93, 100, 108-115, 122, 125, 134-135, 141-142, 152-154, 159, 166, 173-178, 185-186, 193, 196-209, 214, 230, 234-235, 245, 267-268, 272, 279-284, 295-296, 303, 306, 309-311, 316, 326, 330-334, 338, 348-350, 354, 371-373, 379, 383-384, 388, 413-415, 421, 425-426, 430, 450-452, 458, 462-463, 467, 492-494, 499, 503-504, 508, 525-532, 538, 545, 549, 553-554, 558, 573-575, 580, 584, 588, 603-605, 610-611, 615, 649-661
+src/modules/tagfs/tagfs 77 0 0% 28-173
+src/modules/tagfs/transient_dict 51 38 74% 20-24, 27, 30-32, 36, 43, 47, 54, 64, 73, 92
+src/modules/tagfs/view 78 40 51% 21-41, 46, 50, 82, 89, 100, 105-107, 111, 116-118, 122-133, 138-140, 144, 149-151, 155-164
+----------------------------------------------------------------
+TOTAL 844 445 52%
diff --git a/reports/2011-02-07_10_10_40/tests.txt b/reports/2011-02-07_10_10_40/tests.txt
new file mode 100644
index 0000000..dc36480
--- /dev/null
+++ b/reports/2011-02-07_10_10_40/tests.txt
@@ -0,0 +1,41 @@
+Tests an item with tags assigned to. ... ok
+Tests the results for items which got no tags assigned. ... ok
+Tests the results for items which don't exist. ... ok
+Tests AndFilter filter arguments at once. ... ok
+Tests TagValueFilter filter argument. ... ok
+Test the items property of ItemAccess. ... FAIL
+Test the items property of ItemAccess. ... ok
+Test the tag property of ItemAccess ... ok
+Test the untaggedItems property of ItemAccess ... FAIL
+testItemNodeInterface (test.test_all.TestItemNode) ... ok
+testRecurse (test.test_all.TestNodeRecurse) ... ok
+Tests the parseTagsFromFile(...) method. ... ok
+testRootNode (test.test_all.TestRootNode) ... ok
+testTagNode (test.test_all.TestTagNode) ... ok
+Makes sure that the events directory is accessible. ... ok
+testUntaggedItemsNodeInterface (test.test_all.TestUntaggedItemsNode) ... ok
+Test the forgett feature ... ok
+Test some simple get, set an in calls. ... ok
+Testing view interface ... ok
+testConfig (test.test_config.TestConfig) ... ok
+
+======================================================================
+FAIL: Test the items property of ItemAccess.
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "/home/marook/work/devel/projects/tagfs/src/test/test_all.py", line 147, in testItems
+ set(items))
+AssertionError: set(['2008-12-25 - holiday india', '2009-07-29 - no tags', '2008-03-29 - holiday south korea', '2008-11-11 - airport underground railway']) != set(['.tagfs', '2008-12-25 - holiday india', '2008-03-29 - holiday south korea', '2009-07-29 - no tags', '2008-11-11 - airport underground railway'])
+
+======================================================================
+FAIL: Test the untaggedItems property of ItemAccess
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "/home/marook/work/devel/projects/tagfs/src/test/test_all.py", line 184, in testUntaggedItems
+ set([item.name for item in untaggedItems]))
+AssertionError: set(['2009-07-29 - no tags']) != set(['.tagfs', '2009-07-29 - no tags'])
+
+----------------------------------------------------------------------
+Ran 20 tests in 1.370s
+
+FAILED (failures=2)
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..22ef5a9
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Peter Prohaska
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from distutils.core import setup, Command
+import sys
+import os
+from os.path import (
+ basename,
+ dirname,
+ abspath,
+ splitext,
+ join as pjoin
+)
+from glob import glob
+from unittest import TestLoader, TextTestRunner
+import re
+import datetime
+
+projectdir = dirname(abspath(__file__))
+reportdir = pjoin(projectdir, 'reports')
+
+srcdir = pjoin(projectdir, 'src')
+bindir = pjoin(srcdir, 'bin')
+moddir = pjoin(srcdir, 'modules')
+testdir = pjoin(srcdir, 'test')
+
+testdatadir = pjoin(projectdir, 'etc', 'test', 'events')
+testmntdir = pjoin(projectdir, 'mnt')
+
+class Report(object):
+
+ def __init__(self):
+ self.reportDateTime = datetime.datetime.utcnow()
+ self.reportDir = os.path.join(reportdir, self.reportDateTime.strftime('%Y-%m-%d_%H_%M_%S'))
+
+ # fails when dir already exists which is nice
+ os.makedirs(self.reportDir)
+
+ @property
+ def coverageReportFileName(self):
+ return os.path.join(self.reportDir, 'coverage.txt')
+
+ @property
+ def unitTestReportFileName(self):
+ return os.path.join(self.reportDir, 'tests.txt')
+
+def sourceFiles():
+ yield os.path.join(bindir, 'tagfs')
+
+ sourceFilePattern = re.compile('^.*[.]py$')
+ for root, dirs, files in os.walk(moddir):
+ for f in files:
+ if(not sourceFilePattern.match(f)):
+ continue
+
+ yield os.path.join(root, f)
+
+def printFile(fileName):
+ if(not os.path.exists(fileName)):
+ # TODO maybe we should not silently return?
+ return
+
+ with open(fileName, 'r') as f:
+ for line in f:
+ sys.stdout.write(line)
+
+class test(Command):
+ description = 'run tests'
+ user_options = []
+
+ def initialize_options(self):
+ self._cwd = os.getcwd()
+ self._verbosity = 2
+
+ def finalize_options(self): pass
+
+ def run(self):
+ report = Report()
+
+ testPyMatcher = re.compile('(.*/)?test[^/]*[.]py', re.IGNORECASE)
+
+ tests = ['.'.join([
+ basename(testdir), splitext(basename(f))[0]
+ ]) for f in glob(pjoin(
+ testdir, '*.py'
+ )) if testPyMatcher.match(f)]
+
+ print "..using:"
+ print " testdir:", testdir
+ print " testdatadir:", testdatadir
+ print " testmntdir:", testmntdir
+ print " tests:", tests
+ print " sys.path:", sys.path
+ print
+ sys.path.insert(0, moddir)
+ sys.path.insert(0, srcdir)
+
+ # configure logging
+ # TODO not sure how to enable this... it's a bit complicate to enable
+ # logging only for 'make mt' and disable it then for
+ # 'python setup.py test'. 'python setup.py test' is such a gabber...
+ #if 'DEBUG' in os.environ:
+ # from tagfs import log_config
+ # log_config.setUpLogging()
+
+ suite = TestLoader().loadTestsFromNames(tests)
+
+ with open(report.unitTestReportFileName, 'w') as testResultsFile:
+ r = TextTestRunner(stream = testResultsFile, verbosity = self._verbosity)
+
+ def runTests():
+ r.run(suite)
+
+ try:
+ import coverage
+
+ c = coverage.coverage()
+ c.start()
+ runTests()
+ c.stop()
+
+ with open(report.coverageReportFileName, 'w') as reportFile:
+ c.report([f for f in sourceFiles()], file = reportFile)
+
+ except ImportError:
+ print ''
+ print 'coverage module not found.'
+ print 'To view source coverage stats install http://nedbatchelder.com/code/coverage/'
+ print ''
+
+ runTests()
+
+ # TODO use two streams instead of printing files after writing
+ printFile(report.unitTestReportFileName)
+ printFile(report.coverageReportFileName)
+
+# Overrides default clean (which cleans from build runs)
+# This clean should probably be hooked into that somehow.
+class clean_pyc(Command):
+ description = 'remove *.pyc files from source directory'
+ user_options = []
+
+ def initialize_options(self):
+ self._delete = []
+ for cwd, dirs, files in os.walk(projectdir):
+ self._delete.extend(
+ pjoin(cwd, f) for f in files if f.endswith('.pyc')
+ )
+
+ def finalize_options(self): pass
+
+ def run(self):
+ for f in self._delete:
+ try:
+ os.unlink(f)
+ except OSError, e:
+ print "Strange '%s': %s" % (f, e)
+ # Could be a directory.
+ # Can we detect file in use errors or are they OSErrors
+ # as well?
+ # Shall we catch all?
+
+setup(
+ cmdclass = {
+ 'test': test,
+ 'clean_pyc': clean_pyc,
+ },
+ name = 'tagfs',
+ version = '0.1',
+ url = 'http://wiki.github.com/marook/tagfs',
+ description = '',
+ long_description = '',
+ author = 'Markus Pielmeier',
+ author_email = '[email protected]',
+ license = 'GPLv3',
+ download_url = 'http://github.com/marook/tagfs/downloads/tagfs_0.1-src.tar.bz2',
+ platforms = 'Linux',
+ requires = [],
+ classifiers = [
+ 'Development Status :: 2 - Pre-Alpha',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: GNU General Public License (GPL)',
+ 'Natural Language :: English',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python',
+ 'Topic :: System :: Filesystems'
+ ],
+ data_files = [
+ (pjoin('share', 'doc', 'tagfs'), ['AUTHORS', 'COPYING', 'README'])
+ ],
+ # TODO maybe we should include src/bin/*?
+ scripts = [pjoin(bindir, 'tagfs')],
+ packages = ['tagfs'],
+ package_dir = {'': moddir},
+)
diff --git a/src/bin/tagfs b/src/bin/tagfs
new file mode 100755
index 0000000..c486ede
--- /dev/null
+++ b/src/bin/tagfs
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from tagfs import main, log_config
+
+if __name__ == '__main__':
+ from os import environ as env
+ if 'DEBUG' in env:
+ log_config.setUpLogging()
+
+ import logging
+ import sys
+
+ if 'PROFILE' in env:
+ logging.info('Enabled tagfs profiling')
+
+ import cProfile
+ import os
+
+ profileFile = os.path.join(os.getcwd(), 'tagfs.profile')
+
+ sys.exit(cProfile.run('main()', profileFile))
+ else:
+ sys.exit(main())
+
diff --git a/src/modules/tagfs/__init__.py b/src/modules/tagfs/__init__.py
new file mode 100644
index 0000000..d1b6175
--- /dev/null
+++ b/src/modules/tagfs/__init__.py
@@ -0,0 +1 @@
+from tagfs import *
diff --git a/src/modules/tagfs/cache.py b/src/modules/tagfs/cache.py
new file mode 100644
index 0000000..02bf18b
--- /dev/null
+++ b/src/modules/tagfs/cache.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import time
+import functools
+
+class NoCacheStrategy(object):
+ """This cache strategy reloads the cache on every call.
+ """
+
+ def isCacheValid(self, f, *args, **kwargs):
+ return False
+
+class NoReloadStrategy(object):
+ """This cache strategy never reloads the cache.
+ """
+
+ def isCacheValid(self, f, *args, **kwargs):
+ return True
+
+class TimeoutReloadStrategy(object):
+
+ def __init__(self, timeoutDuration = 10 * 60):
+ self.timeoutDuration = timeoutDuration
+
+ def isCacheValid(self, f, *args, **kwargs):
+ obj = args[0]
+
+ timestampFieldName = '__' + f.__name__ + 'Timestamp'
+ now = time.time()
+
+ if not hasattr(obj, timestampFieldName):
+ setattr(obj, timestampFieldName, now)
+
+ return False
+
+ lastTime = getattr(obj, timestampFieldName)
+
+ if now - lastTime < self.timeoutDuration:
+ return True
+
+ setattr(obj, timestampFieldName, now)
+
+ return False
+
+
+def cache(f, reloadStrategy = NoReloadStrategy()):
+ """This annotation is used to cache the result of a method call.
+
+ @param f: This is the wrapped function which's return value will be cached.
+ @param reload: This is the reload strategy. This function returns True when
+ the cache should be reloaded. Otherwise False.
+ @attention: The cache is never deleted. The first call initializes the
+ cache. The method's parameters just passed to the called method. The cache
+ is not evaluating the parameters.
+ """
+
+ @functools.wraps(f)
+ def cacher(*args, **kwargs):
+ obj = args[0]
+
+ cacheMemberName = '__' + f.__name__ + 'Cache'
+
+ # the reload(...) call has to be first as we always have to call the
+ # method. not only when there is a cache member available in the object.
+ if (not reloadStrategy.isCacheValid(f, *args, **kwargs)) or (not hasattr(obj, cacheMemberName)):
+ value = f(*args, **kwargs)
+
+ setattr(obj, cacheMemberName, value)
+
+ return value
+
+ return getattr(obj, cacheMemberName)
+
+ return cacher
+
diff --git a/src/modules/tagfs/config.py b/src/modules/tagfs/config.py
new file mode 100644
index 0000000..4fc8903
--- /dev/null
+++ b/src/modules/tagfs/config.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import ConfigParser
+import logging
+import os
+
+class ConfigError(Exception):
+
+ pass
+
+class Config(object):
+
+ GLOBAL_SECTION = 'global'
+
+ def applyDefaults(self):
+ self.tagFileName = '.tag'
+ self.enableValueFilters = False
+ self.enableRootItemLinks = False
+
+ def __init__(self, itemsDir):
+ self._config = ConfigParser.SafeConfigParser({
+ 'tagFileName': '.tag',
+ 'enableValueFilters': False,
+ 'enableRootItemLinks': False,
+ })
+ self._config.add_section(Config.GLOBAL_SECTION)
+
+ self.itemsDir = itemsDir
+
+ self.applyDefaults()
+
+ parsedFiles = self._config.read([os.path.join(itemsDir, '.tagfs', 'tagfs.conf'),
+ os.path.expanduser(os.path.join('~', '.tagfs', 'tagfs.conf')),
+ os.path.join('/', 'etc', 'tagfs', 'tagfs.conf')])
+
+ logging.debug('Parsed the following config files: %s' % ', '.join(parsedFiles))
+
+ def _boolToStr(self, b):
+ if b is True:
+ return 'true'
+ elif b is False:
+ return 'false'
+ else:
+ # TODO make error more verbose
+ raise ConfigError()
+
+ @property
+ def tagFileName(self):
+ return self._config.get(Config.GLOBAL_SECTION, 'tagFileName')
+
+ @tagFileName.setter
+ def tagFileName(self, tagFileName):
+ self._config.set(Config.GLOBAL_SECTION, 'tagFileName', tagFileName)
+
+ # TODO implement generic approach to get/set boolean values
+ @property
+ def enableValueFilters(self):
+ return self._config.getboolean(Config.GLOBAL_SECTION, 'enableValueFilters')
+
+ @enableValueFilters.setter
+ def enableValueFilters(self, enableValueFilters):
+ self._config.set(Config.GLOBAL_SECTION, 'enableValueFilters', self._boolToStr(enableValueFilters))
+
+ @property
+ def enableRootItemLinks(self):
+ return self._config.getboolean(Config.GLOBAL_SECTION, 'enableRootItemLinks')
+
+ @enableRootItemLinks.setter
+ def enableRootItemLinks(self, enableRootItemLinks):
+ self._config.set(Config.GLOBAL_SECTION, 'enableRootItemLinks', self._boolToStr(enableRootItemLinks))
+
+ def __str__(self):
+ #return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['tagFileName', 'enableValueFilters', 'enableRootItemLinks']]) + ']'
+ return '[tagFileName: %s, enableValueFilters: %s, enableRootItemLinks: %s]' % (self.tagFileName, self.enableValueFilters, self.enableRootItemLinks)
diff --git a/src/modules/tagfs/item_access.py b/src/modules/tagfs/item_access.py
new file mode 100644
index 0000000..82f6cfa
--- /dev/null
+++ b/src/modules/tagfs/item_access.py
@@ -0,0 +1,336 @@
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+import os
+import time
+import traceback
+
+from cache import cache
+
+class Tag(object):
+
+ def __init__(self, value, context = None):
+ if context == None:
+ self.context = None
+ else:
+ self.context = context.strip()
+
+ self.value = value.strip()
+
+ if not self.context == None and len(self.context) == 0:
+ # we don't allow empty strings as they can't be represented as a
+ # directory very well
+ raise ValueError()
+
+ if len(self.value) == 0:
+ # we don't allow empty strings as they can't be represented as a
+ # directory very well
+ raise ValueError()
+
+ def __hash__(self):
+ return (self.context, self.value).__hash__()
+
+ def __eq__(self, other):
+ return self.value == other.value and self.context == other.context
+
+ def __repr__(self):
+ return '<Tag %s: %s>' % (self.context, self.value)
+
+def parseTagsFromFile(tagFileName):
+ """Parses the tags from the specified file.
+
+ @return: The parsed values are returned as a set containing Tag objects.
+ @see: Tag
+ """
+
+ tags = set()
+
+ tagFile = open(tagFileName, 'r')
+ try:
+ for rawTag in tagFile.readlines():
+ rawTag = rawTag.strip()
+
+ try:
+ if len(rawTag) == 0:
+ continue
+
+ tagTuple = rawTag.split(':', 1)
+
+ if len(tagTuple) == 1:
+ tagContext = None
+ tagValue = tagTuple[0]
+ else:
+ tagContext = tagTuple[0]
+ tagValue = tagTuple[1]
+
+ tag = Tag(tagValue, context = tagContext)
+
+ tags.add(tag)
+ except:
+ logging.warning('Skipping tagging \'%s\' from file \'%s\' as it can\'t be parsed\n%s.' % (rawTag, tagFileName, traceback.format_exc()))
+
+ finally:
+ tagFile.close()
+
+ return tags
+
+class Item(object):
+
+ def __init__(self, name, itemAccess):
+ self.name = name
+ self.itemAccess = itemAccess
+
+ # TODO register at file system to receive tag file change events.
+
+ @property
+ @cache
+ def itemDirectory(self):
+ return os.path.join(self.itemAccess.dataDirectory, self.name)
+
+ @property
+ @cache
+ def _tagFileName(self):
+ """Returns the name of the tag file for this item.
+ """
+
+ itemDirectory = self.itemDirectory
+
+ return os.path.join(itemDirectory, self.itemAccess.tagFileName)
+
+ def __parseTags(self):
+ tagFileName = self._tagFileName
+
+ if not os.path.exists(tagFileName):
+ return None
+
+ return parseTagsFromFile(tagFileName)
+
+ @property
+ @cache
+ def tagsCreationTime(self):
+ tagFileName = self._tagFileName
+
+ if not os.path.exists(tagFileName):
+ return None
+
+ return os.path.getctime(self._tagFileName)
+
+ @property
+ @cache
+ def tagsModificationTime(self):
+ """Returns the last time when the tags have been modified.
+ """
+
+ tagFileName = self._tagFileName
+
+ if not os.path.exists(tagFileName):
+ return None
+
+ return os.path.getmtime(tagFileName)
+
+ @property
+ @cache
+ def tags(self):
+ """Returns the tags as a list for this item.
+ """
+
+ return self.__parseTags()
+
+ def getTagsByContext(self, context):
+ for t in self.tags:
+ if context != t.context:
+ continue
+
+ yield t.value
+
+ @property
+ @cache
+ def tagged(self):
+ return os.path.exists(self._tagFileName)
+
+ def __repr__(self):
+ return '<Item %s>' % self.name
+
+class TagValueFilter(object):
+
+ def __init__(self, tagValue):
+ self.tagValue = tagValue
+
+ def filterItems(self, items):
+ droppedItems = set()
+
+ for item in items:
+ hasTagValue = False
+
+ for itemTag in item.tags:
+ if itemTag.value == self.tagValue:
+ hasTagValue = True
+
+ break
+
+ if not hasTagValue:
+ droppedItems.add(item)
+
+ items -= droppedItems
+
+class TagFilter(object):
+
+ def __init__(self, tag):
+ self.tag = tag
+
+ def filterItems(self, items):
+ droppedItems = set()
+
+ for item in items:
+ if not self.tag in item.tags:
+ droppedItems.add(item)
+
+ items -= droppedItems
+
+class AndFilter(object):
+ """Concatenates two filters with a logical 'and'.
+ """
+
+ def __init__(self, subFilters):
+ self.subFilters = subFilters
+
+ def filterItems(self, items):
+ for subFilter in self.subFilters:
+ subFilter.filterItems(items)
+
+class NoneFilter(object):
+
+ def filterItems(self, items):
+ pass
+
+class NotContextFilter(object):
+
+ def __init__(self, context):
+ self.context = context
+
+ def filterItems(self, items):
+ droppedItems = set()
+
+ for item in items:
+ for tag in item.tags:
+ if self.context == tag.context:
+ droppedItems.add(item)
+
+ break
+
+ items -= droppedItems
+
+class ItemAccess(object):
+ """This is the access point to the Items.
+ """
+
+ def __init__(self, dataDirectory, tagFileName):
+ self.dataDirectory = dataDirectory
+ self.tagFileName = tagFileName
+
+ self.__items = None
+ self.__tags = None
+ self.__taggedItems = None
+ self.__untaggedItems = None
+ self.parseTime = 0
+
+ def __parseItems(self):
+ items = {}
+
+ logging.debug('Start parsing items from dir: %s', self.dataDirectory)
+
+ for itemName in os.listdir(self.dataDirectory):
+ try:
+ item = Item(itemName, self)
+
+ items[itemName] = item
+
+ except IOError, (error, strerror):
+ logging.error('Can \'t read tags for item %s: %s',
+ itemName,
+ strerror)
+
+ logging.debug('Found %s items', len(items))
+
+ self.parseTime = time.time()
+
+ return items
+
+ @property
+ @cache
+ def items(self):
+ return self.__parseItems()
+
+ @property
+ @cache
+ def tags(self):
+ tags = set()
+
+ for item in self.items.itervalues():
+ if not item.tagged:
+ continue
+
+ tags = tags | item.tags
+
+ return tags
+
+ @property
+ @cache
+ def taggedItems(self):
+ return set([item for item in self.items.itervalues() if item.tagged])
+
+ @property
+ @cache
+ def untaggedItems(self):
+ return set([item for item in self.items.itervalues() if not item.tagged])
+
+ def getItemDirectory(self, item):
+ return os.path.join(self.dataDirectory, item)
+
+ def filterItems(self, filter):
+ resultItems = set([item for item in self.taggedItems])
+
+ filter.filterItems(resultItems)
+
+ return resultItems
+
+ def contextTags(self, context):
+ contextTags = set()
+
+ for tag in self.tags:
+ if tag.context == context:
+ contextTags.add(tag)
+
+ return contextTags
+
+ @property
+ @cache
+ def contexts(self):
+ contexts = set()
+
+ for tag in self.tags:
+ if tag.context == None:
+ continue
+
+ contexts.add(tag.context)
+
+ return contexts
+
+ def __str__(self):
+ return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['dataDirectory', 'tagFileName']]) + ']'
diff --git a/src/modules/tagfs/log.py b/src/modules/tagfs/log.py
new file mode 100644
index 0000000..6581584
--- /dev/null
+++ b/src/modules/tagfs/log.py
@@ -0,0 +1,60 @@
+#
+# Copyright 2010, 2011 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import functools
+import logging
+
+def getLogger(*args):
+ o = args[0]
+
+ logger = logging.getLogger(o.__class__.__name__)
+
+ return logger
+
+
+def logCall(f):
+
+ @functools.wraps(f)
+ def logCall(*args, **kwargs):
+ logger = getLogger(*args)
+
+ if(logger.isEnabledFor(logging.DEBUG)):
+ logger.debug(f.__name__ + '(' + (', '.join('\'' + str(a) + '\'' for a in args[1:])) + ')')
+
+ return f(*args, **kwargs)
+
+ return logCall
+
+def logException(f):
+
+ @functools.wraps(f)
+ def logException(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except:
+ logger = getLogger(*args)
+
+ if(logger.isEnabledFor(logging.ERROR)):
+ import traceback
+
+ logger.error(traceback.format_exc())
+
+ raise
+
+ return logException
diff --git a/src/modules/tagfs/log_config.py b/src/modules/tagfs/log_config.py
new file mode 100644
index 0000000..ff159ae
--- /dev/null
+++ b/src/modules/tagfs/log_config.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import logging
+import sys
+
+def setUpLogging():
+ def exceptionCallback(eType, eValue, eTraceBack):
+ import cgitb
+
+ txt = cgitb.text((eType, eValue, eTraceBack))
+
+ logging.critical(txt)
+
+ # sys.exit(1)
+
+ # configure file logger
+ logging.basicConfig(level = logging.DEBUG,
+ format = '%(asctime)s %(levelname)s %(message)s',
+ filename = '/tmp/tagfs.log',
+ filemode = 'a')
+
+ # configure console logger
+ consoleHandler = logging.StreamHandler(sys.stdout)
+ consoleHandler.setLevel(logging.DEBUG)
+
+ consoleFormatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
+ consoleHandler.setFormatter(consoleFormatter)
+ logging.getLogger().addHandler(consoleHandler)
+
+ # replace default exception handler
+ sys.excepthook = exceptionCallback
+
+ logging.debug('Logging and exception handling has been set up')
diff --git a/src/modules/tagfs/node.py b/src/modules/tagfs/node.py
new file mode 100644
index 0000000..20f2a8b
--- /dev/null
+++ b/src/modules/tagfs/node.py
@@ -0,0 +1,663 @@
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import fuse
+import logging
+import stat
+import time
+
+from cache import cache
+import item_access
+
+class MyStat(fuse.Stat):
+
+ def __init__(self):
+ self.st_mode = 0
+ self.st_ino = 0
+ self.st_dev = 0
+ self.st_nlink = 0
+ self.st_uid = 0
+ self.st_gid = 0
+ self.st_size = 0
+ self.st_atime = 0
+ self.st_mtime = 0
+ self.st_ctime = 0
+
+ def __str__(self):
+ return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in self.__dict__]) + ']'
+
+class Node(object):
+
+ def _addSubNodes(self, subNodes, nodeNames, nodes):
+ """Adds the supplied nodes to the sub nodes.
+
+ @param subNodes: This dict is extended with the nodes.
+ @param nodeNames: This is a logical name for the added nodes. It's just
+ used for logging.
+ @param nodes: This list contains the added nodes.
+ """
+ for node in nodes:
+ if node.name in subNodes:
+ logging.debug('%s is shadowed by %s',
+ nodeNames,
+ subNodes[node.name])
+
+ continue
+
+ subNodes[node.name] = node
+
+ @property
+ @cache
+ def subNodes(self):
+ return [node for name, node in self._getSubNodesDict().iteritems()]
+
+ def getSubNode(self, pathElement):
+ subNodesDict = self._getSubNodesDict()
+
+ if not pathElement in subNodesDict:
+ logging.warning('Unknown path element requested ' + pathElement)
+
+ return None
+
+ return subNodesDict[pathElement]
+
+class DirectoryNode(Node):
+
+ def getattr(self, path):
+ st = MyStat()
+ st.st_mode = stat.S_IFDIR | 0555
+ st.st_nlink = 2
+ st.st_mtime = self.itemAccess.parseTime
+ st.st_ctime = st.st_mtime
+ st.st_atime = st.st_mtime
+
+ return st
+
+ @property
+ @cache
+ def direntry(self):
+ e = fuse.Direntry(self.name)
+ e.type = stat.S_IFDIR
+
+ return e
+
+ def readdir(self, path, offset):
+ yield fuse.Direntry('.')
+ yield fuse.Direntry('..')
+
+ for n in self.subNodes:
+ yield fuse.Direntry(n.name)
+
+
+class ContainerNode(DirectoryNode):
+ """Abstract base node for nodes which contain items and or tags.
+
+ Facts about ContainerNodes:
+ * container nodes are always represented as directories
+ """
+
+ def _addSubContainerNodes(self, subNodes, nodeNames, containerNodes):
+ items = self.items
+
+ self._addSubNodes(subNodes,
+ nodeNames,
+ [n for n in containerNodes if n.required(items)])
+
+ def __init__(self, parentNode):
+ self.parentNode = parentNode
+
+ @cache
+ # DANGER! we can only cache this method because we know that the items from
+ # our parent will never ever change!
+ def required(self, items):
+ selfItems = self.items
+ selfItemsLen = len(self.items)
+
+ return not selfItemsLen == 0 and not selfItemsLen == len(items)
+
+ @property
+ @cache
+ def filter(self):
+ parentFilter = self.parentNode.filter
+
+ return item_access.AndFilter([parentFilter, self._myFilter])
+
+ @property
+ @cache
+ def items(self):
+ items = self.itemAccess.filterItems(self.filter)
+
+ logging.debug('Items request for %s: %s',
+ self,
+ [item.name for item in items])
+
+ return items
+
+class ItemNode(Node):
+
+ def __init__(self, item, itemAccess, prefix = None):
+ self.item = item
+ self.itemAccess = itemAccess
+ self.prefix = prefix
+
+ @property
+ def name(self):
+ if not self.prefix:
+ return self.item.name
+
+ return self.prefix + self.item.name
+
+ @property
+ def subNodes(self):
+ """Returns always [] because we don't have sub nodes.
+ """
+
+ return []
+
+ def getSubNode(self, pathElement):
+ """Returns always None as item nodes don't have sub nodes.
+ """
+ return None
+
+ def getattr(self, path):
+ st = MyStat()
+ st.st_mode = stat.S_IFLNK | 0444
+ st.st_nlink = 2
+
+ return st
+
+ @property
+ @cache
+ def direntry(self):
+ e = fuse.Direntry(self.name)
+ e.type = stat.S_IFLNK
+
+ return e
+
+ def readlink(self, path):
+ return self.item.itemDirectory
+
+ def __repr__(self):
+ return '<ItemNode %s>' % self.name
+
+class CsvExportNode(Node):
+
+ COL_SEPARATOR = ';'
+
+ TEXT_CHAR = '"'
+
+ ROW_SEPARATOR = '\n'
+
+ TAG_VALUE_SEPARATOR = '\n'
+
+ def __init__(self, parentNode, itemAccess):
+ self.name = 'export.csv'
+ self.parentNode = parentNode
+ self.itemAccess = itemAccess
+
+ def formatRow(self, row):
+ first = True
+
+ for col in row:
+ if first:
+ first = False
+ else:
+ yield CsvExportNode.COL_SEPARATOR
+
+ # TODO escape TEXT_CHAR in col string
+ yield CsvExportNode.TEXT_CHAR
+ yield str(col)
+ yield CsvExportNode.TEXT_CHAR
+
+ yield CsvExportNode.ROW_SEPARATOR
+
+ @property
+ def filter(self):
+ return self.parentNode.filter
+
+ @property
+ @cache
+ def items(self):
+ items = self.itemAccess.filterItems(self.filter)
+
+ logging.debug('Items request for %s: %s',
+ self,
+ [item.name for item in items])
+
+ return items
+
+ @property
+ def _content(self):
+ contexts = set()
+ for i in self.items:
+ for t in i.tags:
+ contexts.add(t.context)
+
+ headline = ['name', ]
+ for c in contexts:
+ headline.append(c)
+ for s in self.formatRow(headline):
+ yield s
+
+ for i in self.items:
+ row = [i.name, ]
+
+ for c in contexts:
+ row.append(CsvExportNode.TAG_VALUE_SEPARATOR.join(i.getTagsByContext(c)))
+
+ for s in self.formatRow(row):
+ yield s
+
+ @property
+ @cache
+ def content(self):
+ return ''.join(self._content)
+
+ @property
+ def subNodes(self):
+ """Returns always [] because we don't have sub nodes.
+ """
+
+ return []
+
+ def getSubNode(self, pathElement):
+ """Returns always None as file nodes don't have sub nodes.
+ """
+ return None
+
+ def getattr(self, path):
+ a = MyStat()
+ a.st_mode = stat.S_IFREG | 0444
+ a.st_nlink = 2
+
+ # TODO replace with memory saving size calculation
+ import array
+ a.st_size = len(array.array('c', self.content))
+
+ return a
+
+ @property
+ @cache
+ def direntry(self):
+ e = fuse.Direntry(self.name)
+ e.type = stat.S_IFREG
+
+ return e
+
+ def open(self, path, flags):
+ return
+
+ def read(self, path, size, offset):
+ return self.content[offset:offset + size]
+
+class ExportDirectoryNode(DirectoryNode):
+
+ def __init__(self, name, parentNode, itemAccess):
+ self.name = name
+ self.parentNode = parentNode
+ self.itemAccess = itemAccess
+
+ @cache
+ def _getSubNodesDict(self):
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'csv',
+ [CsvExportNode(self, self.itemAccess), ])
+
+ return subNodes
+
+ @property
+ def filter(self):
+ return self.parentNode.filter
+
+class UntaggedItemsNode(DirectoryNode):
+ """Represents a node which contains not tagged items.
+ """
+
+ def __init__(self, name, itemAccess):
+ self.name = name
+ self.itemAccess = itemAccess
+
+ @cache
+ def _getSubNodesDict(self):
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess) for item in self.itemAccess.untaggedItems])
+
+ return subNodes
+
+class ReviewItemsNode(DirectoryNode):
+
+ def __init__(self, name, itemAccess):
+ self.name = name
+ self.itemAccess = itemAccess
+
+ @cache
+ def _getSubNodesDict(self):
+ items = [i for i in self.itemAccess.items.itervalues() if i.tagged]
+ items.sort(key = lambda i: i.tagsModificationTime)
+ prefixWidth = len(str(len(items)))
+
+ def prefix(i):
+ return str(i).rjust(prefixWidth, '0')
+
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess, prefix = prefix(i) + ' ') for i, item in enumerate(items)])
+
+ return subNodes
+
+class TagValueNode(ContainerNode):
+
+ def __init__(self, parentNode, tagValue, itemAccess, config):
+ super(TagValueNode, self).__init__(parentNode)
+ self.tagValue = tagValue
+ self.itemAccess = itemAccess
+ self.config = config
+
+ @property
+ def name(self):
+ return self.tagValue
+
+ @property
+ @cache
+ def _myFilter(self):
+ return item_access.TagValueFilter(self.tagValue)
+
+ @cache
+ def _getSubNodesDict(self):
+ items = self.items
+
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'export',
+ [ExportDirectoryNode('.export', self, self.itemAccess), ])
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess) for item in items])
+ self._addSubContainerNodes(subNodes,
+ 'contexts',
+ [ContextContainerNode(self, context, self.itemAccess, self.config) for context in self.itemAccess.contexts])
+
+ if self.config.enableValueFilters:
+ self._addSubContainerNodes(subNodes,
+ 'tags',
+ [TagValueNode(self, tag.value, self.itemAccess, self.config) for tag in self.itemAccess.tags])
+
+ logging.debug('Sub nodes for tag value %s: %s', self.tagValue, subNodes)
+
+ return subNodes
+
+class TagNode(ContainerNode):
+
+ def __init__(self, parentNode, tag, itemAccess, config):
+ super(TagNode, self).__init__(parentNode)
+ self.tag = tag
+ self.itemAccess = itemAccess
+ self.config = config
+
+ @property
+ def name(self):
+ return self.tag.value
+
+ @property
+ @cache
+ def _myFilter(self):
+ return item_access.TagFilter(self.tag)
+
+ @cache
+ def _getSubNodesDict(self):
+ items = self.items
+
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'export',
+ [ExportDirectoryNode('.export', self, self.itemAccess), ])
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess) for item in items])
+ self._addSubContainerNodes(subNodes,
+ 'tags',
+ [TagValueNode(self, tag.value, self.itemAccess, self.config) for tag in self.itemAccess.tags])
+
+ logging.debug('Sub nodes for %s: %s', self.tag, subNodes)
+
+ return subNodes
+
+class ContextTagNode(ContainerNode):
+
+ def __init__(self, parentNode, tag, itemAccess, config):
+ super(ContextTagNode, self).__init__(parentNode)
+ self.tag = tag
+ self.itemAccess = itemAccess
+ self.config = config
+
+ @property
+ def name(self):
+ return self.tag.value
+
+ @property
+ @cache
+ def _myFilter(self):
+ return item_access.TagFilter(self.tag)
+
+ @cache
+ def _getSubNodesDict(self):
+ items = self.items
+
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'export',
+ [ExportDirectoryNode('.export', self, self.itemAccess), ])
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess) for item in items])
+ self._addSubContainerNodes(subNodes,
+ 'contexts',
+ [ContextContainerNode(self, context, self.itemAccess, self.config) for context in self.itemAccess.contexts])
+
+ if self.config.enableValueFilters:
+ self._addSubContainerNodes(subNodes,
+ 'tags',
+ [TagNode(self, tag, self.itemAccess, self.config) for tag in self.itemAccess.tags])
+
+ logging.debug('Sub nodes for %s: %s', self, subNodes)
+
+ return subNodes
+
+class ContextNotSetNode(ContainerNode):
+
+ def __init__(self, parentNode, context, itemAccess):
+ super(ContextNotSetNode, self).__init__(parentNode)
+ self.context = context
+ self.itemAccess = itemAccess
+
+ @property
+ def name(self):
+ return '.unset'
+
+ @property
+ @cache
+ def _myFilter(self):
+ return item_access.NotContextFilter(self.context)
+
+ @cache
+ def _getSubNodesDict(self):
+ items = self.items
+
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'export',
+ [ExportDirectoryNode('.export', self, self.itemAccess), ])
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess) for item in items])
+
+ logging.debug('Sub nodes for %s: %s', self, subNodes)
+
+ return subNodes
+
+class ContextContainerNode(ContainerNode):
+ """Contains directories for the target context's values.
+
+ @attention: This node can only be contained by nodes which got an items
+ property. Reason is parentNode.items call in contextTagNodes(self) method.
+ """
+
+ def __init__(self, parentNode, context, itemAccess, config):
+ super(ContextContainerNode, self).__init__(parentNode)
+ self.context = context
+ self.itemAccess = itemAccess
+ self.config = config
+
+ def required(self, items):
+ for tagNode in self.contextTagNodes:
+ if tagNode.required(items):
+ return True
+
+ return False
+
+ @property
+ def name(self):
+ return self.context
+
+ @property
+ def filter(self):
+ return self.parentNode.filter
+
+ @property
+ @cache
+ def contextTagNodes(self):
+ return [ContextTagNode(self, tag, self.itemAccess, self.config) for tag in self.itemAccess.contextTags(self.context)]
+
+ @cache
+ def _getSubNodesDict(self):
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'not_context',
+ [ContextNotSetNode(self, self.context, self.itemAccess),])
+ self._addSubNodes(subNodes,
+ 'tags',
+ self.contextTagNodes)
+
+ logging.debug('Sub nodes for %s: %s', self, subNodes)
+
+ return subNodes
+
+class TagValueContainerNode(ContainerNode):
+
+ def __init__(self, parentNode, itemAccess, config):
+ super(TagValueContainerNode, self).__init__(parentNode)
+ self.itemAccess = itemAccess
+ self.config = config
+
+ @property
+ def name(self):
+ return '.any_context'
+
+ @property
+ def filter(self):
+ return self.parentNode.filter
+
+ @cache
+ def _getSubNodesDict(self):
+ items = self.items
+
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'export',
+ [ExportDirectoryNode('.export', self, self.itemAccess), ])
+ self._addSubContainerNodes(subNodes,
+ 'tags',
+ [TagValueNode(self, tag.value, self.itemAccess, self.config) for tag in self.itemAccess.tags])
+
+ return subNodes
+
+class RootNode(DirectoryNode):
+
+ def __init__(self, itemAccess, config):
+ self.itemAccess = itemAccess
+ self.config = config
+ self.filterTags = []
+
+ @property
+ @cache
+ def filter(self):
+ return item_access.NoneFilter()
+
+ @cache
+ def _getSubNodesDict(self):
+ subNodes = {}
+
+ self._addSubNodes(subNodes,
+ 'untagged items',
+ [UntaggedItemsNode('.untagged', self.itemAccess), ])
+ self._addSubNodes(subNodes,
+ 'review items',
+ [ReviewItemsNode('.review', self.itemAccess), ])
+ self._addSubNodes(subNodes,
+ 'export',
+ [ExportDirectoryNode('.export', self, self.itemAccess), ])
+
+ if self.config.enableRootItemLinks:
+ self._addSubNodes(subNodes,
+ 'items',
+ [ItemNode(item, self.itemAccess) for item in self.itemAccess.items.itervalues()])
+
+ self._addSubNodes(subNodes,
+ 'contexts',
+ [ContextContainerNode(self, context, self.itemAccess, self.config) for context in self.itemAccess.contexts])
+
+ if self.config.enableValueFilters:
+ self._addSubNodes(subNodes,
+ 'tags',
+ [TagValueNode(self, tag.value, self.itemAccess, self.config) for tag in self.itemAccess.tags])
+ else:
+ self._addSubNodes(subNodes,
+ 'tags container',
+ [TagValueContainerNode(self, self.itemAccess, self.config), ])
+
+ return subNodes
+
+ @property
+ @cache
+ def attr(self):
+ st = MyStat()
+ st.st_mode = stat.S_IFDIR | 0555
+ st.st_nlink = 2
+ st.st_mtime = time.time()
+ st.st_ctime = st.st_mtime
+ st.st_atime = st.st_mtime
+
+ return st
+
+ @property
+ def direntry(self):
+ return None
diff --git a/src/modules/tagfs/tagfs.py b/src/modules/tagfs/tagfs.py
new file mode 100644
index 0000000..21790f7
--- /dev/null
+++ b/src/modules/tagfs/tagfs.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+#
+# = tag fs =
+# == glossary ==
+# * item: An item is a directory in the item container directory. Items can be
+# tagged using a tag file.
+# * tag: A tag is a text string which can be assigned to an item. Tags can
+# consist of any character except newlines.
+
+import os
+import stat
+import errno
+import exceptions
+import time
+import functools
+import logging
+
+import fuse
+if not hasattr(fuse, '__version__'):
+ raise RuntimeError, \
+ "your fuse-py doesn't know of fuse.__version__, probably it's too old."
+fuse.fuse_python_api = (0, 2)
+
+import view
+from cache import cache
+import item_access
+import node
+import config
+from log import logException
+
+class TagFS(fuse.Fuse):
+
+ def __init__(self, initwd, *args, **kw):
+ fuse.Fuse.__init__(self, *args, **kw)
+
+ self._initwd = initwd
+ self._itemsRoot = None
+
+ # TODO change command line arguments structure
+ # goal: tagfs <items dir> <mount dir>
+ self.parser.add_option('-i',
+ '--items-dir',
+ dest = 'itemsDir',
+ help = 'items directory',
+ metavar = 'dir')
+ self.parser.add_option('-t',
+ '--tag-file',
+ dest = 'tagFileName',
+ help = 'tag file name',
+ metavar = 'file',
+ default = None)
+ self.parser.add_option('--value-filter',
+ action = 'store_true',
+ dest = 'enableValueFilters',
+ help = 'Displays value filter directories on toplevel instead of only context entries',
+ default = None)
+ self.parser.add_option('--root-items',
+ action = 'store_true',
+ dest = 'enableRootItemLinks',
+ help = 'Display item links in tagfs root directory.',
+ default = None)
+
+ def getItemAccess(self):
+ # Maybe we should move the parser run from main here.
+ # Or we should at least check if it was run once...
+ opts, args = self.cmdline
+
+ # Maybe we should add expand user? Maybe even vars???
+ assert opts.itemsDir != None and opts.itemsDir != ''
+ itemsRoot = os.path.normpath(
+ os.path.join(self._initwd, opts.itemsDir))
+
+ # TODO rel https://github.com/marook/tagfs/issues#issue/2
+ # Ensure that mount-point and items dir are disjoined.
+ # Something along
+ # assert not os.path.normpath(itemsDir).startswith(itemsRoot)
+
+ # try/except here?
+ try:
+ return item_access.ItemAccess(itemsRoot, self.config.tagFileName)
+ except OSError, e:
+ logging.error("Can't create item access from items directory %s. Reason: %s",
+ itemsRoot, str(e.strerror))
+ raise
+
+ @property
+ @cache
+ def config(self):
+ opts, args = self.cmdline
+
+ c = config.Config(os.path.normpath(os.path.join(self._initwd, opts.itemsDir)))
+
+ if opts.tagFileName:
+ c.tagFileName = opts.tagFileName
+
+ if opts.enableValueFilters:
+ c.enableValueFilters = opts.enableValueFilters
+
+ if opts.enableRootItemLinks:
+ c.enableRootItemLinks = opts.enableRootItemLinks
+
+ logging.debug('Using configuration %s' % c)
+
+ return c
+
+ @property
+ @cache
+ def view(self):
+ itemAccess = self.getItemAccess()
+
+ return view.View(itemAccess, self.config)
+
+ @logException
+ def getattr(self, path):
+ return self.view.getattr(path)
+
+ @logException
+ def readdir(self, path, offset):
+ return self.view.readdir(path, offset)
+
+ @logException
+ def readlink(self, path):
+ return self.view.readlink(path)
+
+ @logException
+ def open(self, path, flags):
+ return self.view.open(path, flags)
+
+ @logException
+ def read(self, path, size, offset):
+ return self.view.read(path, size, offset)
+
+ @logException
+ def write(self, path, data, pos):
+ return self.view.write(path, data, pos)
+
+ @logException
+ def symlink(self, path, linkPath):
+ return self.view.symlink(path, linkPath)
+
+def main():
+ fs = TagFS(os.getcwd(),
+ version = "%prog " + fuse.__version__,
+ dash_s_do = 'setsingle')
+
+ fs.parse(errex = 1)
+ opts, args = fs.cmdline
+
+ if opts.itemsDir == None:
+ fs.parser.print_help()
+ # items dir should probably be an arg, not an option.
+ print "Error: Missing items directory option."
+ # Quickfix rel https://github.com/marook/tagfs/issues/#issue/3
+ # FIXME: since we run main via sys.exit(main()), this should
+ # probably be handled via some return code.
+ import sys
+ sys.exit()
+
+ return fs.main()
+
+if __name__ == '__main__':
+ import sys
+ sys.exit(main())
diff --git a/src/modules/tagfs/transient_dict.py b/src/modules/tagfs/transient_dict.py
new file mode 100644
index 0000000..7d20461
--- /dev/null
+++ b/src/modules/tagfs/transient_dict.py
@@ -0,0 +1,93 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+class TransientDict(object):
+
+ class Version(object):
+
+ def __init__(self, key):
+ self.key = key
+
+ def touch(self, version):
+ self.version = version
+
+ class Value(object):
+
+ def __init__(self, value, version):
+ self.value = value
+ self.version = version
+
+ def __init__(self, averageCapacity):
+ self.averageCapacity = averageCapacity
+ self.nextVersion = 0
+ self.setCounter = 0
+ self.data = {}
+ self.versions = []
+
+ def __getitem__(self, k):
+ v = self.data[k]
+
+ if not v:
+ return None
+
+ v.version.touch(self.nextVersion)
+ self.nextVersion += 1
+
+ return v.value
+
+ def _cleanUpCache(self):
+ if len(self.data) < self.averageCapacity:
+ return
+
+ def versionCmp(a, b):
+ if a.version < b.version:
+ return 1
+ if b.version < a.version:
+ return -1
+
+ return 0
+
+ self.versions.sort(versionCmp)
+
+ while len(self.versions) > self.averageCapacity:
+ version = self.versions.pop()
+
+ self.data.pop(version.key)
+
+ def __setitem__(self, k, v):
+ if k in self.data:
+ value = self.data[k]
+
+ value.value = v
+ else:
+ self.setCounter += 1
+ if self.setCounter % self.averageCapacity == 0:
+ self._cleanUpCache()
+
+ version = TransientDict.Version(k)
+ self.versions.append(version)
+
+ value = TransientDict.Value(v, version)
+ self.data[k] = value
+
+ value.version.touch(self.nextVersion)
+ self.nextVersion += 1
+
+ def __contains__(self, k):
+ return k in self.data
diff --git a/src/modules/tagfs/view.py b/src/modules/tagfs/view.py
new file mode 100644
index 0000000..893edf6
--- /dev/null
+++ b/src/modules/tagfs/view.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from cache import cache
+import errno
+import log
+import logging
+import node
+import os
+from transient_dict import TransientDict
+
+class View(object):
+ """Abstraction layer from fuse API.
+
+ This class is an abstraction layer from the fuse API. This should ease
+ writing test cases for the file system.
+ """
+
+ DEFAULT_NODES = {
+ # directory icons for rox filer
+ '.DirIcon': None,
+
+ # launch script for rox filer application directories
+ 'AppRun': None
+ }
+
+ def __init__(self, itemAccess, config):
+ self.itemAccess = itemAccess
+ self.config = config
+ self._nodeCache = TransientDict(100)
+
+ @cache
+ def getRootNode(self):
+ return node.RootNode(self.itemAccess, self.config)
+
+ def getNode(self, path):
+ if path in self._nodeCache:
+ # simple path name based caching is implemented here
+
+ logging.debug('tagfs _nodeCache hit')
+
+ return self._nodeCache[path]
+
+ # ps contains the path segments
+ ps = [x for x in os.path.normpath(path).split(os.sep) if x != '']
+
+ psLen = len(ps)
+ if psLen > 0:
+ lastSegment = ps[psLen - 1]
+
+ if lastSegment in View.DEFAULT_NODES:
+ logging.debug('Using default node for path ' + path)
+
+ return View.DEFAULT_NODES[lastSegment]
+
+ n = self.getRootNode()
+
+ for e in path.split('/')[1:]:
+ if e == '':
+ continue
+
+ n = n.getSubNode(e)
+
+ if not n:
+ # it seems like we are trying to fetch a node for an illegal
+ # path
+
+ break
+
+ logging.debug('tagfs _nodeCache miss')
+ self._nodeCache[path] = n
+
+ return n
+
+ @log.logCall
+ def getattr(self, path):
+ n = self.getNode(path)
+
+ if not n:
+ logging.debug('Try to read attributes from not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.getattr(path)
+
+ @log.logCall
+ def readdir(self, path, offset):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to read not existing directory: ' + path)
+
+ return -errno.ENOENT
+
+ return n.readdir(path, offset)
+
+ @log.logCall
+ def readlink(self, path):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to read not existing link from node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.readlink(path)
+
+ @log.logCall
+ def symlink(self, path, linkPath):
+ linkPathSegs = linkPath.split('/')
+
+ n = self.getNode('/'.join(linkPathSegs[0:len(linkPathSegs) - 2]))
+
+ if not n:
+ return -errno.ENOENT
+
+ return n.symlink(path, linkPath)
+
+ @log.logCall
+ def open(self, path, flags):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to open not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.open(path, flags)
+
+ @log.logCall
+ def read(self, path, len, offset):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to read from not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.read(path, len, offset)
+
+ @log.logCall
+ def write(self, path, data, pos):
+ n = self.getNode(path)
+
+ if not n:
+ logging.warn('Try to write to not existing node: ' + path)
+
+ return -errno.ENOENT
+
+ return n.write(path, data, pos)
+
diff --git a/src/test/__init__.py b/src/test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/test/env.py b/src/test/env.py
new file mode 100644
index 0000000..11bcae6
--- /dev/null
+++ b/src/test/env.py
@@ -0,0 +1,41 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import sys
+
+def setupenv():
+ from os.path import dirname, abspath, exists, join as pjoin, split as psplit
+
+ global eventsdir
+ global projectdir
+
+ # TODO this code is duplicate with path generation from setup.py
+ testdir = dirname(abspath(__file__))
+ projectdir = pjoin(testdir, '..', '..')
+ srcdir = pjoin(projectdir, 'src')
+ moddir = pjoin(srcdir, 'modules')
+ eventsdir = pjoin(projectdir, 'etc', 'test', 'events')
+
+ for x in (testdir, eventsdir, projectdir):
+ assert exists(x), "Directory not found: %s" % x
+
+ sys.path.insert(0, testdir)
+ sys.path.insert(0, moddir)
+
+setupenv()
diff --git a/src/test/mock_config.py b/src/test/mock_config.py
new file mode 100644
index 0000000..fb15377
--- /dev/null
+++ b/src/test/mock_config.py
@@ -0,0 +1,28 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+class Config(object):
+
+ def __init__(self, tagFileName, enableValueFilters, enableRootItemLinks):
+ self.tagFileName = tagFileName
+ self.enableValueFilters = enableValueFilters
+ self.enableRootItemLinks = enableRootItemLinks
+
+ def __str__(self):
+ return '[' + ', '.join([field + ': ' + str(self.__dict__[field]) for field in ['tagFileName', 'enableValueFilters', 'enableRootItemLinks']]) + ']'
diff --git a/src/test/setup.py b/src/test/setup.py
new file mode 100644
index 0000000..6d31bc8
--- /dev/null
+++ b/src/test/setup.py
@@ -0,0 +1,27 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import env
+import os
+
+# this is a list whith paths to valid item directories
+validItemDirectories = [
+ os.path.join(env.projectdir, 'etc', 'test', 'events'),
+ os.path.join(env.projectdir, 'etc', 'demo', 'events')
+]
diff --git a/src/test/test_all.py b/src/test/test_all.py
new file mode 100644
index 0000000..92ad9dc
--- /dev/null
+++ b/src/test/test_all.py
@@ -0,0 +1,341 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import logging
+import unittest
+import os
+import env
+
+import tagfs
+import tagfs.item_access as item_access
+import tagfs.node as node
+
+import mock_config
+
+def createTestItemAccess():
+ return item_access.ItemAccess(env.eventsdir, '.tag')
+
+class TestTestCaseEnvironment(unittest.TestCase):
+ """Makes sure the environment for the test case is set up right.
+ """
+
+ def testCwd(self):
+ """Makes sure that the events directory is accessible.
+ """
+
+ import os
+
+ self.assertTrue(os.path.exists(env.eventsdir))
+
+class TestParseTagsFromFile(unittest.TestCase):
+
+ def testParse(self):
+ """Tests the parseTagsFromFile(...) method.
+ """
+
+ tagFileName = os.path.join(env.eventsdir,
+ '2008-03-29 - holiday south korea',
+ '.tag')
+
+ tags = item_access.parseTagsFromFile(tagFileName)
+
+ expectedTags = set([item_access.Tag('holiday'),
+ item_access.Tag('airport'),
+ item_access.Tag('korea'),
+ item_access.Tag('tube', context = 'object'),
+ item_access.Tag('Markus Pielmeier', context = 'creator')])
+ self.assertEqual(expectedTags, tags)
+
+
+class TestItem(unittest.TestCase):
+ """This is a test case for the Item class.
+ """
+
+ class MyItemAccess(object):
+ """This is a mock object for the ItemAccess class.
+ """
+
+ def __init__(self):
+ self.dataDirectory = env.eventsdir
+ self.tagFileName = '.tag'
+
+ def setUp(self):
+ unittest.TestCase.setUp(self)
+
+ self.itemAccess = TestItem.MyItemAccess()
+
+ def testItemNotExists(self):
+ """Tests the results for items which don't exist.
+ """
+
+ item = item_access.Item('no such item', self.itemAccess)
+
+ self.assertFalse(os.path.isdir(item.itemDirectory))
+
+ self.assertEqual(None, item.tags)
+ self.assertEqual(None, item.tagsModificationTime)
+ self.assertEqual(None, item.tagsCreationTime)
+ self.assertFalse(item.tagged)
+
+ def testItemNoTagsItem(self):
+ """Tests the results for items which got no tags assigned.
+ """
+
+ item = item_access.Item('2009-07-29 - no tags', self.itemAccess)
+
+ self.assertTrue(os.path.isdir(item.itemDirectory))
+
+ self.assertEqual(None, item.tags)
+ self.assertEqual(None, item.tagsModificationTime)
+ self.assertEqual(None, item.tagsCreationTime)
+ self.assertFalse(item.tagged)
+
+ def testItem(self):
+ """Tests an item with tags assigned to.
+ """
+
+ item = item_access.Item('2008-12-25 - holiday india', self.itemAccess)
+
+ self.assertTrue(os.path.isdir(item.itemDirectory))
+
+ expectedTags = set([item_access.Tag('holiday'),
+ item_access.Tag('airport'),
+ item_access.Tag('india'),
+ item_access.Tag('Markus Pielmeier', context = 'creator')])
+ self.assertEqual(expectedTags, item.tags)
+ # TODO disabled timestamp tests until time in test is not human readable
+ #self.assertAlmostEqual(1250195650.7, item.tagsModificationTime, 1)
+ #self.assertAlmostEqual(1250195650.7, item.tagsCreationTime, 1)
+ self.assertTrue(item.tagged)
+
+class TestItemAccess(unittest.TestCase):
+ """Test the tagfs.ItemAccess class.
+ """
+
+ def setUp(self):
+ unittest.TestCase.setUp(self)
+
+ self.itemAccess = createTestItemAccess()
+
+ def testItems(self):
+ """Test the items property of ItemAccess.
+ """
+ items = self.itemAccess.items
+
+ expectedItems = set(['2008-03-29 - holiday south korea',
+ '2008-12-25 - holiday india',
+ '2009-07-29 - no tags',
+ '2008-11-11 - airport underground railway'])
+ self.assertEqual(expectedItems,
+ set(items))
+
+ def testTags(self):
+ """Test the tag property of ItemAccess
+ """
+
+ tags = self.itemAccess.tags
+
+ expectedTags = set([item_access.Tag('airport'),
+ item_access.Tag('holiday'),
+ item_access.Tag('india'),
+ item_access.Tag('korea'),
+ item_access.Tag('Markus Pielmeier', context = 'creator'),
+ item_access.Tag('Tama Yuri', context = 'creator'),
+ item_access.Tag('flickr', context = 'source'),
+ item_access.Tag('tube', context = 'object')])
+ self.assertEqual(expectedTags,
+ set(tags))
+
+ def testTaggedItems(self):
+ """Test the items property of ItemAccess.
+ """
+ items = self.itemAccess.taggedItems
+
+ expectedItems = set(['2008-03-29 - holiday south korea',
+ '2008-12-25 - holiday india',
+ '2008-11-11 - airport underground railway'])
+ self.assertEqual(expectedItems,
+ set([item.name for item in items]))
+
+ def testUntaggedItems(self):
+ """Test the untaggedItems property of ItemAccess
+ """
+
+ untaggedItems = self.itemAccess.untaggedItems
+
+ self.assertEqual(set(['2009-07-29 - no tags']),
+ set([item.name for item in untaggedItems]))
+
+ def _testFilter(self, filters, expectedResultItems):
+ resultItems = self.itemAccess.filterItems(filters)
+
+ self.assertEqual(set(expectedResultItems),
+ set([item.name for item in resultItems]))
+
+ def testFilterSingle(self):
+ """Tests TagValueFilter filter argument.
+
+ @see: tagfs.TagValueFilter
+ """
+
+ self._testFilter(item_access.TagValueFilter('korea'),
+ ['2008-03-29 - holiday south korea'])
+
+ def testFilterMultiple(self):
+ """Tests AndFilter filter arguments at once.
+
+ @see: tagfs.AndFilter
+ """
+
+ self._testFilter(item_access.AndFilter([item_access.TagValueFilter('korea'),
+ item_access.TagValueFilter('airport')]),
+ ['2008-03-29 - holiday south korea'])
+
+class AbstractNodeTest(unittest.TestCase):
+ """This abstract TestCase checks the Node interface definitions.
+ """
+
+ def setUp(self):
+ unittest.TestCase.setUp(self)
+
+ self.itemAccess = createTestItemAccess()
+
+ def _testNodeInterface(self, node):
+ """This method tests wether the node implements the node interface contract.
+ """
+
+ self.assertNotEqual(None, node)
+
+ for subNode in node.subNodes:
+ self._testNodeInterface(subNode)
+
+ self.assertTrue('getSubNode' in set(dir(node)))
+
+ # TODO supply correct path to node.getattr
+ attr = node.getattr('/path')
+ self.assertNotEqual(None, attr.st_mode)
+
+ direntry = node.direntry
+ if direntry is not None:
+ self.assertNotEqual(None, direntry.name)
+ self.assertNotEqual(None, direntry.type)
+
+class TestItemNode(AbstractNodeTest):
+ """This test case tests the ItemNode class.
+ """
+
+ def testItemNodeInterface(self):
+ import stat
+
+ item = item_access.Item('test', self.itemAccess)
+
+ n = node.ItemNode(item, self.itemAccess)
+
+ self._testNodeInterface(n)
+
+ direntry = n.direntry
+ self.assertEqual('test', direntry.name)
+ self.assertEqual(stat.S_IFLNK, direntry.type)
+
+ # TODO supply correct path to n.readlink
+ self.assertNotEqual(None, n.readlink('/path'))
+
+class TestUntaggedItemsNode(AbstractNodeTest):
+ """This test case tests the UntaggedItemsNode.
+ """
+
+ def testUntaggedItemsNodeInterface(self):
+ n = node.UntaggedItemsNode('.untagged', self.itemAccess)
+
+ self._testNodeInterface(n)
+
+ direntry = n.direntry
+ self.assertEqual('.untagged', direntry.name)
+
+class TestTagNode(AbstractNodeTest):
+ """This test case tests the TagNode class.
+ """
+
+ def testTagNode(self):
+ c = mock_config.Config('.tag', False, False)
+
+ parentNode = node.RootNode(self.itemAccess, c)
+
+ n = node.TagNode(parentNode, item_access.Tag('holiday'), self.itemAccess, c)
+
+ self._testNodeInterface(n)
+
+class TestRootNode(AbstractNodeTest):
+ """This test case tests the RootNode class.
+ """
+
+ def testRootNode(self):
+ c = mock_config.Config('.tag', False, False)
+
+ n = node.RootNode(self.itemAccess, c)
+
+ self._testNodeInterface(n)
+
+class TestNodeRecurse(AbstractNodeTest):
+ """This test recurses through a RootNode and it's children.
+
+ This test case tries to call many of the node functions to get a overall
+ performance measure.
+ """
+
+ def setUp(self):
+ unittest.TestCase.setUp(self)
+
+ self.itemAccess = createTestItemAccess()
+
+ def __recurseNode(self, n):
+ self._testNodeInterface(n)
+
+ nDir = set(dir(n))
+
+ if 'required' in nDir:
+ n.required([])
+
+ if 'filter' in nDir:
+ self.assertNotEqual(None, n.filter)
+
+ for sn in n.subNodes:
+ self.__recurseNode(sn)
+
+ def testRecurse(self):
+ c = mock_config.Config('.tag', False, False)
+
+ n = node.RootNode(self.itemAccess, c)
+
+ self.__recurseNode(n)
+
+
+if __name__ == "__main__":
+ setupenv()
+ import tagfs
+
+ if 'PROFILE' in os.environ:
+ import cProfile
+
+ profileFile = os.path.join(os.getcwd(), 'tagfs_test.profile')
+
+ cProfile.run('unittest.main()', profileFile)
+ else:
+ unittest.main()
diff --git a/src/test/test_config.py b/src/test/test_config.py
new file mode 100644
index 0000000..0aa97cd
--- /dev/null
+++ b/src/test/test_config.py
@@ -0,0 +1,44 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import unittest
+import logging
+import env
+
+from tagfs import config
+
+import setup
+
+class TestConfig(unittest.TestCase):
+
+ def configs(self):
+ for dir in setup.validItemDirectories:
+ yield config.Config(dir)
+
+ def testConfig(self):
+ for c in self.configs():
+ logging.debug('Testing config %s' % c)
+
+ self.assertTrue(isinstance(c.tagFileName, str))
+ self.assertTrue(isinstance(c.enableValueFilters, bool))
+ self.assertTrue(isinstance(c.enableRootItemLinks, bool))
+
+ c.enableValueFilters = True
+ self.assertTrue(isinstance(c.enableValueFilters, bool))
+ self.assertEqual(True, c.enableValueFilters)
diff --git a/src/test/test_transient_dict.py b/src/test/test_transient_dict.py
new file mode 100644
index 0000000..d90ff0a
--- /dev/null
+++ b/src/test/test_transient_dict.py
@@ -0,0 +1,58 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import unittest
+from tagfs.transient_dict import TransientDict
+
+class TestTransientDict(unittest.TestCase):
+
+ def testGetSetIn(self):
+ """Test some simple get, set an in calls.
+ """
+
+ d = TransientDict(10)
+
+ self.assertTrue('1' not in d)
+
+ d['1'] = 'a'
+ d['2'] = 'b'
+
+ self.assertTrue(d['1'] == 'a')
+ self.assertTrue(d['2'] == 'b')
+
+ self.assertTrue('1' in d)
+ self.assertTrue('3' not in d)
+ self.assertTrue('a' not in d)
+
+ def testForgettFeature(self):
+ """Test the forgett feature
+ """
+
+ d = TransientDict(2)
+
+ d['1'] = 'a'
+ d['2'] = 'b'
+ d['1'] = 'a'
+ d['3'] = 'c'
+ d['1'] = 'a'
+ d['4'] = 'c'
+
+ self.assertTrue('1' in d)
+ self.assertTrue('2' not in d)
+ self.assertTrue('4' in d)
diff --git a/src/test/test_view.py b/src/test/test_view.py
new file mode 100644
index 0000000..0ed2675
--- /dev/null
+++ b/src/test/test_view.py
@@ -0,0 +1,138 @@
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import errno
+import logging
+import os
+import stat
+import unittest
+import tagfs
+import env
+import traceback
+from tagfs import item_access
+from tagfs import view
+import mock_config
+import setup
+
+class TestView(unittest.TestCase):
+
+ def assertEqual(self, expected, actual, msg = None):
+ if not msg is None:
+ msg = '%s (%s != %s)' % (msg, expected, actual);
+
+ super(TestView, self).assertEqual(expected, actual, msg)
+
+ def validateDirectoryPath(self, view, path):
+ # path is a directory
+
+ # test rox filer default access
+ for file in ['AppRun', '.DirIcon']:
+ attr = view.getattr(os.path.join(path, file))
+
+ # TODO implement propper offset handling
+ for entry in view.readdir(path, 0):
+ self.assertTrue(entry != None)
+
+ # TODO put '.' and '..' in set
+ if entry.name == '.':
+ continue
+
+ if entry.name == '..':
+ continue
+
+ self.validateView(view, path + '/' + entry.name)
+
+ def validateLinkPath(self, view, path):
+ l = view.readlink(path)
+
+ self.assertNotEquals(-errno.ENOENT, l)
+
+ self.assertTrue(len(l) > 0)
+
+ def validateRegularFilePath(self, view, path):
+ attr = view.getattr(path)
+
+ self.assertTrue(attr.st_size >= 0)
+
+ self.assertEqual(None,
+ view.open(path, 32768),
+ 'Failure with path %s' % path)
+
+ content = view.read(path, 4096, 0)
+
+ self.assertNotEquals(-errno.ENOSYS, content)
+ self.assertNotEquals(-errno.ENOENT, content)
+
+ self.assertTrue(content != None)
+
+ logging.debug('Content: ' + str(content))
+
+ # TODO validate file close
+
+ # TODO validate block file
+
+ def validateView(self, view, path):
+ attr = view.getattr(path)
+
+ # TODO assert every file belongs to 'me'
+ # right now this is the default behaviour
+ #self.assertEquals(os.getuid(), attr.st_uid)
+ #self.assertEquals(os.getgid(), attr.st_gid)
+
+ self.assertNotEquals(-errno.ENOSYS, attr,
+ msg = 'Expected attributes for path ' + path + ' but was ' + str(attr))
+ self.assertNotEquals(-errno.ENOENT, attr,
+ msg = 'Expected attributes for path ' + path + ' but was ' + str(attr))
+
+ if (attr.st_mode & stat.S_IFDIR == stat.S_IFDIR):
+ self.validateDirectoryPath(view, path)
+ elif (attr.st_mode & stat.S_IFLNK == stat.S_IFLNK):
+ self.validateLinkPath(view, path)
+ elif (attr.st_mode & stat.S_IFREG == stat.S_IFREG):
+ self.validateRegularFilePath(view, path)
+ else:
+ self.fail('Unknown attributes ' + str(attr))
+
+ @property
+ def configs(self):
+ for enableValueFilters in (True, False):
+ for enableRootItemLinks in (True, False):
+ yield mock_config.Config('.tag', enableValueFilters, enableRootItemLinks)
+
+ @property
+ def itemAccesses(self):
+ for dir in setup.validItemDirectories:
+ yield item_access.ItemAccess(dir, '.tag')
+
+ def testView(self):
+ """Testing view interface
+ """
+
+ for conf in self.configs:
+ for itemAccess in self.itemAccesses:
+ try:
+ v = view.View(itemAccess, conf)
+
+ self.validateView(v, '/')
+ except Exception as e:
+ raise Exception('Can\' test view for conf %s and itemAccess %s.\nCaused by: %s' % (conf, itemAccess, traceback.format_exc()))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/util/trace_logfiles.py b/util/trace_logfiles.py
new file mode 100755
index 0000000..cea3b23
--- /dev/null
+++ b/util/trace_logfiles.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Markus Pielmeier
+#
+# This file is part of tagfs.
+#
+# tagfs is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# tagfs is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import logging
+import re
+
+class TraceLogEntry(object):
+
+ def __init__(self, context, path):
+ self.context = context
+ self.path = path
+
+class TraceLog(object):
+
+ LINE_BUFFER_SIZE = 100000
+
+ TRACE_PATTERN = re.compile('[0-9\-,: ]+DEBUG (readlink|getattr|readdir) (.*)$')
+
+ def __init__(self):
+ self.entries = []
+
+ def _readLogLine(self, line):
+ m = TraceLog.TRACE_PATTERN.match(line)
+
+ if not m:
+ return
+
+ context = m.group(1)
+ path = m.group(2)
+
+ self.entries.append(TraceLogEntry(context, path))
+
+ def readLogFile(self, fileName):
+ logging.info('Reading logfile ' + fileName)
+
+ f = open(fileName)
+
+ while True:
+ lines = f.readlines(TraceLog.LINE_BUFFER_SIZE)
+ if not lines:
+ break;
+
+ for line in lines:
+ self._readLogLine(line)
+
+class TraceResult(object):
+
+ def __init__(self):
+ self.contextHistogram = {}
+ self.contextPathHistogram = {}
+
+ def _analyzeContextHistogram(self, traceLog):
+ for e in traceLog.entries:
+ if not e.context in self.contextHistogram:
+ self.contextHistogram[e.context] = 0
+
+ self.contextHistogram[e.context] += 1
+
+ def _analyzeContextPathHistogram(self, traceLog):
+ for e in traceLog.entries:
+ if not e.context in self.contextPathHistogram:
+ self.contextPathHistogram[e.context] = {}
+
+ ph = self.contextPathHistogram[e.context]
+
+ if not e.path in ph:
+ ph[e.path] = 0
+
+ ph[e.path] += 1
+
+
+ def _analyzeTraceLog(self, traceLog):
+ self._analyzeContextHistogram(traceLog)
+ self._analyzeContextPathHistogram(traceLog)
+
+ def analyzeLogFile(self, fileName):
+ tl = TraceLog()
+ tl.readLogFile(fileName)
+
+ self._analyzeTraceLog(tl)
+
+def usage():
+ # TODO print usage
+
+ pass
+
+def writeCSV(fileName, pathHistogram):
+ import csv
+
+ w = csv.writer(open(fileName, 'w'))
+
+ for path, histogram in pathHistogram.iteritems():
+ w.writerow([path, histogram])
+
+if __name__ == '__main__':
+ logging.basicConfig(level = logging.DEBUG)
+
+ import getopt
+ import sys
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "", [])
+ except getopt.GetoptError:
+ usage()
+
+ sys.exit(1)
+
+ for opt, arg in opts:
+ if opt in ("-h", "--help"):
+ usage()
+ sys.exit()
+
+ tr = TraceResult()
+
+ for fileName in args:
+ tr.analyzeLogFile(fileName)
+
+ print "Context Histogram"
+ for context, calls in tr.contextHistogram.iteritems():
+ print ' %s: %s' % (context, calls)
+
+ for context, pathHistogram in tr.contextPathHistogram.iteritems():
+ writeCSV('pathHistogram_' + context + '.csv', pathHistogram)
|
jwilger/jack-the-ripper
|
7e77175b7f575f8f616dcb96d7925c4f4c52af4a
|
Switch to SqsGen2, as Amazon appears to have deprecated API versions prior to 2008-01-01 as of yesterday.
|
diff --git a/bin/jack_the_ripper b/bin/jack_the_ripper
index 8fd6191..474fa16 100644
--- a/bin/jack_the_ripper
+++ b/bin/jack_the_ripper
@@ -1,11 +1,11 @@
#!/usr/bin/env ruby
LOAD_PATH = File.expand_path( File.dirname( __FILE__ ) + '/../lib' )
require 'rubygems'
-gem 'daemons', '= 1.0.9'
+gem 'daemons', '>= 1.0.9'
require 'daemons'
Daemons.run( LOAD_PATH + '/jack_the_ripper_server.rb', {
:app_name => 'JackTheRIPper',
:dir_mode => :normal,
:dir => '/tmp',
:log_output => true
} )
\ No newline at end of file
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 69eb3a9..a361ea2 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,53 +1,53 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'rubygems'
gem 'right_aws'
gem 'right_http_connection'
require 'yaml'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.4.3'
+ VERSION = '1.4.4'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
- RightAws::Sqs.new( access_key_id, secret_access_key ).
+ RightAws::SqsGen2.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index 47698df..e7e826b 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -1,79 +1,79 @@
require 'rubygems'
gem 'mocha', '=0.5.6'
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPper < Test::Unit::TestCase
def test_should_allow_logger_to_be_set_and_retrieved
logger = stub
assert_nil JackTheRIPper.logger
assert_nothing_raised { JackTheRIPper.logger = logger }
assert_same logger, JackTheRIPper.logger
end
def test_should_process_one_message_from_the_queue_then_delete_the_message_and_return_true
queue = mock
message = mock
body = YAML::dump( { :foo => 'bar' } )
queue.expects( :receive ).returns( message )
message.expects( :delete )
message.stubs( :body ).returns( body )
processor = mock
JackTheRIPper::Processor.expects( :new ).with( { :foo => 'bar' } ).
returns( processor )
processor.expects( :process )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
def test_should_not_delete_message_from_queue_if_conversion_fails_due_to_remote_error
logger = stub_everything
JackTheRIPper.logger = logger
message = mock
queue = stub_everything( :receive => message )
message.stubs( :body ).returns( 'foo' )
message.expects( :delete ).never
processor = stub
JackTheRIPper::Processor.stubs( :new ).returns( processor )
processor.stubs( :process ).raises( JackTheRIPper::RemoteError.new( 'blah' ) )
logger.expects( :warn ).with( 'Remote Error: blah' )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
def test_should_delete_message_from_queue_if_conversion_fails_due_to_processor_error
logger = stub_everything
JackTheRIPper.logger = logger
message = mock
queue = stub_everything( :receive => message )
message.stubs( :body ).returns( 'foo' )
message.expects( :delete )
processor = stub
JackTheRIPper::Processor.stubs( :new ).returns( processor )
processor.stubs( :process ).raises( JackTheRIPper::ProcessorError.new( 'blah' ) )
logger.expects( :error ).with( 'Processor Error: blah' )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
def test_should_return_false_if_there_are_no_messages_retrieved
queue = mock
queue.expects( :receive ).returns( nil )
assert_equal false, JackTheRIPper.process_next_message( queue )
end
def test_should_instantiate_queue_and_return_it
sqs = mock
queue = stub
sqs.expects( :queue ).with( 'myqueue', true, 240 ).returns( queue )
- RightAws::Sqs.expects( :new ).with( 'myaccesskeyid', 'mysecretaccesskey' ).
+ RightAws::SqsGen2.expects( :new ).with( 'myaccesskeyid', 'mysecretaccesskey' ).
returns( sqs )
assert_same queue, JackTheRIPper.get_queue( 'myaccesskeyid',
'mysecretaccesskey', 'myqueue' )
end
def test_should_have_tmp_path_attribute
assert_equal '/tmp', JackTheRIPper.tmp_path
assert_nothing_raised { JackTheRIPper.tmp_path = '/foo/bar' }
assert_equal '/foo/bar', JackTheRIPper.tmp_path
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
02cf1d48cc48579486d9c0ccc085a87b2b8f159b
|
Check for PostScript file using case-insensitive matching
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 50841de..69eb3a9 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,53 +1,53 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'rubygems'
gem 'right_aws'
gem 'right_http_connection'
require 'yaml'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.4.2'
+ VERSION = '1.4.3'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index 6a8a7d1..187897b 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,51 +1,51 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
JackTheRIPper.logger.debug "Processing message"
source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
JackTheRIPper.logger.debug "Source file retrieved."
file_type_info = `file #{source_file.path}`
- if /\bPostscript\b/ =~ file_type_info
+ if /\bpostscript\b/i =~ file_type_info
`pstopdf #{source_file.path} -o #{source_file.path}.pdf`
new_source_file = HTTPFile.new('', "#{source_file.path}.pdf")
source_file.delete
source_file = new_source_file
end
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = JackTheRIPper.tmp_path + '/result' + result_ext
cmd = "sips #{sips_args} #{source_file.path} --out #{result_path}"
output = `#{cmd}`
JackTheRIPper.logger.debug "Ran command #{cmd}"
raise ProcessorError, output unless File.exist?( result_path )
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
ensure
source_file.delete unless source_file.nil?
result_file.delete unless result_file.nil?
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index 0ffd092..e9e0be2 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,247 +1,287 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../lib' ) )
require 'rubygems'
gem 'mocha', '=0.5.6'
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def setup
JackTheRIPper.stubs( :logger ).returns( stub_everything )
end
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects(:`).with("file #{working_dir_path}/source.pdf").returns('')
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_convert_eps_source_to_pdf_before_converting_to_final_format
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.eps' )
pdf_file = mock
pdf_file.expects(:delete)
pdf_file.stubs(:path).returns(working_dir_path + '/source.eps.pdf')
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects(:new).with('', working_dir_path + '/source.eps.pdf').
returns(pdf_file)
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects(:`).with("file #{working_dir_path}/source.eps").
returns('DOS EPS Binary File Postscript starts at byte 32 length ' +
'4570373 TIFF starts at byte 4570405 length 575329\n')
processor.expects(:`).with("pstopdf #{working_dir_path}/source.eps -o #{working_dir_path}/source.eps.pdf")
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.eps.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
+ def test_should_detect_alternate_postscript_tag_in_file_output
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :format => :jpg
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.eps' )
+ pdf_file = mock
+ pdf_file.expects(:delete)
+ pdf_file.stubs(:path).returns(working_dir_path + '/source.eps.pdf')
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects(:new).with('', working_dir_path + '/source.eps.pdf').
+ returns(pdf_file)
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects(:`).with("file #{working_dir_path}/source.eps").
+ returns('PostScript document text conforming at level 3.1 - type EPS')
+ processor.expects(:`).with("pstopdf #{working_dir_path}/source.eps -o #{working_dir_path}/source.eps.pdf")
+ processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.eps.pdf --out #{working_dir_path}/result.jpg" )
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
+
+ processor.process
+ end
+
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects(:`).with("file #{working_dir_path}/source.pdf").returns('')
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
File.expects( :exist? ).with( working_dir_path + '/result.png' ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects(:`).with("file #{working_dir_path}/source.jpg").returns('')
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects(:`).with("file #{working_dir_path}/source.jpg").returns('')
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects(:`).with("file #{working_dir_path}/source.pdf").returns('')
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_raise_processor_error_if_sips_process_does_not_write_result_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = stub_everything( :path => '/foo/bar.jpg' )
JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.stubs( :` ).returns( 'blah blah blah' )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( false )
begin
processor.process
fail "Expected ProcessorError to be raised."
rescue JackTheRIPper::ProcessorError => e
assert_equal 'blah blah blah', e.message
end
end
end
|
jwilger/jack-the-ripper
|
06844b75a4cddcc233b05f610a622464f9b2b164
|
Not sure WTF happened to the vendor directory, and not sure why it was there in the first place. Using external gems instead.
|
diff --git a/Manifest.txt b/Manifest.txt
index d49b46e..594e933 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,13 +1,13 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
lib/jack_the_ripper/uri_fix.rb
lib/jack_the_ripper_server.rb
test/jack_the_ripper/test_http_file.rb
test/jack_the_ripper/test_processor.rb
-test/test_jack_the_ripper.rb
\ No newline at end of file
+test/test_jack_the_ripper.rb
diff --git a/Rakefile b/Rakefile
index 72dea7b..6a23f20 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,17 +1,17 @@
require 'rubygems'
gem 'hoe', '=1.5.1'
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
- p.extra_deps = %w( mime-types daemons right_aws )
-end
\ No newline at end of file
+ p.extra_deps = %w( mime-types daemons right_aws right_http_connection)
+end
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 97bb868..50841de 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,53 +1,53 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
-VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/../vendor' )
-$:.unshift( VENDOR_DIR + '/right_aws-1.5.0/lib' )
-$:.unshift( VENDOR_DIR + '/right_http_connection-1.2.1/lib' )
+require 'rubygems'
+gem 'right_aws'
+gem 'right_http_connection'
require 'yaml'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.4.1'
+ VERSION = '1.4.2'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 6a5bcbc..8f6af59 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,96 +1,96 @@
-$:.unshift( VENDOR_DIR + '/mime-types-1.15/lib' )
+gem 'mime-types'
+require 'mime/types'
require 'uri'
require 'jack_the_ripper/uri_fix'
require 'net/http'
require 'net/https'
-require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def logger
self.class.logger
end
def delete
logger.debug "Deleting file #{@path}"
File.unlink( @path ) if File.exist?( @path )
end
def put( uri = nil, redirection_limit = 10 )
if redirection_limit == 0
raise RemoteError, "Too many redirects for PUT: #{uri}"
end
logger.info "PUTing file: #{@uri}"
content_type = MIME::Types.type_for( @path ).first.content_type
result = HTTPFile.send_request( uri || @uri, :put, { 'Content-Type' => content_type }, Base64.encode64( File.read( @path ) ) )
case result
when Net::HTTPSuccess
# ok
logger.info "File PUT successful"
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
put( result[ 'location' ], redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
def logger
JackTheRIPper.logger || Proc.new{ l = Logger.new( STDERR ); l.level = Logger::ERROR; l }.call
end
def get( uri, directory, basename, redirection_limit = 10 )
logger.info "GETing file: #{uri}"
if redirection_limit == 0
raise RemoteError, "Too many redirects for GET: #{uri}"
end
result = send_request( uri, :get )
case result
when Net::HTTPSuccess
logger.info "File GET successful"
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
logger.debug "File stored at #{file_path}"
new( nil, file_path )
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
def send_request( uri, method, headers = {}, body = nil )
uri = URI.parse( uri )
http = Net::HTTP.new( uri.host, uri.port )
http.use_ssl = true if uri.scheme == 'https'
http.start do |h|
logger.debug "HTTP#{ uri.scheme == 'https' ? 'S' : '' } connection started."
h.send_request( method.to_s.upcase, uri.request_uri, body, headers )
end
rescue URI::InvalidURIError => e
raise JackTheRIPper::ProcessorError, "Invalid URI for #{method}: #{uri}"
end
end
end
-end
\ No newline at end of file
+end
|
jwilger/jack-the-ripper
|
3b10aa5cfc050fa504cb6f3205c278556928654a
|
Convert EPS to PDF
|
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index fa55599..6a8a7d1 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,44 +1,51 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
JackTheRIPper.logger.debug "Processing message"
source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
JackTheRIPper.logger.debug "Source file retrieved."
+ file_type_info = `file #{source_file.path}`
+ if /\bPostscript\b/ =~ file_type_info
+ `pstopdf #{source_file.path} -o #{source_file.path}.pdf`
+ new_source_file = HTTPFile.new('', "#{source_file.path}.pdf")
+ source_file.delete
+ source_file = new_source_file
+ end
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = JackTheRIPper.tmp_path + '/result' + result_ext
cmd = "sips #{sips_args} #{source_file.path} --out #{result_path}"
output = `#{cmd}`
JackTheRIPper.logger.debug "Ran command #{cmd}"
raise ProcessorError, output unless File.exist?( result_path )
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
ensure
source_file.delete unless source_file.nil?
result_file.delete unless result_file.nil?
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
-end
\ No newline at end of file
+end
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index de3e0be..0ffd092 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,201 +1,247 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../lib' ) )
require 'rubygems'
gem 'mocha', '=0.5.6'
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def setup
JackTheRIPper.stubs( :logger ).returns( stub_everything )
end
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
+ processor.expects(:`).with("file #{working_dir_path}/source.pdf").returns('')
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
+ def test_should_convert_eps_source_to_pdf_before_converting_to_final_format
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :format => :jpg
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.eps' )
+ pdf_file = mock
+ pdf_file.expects(:delete)
+ pdf_file.stubs(:path).returns(working_dir_path + '/source.eps.pdf')
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects(:new).with('', working_dir_path + '/source.eps.pdf').
+ returns(pdf_file)
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects(:`).with("file #{working_dir_path}/source.eps").
+ returns('DOS EPS Binary File Postscript starts at byte 32 length ' +
+ '4570373 TIFF starts at byte 4570405 length 575329\n')
+ processor.expects(:`).with("pstopdf #{working_dir_path}/source.eps -o #{working_dir_path}/source.eps.pdf")
+ processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.eps.pdf --out #{working_dir_path}/result.jpg" )
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
+
+ processor.process
+ end
+
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
+ processor.expects(:`).with("file #{working_dir_path}/source.pdf").returns('')
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
File.expects( :exist? ).with( working_dir_path + '/result.png' ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
+ processor.expects(:`).with("file #{working_dir_path}/source.jpg").returns('')
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
+ processor.expects(:`).with("file #{working_dir_path}/source.jpg").returns('')
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
+ processor.expects(:`).with("file #{working_dir_path}/source.pdf").returns('')
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_raise_processor_error_if_sips_process_does_not_write_result_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = stub_everything( :path => '/foo/bar.jpg' )
JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.stubs( :` ).returns( 'blah blah blah' )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( false )
begin
processor.process
fail "Expected ProcessorError to be raised."
rescue JackTheRIPper::ProcessorError => e
assert_equal 'blah blah blah', e.message
end
end
-end
\ No newline at end of file
+end
|
jwilger/jack-the-ripper
|
dd351771d1b699dae4cb41f0b5251c971de9b65f
|
Added .gitignore
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0c6117d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+pkg
\ No newline at end of file
|
jwilger/jack-the-ripper
|
a957fefdf88d87c8fe708bcfcd306964525e4009
|
Stub Logger in Processor Tests
|
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index 6801ea7..de3e0be 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,197 +1,201 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../lib' ) )
require 'rubygems'
gem 'mocha', '=0.5.6'
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
+ def setup
+ JackTheRIPper.stubs( :logger ).returns( stub_everything )
+ end
+
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
File.expects( :exist? ).with( working_dir_path + '/result.png' ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_raise_processor_error_if_sips_process_does_not_write_result_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = stub_everything( :path => '/foo/bar.jpg' )
JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.stubs( :` ).returns( 'blah blah blah' )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( false )
begin
processor.process
fail "Expected ProcessorError to be raised."
rescue JackTheRIPper::ProcessorError => e
assert_equal 'blah blah blah', e.message
end
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
63e4f49b70590c3aff79d0665d493455fdebba2e
|
Make sure tests can access necessary libraries when run individually.
|
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 3b68140..12ad28d 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,159 +1,160 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../lib' ) )
require 'rubygems'
gem 'mocha', '=0.5.6'
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
f = mock
File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/redirect_me', :get ).
returns( redirect )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/result.jpg', :put, headers, Base64.encode64( data ) ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_404
http_result = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_invalid_uri
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'not a url',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_other_client_error
http_result = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_uncaught_exception
JackTheRIPper::HTTPFile.stubs( :send_request ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_404
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_invalid_uri
f = JackTheRIPper::HTTPFile.new( 'not a url',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_other_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_uncaught_exception
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
Net::HTTP.stubs( :start ).raises( Exception.new )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index cd3bbe2..6801ea7 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,195 +1,197 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../lib' ) )
+
require 'rubygems'
gem 'mocha', '=0.5.6'
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
File.expects( :exist? ).with( working_dir_path + '/result.png' ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_raise_processor_error_if_sips_process_does_not_write_result_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = stub_everything( :path => '/foo/bar.jpg' )
JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.stubs( :` ).returns( 'blah blah blah' )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( false )
begin
processor.process
fail "Expected ProcessorError to be raised."
rescue JackTheRIPper::ProcessorError => e
assert_equal 'blah blah blah', e.message
end
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
3431920c812d1d65f1c0ef699a1bfe8464a547ba
|
Use Rubygems to load Hoe in Rakefile
|
diff --git a/Rakefile b/Rakefile
index cef1019..72dea7b 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,17 +1,17 @@
-VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/vendor' )
-$:.unshift( VENDOR_DIR + '/hoe-1.5.0/lib' )
-$:.unshift( VENDOR_DIR + '/rubyforge-0.4.4/lib' )
+require 'rubygems'
+gem 'hoe', '=1.5.1'
+
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
p.extra_deps = %w( mime-types daemons right_aws )
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
c7f13125c2002ca32c1618246854b9eb90a2f894
|
Added right_aws to list of gem dependencies
|
diff --git a/Rakefile b/Rakefile
index b37dd19..cef1019 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,17 +1,17 @@
VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/vendor' )
$:.unshift( VENDOR_DIR + '/hoe-1.5.0/lib' )
$:.unshift( VENDOR_DIR + '/rubyforge-0.4.4/lib' )
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
- p.extra_deps = %w( mime-types daemons )
+ p.extra_deps = %w( mime-types daemons right_aws )
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
7ec7977c29f0ea7e86fc9cf9eb606b0c707fe43f
|
incremented version string for dev version to 1.3.1.1
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 16e834c..1c59760 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,53 +1,53 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/../vendor' )
$:.unshift( VENDOR_DIR + '/right_aws-1.5.0/lib' )
$:.unshift( VENDOR_DIR + '/right_http_connection-1.2.1/lib' )
require 'yaml'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.3.1'
+ VERSION = '1.3.1.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
b8a1aa24dc7225ca3a74a1d376f66441a131077d
|
Removed CTI-specific CruiseControl.rb configuration
|
diff --git a/Rakefile b/Rakefile
index f05673b..b37dd19 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,20 +1,17 @@
VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/vendor' )
$:.unshift( VENDOR_DIR + '/hoe-1.5.0/lib' )
$:.unshift( VENDOR_DIR + '/rubyforge-0.4.4/lib' )
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
p.extra_deps = %w( mime-types daemons )
-end
-
-desc "Run the cruise control build"
-task :cruise => [ :test, :check_manifest, :gem ]
\ No newline at end of file
+end
\ No newline at end of file
diff --git a/cruise_config.rb b/cruise_config.rb
deleted file mode 100644
index 203051e..0000000
--- a/cruise_config.rb
+++ /dev/null
@@ -1,6 +0,0 @@
-Project.configure do |project|
- # Send email notifications about broken and fixed builds to [email protected], [email protected] (default: send to nobody)
- project.email_notifier.emails = [ '[email protected]' ]
-
- project.rake_task = 'cruise'
-end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
b9d708768b29cadb8b487a795f0dbcd8977d41e1
|
added cruise control config
|
diff --git a/Manifest.txt b/Manifest.txt
index f313bdc..3f1911c 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,120 +1,121 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
+cruise_config.rb
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
lib/jack_the_ripper/uri_fix.rb
lib/jack_the_ripper_server.rb
test/jack_the_ripper/test_http_file.rb
test/jack_the_ripper/test_processor.rb
test/test_jack_the_ripper.rb
vendor/hoe-1.5.0/History.txt
vendor/hoe-1.5.0/Manifest.txt
vendor/hoe-1.5.0/README.txt
vendor/hoe-1.5.0/Rakefile
vendor/hoe-1.5.0/bin/sow
vendor/hoe-1.5.0/lib/hoe.rb
vendor/hoe-1.5.0/test/test_hoe.rb
vendor/mime-types-1.15/ChangeLog
vendor/mime-types-1.15/Install
vendor/mime-types-1.15/LICENCE
vendor/mime-types-1.15/README
vendor/mime-types-1.15/Rakefile
vendor/mime-types-1.15/lib/mime/types.rb
vendor/mime-types-1.15/pre-setup.rb
vendor/mime-types-1.15/setup.rb
vendor/mime-types-1.15/tests/tc_mime_type.rb
vendor/mime-types-1.15/tests/tc_mime_types.rb
vendor/mime-types-1.15/tests/testall.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
vendor/right_aws-1.5.0/History.txt
vendor/right_aws-1.5.0/Manifest.txt
vendor/right_aws-1.5.0/README.txt
vendor/right_aws-1.5.0/Rakefile
vendor/right_aws-1.5.0/lib/awsbase/benchmark_fix.rb
vendor/right_aws-1.5.0/lib/awsbase/right_awsbase.rb
vendor/right_aws-1.5.0/lib/awsbase/support.rb
vendor/right_aws-1.5.0/lib/ec2/right_ec2.rb
vendor/right_aws-1.5.0/lib/right_aws.rb
vendor/right_aws-1.5.0/lib/s3/right_s3.rb
vendor/right_aws-1.5.0/lib/s3/right_s3_interface.rb
vendor/right_aws-1.5.0/lib/sdb/right_sdb_interface.rb
vendor/right_aws-1.5.0/lib/sqs/right_sqs.rb
vendor/right_aws-1.5.0/lib/sqs/right_sqs_interface.rb
vendor/right_aws-1.5.0/test/ec2/test_helper.rb
vendor/right_aws-1.5.0/test/ec2/test_right_ec2.rb
vendor/right_aws-1.5.0/test/http_connection.rb
vendor/right_aws-1.5.0/test/s3/test_helper.rb
vendor/right_aws-1.5.0/test/s3/test_right_s3.rb
vendor/right_aws-1.5.0/test/s3/test_right_s3_stubbed.rb
vendor/right_aws-1.5.0/test/sdb/test_helper.rb
vendor/right_aws-1.5.0/test/sdb/test_right_sdb.rb
vendor/right_aws-1.5.0/test/sqs/test_helper.rb
vendor/right_aws-1.5.0/test/sqs/test_right_sqs.rb
vendor/right_aws-1.5.0/test/test_credentials.rb
vendor/right_aws-1.5.0/test/ts_right_aws.rb
vendor/right_http_connection-1.2.1/History.txt
vendor/right_http_connection-1.2.1/Manifest.txt
vendor/right_http_connection-1.2.1/README.txt
vendor/right_http_connection-1.2.1/Rakefile
vendor/right_http_connection-1.2.1/lib/net_fix.rb
vendor/right_http_connection-1.2.1/lib/right_http_connection.rb
vendor/right_http_connection-1.2.1/setup.rb
vendor/rubyforge-0.4.4/History.txt
vendor/rubyforge-0.4.4/Manifest.txt
vendor/rubyforge-0.4.4/README.txt
vendor/rubyforge-0.4.4/Rakefile
vendor/rubyforge-0.4.4/bin/rubyforge
vendor/rubyforge-0.4.4/lib/http-access2.rb
vendor/rubyforge-0.4.4/lib/http-access2/cookie.rb
vendor/rubyforge-0.4.4/lib/http-access2/http.rb
vendor/rubyforge-0.4.4/lib/rubyforge.rb
vendor/rubyforge-0.4.4/test/test_rubyforge.rb
diff --git a/Rakefile b/Rakefile
index b37dd19..f05673b 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,17 +1,20 @@
VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/vendor' )
$:.unshift( VENDOR_DIR + '/hoe-1.5.0/lib' )
$:.unshift( VENDOR_DIR + '/rubyforge-0.4.4/lib' )
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
p.extra_deps = %w( mime-types daemons )
-end
\ No newline at end of file
+end
+
+desc "Run the cruise control build"
+task :cruise => [ :test, :check_manifest, :gem ]
\ No newline at end of file
diff --git a/cruise_config.rb b/cruise_config.rb
new file mode 100644
index 0000000..203051e
--- /dev/null
+++ b/cruise_config.rb
@@ -0,0 +1,6 @@
+Project.configure do |project|
+ # Send email notifications about broken and fixed builds to [email protected], [email protected] (default: send to nobody)
+ project.email_notifier.emails = [ '[email protected]' ]
+
+ project.rake_task = 'cruise'
+end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
fd7ca5cf90fab0f8e7436ae9504c91a99a72521d
|
added mime-types-1.15 library to vendor
|
diff --git a/Manifest.txt b/Manifest.txt
index 060994f..f313bdc 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,66 +1,120 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
lib/jack_the_ripper/uri_fix.rb
lib/jack_the_ripper_server.rb
test/jack_the_ripper/test_http_file.rb
test/jack_the_ripper/test_processor.rb
test/test_jack_the_ripper.rb
vendor/hoe-1.5.0/History.txt
vendor/hoe-1.5.0/Manifest.txt
vendor/hoe-1.5.0/README.txt
vendor/hoe-1.5.0/Rakefile
vendor/hoe-1.5.0/bin/sow
vendor/hoe-1.5.0/lib/hoe.rb
vendor/hoe-1.5.0/test/test_hoe.rb
+vendor/mime-types-1.15/ChangeLog
+vendor/mime-types-1.15/Install
+vendor/mime-types-1.15/LICENCE
+vendor/mime-types-1.15/README
+vendor/mime-types-1.15/Rakefile
+vendor/mime-types-1.15/lib/mime/types.rb
+vendor/mime-types-1.15/pre-setup.rb
+vendor/mime-types-1.15/setup.rb
+vendor/mime-types-1.15/tests/tc_mime_type.rb
+vendor/mime-types-1.15/tests/tc_mime_types.rb
+vendor/mime-types-1.15/tests/testall.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
+vendor/right_aws-1.5.0/History.txt
+vendor/right_aws-1.5.0/Manifest.txt
+vendor/right_aws-1.5.0/README.txt
+vendor/right_aws-1.5.0/Rakefile
+vendor/right_aws-1.5.0/lib/awsbase/benchmark_fix.rb
+vendor/right_aws-1.5.0/lib/awsbase/right_awsbase.rb
+vendor/right_aws-1.5.0/lib/awsbase/support.rb
+vendor/right_aws-1.5.0/lib/ec2/right_ec2.rb
+vendor/right_aws-1.5.0/lib/right_aws.rb
+vendor/right_aws-1.5.0/lib/s3/right_s3.rb
+vendor/right_aws-1.5.0/lib/s3/right_s3_interface.rb
+vendor/right_aws-1.5.0/lib/sdb/right_sdb_interface.rb
+vendor/right_aws-1.5.0/lib/sqs/right_sqs.rb
+vendor/right_aws-1.5.0/lib/sqs/right_sqs_interface.rb
+vendor/right_aws-1.5.0/test/ec2/test_helper.rb
+vendor/right_aws-1.5.0/test/ec2/test_right_ec2.rb
+vendor/right_aws-1.5.0/test/http_connection.rb
+vendor/right_aws-1.5.0/test/s3/test_helper.rb
+vendor/right_aws-1.5.0/test/s3/test_right_s3.rb
+vendor/right_aws-1.5.0/test/s3/test_right_s3_stubbed.rb
+vendor/right_aws-1.5.0/test/sdb/test_helper.rb
+vendor/right_aws-1.5.0/test/sdb/test_right_sdb.rb
+vendor/right_aws-1.5.0/test/sqs/test_helper.rb
+vendor/right_aws-1.5.0/test/sqs/test_right_sqs.rb
+vendor/right_aws-1.5.0/test/test_credentials.rb
+vendor/right_aws-1.5.0/test/ts_right_aws.rb
+vendor/right_http_connection-1.2.1/History.txt
+vendor/right_http_connection-1.2.1/Manifest.txt
+vendor/right_http_connection-1.2.1/README.txt
+vendor/right_http_connection-1.2.1/Rakefile
+vendor/right_http_connection-1.2.1/lib/net_fix.rb
+vendor/right_http_connection-1.2.1/lib/right_http_connection.rb
+vendor/right_http_connection-1.2.1/setup.rb
+vendor/rubyforge-0.4.4/History.txt
+vendor/rubyforge-0.4.4/Manifest.txt
+vendor/rubyforge-0.4.4/README.txt
+vendor/rubyforge-0.4.4/Rakefile
+vendor/rubyforge-0.4.4/bin/rubyforge
+vendor/rubyforge-0.4.4/lib/http-access2.rb
+vendor/rubyforge-0.4.4/lib/http-access2/cookie.rb
+vendor/rubyforge-0.4.4/lib/http-access2/http.rb
+vendor/rubyforge-0.4.4/lib/rubyforge.rb
+vendor/rubyforge-0.4.4/test/test_rubyforge.rb
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 13a5f1d..6a5bcbc 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,97 +1,96 @@
+$:.unshift( VENDOR_DIR + '/mime-types-1.15/lib' )
require 'uri'
require 'jack_the_ripper/uri_fix'
require 'net/http'
require 'net/https'
-require 'rubygems'
-gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def logger
self.class.logger
end
def delete
logger.debug "Deleting file #{@path}"
File.unlink( @path ) if File.exist?( @path )
end
def put( uri = nil, redirection_limit = 10 )
if redirection_limit == 0
raise RemoteError, "Too many redirects for PUT: #{uri}"
end
logger.info "PUTing file: #{@uri}"
content_type = MIME::Types.type_for( @path ).first.content_type
result = HTTPFile.send_request( uri || @uri, :put, { 'Content-Type' => content_type }, Base64.encode64( File.read( @path ) ) )
case result
when Net::HTTPSuccess
# ok
logger.info "File PUT successful"
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
put( result[ 'location' ], redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
def logger
JackTheRIPper.logger || Proc.new{ l = Logger.new( STDERR ); l.level = Logger::ERROR; l }.call
end
def get( uri, directory, basename, redirection_limit = 10 )
logger.info "GETing file: #{uri}"
if redirection_limit == 0
raise RemoteError, "Too many redirects for GET: #{uri}"
end
result = send_request( uri, :get )
case result
when Net::HTTPSuccess
logger.info "File GET successful"
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
logger.debug "File stored at #{file_path}"
new( nil, file_path )
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
def send_request( uri, method, headers = {}, body = nil )
uri = URI.parse( uri )
http = Net::HTTP.new( uri.host, uri.port )
http.use_ssl = true if uri.scheme == 'https'
http.start do |h|
logger.debug "HTTP#{ uri.scheme == 'https' ? 'S' : '' } connection started."
h.send_request( method.to_s.upcase, uri.request_uri, body, headers )
end
rescue URI::InvalidURIError => e
raise JackTheRIPper::ProcessorError, "Invalid URI for #{method}: #{uri}"
end
end
end
end
\ No newline at end of file
diff --git a/vendor/mime-types-1.15/ChangeLog b/vendor/mime-types-1.15/ChangeLog
new file mode 100644
index 0000000..9544a47
--- /dev/null
+++ b/vendor/mime-types-1.15/ChangeLog
@@ -0,0 +1,101 @@
+= MIME::Types Change Log
+
+Unless explicitly stated differently are all changes produced by Austin
+Ziegler <[email protected]>.
+
+== MIME::Types 1.15
+* Removed lib/mime/type.rb to form a single MIME::Types database source. It
+ is unlikely that one will ever need MIME::Type without MIME::Types.
+* Re-synchronized the MIME type list with the sources, focusing primarily on
+ the IANA list.
+* Added more detailed source information for MIME::Type objects.
+* Changed MIME::Types from a module to a class with a default instance. There
+ should be no difference in usage.
+* Removed MIME::Types::DATA_VERSION; it is now an attribute on the
+ MIME::Types instance.
+* NOTE: Synchronization with the Perl version of MIME::Types is no longer a
+ priority as of this release. The data format and information has changed.
+* Removed MIME::Types.by_suffix and MIME::Types.by_mediatype.
+
+== MIME::Types 1.13.1
+* Fixed a problem with the installer running tests. This now works.
+* Improved the implementation of MIME::Type.signature?
+* Moved code around to use the class << self idiom instead of always
+ prepending the module/class name.
+* Added two new best-guess implementations of functions found in Perl's
+ MIME::Types implementation (1.13). Do not rely on these until the purpose
+ and implementation is stabilised.
+* Updated the MIME list to reflect changes noted by
+ Ville Skyttä <[email protected]>.
+* Added a new constant to MIME::Types, DATA_VERSION. This will allow the Ruby
+ version number to be updated separately from the Perl version while keeping
+ the MIME Type list version in sync.
+
+== MIME::Types 1.13
+ ! WARNING: This version changes the API of MIME::Types !
+ ! WARNING: This version is compatible with Ruby 1.8 and higher ONLY !
+* Removed dependency on InstallPackage; offering 1.13 as either .tar.gz or
+ .gem.
+* Split into two files, mime/type.rb and mime/types.rb. This will make
+ maintaining the list of changes easier.
+* Changed the MIME::Type construction API. Accepts only a single String
+ argument (but does no named type-checking) and yields self.
+* Removed private methods #init_extensions, #init_encoding, and #init_system
+ and replaced with #extensions=, #encoding=, and #system=.
+* Added #default_encoding to return 'quoted-printable' or 'base64' depending
+ on the media type of the MIME type.
+* Added #raw_media_type and #raw_sub_type to provide the non-simplified
+ versions of the media type and subtype.
+* Alternative constructors MIME::Type.from_array, MIME::Type.from_hash, and
+ MIME::Type.from_mime_type added to compensate for the removal of named type
+ checking in the original constructor.
+* Added #to_str, #to_a, and #to_hash methods. The latter two will provide
+ output suitable for use in #from_array and #from_hash.
+* Removed "binary" encoding and enforced the use of a valid encoding string.
+* Added #system? returning true if the MIME::Type is an OS-specific
+ MIME::Type.
+* Added #platform? returning true if the MIME::Type is an OS-specific
+ MIME::Type for the current RUBY_PLATFORM.
+* Added #like? returning true if the simplified type matches the other value
+ provided. #<'application/x-excel'>.like?('application/excel') is true.
+* Added #complete? returning true if the MIME::Type specifies an extension
+ list.
+* Updated the MIME type list to reflect additions by Mark Overmeer for Perl's
+ MIME::Types 1.12 and the official IANA list as of 2004.04.06. A number of
+ formerly "registered" MIME types are now no longer registered (e.g.,
+ application/excel is now application/x-excel). This ensures that the
+ simplified type still works with applications, but does not report an
+ unregistered type as registered.
+* Restored MIME type list to Mark Overmeer's format to facilitate easy
+ exchange between the two projects.
+* Added additional unit tests from Mark Overmeer's 1.12 version.
+
+== MIME::Types 1.07
+* Changed version numbering to match Perl MIME::Types 1.07.
+* Re-synchronized with Mark Overmeer's list in Perl PMIME::Types 1.07.
+ - [NN Poster] updated the attributes for the PGP types.
+
+== MIME::Types 1.005
+* Changed to Phil Thomson's InstallPackage.
+* Added several types from Perl MIME::Types 1.005.
+* Cleaned up data format; some data formats will show up with proper data now.
+
+== MIME::Types 1.004
+* Updated to match Perl MIME::Types 1.004, links credited to Dan Puro. Adds
+ new reference list to http://www.indiana.edu/cgi-bin-local/mimetypes
+* Removed InvalidType and replaced with TypeError.
+* Changed instances of #type to #class.
+* Cleaned up how simplified versions are created.
+
+== MIME::Types 1.003
+* Initial release based on Perl MIME::Types 1.003.
+
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2006 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: ChangeLog,v 1.3 2006/02/12 21:27:21 austin Exp $
+#++
+# vim: sts=2 sw=2 ts=4 et ai tw=77
diff --git a/vendor/mime-types-1.15/Install b/vendor/mime-types-1.15/Install
new file mode 100644
index 0000000..690edb8
--- /dev/null
+++ b/vendor/mime-types-1.15/Install
@@ -0,0 +1,16 @@
+Installing this package is as simple as:
+
+ % ruby setup.rb
+
+Alternatively, you can use the RubyGem version of MIME::Types available as
+mime-types-1.15.gem from the usual sources.
+
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2006 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: Install,v 1.3 2006/02/12 21:27:21 austin Exp $
+#++
+# vim: sts=2 sw=2 ts=4 et ai tw=77
diff --git a/vendor/mime-types-1.15/LICENCE b/vendor/mime-types-1.15/LICENCE
new file mode 100644
index 0000000..fcda4a6
--- /dev/null
+++ b/vendor/mime-types-1.15/LICENCE
@@ -0,0 +1,18 @@
+= MIME::Types for Ruby
+Homepage:: http://rubyforge.org/projects/mime-types/
+Copyright:: Copyright (c) 2003 - 2006 Austin Ziegler.
+Summary:: Ruby's licence, Perl Aristic Licence,
+ GNU GPL version 2 (or later)
+
+The text of the Ruby licence can be found at:
+http://www.ruby-lang.org/en/LICENSE.txt
+
+The text of the Perl Artistic Licence can be found at:
+http://www.perl.com/pub/a/language/misc/Artistic.html
+
+The text of the GNU GPL can be found at: http://www.gnu.org/copyleft/gpl.html
+
+If you do not accept one of these licences, you may not use this software.
+
+$Id: LICENCE,v 1.1 2006/02/12 21:27:21 austin Exp $
+# vim: sts=2 sw=2 ts=4 et ai tw=77
diff --git a/vendor/mime-types-1.15/README b/vendor/mime-types-1.15/README
new file mode 100644
index 0000000..f940817
--- /dev/null
+++ b/vendor/mime-types-1.15/README
@@ -0,0 +1,30 @@
+= README: Mime::Types for Ruby
+This library allows for the identification of a file's likely MIME content
+type. This is release 1.15. The identification of MIME content type is based
+on a file's filename extensions.
+
+MIME::Types for Ruby originally based on and synchronized with MIME::Types
+for Perl by Mark Overmeer, copyright 2001 - 2005. As of version 1.15, the
+data format for the MIME::Type list has changed and the synchronization will
+no longer happen.
+
+Homepage:: http://mime-types.rubyforge.org/
+Copyright:: 2002 - 2006, Austin Ziegler
+ Based on prior work copyright Mark Overmeer
+
+== Licensing
+MIME::Types is available under three disjunctive licences, as detailed in the
+LICENCE file.
+
+== Requirements
+MIME::Types requires Ruby 1.8.2 or better.
+
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2005 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: Readme,v 1.3 2006/02/12 21:27:21 austin Exp $
+#++
+# vim: sts=2 sw=2 ts=4 et ai tw=77
diff --git a/vendor/mime-types-1.15/Rakefile b/vendor/mime-types-1.15/Rakefile
new file mode 100644
index 0000000..da024c2
--- /dev/null
+++ b/vendor/mime-types-1.15/Rakefile
@@ -0,0 +1,208 @@
+#! /usr/bin/env rake
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2005 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: Rakefile,v 1.8 2006/02/12 22:05:20 austin Exp $
+#++
+require 'meta_project'
+require 'rake/gempackagetask'
+require 'rake/contrib/xforge'
+require 'rake/clean'
+
+require 'gmailer'
+
+require 'archive/tar/minitar'
+require 'zlib'
+
+$LOAD_PATH.unshift('lib')
+
+require 'mime/types'
+
+$version = MIME::Types::VERSION
+$name = MIME::Types.to_s
+$project = MetaProject::Project::XForge::RubyForge.new('mime-types')
+$distdir = "mime-types-#$version"
+$tardist = "../#$distdir.tar.gz"
+
+$release_date = nil
+$release_date = Time.parse(ENV['RELEASE_DATE']) if ENV['RELEASE_DATE']
+
+desc "Run the tests for #$name."
+task :test do |t|
+ require 'test/unit/testsuite'
+ require 'test/unit/ui/console/testrunner'
+
+ runner = Test::Unit::UI::Console::TestRunner
+
+ $LOAD_PATH.unshift('tests')
+ $stderr.puts "Checking for test cases:" if t.verbose
+ Dir['tests/tc_*.rb'].each do |testcase|
+ $stderr.puts "\t#{testcase}" if t.verbose
+ load testcase
+ end
+
+ suite = Test::Unit::TestSuite.new($name)
+
+ ObjectSpace.each_object(Class) do |testcase|
+ suite << testcase.suite if testcase < Test::Unit::TestCase
+ end
+
+ runner.run(suite)
+end
+
+desc "Look for TODO and FIXME tags in the code"
+task :todo do
+ rx = %r{#.*(FIXME|TODO|TBD|DEPRECATED)}
+ Pathname.new(File.dirname(__FILE__)).egrep(rx) do |match|
+ puts match
+ end
+end
+
+spec = eval(File.read("mime-types.gemspec"))
+spec.version = $version
+desc "Build the RubyGem for #$name."
+task :gem => [ :test ]
+Rake::GemPackageTask.new(spec) do |g|
+ g.need_tar = false
+ g.need_zip = false
+ g.package_dir = ".."
+end
+
+desc "Build a #$name .tar.gz distribution."
+task :tar => [ $tardist ]
+file $tardist => [ :test ] do |t|
+ current = File.basename(Dir.pwd)
+ Dir.chdir("..") do
+ begin
+ files = %W(bin/**/* lib/**/* tests/**/* ChangeLog README LICENCE
+ Rakefile mime-types.gemspec setup.rb pre-setup.rb)
+ files = FileList[files.map { |file| File.join(current, file) }].to_a
+ files.map! do |dd|
+ ddnew = dd.gsub(/^#{current}/, $distdir)
+ mtime = $release_date || File.stat(dd).mtime
+ if File.directory?(dd)
+ { :name => ddnew, :mode => 0755, :dir => true, :mtime => mtime }
+ else
+ if dd =~ %r{bin/}
+ mode = 0755
+ else
+ mode = 0644
+ end
+ data = File.open(dd, "rb") { |ff| ff.read }
+ { :name => ddnew, :mode => mode, :data => data, :size =>
+ data.size, :mtime => mtime }
+ end
+ end
+
+ ff = File.open(t.name.gsub(%r{^\.\./}o, ''), "wb")
+ gz = Zlib::GzipWriter.new(ff)
+ tw = Archive::Tar::Minitar::Writer.new(gz)
+
+ files.each do |entry|
+ if entry[:dir]
+ tw.mkdir(entry[:name], entry)
+ else
+ tw.add_file_simple(entry[:name], entry) { |os| os.write(entry[:data]) }
+ end
+ end
+ ensure
+ tw.close if tw
+ gz.finish if gz
+ ff.close
+ end
+ end
+end
+task $tardist => [ :test ]
+
+desc "Build the RDoc documentation for #$name."
+task :docs do
+ require 'rdoc/rdoc'
+ rdoc_options = %W(--title #$name --main README --line-numbers)
+ files = FileList[*%w(README LICENCE ChangeLog bin/**/*.rb lib/**/*.rb)]
+ rdoc_options += files.to_a
+ RDoc::RDoc.new.document(rdoc_options)
+end
+
+task :verify_rubyforge do
+ raise "RUBYFORGE_USER environment variable not set!" unless ENV['RUBYFORGE_USER']
+ raise "RUBYFORGE_PASSWORD environment variable not set!" unless ENV['RUBYFORGE_PASSWORD']
+end
+
+task :verify_gmail do
+ raise "GMAIL_USER environment variable not set!" unless ENV['GMAIL_USER']
+ raise "GMAIL_PASSWORD environment variable not set!" unless ENV['GMAIL_PASSWORD']
+end
+
+desc "Release files on RubyForge."
+task :release_files => [ :verify_rubyforge, :tar, :gem ] do
+ release_files = FileList[$tardist, "../#$distdir.gem"]
+ Rake::XForge::Release.new($project) do |release|
+ release.user_name = ENV['RUBYFORGE_USER']
+ release.password = ENV['RUBYFORGE_PASSWORD']
+ release.files = release_files.to_a
+ release.release_name = "#$name #$version"
+ release.package_name = "mime-types"
+
+ notes = []
+ File.open("README") do |file|
+ file.each do |line|
+ line.chomp!
+ line.gsub!(/^#.*$/, '') and next
+ notes << line
+ end
+ end
+ release.release_notes = notes.join("\n")
+
+ changes = []
+ File.open("ChangeLog") do |file|
+ current = true
+
+ file.each do |line|
+ line.chomp!
+ current = false if current and line =~ /^==/
+ break if line.empty? and not current
+ changes << line
+ end
+ end
+ release.release_changes = changes.join("\n")
+ end
+end
+
+desc "Publish news on RubyForge"
+task :publish_news => [ :verify_rubyforge, :tar, :gem ] do
+ Rake::XForge::NewsPublisher.new($project) do |news|
+ news.user_name = ENV['RUBYFORGE_USER']
+ news.password = ENV['RUBYFORGE_PASSWORD']
+ news.subject = "#$name #$version Released"
+ news.changes_file = nil
+
+ details = []
+ File.open("Release-Announcement") do |file|
+ file.each do |line|
+ line.chomp!
+ break if line =~ /^=/
+ details << line
+ end
+ end
+ news.details = details.join("\n")
+ end
+end
+
+desc "Post a release announcement via GMail."
+task :email_announcement => [ :verify_gmail ] do
+ GMailer.connect(ENV["GMAIL_USER"], ENV["GMAIL_PASSWORD"]) do |gmail|
+ gmail.send :to => "[email protected]",
+ :subject => "[ANN] #$name #$version",
+ :body => File.read("Release-Announcement")
+ end
+end
+
+desc "Release the latest version."
+task :release => [ :verify_rubyforge, :verify_gmail, :release_files,
+ :publish_news, :email_announcement, :docs ]
+
+desc "Build everything."
+task :default => [ :tar, :gem ]
diff --git a/vendor/mime-types-1.15/lib/mime/types.rb b/vendor/mime-types-1.15/lib/mime/types.rb
new file mode 100644
index 0000000..28a8599
--- /dev/null
+++ b/vendor/mime-types-1.15/lib/mime/types.rb
@@ -0,0 +1,1558 @@
+#--
+# MIME::Types for Ruby
+# Version 1.15
+#
+# Copyright (c) 2002 - 2004 Austin Ziegler
+#
+# $Id: types.rb,v 1.4 2006/02/12 21:27:22 austin Exp $
+#
+# The ChangeLog contains all details on revisions.
+#++
+
+# The namespace for MIME applications, tools, and libraries.
+module MIME
+ # Reflects a MIME Content-Type which is in invalid format (e.g., it isn't
+ # in the form of type/subtype).
+ class InvalidContentType < RuntimeError; end
+
+ # The definition of one MIME content-type.
+ #
+ # == Usage
+ # require 'mime/types'
+ #
+ # plaintext = MIME::Types['text/plain']
+ # print plaintext.media_type # => 'text'
+ # print plaintext.sub_type # => 'plain'
+ #
+ # puts plaintext.extensions.join(" ") # => 'asc txt c cc h hh cpp'
+ #
+ # puts plaintext.encoding # => 8bit
+ # puts plaintext.binary? # => false
+ # puts plaintext.ascii? # => true
+ # puts plaintext == 'text/plain' # => true
+ # puts MIME::Type.simplified('x-appl/x-zip') # => 'appl/zip'
+ #
+ class Type
+ VERSION = '1.15'
+
+ include Comparable
+
+ MEDIA_TYPE_RE = %r{([-\w.+]+)/([-\w.+]*)}o #:nodoc:
+ UNREG_RE = %r{[Xx]-}o #:nodoc:
+ ENCODING_RE = %r{(?:base64|7bit|8bit|quoted\-printable)}o #:nodoc:
+ PLATFORM_RE = %r|#{RUBY_PLATFORM}|o #:nodoc:
+
+ SIGNATURES = %w(application/pgp-keys application/pgp
+ application/pgp-signature application/pkcs10
+ application/pkcs7-mime application/pkcs7-signature
+ text/vcard) #:nodoc:
+
+ IANA_URL = "http://www.iana.org/assignments/media-types/%s/%s"
+ RFC_URL = "http://rfc-editor.org/rfc/rfc%s.txt"
+ DRAFT_URL = "http://datatracker.ietf.org/public/idindex.cgi?command=id_details&filename=%s"
+ LTSW_URL = "http://www.ltsw.se/knbase/internet/%s.htp"
+ CONTACT_URL = "http://www.iana.org/assignments/contact-people.htm#%s"
+
+ # Returns +true+ if the simplified type matches the current
+ def like?(other)
+ if other.respond_to?(:simplified)
+ @simplified == other.simplified
+ else
+ @simplified == Type.simplified(other)
+ end
+ end
+
+ # Compares the MIME::Type against the exact content type or the
+ # simplified type (the simplified type will be used if comparing against
+ # something that can be treated as a String with #to_s).
+ def <=>(other) #:nodoc:
+ if other.respond_to?(:content_type)
+ @content_type.downcase <=> other.content_type.downcase
+ elsif other.respond_to?(:to_s)
+ @simplified <=> Type.simplified(other.to_s)
+ else
+ @content_type.downcase <=> other.downcase
+ end
+ end
+
+ # Returns +true+ if the other object is a MIME::Type and the content
+ # types match.
+ def eql?(other) #:nodoc:
+ other.kind_of?(MIME::Type) and self == other
+ end
+
+ # Returns the whole MIME content-type string.
+ #
+ # text/plain => text/plain
+ # x-chemical/x-pdb => x-chemical/x-pdb
+ attr_reader :content_type
+ # Returns the media type of the simplified MIME type.
+ #
+ # text/plain => text
+ # x-chemical/x-pdb => chemical
+ attr_reader :media_type
+ # Returns the media type of the unmodified MIME type.
+ #
+ # text/plain => text
+ # x-chemical/x-pdb => x-chemical
+ attr_reader :raw_media_type
+ # Returns the sub-type of the simplified MIME type.
+ #
+ # text/plain => plain
+ # x-chemical/x-pdb => pdb
+ attr_reader :sub_type
+ # Returns the media type of the unmodified MIME type.
+ #
+ # text/plain => plain
+ # x-chemical/x-pdb => x-pdb
+ attr_reader :raw_sub_type
+ # The MIME types main- and sub-label can both start with <tt>x-</tt>,
+ # which indicates that it is a non-registered name. Of course, after
+ # registration this flag can disappear, adds to the confusing
+ # proliferation of MIME types. The simplified string has the <tt>x-</tt>
+ # removed and are translated to lowercase.
+ #
+ # text/plain => text/plain
+ # x-chemical/x-pdb => chemical/pdb
+ attr_reader :simplified
+ # The list of extensions which are known to be used for this MIME::Type.
+ # Non-array values will be coerced into an array with #to_a. Array
+ # values will be flattened and +nil+ values removed.
+ attr_accessor :extensions
+ remove_method :extensions= ;
+ def extensions=(ext) #:nodoc:
+ @extensions = ext.to_a.flatten.compact
+ end
+
+ # The encoding (7bit, 8bit, quoted-printable, or base64) required to
+ # transport the data of this content type safely across a network, which
+ # roughly corresponds to Content-Transfer-Encoding. A value of +nil+ or
+ # <tt>:default</tt> will reset the #encoding to the #default_encoding
+ # for the MIME::Type. Raises ArgumentError if the encoding provided is
+ # invalid.
+ #
+ # If the encoding is not provided on construction, this will be either
+ # 'quoted-printable' (for text/* media types) and 'base64' for eveything
+ # else.
+ attr_accessor :encoding
+ remove_method :encoding= ;
+ def encoding=(enc) #:nodoc:
+ if enc.nil? or enc == :default
+ @encoding = self.default_encoding
+ elsif enc =~ ENCODING_RE
+ @encoding = enc
+ else
+ raise ArgumentError, "The encoding must be nil, :default, base64, 7bit, 8bit, or quoted-printable."
+ end
+ end
+
+ # The regexp for the operating system that this MIME::Type is specific
+ # to.
+ attr_accessor :system
+ remove_method :system= ;
+ def system=(os) #:nodoc:
+ if os.nil? or os.kind_of?(Regexp)
+ @system = os
+ else
+ @system = %r|#{os}|
+ end
+ end
+ # Returns the default encoding for the MIME::Type based on the media
+ # type.
+ attr_reader :default_encoding
+ remove_method :default_encoding
+ def default_encoding
+ (@media_type == 'text') ? 'quoted-printable' : 'base64'
+ end
+
+ # Returns the media type or types that should be used instead of this
+ # media type, if it is obsolete. If there is no replacement media type,
+ # or it is not obsolete, +nil+ will be returned.
+ attr_reader :use_instead
+ remove_method :use_instead
+ def use_instead
+ return nil unless @obsolete
+ @use_instead
+ end
+
+ # Returns +true+ if the media type is obsolete.
+ def obsolete?
+ @obsolete ? true : false
+ end
+ # Sets the obsolescence indicator for this media type.
+ attr_writer :obsolete
+
+ # The documentation for this MIME::Type. Documentation about media
+ # types will be found on a media type definition as a comment.
+ # Documentation will be found through #docs.
+ attr_accessor :docs
+ remove_method :docs= ;
+ def docs=(d)
+ if d
+ a = d.scan(%r{use-instead:#{MEDIA_TYPE_RE}})
+
+ if a.empty?
+ @use_instead = nil
+ else
+ @use_instead = a.map { |el| "#{el[0]}/#{el[1]}" }
+ end
+ end
+ end
+
+ # The encoded URL list for this MIME::Type. See #urls for
+ attr_accessor :url
+ # The decoded URL list for this MIME::Type.
+ # The special URL value IANA will be translated into:
+ # http://www.iana.org/assignments/media-types/<mediatype>/<subtype>
+ #
+ # The special URL value RFC### will be translated into:
+ # http://www.rfc-editor.org/rfc/rfc###.txt
+ #
+ # The special URL value DRAFT:name will be translated into:
+ # https://datatracker.ietf.org/public/idindex.cgi?
+ # command=id_detail&filename=<name>
+ #
+ # The special URL value LTSW will be translated into:
+ # http://www.ltsw.se/knbase/internet/<mediatype>.htp
+ #
+ # The special URL value [token] will be translated into:
+ # http://www.iana.org/assignments/contact-people.htm#<token>
+ #
+ # These values will be accessible through #url, which always returns an
+ # array.
+ def urls
+ @url.map { |el|
+ case el
+ when %r{^IANA$}
+ IANA_URL % [ @media_type, @sub_type ]
+ when %r{^RFC(\d+)$}
+ RFC_URL % $1
+ when %r{^DRAFT:(.+)$}
+ DRAFT_URL % $1
+ when %r{^LTSW$}
+ LTSW_URL % @media_type
+ when %r{^\[([^\]]+)\]}
+ CONTACT_URL % $1
+ else
+ el
+ end
+ }
+ end
+
+ class << self
+ # The MIME types main- and sub-label can both start with <tt>x-</tt>,
+ # which indicates that it is a non-registered name. Of course, after
+ # registration this flag can disappear, adds to the confusing
+ # proliferation of MIME types. The simplified string has the
+ # <tt>x-</tt> removed and are translated to lowercase.
+ def simplified(content_type)
+ matchdata = MEDIA_TYPE_RE.match(content_type)
+
+ if matchdata.nil?
+ simplified = nil
+ else
+ media_type = matchdata.captures[0].downcase.gsub(UNREG_RE, '')
+ subtype = matchdata.captures[1].downcase.gsub(UNREG_RE, '')
+ simplified = "#{media_type}/#{subtype}"
+ end
+ simplified
+ end
+
+ # Creates a MIME::Type from an array in the form of:
+ # [type-name, [extensions], encoding, system]
+ #
+ # +extensions+, +encoding+, and +system+ are optional.
+ #
+ # MIME::Type.from_array("application/x-ruby", ['rb'], '8bit')
+ # MIME::Type.from_array(["application/x-ruby", ['rb'], '8bit'])
+ #
+ # These are equivalent to:
+ #
+ # MIME::Type.new('application/x-ruby') do |t|
+ # t.extensions = %w(rb)
+ # t.encoding = '8bit'
+ # end
+ def from_array(*args) #:yields MIME::Type.new:
+ # Dereferences the array one level, if necessary.
+ args = args[0] if args[0].kind_of?(Array)
+
+ if args.size.between?(1, 8)
+ m = MIME::Type.new(args[0]) do |t|
+ t.extensions = args[1] if args.size > 1
+ t.encoding = args[2] if args.size > 2
+ t.system = args[3] if args.size > 3
+ t.obsolete = args[4] if args.size > 4
+ t.docs = args[5] if args.size > 5
+ t.url = args[6] if args.size > 6
+ t.registered = args[7] if args.size > 7
+ end
+ yield m if block_given?
+ else
+ raise ArgumentError, "Array provided must contain between one and eight elements."
+ end
+ m
+ end
+
+ # Creates a MIME::Type from a hash. Keys are case-insensitive,
+ # dashes may be replaced with underscores, and the internal Symbol
+ # of the lowercase-underscore version can be used as well. That is,
+ # Content-Type can be provided as content-type, Content_Type,
+ # content_type, or :content_type.
+ #
+ # Known keys are <tt>Content-Type</tt>,
+ # <tt>Content-Transfer-Encoding</tt>, <tt>Extensions</tt>, and
+ # <tt>System</tt>.
+ #
+ # MIME::Type.from_hash('Content-Type' => 'text/x-yaml',
+ # 'Content-Transfer-Encoding' => '8bit',
+ # 'System' => 'linux',
+ # 'Extensions' => ['yaml', 'yml'])
+ #
+ # This is equivalent to:
+ #
+ # MIME::Type.new('text/x-yaml') do |t|
+ # t.encoding = '8bit'
+ # t.system = 'linux'
+ # t.extensions = ['yaml', 'yml']
+ # end
+ def from_hash(hash) #:yields MIME::Type.new:
+ type = {}
+ hash.each_pair do |k, v|
+ type[k.to_s.tr('-A-Z', '_a-z').to_sym] = v
+ end
+
+ m = MIME::Type.new(type[:content_type]) do |t|
+ t.extensions = type[:extensions]
+ t.encoding = type[:content_transfer_encoding]
+ t.system = type[:system]
+ t.obsolete = type[:obsolete]
+ t.docs = type[:docs]
+ t.url = type[:url]
+ t.registered = type[:registered]
+ end
+
+ yield m if block_given?
+ m
+ end
+
+ # Essentially a copy constructor.
+ #
+ # MIME::Type.from_mime_type(plaintext)
+ #
+ # is equivalent to:
+ #
+ # MIME::Type.new(plaintext.content_type.dup) do |t|
+ # t.extensions = plaintext.extensions.dup
+ # t.system = plaintext.system.dup
+ # t.encoding = plaintext.encoding.dup
+ # end
+ def from_mime_type(mime_type) #:yields the new MIME::Type:
+ m = MIME::Type.new(mime_type.content_type.dup) do |t|
+ t.extensions = mime_type.extensions.dup
+ t.system = mime_type.system.dup
+ t.encoding = mime_type.encoding.dup
+ end
+
+ yield m if block_given?
+ end
+ end
+
+ # Builds a MIME::Type object from the provided MIME Content Type value
+ # (e.g., 'text/plain' or 'applicaton/x-eruby'). The constructed object
+ # is yielded to an optional block for additional configuration, such as
+ # associating extensions and encoding information.
+ def initialize(content_type) #:yields self:
+ matchdata = MEDIA_TYPE_RE.match(content_type)
+
+ if matchdata.nil?
+ raise InvalidContentType, "Invalid Content-Type provided ('#{content_type}')"
+ end
+
+ @content_type = content_type
+ @raw_media_type = matchdata.captures[0]
+ @raw_sub_type = matchdata.captures[1]
+
+ @simplified = MIME::Type.simplified(@content_type)
+ matchdata = MEDIA_TYPE_RE.match(@simplified)
+ @media_type = matchdata.captures[0]
+ @sub_type = matchdata.captures[1]
+
+ self.extensions = nil
+ self.encoding = :default
+ self.system = nil
+ self.registered = true
+
+ yield self if block_given?
+ end
+
+ # MIME content-types which are not regestered by IANA nor defined in
+ # RFCs are required to start with <tt>x-</tt>. This counts as well for
+ # a new media type as well as a new sub-type of an existing media
+ # type. If either the media-type or the content-type begins with
+ # <tt>x-</tt>, this method will return +false+.
+ def registered?
+ if (@raw_media_type =~ UNREG_RE) || (@raw_sub_type =~ UNREG_RE)
+ false
+ else
+ @registered
+ end
+ end
+ attr_writer :registered #:nodoc:
+
+ # MIME types can be specified to be sent across a network in particular
+ # formats. This method returns +true+ when the MIME type encoding is set
+ # to <tt>base64</tt>.
+ def binary?
+ @encoding == 'base64'
+ end
+
+ # MIME types can be specified to be sent across a network in particular
+ # formats. This method returns +false+ when the MIME type encoding is
+ # set to <tt>base64</tt>.
+ def ascii?
+ not binary?
+ end
+
+ # Returns +true+ when the simplified MIME type is in the list of known
+ # digital signatures.
+ def signature?
+ SIGNATURES.include?(@simplified.downcase)
+ end
+
+ # Returns +true+ if the MIME::Type is specific to an operating system.
+ def system?
+ not @system.nil?
+ end
+
+ # Returns +true+ if the MIME::Type is specific to the current operating
+ # system as represented by RUBY_PLATFORM.
+ def platform?
+ system? and (RUBY_PLATFORM =~ @system)
+ end
+
+ # Returns +true+ if the MIME::Type specifies an extension list,
+ # indicating that it is a complete MIME::Type.
+ def complete?
+ not @extensions.empty?
+ end
+
+ # Returns the MIME type as a string.
+ def to_s
+ @content_type
+ end
+
+ # Returns the MIME type as a string for implicit conversions.
+ def to_str
+ @content_type
+ end
+
+ # Returns the MIME type as an array suitable for use with
+ # MIME::Type.from_array.
+ def to_a
+ [ @content_type, @extensions, @encoding, @system, @obsolete, @docs,
+ @url, registered? ]
+ end
+
+ # Returns the MIME type as an array suitable for use with
+ # MIME::Type.from_hash.
+ def to_hash
+ { 'Content-Type' => @content_type,
+ 'Content-Transfer-Encoding' => @encoding,
+ 'Extensions' => @extensions,
+ 'System' => @system,
+ 'Obsolete' => @obsolete,
+ 'Docs' => @docs,
+ 'URL' => @url,
+ 'Registered' => registered?,
+ }
+ end
+ end
+
+ # = MIME::Types
+ # MIME types are used in MIME-compliant communications, as in e-mail or
+ # HTTP traffic, to indicate the type of content which is transmitted.
+ # MIME::Types provides the ability for detailed information about MIME
+ # entities (provided as a set of MIME::Type objects) to be determined and
+ # used programmatically. There are many types defined by RFCs and vendors,
+ # so the list is long but not complete; don't hesitate to ask to add
+ # additional information. This library follows the IANA collection of MIME
+ # types (see below for reference).
+ #
+ # == Description
+ # MIME types are used in MIME entities, as in email or HTTP traffic. It is
+ # useful at times to have information available about MIME types (or,
+ # inversely, about files). A MIME::Type stores the known information about
+ # one MIME type.
+ #
+ # == Usage
+ # require 'mime/types'
+ #
+ # plaintext = MIME::Types['text/plain']
+ # print plaintext.media_type # => 'text'
+ # print plaintext.sub_type # => 'plain'
+ #
+ # puts plaintext.extensions.join(" ") # => 'asc txt c cc h hh cpp'
+ #
+ # puts plaintext.encoding # => 8bit
+ # puts plaintext.binary? # => false
+ # puts plaintext.ascii? # => true
+ # puts plaintext.obsolete? # => false
+ # puts plaintext.registered? # => true
+ # puts plaintext == 'text/plain' # => true
+ # puts MIME::Type.simplified('x-appl/x-zip') # => 'appl/zip'
+ #
+ # This module is built to conform to the MIME types of RFCs 2045 and 2231.
+ # It follows the official IANA registry at
+ # http://www.iana.org/assignments/media-types/ and
+ # ftp://ftp.iana.org/assignments/media-types with some unofficial types
+ # added from the the collection at
+ # http://www.ltsw.se/knbase/internet/mime.htp
+ #
+ # This is originally based on Perl MIME::Types by Mark Overmeer.
+ #
+ # = Author
+ # Copyright:: Copyright (c) 2002 - 2006 by Austin Ziegler
+ # <[email protected]>
+ # Version:: 1.15
+ # Based On:: Perl
+ # MIME::Types[http://search.cpan.org/author/MARKOV/MIME-Types-1.15/MIME/Types.pm],
+ # Copyright (c) 2001 - 2005 by Mark Overmeer
+ # <[email protected]>.
+ # Licence:: Ruby's, Perl Artistic, or GPL version 2 (or later)
+ # See Also:: http://www.iana.org/assignments/media-types/
+ # http://www.ltsw.se/knbase/internet/mime.htp
+ #
+ class Types
+ # The released version of Ruby MIME::Types
+ VERSION = '1.15'
+
+ # The data version.
+ attr_reader :data_version
+
+ def initialize(data_version = nil)
+ @type_variants = Hash.new { |h, k| h[k] = [] }
+ @extension_index = Hash.new { |h, k| h[k] = [] }
+ end
+
+ def add_type_variant(mime_type) #:nodoc:
+ @type_variants[mime_type.simplified] << mime_type
+ end
+
+ def index_extensions(mime_type) #:nodoc:
+ mime_type.extensions.each { |ext| @extension_index[ext] << mime_type }
+ end
+
+ @__types__ = self.new(VERSION)
+
+ # Returns a list of MIME::Type objects, which may be empty. The optional
+ # flag parameters are :complete (finds only complete MIME::Type objects)
+ # and :platform (finds only MIME::Types for the current platform). It is
+ # possible for multiple matches to be returned for either type (in the
+ # example below, 'text/plain' returns two values -- one for the general
+ # case, and one for VMS systems.
+ #
+ # puts "\nMIME::Types['text/plain']"
+ # MIME::Types['text/plain'].each { |t| puts t.to_a.join(", ") }
+ #
+ # puts "\nMIME::Types[/^image/, :complete => true]"
+ # MIME::Types[/^image/, :complete => true].each do |t|
+ # puts t.to_a.join(", ")
+ # end
+ def [](type_id, flags = {})
+ if type_id.kind_of?(Regexp)
+ matches = []
+ @type_variants.each_key do |k|
+ matches << @type_variants[k] if k =~ type_id
+ end
+ matches.flatten!
+ elsif type_id.kind_of?(MIME::Type)
+ matches = [type_id]
+ else
+ matches = @type_variants[MIME::Type.simplified(type_id)]
+ end
+
+ matches.delete_if { |e| not e.complete? } if flags[:complete]
+ matches.delete_if { |e| not e.platform? } if flags[:platform]
+ matches
+ end
+
+ # Return the list of MIME::Types which belongs to the file based on its
+ # filename extension. If +platform+ is +true+, then only file types that
+ # are specific to the current platform will be returned.
+ #
+ # puts "MIME::Types.type_for('citydesk.xml')
+ # => "#{MIME::Types.type_for('citydesk.xml')}"
+ # puts "MIME::Types.type_for('citydesk.gif')
+ # => "#{MIME::Types.type_for('citydesk.gif')}"
+ def type_for(filename, platform = false)
+ ext = filename.chomp.downcase.gsub(/.*\./o, '')
+ list = @extension_index[ext]
+ list.delete_if { |e| not e.platform? } if platform
+ list
+ end
+
+ # A synonym for MIME::Types.type_for
+ def of(filename, platform = false)
+ type_for(filename, platform)
+ end
+
+ # Add one or more MIME::Type objects to the set of known types. Each
+ # type should be experimental (e.g., 'application/x-ruby'). If the type
+ # is already known, a warning will be displayed.
+ #
+ # <b>Please inform the maintainer of this module when registered types
+ # are missing.</b>
+ def add(*types)
+ types.each do |mime_type|
+ if @type_variants.include?(mime_type.simplified)
+ if @type_variants[mime_type.simplified].include?(mime_type)
+ warn "Type #{mime_type} already registered as a variant of #{mime_type.simplified}."
+ end
+ end
+ add_type_variant(mime_type)
+ index_extensions(mime_type)
+ end
+ end
+
+ class <<self
+ def add_type_variant(mime_type) #:nodoc:
+ @__types__.add_type_variant(mime_type)
+ end
+
+ def index_extensions(mime_type) #:nodoc:
+ @__types__.index_extensions(mime_type)
+ end
+
+ # Returns a list of MIME::Type objects, which may be empty. The
+ # optional flag parameters are :complete (finds only complete
+ # MIME::Type objects) and :platform (finds only MIME::Types for the
+ # current platform). It is possible for multiple matches to be
+ # returned for either type (in the example below, 'text/plain' returns
+ # two values -- one for the general case, and one for VMS systems.
+ #
+ # puts "\nMIME::Types['text/plain']"
+ # MIME::Types['text/plain'].each { |t| puts t.to_a.join(", ") }
+ #
+ # puts "\nMIME::Types[/^image/, :complete => true]"
+ # MIME::Types[/^image/, :complete => true].each do |t|
+ # puts t.to_a.join(", ")
+ # end
+ def [](type_id, flags = {})
+ @__types__[type_id, flags]
+ end
+
+ # Return the list of MIME::Types which belongs to the file based on
+ # its filename extension. If +platform+ is +true+, then only file
+ # types that are specific to the current platform will be returned.
+ #
+ # puts "MIME::Types.type_for('citydesk.xml')
+ # => "#{MIME::Types.type_for('citydesk.xml')}"
+ # puts "MIME::Types.type_for('citydesk.gif')
+ # => "#{MIME::Types.type_for('citydesk.gif')}"
+ def type_for(filename, platform = false)
+ @__types__.type_for(filename, platform)
+ end
+
+ # A synonym for MIME::Types.type_for
+ def of(filename, platform = false)
+ @__types__.type_for(filename, platform)
+ end
+
+ # Add one or more MIME::Type objects to the set of known types. Each
+ # type should be experimental (e.g., 'application/x-ruby'). If the
+ # type is already known, a warning will be displayed.
+ #
+ # <b>Please inform the maintainer of this module when registered types
+ # are missing.</b>
+ def add(*types)
+ @__types__.add(*types)
+ end
+ end
+ end
+end
+
+# Build the type list
+data_mime_type = <<MIME_TYPES
+# What follows is the compiled list of known media types, IANA-registered
+# ones first, one per line.
+#
+# [*][!][os:]mt/st[<ws>@ext][<ws>:enc][<ws>'url-list][<ws>=docs]
+#
+# == *
+# An unofficial MIME type. This should be used if an only if the MIME type
+# is not properly specified.
+#
+# == !
+# An obsolete MIME type.
+#
+# == os:
+# Platform-specific MIME type definition.
+#
+# == mt
+# The media type.
+#
+# == st
+# The media subtype.
+#
+# == <ws>@ext
+# The list of comma-separated extensions.
+#
+# == <ws>:enc
+# The encoding.
+#
+# == <ws>'url-list
+# The list of comma-separated URLs.
+#
+# == <ws>=docs
+# The documentation string.
+#
+# That is, everything except the media type and the subtype is optional.
+#
+# -- Austin Ziegler, 2006.02.12
+
+ # Registered: application/*
+!application/xhtml-voice+xml 'DRAFT:draft-mccobb-xplusv-media-type
+application/CSTAdata+xml 'IANA,[Ecma International Helpdesk]
+application/EDI-Consent 'RFC1767
+application/EDI-X12 'RFC1767
+application/EDIFACT 'RFC1767
+application/activemessage 'IANA,[Shapiro]
+application/andrew-inset 'IANA,[Borenstein]
+application/applefile :base64 'IANA,[Faltstrom]
+application/atom+xml 'RFC4287
+application/atomicmail 'IANA,[Borenstein]
+application/batch-SMTP 'RFC2442
+application/beep+xml 'RFC3080
+application/cals-1840 'RFC1895
+application/ccxml+xml 'DRAFT:draft-froumentin-voice-mediatypes
+application/cnrp+xml 'RFCCNRP
+application/commonground 'IANA,[Glazer]
+application/conference-info+xml 'DRAFT:draft-ietf-sipping-conference-package
+application/cpl+xml 'RFC3880
+application/csta+xml 'IANA,[Ecma International Helpdesk]
+application/cybercash 'IANA,[Eastlake]
+application/dca-rft 'IANA,[Campbell]
+application/dec-dx 'IANA,[Campbell]
+application/dialog-info+xml 'DRAFT:draft-ietf-sipping-dialog-package
+application/dicom 'RFC3240
+application/dns 'RFC4027
+application/dvcs 'RFC3029
+application/ecmascript 'DRAFT:draft-hoehrmann-script-types
+application/epp+xml 'RFC3730
+application/eshop 'IANA,[Katz]
+application/fastinfoset 'IANA,[ITU-T ASN.1 Rapporteur]
+application/fastsoap 'IANA,[ITU-T ASN.1 Rapporteur]
+application/fits 'RFC4047
+application/font-tdpfr @pfr 'RFC3073
+application/http 'RFC2616
+application/hyperstudio @stk 'IANA,[Domino]
+application/iges 'IANA,[Parks]
+application/im-iscomposing+xml 'RFC3994
+application/index 'RFC2652
+application/index.cmd 'RFC2652
+application/index.obj 'RFC2652
+application/index.response 'RFC2652
+application/index.vnd 'RFC2652
+application/iotp 'RFC2935
+application/ipp 'RFC2910
+application/isup 'RFC3204
+application/javascript 'DRAFT:draft-hoehrmann-script-types
+application/kpml-request+xml 'DRAFT:draft-ietf-sipping-kpml
+application/kpml-response+xml 'DRAFT:draft-ietf-sipping-kpml
+application/mac-binhex40 @hqx :8bit 'IANA,[Faltstrom]
+application/macwriteii 'IANA,[Lindner]
+application/marc 'RFC2220
+application/mathematica 'IANA,[Van Nostern]
+application/mbox 'DRAFT:draft-hall-mime-app-mbox
+application/mikey 'RFC3830
+application/mp4 'DRAFT:draft-lim-mpeg4-mime
+application/mpeg4-generic 'RFC3640
+application/mpeg4-iod 'DRAFT:draft-lim-mpeg4-mime
+application/mpeg4-iod-xmt 'DRAFT:draft-lim-mpeg4-mime
+application/msword @doc,dot :base64 'IANA,[Lindner]
+application/news-message-id 'RFC1036,[Spencer]
+application/news-transmission 'RFC1036,[Spencer]
+application/nss 'IANA,[Hammer]
+application/ocsp-request 'RFC2560
+application/ocsp-response 'RFC2560
+application/octet-stream @bin,dms,lha,lzh,exe,class,ani,pgp :base64 'RFC2045,RFC2046
+application/oda @oda 'RFC2045,RFC2046
+application/ogg @ogg 'RFC3534
+application/parityfec 'RFC3009
+application/pdf @pdf :base64 'RFC3778
+application/pgp-encrypted :7bit 'RFC3156
+application/pgp-keys :7bit 'RFC3156
+application/pgp-signature @sig :base64 'RFC3156
+application/pidf+xml 'IANA,RFC3863
+application/pkcs10 @p10 'RFC2311
+application/pkcs7-mime @p7m,p7c 'RFC2311
+application/pkcs7-signature @p7s 'RFC2311
+application/pkix-cert @cer 'RFC2585
+application/pkix-crl @crl 'RFC2585
+application/pkix-pkipath @pkipath 'DRAFT:draft-ietf-tls-rfc3546bis
+application/pkixcmp @pki 'RFC2510
+application/pls+xml 'DRAFT:draft-froumentin-voice-mediatypes
+application/poc-settings+xml 'DRAFT:draft-garcia-sipping-poc-isb-am
+application/postscript @ai,eps,ps :8bit 'RFC2045,RFC2046
+application/prs.alvestrand.titrax-sheet 'IANA,[Alvestrand]
+application/prs.cww @cw,cww 'IANA,[Rungchavalnont]
+application/prs.nprend @rnd,rct 'IANA,[Doggett]
+application/prs.plucker 'IANA,[Janssen]
+application/qsig 'RFC3204
+application/rdf+xml @rdf 'RFC3870
+application/reginfo+xml 'RFC3680
+application/remote-printing 'IANA,RFC1486,[Rose]
+application/resource-lists+xml 'DRAFT:draft-ietf-simple-xcap-list-usage
+application/riscos 'IANA,[Smith]
+application/rlmi+xml 'DRAFT:draft-ietf-simple-event-list
+application/rls-services+xml 'DRAFT:draft-ietf-simple-xcap-list-usage
+application/rtf @rtf 'IANA,[Lindner]
+application/rtx 'DRAFT:draft-ietf-avt-rtp-retransmission
+application/samlassertion+xml 'IANA,[OASIS Security Services Technical Committee (SSTC)]
+application/samlmetadata+xml 'IANA,[OASIS Security Services Technical Committee (SSTC)]
+application/sbml+xml 'RFC3823
+application/sdp 'RFC2327
+application/set-payment 'IANA,[Korver]
+application/set-payment-initiation 'IANA,[Korver]
+application/set-registration 'IANA,[Korver]
+application/set-registration-initiation 'IANA,[Korver]
+application/sgml @sgml 'RFC1874
+application/sgml-open-catalog @soc 'IANA,[Grosso]
+application/shf+xml 'RFC4194
+application/sieve @siv 'RFC3028
+application/simple-filter+xml 'DRAFT:draft-ietf-simple-filter-format
+application/simple-message-summary 'RFC3842
+application/slate 'IANA,[Crowley]
+application/soap+fastinfoset 'IANA,[ITU-T ASN.1 Rapporteur]
+application/soap+xml 'RFC3902
+application/spirits-event+xml 'RFC3910
+application/srgs 'DRAFT:draft-froumentin-voice-mediatypes
+application/srgs+xml 'DRAFT:draft-froumentin-voice-mediatypes
+application/ssml+xml 'DRAFT:draft-froumentin-voice-mediatypes
+application/timestamp-query 'RFC3161
+application/timestamp-reply 'RFC3161
+application/tve-trigger 'IANA,[Welsh]
+application/vemmi 'RFC2122
+application/vnd.3M.Post-it-Notes 'IANA,[O'Brien]
+application/vnd.3gpp.pic-bw-large @plb 'IANA,[Meredith]
+application/vnd.3gpp.pic-bw-small @psb 'IANA,[Meredith]
+application/vnd.3gpp.pic-bw-var @pvb 'IANA,[Meredith]
+application/vnd.3gpp.sms @sms 'IANA,[Meredith]
+application/vnd.FloGraphIt 'IANA,[Floersch]
+application/vnd.Kinar @kne,knp,sdf 'IANA,[Thakkar]
+application/vnd.Mobius.DAF 'IANA,[Kabayama]
+application/vnd.Mobius.DIS 'IANA,[Kabayama]
+application/vnd.Mobius.MBK 'IANA,[Devasia]
+application/vnd.Mobius.MQY 'IANA,[Devasia]
+application/vnd.Mobius.MSL 'IANA,[Kabayama]
+application/vnd.Mobius.PLC 'IANA,[Kabayama]
+application/vnd.Mobius.TXF 'IANA,[Kabayama]
+application/vnd.Quark.QuarkXPress @qxd,qxt,qwd,qwt,qxl,qxb :8bit 'IANA,[Scheidler]
+application/vnd.RenLearn.rlprint 'IANA,[Wick]
+application/vnd.accpac.simply.aso 'IANA,[Leow]
+application/vnd.accpac.simply.imp 'IANA,[Leow]
+application/vnd.acucobol 'IANA,[Lubin]
+application/vnd.acucorp @atc,acutc :7bit 'IANA,[Lubin]
+application/vnd.adobe.xfdf @xfdf 'IANA,[Perelman]
+application/vnd.aether.imp 'IANA,[Moskowitz]
+application/vnd.amiga.ami @ami 'IANA,[Blumberg]
+application/vnd.apple.installer+xml 'IANA,[Bierman]
+application/vnd.audiograph 'IANA,[Slusanschi]
+application/vnd.autopackage 'IANA,[Hearn]
+application/vnd.blueice.multipass @mpm 'IANA,[Holmstrom]
+application/vnd.bmi 'IANA,[Gotoh]
+application/vnd.businessobjects 'IANA,[Imoucha]
+application/vnd.cinderella @cdy 'IANA,[Kortenkamp]
+application/vnd.claymore 'IANA,[Simpson]
+application/vnd.commerce-battelle 'IANA,[Applebaum]
+application/vnd.commonspace 'IANA,[Chandhok]
+application/vnd.contact.cmsg 'IANA,[Patz]
+application/vnd.cosmocaller @cmc 'IANA,[Dellutri]
+application/vnd.criticaltools.wbs+xml @wbs 'IANA,[Spiller]
+application/vnd.ctc-posml 'IANA,[Kohlhepp]
+application/vnd.cups-postscript 'IANA,[Sweet]
+application/vnd.cups-raster 'IANA,[Sweet]
+application/vnd.cups-raw 'IANA,[Sweet]
+application/vnd.curl @curl 'IANA,[Byrnes]
+application/vnd.cybank 'IANA,[Helmee]
+application/vnd.data-vision.rdz @rdz 'IANA,[Fields]
+application/vnd.dna 'IANA,[Searcy]
+application/vnd.dpgraph 'IANA,[Parker]
+application/vnd.dreamfactory @dfac 'IANA,[Appleton]
+application/vnd.dxr 'IANA,[Duffy]
+application/vnd.ecdis-update 'IANA,[Buettgenbach]
+application/vnd.ecowin.chart 'IANA,[Olsson]
+application/vnd.ecowin.filerequest 'IANA,[Olsson]
+application/vnd.ecowin.fileupdate 'IANA,[Olsson]
+application/vnd.ecowin.series 'IANA,[Olsson]
+application/vnd.ecowin.seriesrequest 'IANA,[Olsson]
+application/vnd.ecowin.seriesupdate 'IANA,[Olsson]
+application/vnd.enliven 'IANA,[Santinelli]
+application/vnd.epson.esf 'IANA,[Hoshina]
+application/vnd.epson.msf 'IANA,[Hoshina]
+application/vnd.epson.quickanime 'IANA,[Gu]
+application/vnd.epson.salt 'IANA,[Nagatomo]
+application/vnd.epson.ssf 'IANA,[Hoshina]
+application/vnd.ericsson.quickcall 'IANA,[Tidwell]
+application/vnd.eudora.data 'IANA,[Resnick]
+application/vnd.fdf 'IANA,[Zilles]
+application/vnd.ffsns 'IANA,[Holstage]
+application/vnd.fints 'IANA,[Hammann]
+application/vnd.fluxtime.clip 'IANA,[Winter]
+application/vnd.framemaker 'IANA,[Wexler]
+application/vnd.fsc.weblaunch @fsc :7bit 'IANA,[D.Smith]
+application/vnd.fujitsu.oasys 'IANA,[Togashi]
+application/vnd.fujitsu.oasys2 'IANA,[Togashi]
+application/vnd.fujitsu.oasys3 'IANA,[Okudaira]
+application/vnd.fujitsu.oasysgp 'IANA,[Sugimoto]
+application/vnd.fujitsu.oasysprs 'IANA,[Ogita]
+application/vnd.fujixerox.ddd 'IANA,[Onda]
+application/vnd.fujixerox.docuworks 'IANA,[Taguchi]
+application/vnd.fujixerox.docuworks.binder 'IANA,[Matsumoto]
+application/vnd.fut-misnet 'IANA,[Pruulmann]
+application/vnd.genomatix.tuxedo @txd 'IANA,[Frey]
+application/vnd.grafeq 'IANA,[Tupper]
+application/vnd.groove-account 'IANA,[Joseph]
+application/vnd.groove-help 'IANA,[Joseph]
+application/vnd.groove-identity-message 'IANA,[Joseph]
+application/vnd.groove-injector 'IANA,[Joseph]
+application/vnd.groove-tool-message 'IANA,[Joseph]
+application/vnd.groove-tool-template 'IANA,[Joseph]
+application/vnd.groove-vcard 'IANA,[Joseph]
+application/vnd.hbci @hbci,hbc,kom,upa,pkd,bpd 'IANA,[Hammann]
+application/vnd.hcl-bireports 'IANA,[Serres]
+application/vnd.hhe.lesson-player @les 'IANA,[Jones]
+application/vnd.hp-HPGL @plt,hpgl 'IANA,[Pentecost]
+application/vnd.hp-PCL 'IANA,[Pentecost]
+application/vnd.hp-PCLXL 'IANA,[Pentecost]
+application/vnd.hp-hpid 'IANA,[Gupta]
+application/vnd.hp-hps 'IANA,[Aubrey]
+application/vnd.httphone 'IANA,[Lefevre]
+application/vnd.hzn-3d-crossword 'IANA,[Minnis]
+application/vnd.ibm.MiniPay 'IANA,[Herzberg]
+application/vnd.ibm.afplinedata 'IANA,[Buis]
+application/vnd.ibm.electronic-media @emm 'IANA,[Tantlinger]
+application/vnd.ibm.modcap 'IANA,[Hohensee]
+application/vnd.ibm.rights-management @irm 'IANA,[Tantlinger]
+application/vnd.ibm.secure-container @sc 'IANA,[Tantlinger]
+application/vnd.informix-visionary 'IANA,[Gales]
+application/vnd.intercon.formnet 'IANA,[Gurak]
+application/vnd.intertrust.digibox 'IANA,[Tomasello]
+application/vnd.intertrust.nncp 'IANA,[Tomasello]
+application/vnd.intu.qbo 'IANA,[Scratchley]
+application/vnd.intu.qfx 'IANA,[Scratchley]
+application/vnd.ipunplugged.rcprofile @rcprofile 'IANA,[Ersson]
+application/vnd.irepository.package+xml @irp 'IANA,[Knowles]
+application/vnd.is-xpr 'IANA,[Natarajan]
+application/vnd.japannet-directory-service 'IANA,[Fujii]
+application/vnd.japannet-jpnstore-wakeup 'IANA,[Yoshitake]
+application/vnd.japannet-payment-wakeup 'IANA,[Fujii]
+application/vnd.japannet-registration 'IANA,[Yoshitake]
+application/vnd.japannet-registration-wakeup 'IANA,[Fujii]
+application/vnd.japannet-setstore-wakeup 'IANA,[Yoshitake]
+application/vnd.japannet-verification 'IANA,[Yoshitake]
+application/vnd.japannet-verification-wakeup 'IANA,[Fujii]
+application/vnd.jisp @jisp 'IANA,[Deckers]
+application/vnd.kahootz 'IANA,[Macdonald]
+application/vnd.kde.karbon @karbon 'IANA,[Faure]
+application/vnd.kde.kchart @chrt 'IANA,[Faure]
+application/vnd.kde.kformula @kfo 'IANA,[Faure]
+application/vnd.kde.kivio @flw 'IANA,[Faure]
+application/vnd.kde.kontour @kon 'IANA,[Faure]
+application/vnd.kde.kpresenter @kpr,kpt 'IANA,[Faure]
+application/vnd.kde.kspread @ksp 'IANA,[Faure]
+application/vnd.kde.kword @kwd,kwt 'IANA,[Faure]
+application/vnd.kenameaapp @htke 'IANA,[DiGiorgio-Haag]
+application/vnd.kidspiration @kia 'IANA,[Bennett]
+application/vnd.koan 'IANA,[Cole]
+application/vnd.liberty-request+xml 'IANA,[McDowell]
+application/vnd.llamagraphics.life-balance.desktop @lbd 'IANA,[White]
+application/vnd.llamagraphics.life-balance.exchange+xml @lbe 'IANA,[White]
+application/vnd.lotus-1-2-3 @wks,123 'IANA,[Wattenberger]
+application/vnd.lotus-approach 'IANA,[Wattenberger]
+application/vnd.lotus-freelance 'IANA,[Wattenberger]
+application/vnd.lotus-notes 'IANA,[Laramie]
+application/vnd.lotus-organizer 'IANA,[Wattenberger]
+application/vnd.lotus-screencam 'IANA,[Wattenberger]
+application/vnd.lotus-wordpro 'IANA,[Wattenberger]
+application/vnd.marlin.drm.mdcf 'IANA,[Ellison]
+application/vnd.mcd @mcd 'IANA,[Gotoh]
+application/vnd.mediastation.cdkey 'IANA,[Flurry]
+application/vnd.meridian-slingshot 'IANA,[Wedel]
+application/vnd.mfmp @mfm 'IANA,[Ikeda]
+application/vnd.micrografx.flo @flo 'IANA,[Prevo]
+application/vnd.micrografx.igx @igx 'IANA,[Prevo]
+application/vnd.mif @mif 'IANA,[Wexler]
+application/vnd.minisoft-hp3000-save 'IANA,[Bartram]
+application/vnd.mitsubishi.misty-guard.trustweb 'IANA,[Tanaka]
+application/vnd.mophun.application @mpn 'IANA,[Wennerstrom]
+application/vnd.mophun.certificate @mpc 'IANA,[Wennerstrom]
+application/vnd.motorola.flexsuite 'IANA,[Patton]
+application/vnd.motorola.flexsuite.adsi 'IANA,[Patton]
+application/vnd.motorola.flexsuite.fis 'IANA,[Patton]
+application/vnd.motorola.flexsuite.gotap 'IANA,[Patton]
+application/vnd.motorola.flexsuite.kmr 'IANA,[Patton]
+application/vnd.motorola.flexsuite.ttc 'IANA,[Patton]
+application/vnd.motorola.flexsuite.wem 'IANA,[Patton]
+application/vnd.mozilla.xul+xml @xul 'IANA,[McDaniel]
+application/vnd.ms-artgalry @cil 'IANA,[Slawson]
+application/vnd.ms-asf @asf 'IANA,[Fleischman]
+application/vnd.ms-cab-compressed @cab 'IANA,[Scarborough]
+application/vnd.ms-excel @xls,xlt :base64 'IANA,[Gill]
+application/vnd.ms-fontobject 'IANA,[Scarborough]
+application/vnd.ms-ims 'IANA,[Ledoux]
+application/vnd.ms-lrm @lrm 'IANA,[Ledoux]
+application/vnd.ms-powerpoint @ppt,pps,pot :base64 'IANA,[Gill]
+application/vnd.ms-project @mpp :base64 'IANA,[Gill]
+application/vnd.ms-tnef :base64 'IANA,[Gill]
+application/vnd.ms-works :base64 'IANA,[Gill]
+application/vnd.ms-wpl @wpl :base64 'IANA,[Plastina]
+application/vnd.mseq @mseq 'IANA,[Le Bodic]
+application/vnd.msign 'IANA,[Borcherding]
+application/vnd.music-niff 'IANA,[Butler]
+application/vnd.musician 'IANA,[Adams]
+application/vnd.nervana @ent,entity,req,request,bkm,kcm 'IANA,[Judkins]
+application/vnd.netfpx 'IANA,[Mutz]
+application/vnd.noblenet-directory 'IANA,[Solomon]
+application/vnd.noblenet-sealer 'IANA,[Solomon]
+application/vnd.noblenet-web 'IANA,[Solomon]
+application/vnd.nokia.landmark+wbxml 'IANA,[Nokia]
+application/vnd.nokia.landmark+xml 'IANA,[Nokia]
+application/vnd.nokia.landmarkcollection+xml 'IANA,[Nokia]
+application/vnd.nokia.radio-preset @rpst 'IANA,[Nokia]
+application/vnd.nokia.radio-presets @rpss 'IANA,[Nokia]
+application/vnd.novadigm.EDM 'IANA,[Swenson]
+application/vnd.novadigm.EDX 'IANA,[Swenson]
+application/vnd.novadigm.EXT 'IANA,[Swenson]
+application/vnd.obn 'IANA,[Hessling]
+application/vnd.omads-email+xml 'IANA,[OMA Data Synchronization Working Group]
+application/vnd.omads-file+xml 'IANA,[OMA Data Synchronization Working Group]
+application/vnd.omads-folder+xml 'IANA,[OMA Data Synchronization Working Group]
+application/vnd.osa.netdeploy 'IANA,[Klos]
+application/vnd.osgi.dp 'IANA,[Kriens]
+application/vnd.palm @prc,pdb,pqa,oprc :base64 'IANA,[Peacock]
+application/vnd.paos.xml 'IANA,[Kemp]
+application/vnd.pg.format 'IANA,[Gandert]
+application/vnd.pg.osasli 'IANA,[Gandert]
+application/vnd.piaccess.application-licence 'IANA,[Maneos]
+application/vnd.picsel @efif 'IANA,[Naccarato]
+application/vnd.powerbuilder6 'IANA,[Guy]
+application/vnd.powerbuilder6-s 'IANA,[Guy]
+application/vnd.powerbuilder7 'IANA,[Shilts]
+application/vnd.powerbuilder7-s 'IANA,[Shilts]
+application/vnd.powerbuilder75 'IANA,[Shilts]
+application/vnd.powerbuilder75-s 'IANA,[Shilts]
+application/vnd.preminet 'IANA,[Tenhunen]
+application/vnd.previewsystems.box 'IANA,[Smolgovsky]
+application/vnd.proteus.magazine 'IANA,[Hoch]
+application/vnd.publishare-delta-tree 'IANA,[Ben-Kiki]
+application/vnd.pvi.ptid1 @pti,ptid 'IANA,[Lamb]
+application/vnd.pwg-multiplexed 'RFC3391
+application/vnd.pwg-xhtml-print+xml 'IANA,[Wright]
+application/vnd.rapid 'IANA,[Szekely]
+application/vnd.ruckus.download 'IANA,[Harris]
+application/vnd.s3sms 'IANA,[Tarkkala]
+application/vnd.sealed.doc @sdoc,sdo,s1w 'IANA,[Petersen]
+application/vnd.sealed.eml @seml,sem 'IANA,[Petersen]
+application/vnd.sealed.mht @smht,smh 'IANA,[Petersen]
+application/vnd.sealed.net 'IANA,[Lambert]
+application/vnd.sealed.ppt @sppt,spp,s1p 'IANA,[Petersen]
+application/vnd.sealed.xls @sxls,sxl,s1e 'IANA,[Petersen]
+application/vnd.sealedmedia.softseal.html @stml,stm,s1h 'IANA,[Petersen]
+application/vnd.sealedmedia.softseal.pdf @spdf,spd,s1a 'IANA,[Petersen]
+application/vnd.seemail @see 'IANA,[Webb]
+application/vnd.sema 'IANA,[Hansson]
+application/vnd.shana.informed.formdata 'IANA,[Selzler]
+application/vnd.shana.informed.formtemplate 'IANA,[Selzler]
+application/vnd.shana.informed.interchange 'IANA,[Selzler]
+application/vnd.shana.informed.package 'IANA,[Selzler]
+application/vnd.smaf @mmf 'IANA,[Takahashi]
+application/vnd.sss-cod 'IANA,[Dani]
+application/vnd.sss-dtf 'IANA,[Bruno]
+application/vnd.sss-ntf 'IANA,[Bruno]
+application/vnd.street-stream 'IANA,[Levitt]
+application/vnd.sus-calendar @sus,susp 'IANA,[Niedfeldt]
+application/vnd.svd 'IANA,[Becker]
+application/vnd.swiftview-ics 'IANA,[Widener]
+application/vnd.syncml.+xml 'IANA,[OMA Data Synchronization Working Group]
+application/vnd.syncml.ds.notification 'IANA,[OMA Data Synchronization Working Group]
+application/vnd.triscape.mxs 'IANA,[Simonoff]
+application/vnd.trueapp 'IANA,[Hepler]
+application/vnd.truedoc 'IANA,[Chase]
+application/vnd.ufdl 'IANA,[Manning]
+application/vnd.uiq.theme 'IANA,[Ocock]
+application/vnd.uplanet.alert 'IANA,[Martin]
+application/vnd.uplanet.alert-wbxml 'IANA,[Martin]
+application/vnd.uplanet.bearer-choice 'IANA,[Martin]
+application/vnd.uplanet.bearer-choice-wbxml 'IANA,[Martin]
+application/vnd.uplanet.cacheop 'IANA,[Martin]
+application/vnd.uplanet.cacheop-wbxml 'IANA,[Martin]
+application/vnd.uplanet.channel 'IANA,[Martin]
+application/vnd.uplanet.channel-wbxml 'IANA,[Martin]
+application/vnd.uplanet.list 'IANA,[Martin]
+application/vnd.uplanet.list-wbxml 'IANA,[Martin]
+application/vnd.uplanet.listcmd 'IANA,[Martin]
+application/vnd.uplanet.listcmd-wbxml 'IANA,[Martin]
+application/vnd.uplanet.signal 'IANA,[Martin]
+application/vnd.vcx 'IANA,[T.Sugimoto]
+application/vnd.vectorworks 'IANA,[Pharr]
+application/vnd.vidsoft.vidconference @vsc :8bit 'IANA,[Hess]
+application/vnd.visio @vsd,vst,vsw,vss 'IANA,[Sandal]
+application/vnd.visionary @vis 'IANA,[Aravindakumar]
+application/vnd.vividence.scriptfile 'IANA,[Risher]
+application/vnd.vsf 'IANA,[Rowe]
+application/vnd.wap.sic @sic 'IANA,[WAP-Forum]
+application/vnd.wap.slc @slc 'IANA,[WAP-Forum]
+application/vnd.wap.wbxml @wbxml 'IANA,[Stark]
+application/vnd.wap.wmlc @wmlc 'IANA,[Stark]
+application/vnd.wap.wmlscriptc @wmlsc 'IANA,[Stark]
+application/vnd.webturbo @wtb 'IANA,[Rehem]
+application/vnd.wordperfect @wpd 'IANA,[Scarborough]
+application/vnd.wqd @wqd 'IANA,[Bostrom]
+application/vnd.wrq-hp3000-labelled 'IANA,[Bartram]
+application/vnd.wt.stf 'IANA,[Wohler]
+application/vnd.wv.csp+wbxml @wv 'IANA,[Salmi]
+application/vnd.wv.csp+xml :8bit 'IANA,[Ingimundarson]
+application/vnd.wv.ssp+xml :8bit 'IANA,[Ingimundarson]
+application/vnd.xara 'IANA,[Matthewman]
+application/vnd.xfdl 'IANA,[Manning]
+application/vnd.yamaha.hv-dic @hvd 'IANA,[Yamamoto]
+application/vnd.yamaha.hv-script @hvs 'IANA,[Yamamoto]
+application/vnd.yamaha.hv-voice @hvp 'IANA,[Yamamoto]
+application/vnd.yamaha.smaf-audio @saf 'IANA,[Shinoda]
+application/vnd.yamaha.smaf-phrase @spf 'IANA,[Shinoda]
+application/vnd.yellowriver-custom-menu 'IANA,[Yellow]
+application/vnd.zzazz.deck+xml 'IANA,[Hewett]
+application/voicexml+xml 'DRAFT:draft-froumentin-voice-mediatypes
+application/watcherinfo+xml @wif 'RFC3858
+application/whoispp-query 'RFC2957
+application/whoispp-response 'RFC2958
+application/wita 'IANA,[Campbell]
+application/wordperfect5.1 @wp5,wp 'IANA,[Lindner]
+application/x400-bp 'RFC1494
+application/xcap-att+xml 'DRAFT:draft-ietf-simple-xcap
+application/xcap-caps+xml 'DRAFT:draft-ietf-simple-xcap
+application/xcap-el+xml 'DRAFT:draft-ietf-simple-xcap
+application/xcap-error+xml 'DRAFT:draft-ietf-simple-xcap
+application/xhtml+xml @xhtml :8bit 'RFC3236
+application/xml @xml :8bit 'RFC3023
+application/xml-dtd :8bit 'RFC3023
+application/xml-external-parsed-entity 'RFC3023
+application/xmpp+xml 'RFC3923
+application/xop+xml 'IANA,[Nottingham]
+application/xv+xml 'DRAFT:draft-mccobb-xv-media-type
+application/zip @zip :base64 'IANA,[Lindner]
+
+ # Registered: audio/*
+!audio/vnd.qcelp 'IANA,RFC3625 =use-instead:audio/QCELP
+audio/32kadpcm 'RFC2421,RFC2422
+audio/3gpp @3gpp 'RFC3839,DRAFT:draft-gellens-bucket
+audio/3gpp2 'DRAFT:draft-garudadri-avt-3gpp2-mime
+audio/AMR @amr :base64 'RFC3267
+audio/AMR-WB @awb :base64 'RFC3267
+audio/BV16 'RFC4298
+audio/BV32 'RFC4298
+audio/CN 'RFC3389
+audio/DAT12 'RFC3190
+audio/DVI4 'RFC3555
+audio/EVRC @evc 'RFC3558
+audio/EVRC-QCP 'RFC3625
+audio/EVRC0 'RFC3558
+audio/G722 'RFC3555
+audio/G7221 'RFC3047
+audio/G723 'RFC3555
+audio/G726-16 'RFC3555
+audio/G726-24 'RFC3555
+audio/G726-32 'RFC3555
+audio/G726-40 'RFC3555
+audio/G728 'RFC3555
+audio/G729 'RFC3555
+audio/G729D 'RFC3555
+audio/G729E 'RFC3555
+audio/GSM 'RFC3555
+audio/GSM-EFR 'RFC3555
+audio/L16 @l16 'RFC3555
+audio/L20 'RFC3190
+audio/L24 'RFC3190
+audio/L8 'RFC3555
+audio/LPC 'RFC3555
+audio/MP4A-LATM 'RFC3016
+audio/MPA 'RFC3555
+audio/PCMA 'RFC3555
+audio/PCMU 'RFC3555
+audio/QCELP @qcp 'RFC3555'RFC3625
+audio/RED 'RFC3555
+audio/SMV @smv 'RFC3558
+audio/SMV-QCP 'RFC3625
+audio/SMV0 'RFC3558
+audio/VDVI 'RFC3555
+audio/VMR-WB 'DRAFT:draft-ietf-avt-rtp-vmr-wb,DRAFT:draft-ietf-avt-rtp-vmr-wb-extension
+audio/ac3 'RFC4184
+audio/amr-wb+ 'DRAFT:draft-ietf-avt-rtp-amrwbplus
+audio/basic @au,snd :base64 'RFC2045,RFC2046
+audio/clearmode 'RFC4040
+audio/dsr-es201108 'RFC3557
+audio/dsr-es202050 'RFC4060
+audio/dsr-es202211 'RFC4060
+audio/dsr-es202212 'RFC4060
+audio/iLBC 'RFC3952
+audio/mp4 'DRAFT:draft-lim-mpeg4-mime
+audio/mpa-robust 'RFC3119
+audio/mpeg @mpga,mp2,mp3 :base64 'RFC3003
+audio/mpeg4-generic 'RFC3640
+audio/parityfec 'RFC3009
+audio/prs.sid @sid,psid 'IANA,[Walleij]
+audio/rtx 'DRAFT:draft-ietf-avt-rtp-retransmission
+audio/t140c 'DRAFT:draft-ietf-avt-audio-t140c
+audio/telephone-event 'RFC2833
+audio/tone 'RFC2833
+audio/vnd.3gpp.iufp 'IANA,[Belling]
+audio/vnd.audiokoz 'IANA,[DeBarros]
+audio/vnd.cisco.nse 'IANA,[Kumar]
+audio/vnd.cmles.radio-events 'IANA,[Goulet]
+audio/vnd.cns.anp1 'IANA,[McLaughlin]
+audio/vnd.cns.inf1 'IANA,[McLaughlin]
+audio/vnd.digital-winds @eol :7bit 'IANA,[Strazds]
+audio/vnd.dlna.adts 'IANA,[Heredia]
+audio/vnd.everad.plj @plj 'IANA,[Cicelsky]
+audio/vnd.lucent.voice @lvp 'IANA,[Vaudreuil]
+audio/vnd.nokia.mobile-xmf @mxmf 'IANA,[Nokia Corporation]
+audio/vnd.nortel.vbk @vbk 'IANA,[Parsons]
+audio/vnd.nuera.ecelp4800 @ecelp4800 'IANA,[Fox]
+audio/vnd.nuera.ecelp7470 @ecelp7470 'IANA,[Fox]
+audio/vnd.nuera.ecelp9600 @ecelp9600 'IANA,[Fox]
+audio/vnd.octel.sbc 'IANA,[Vaudreuil]
+audio/vnd.rhetorex.32kadpcm 'IANA,[Vaudreuil]
+audio/vnd.sealedmedia.softseal.mpeg @smp3,smp,s1m 'IANA,[Petersen]
+audio/vnd.vmx.cvsd 'IANA,[Vaudreuil]
+
+ # Registered: image/*
+image/cgm 'IANA =Computer Graphics Metafile [Francis]
+image/fits 'RFC4047
+image/g3fax 'RFC1494
+image/gif @gif :base64 'RFC2045,RFC2046
+image/ief @ief :base64 'RFC1314 =Image Exchange Format
+image/jp2 @jp2 :base64 'IANA,RFC3745
+image/jpeg @jpeg,jpg,jpe :base64 'RFC2045,RFC2046
+image/jpm @jpm :base64 'IANA,RFC3745
+image/jpx @jpx :base64 'IANA,RFC3745
+image/naplps 'IANA,[Ferber]
+image/png @png :base64 'IANA,[Randers-Pehrson]
+image/prs.btif 'IANA,[Simon]
+image/prs.pti 'IANA,[Laun]
+image/t38 'RFC3362
+image/tiff @tiff,tif :base64 'RFC3302 =Tag Image File Format
+image/tiff-fx 'RFC3950 =Tag Image File Format Fax eXtended
+image/vnd.adobe.photoshop 'IANA,[Scarborough]
+image/vnd.cns.inf2 'IANA,[McLaughlin]
+image/vnd.djvu @djvu,djv 'IANA,[Bottou]
+image/vnd.dwg @dwg 'IANA,[Moline]
+image/vnd.dxf 'IANA,[Moline]
+image/vnd.fastbidsheet 'IANA,[Becker]
+image/vnd.fpx 'IANA,[Spencer]
+image/vnd.fst 'IANA,[Fuldseth]
+image/vnd.fujixerox.edmics-mmr 'IANA,[Onda]
+image/vnd.fujixerox.edmics-rlc 'IANA,[Onda]
+image/vnd.globalgraphics.pgb @pgb 'IANA,[Bailey]
+image/vnd.microsoft.icon @ico 'IANA,[Butcher]
+image/vnd.mix 'IANA,[Reddy]
+image/vnd.ms-modi @mdi 'IANA,[Vaughan]
+image/vnd.net-fpx 'IANA,[Spencer]
+image/vnd.sealed.png @spng,spn,s1n 'IANA,[Petersen]
+image/vnd.sealedmedia.softseal.gif @sgif,sgi,s1g 'IANA,[Petersen]
+image/vnd.sealedmedia.softseal.jpg @sjpg,sjp,s1j 'IANA,[Petersen]
+image/vnd.svf 'IANA,[Moline]
+image/vnd.wap.wbmp @wbmp 'IANA,[Stark]
+image/vnd.xiff 'IANA,[S.Martin]
+
+ # Registered: message/*
+message/CPIM 'RFC3862
+message/delivery-status 'RFC1894
+message/disposition-notification 'RFC2298
+message/external-body :8bit 'RFC2046
+message/http 'RFC2616
+message/news :8bit 'RFC1036,[H.Spencer]
+message/partial :8bit 'RFC2046
+message/rfc822 :8bit 'RFC2046
+message/s-http 'RFC2660
+message/sip 'RFC3261
+message/sipfrag 'RFC3420
+message/tracking-status 'RFC3886
+
+ # Registered: model/*
+model/iges @igs,iges 'IANA,[Parks]
+model/mesh @msh,mesh,silo 'RFC2077
+model/vnd.dwf 'IANA,[Pratt]
+model/vnd.flatland.3dml 'IANA,[Powers]
+model/vnd.gdl 'IANA,[Babits]
+model/vnd.gs-gdl 'IANA,[Babits]
+model/vnd.gtw 'IANA,[Ozaki]
+model/vnd.mts 'IANA,[Rabinovitch]
+model/vnd.parasolid.transmit.binary @x_b,xmt_bin 'IANA,[Parasolid]
+model/vnd.parasolid.transmit.text @x_t,xmt_txt :quoted-printable 'IANA,[Parasolid]
+model/vnd.vtu 'IANA,[Rabinovitch]
+model/vrml @wrl,vrml 'RFC2077
+
+ # Registered: multipart/*
+multipart/alternative :8bit 'RFC2045,RFC2046
+multipart/appledouble :8bit 'IANA,[Faltstrom]
+multipart/byteranges 'RFC2068
+multipart/digest :8bit 'RFC2045,RFC2046
+multipart/encrypted 'RFC1847
+multipart/form-data 'RFC2388
+multipart/header-set 'IANA,[Crocker]
+multipart/mixed :8bit 'RFC2045,RFC2046
+multipart/parallel :8bit 'RFC2045,RFC2046
+multipart/related 'RFC2387
+multipart/report 'RFC1892
+multipart/signed 'RFC1847
+multipart/voice-message 'RFC2421,RFC2423
+
+ # Registered: text/*
+!text/ecmascript 'DRAFT:draft-hoehrmann-script-types
+!text/javascript 'DRAFT:draft-hoehrmann-script-types
+text/calendar 'RFC2445
+text/css @css :8bit 'RFC2318
+text/csv @csv :8bit 'RFC4180
+text/directory 'RFC2425
+text/dns 'RFC4027
+text/enriched 'RFC1896
+text/html @html,htm,htmlx,shtml,htx :8bit 'RFC2854
+text/parityfec 'RFC3009
+text/plain @txt,asc,c,cc,h,hh,cpp,hpp,dat,hlp 'RFC2046,RFC3676
+text/prs.fallenstein.rst @rst 'IANA,[Fallenstein]
+text/prs.lines.tag 'IANA,[Lines]
+text/RED 'RFC4102
+text/rfc822-headers 'RFC1892
+text/richtext @rtx :8bit 'RFC2045,RFC2046
+text/rtf @rtf :8bit 'IANA,[Lindner]
+text/rtx 'DRAFT:draft-ietf-avt-rtp-retransmission
+text/sgml @sgml,sgm 'RFC1874
+text/t140 'RFC4103
+text/tab-separated-values @tsv 'IANA,[Lindner]
+text/troff @t,tr,roff,troff :8bit 'DRAFT:draft-lilly-text-troff
+text/uri-list 'RFC2483
+text/vnd.abc 'IANA,[Allen]
+text/vnd.curl 'IANA,[Byrnes]
+text/vnd.DMClientScript 'IANA,[Bradley]
+text/vnd.esmertec.theme-descriptor 'IANA,[Eilemann]
+text/vnd.fly 'IANA,[Gurney]
+text/vnd.fmi.flexstor 'IANA,[Hurtta]
+text/vnd.in3d.3dml 'IANA,[Powers]
+text/vnd.in3d.spot 'IANA,[Powers]
+text/vnd.IPTC.NewsML '[IPTC]
+text/vnd.IPTC.NITF '[IPTC]
+text/vnd.latex-z 'IANA,[Lubos]
+text/vnd.motorola.reflex 'IANA,[Patton]
+text/vnd.ms-mediapackage 'IANA,[Nelson]
+text/vnd.net2phone.commcenter.command @ccc 'IANA,[Xie]
+text/vnd.sun.j2me.app-descriptor @jad :8bit 'IANA,[G.Adams]
+text/vnd.wap.si @si 'IANA,[WAP-Forum]
+text/vnd.wap.sl @sl 'IANA,[WAP-Forum]
+text/vnd.wap.wml @wml 'IANA,[Stark]
+text/vnd.wap.wmlscript @wmls 'IANA,[Stark]
+text/xml @xml,dtd :8bit 'RFC3023
+text/xml-external-parsed-entity 'RFC3023
+vms:text/plain @doc :8bit
+
+ # Registered: video/*
+video/3gpp @3gp,3gpp 'RFC3839,DRAFT:draft-gellens-mime-bucket
+video/3gpp-tt 'DRAFT:draft-ietf-avt-rtp-3gpp-timed-text
+video/3gpp2 'DRAFT:draft-garudadri-avt-3gpp2-mime
+video/BMPEG 'RFC3555
+video/BT656 'RFC3555
+video/CelB 'RFC3555
+video/DV 'RFC3189
+video/H261 'RFC3555
+video/H263 'RFC3555
+video/H263-1998 'RFC3555
+video/H263-2000 'RFC3555
+video/H264 'RFC3984
+video/JPEG 'RFC3555
+video/MJ2 @mj2,mjp2 'RFC3745
+video/MP1S 'RFC3555
+video/MP2P 'RFC3555
+video/MP2T 'RFC3555
+video/mp4 'DRAFT:draft-lim-mpeg4-mime
+video/MP4V-ES 'RFC3016
+video/mpeg @mp2,mpe,mp3g,mpg :base64 'RFC2045,RFC2046
+video/mpeg4-generic 'RFC3640
+video/MPV 'RFC3555
+video/nv 'RFC3555
+video/parityfec 'RFC3009
+video/pointer 'RFC2862
+video/quicktime @qt,mov :base64 'IANA,[Lindner]
+video/raw 'RFC4175
+video/rtx 'DRAFT:draft-ietf-avt-rtp-retransmission
+video/SMPTE292M 'RFC3497
+video/vnd.dlna.mpeg-tts 'IANA,[Heredia]
+video/vnd.fvt 'IANA,[Fuldseth]
+video/vnd.motorola.video 'IANA,[McGinty]
+video/vnd.motorola.videop 'IANA,[McGinty]
+video/vnd.mpegurl @mxu,m4u :8bit 'IANA,[Recktenwald]
+video/vnd.nokia.interleaved-multimedia @nim 'IANA,[Kangaslampi]
+video/vnd.objectvideo @mp4 'IANA,[Clark]
+video/vnd.sealed.mpeg1 @s11 'IANA,[Petersen]
+video/vnd.sealed.mpeg4 @smpg,s14 'IANA,[Petersen]
+video/vnd.sealed.swf @sswf,ssw 'IANA,[Petersen]
+video/vnd.sealedmedia.softseal.mov @smov,smo,s1q 'IANA,[Petersen]
+video/vnd.vivo @viv,vivo 'IANA,[Wolfe]
+
+ # Unregistered: application/*
+!application/x-troff 'LTSW =use-instead:text/troff
+application/x-bcpio @bcpio 'LTSW
+application/x-compressed @z,Z :base64 'LTSW
+application/x-cpio @cpio :base64 'LTSW
+application/x-csh @csh :8bit 'LTSW
+application/x-dvi @dvi :base64 'LTSW
+application/x-gtar @gtar,tgz,tbz2,tbz :base64 'LTSW
+application/x-gzip @gz :base64 'LTSW
+application/x-hdf @hdf 'LTSW
+application/x-java-archive @jar 'LTSW
+application/x-java-jnlp-file @jnlp 'LTSW
+application/x-java-serialized-object @ser 'LTSW
+application/x-java-vm @class 'LTSW
+application/x-latex @ltx,latex :8bit 'LTSW
+application/x-mif @mif 'LTSW
+application/x-rtf 'LTSW =use-instead:application/rtf
+application/x-sh @sh 'LTSW
+application/x-shar @shar 'LTSW
+application/x-stuffit @sit :base64 'LTSW
+application/x-sv4cpio @sv4cpio :base64 'LTSW
+application/x-sv4crc @sv4crc :base64 'LTSW
+application/x-tar @tar :base64 'LTSW
+application/x-tcl @tcl :8bit 'LTSW
+application/x-tex @tex :8bit
+application/x-texinfo @texinfo,texi :8bit
+application/x-troff-man @man :8bit 'LTSW
+application/x-troff-me @me 'LTSW
+application/x-troff-ms @ms 'LTSW
+application/x-ustar @ustar :base64 'LTSW
+application/x-wais-source @src 'LTSW
+mac:application/x-mac @bin :base64
+*!application/cals1840 'LTSW =use-instead:application/cals-1840
+*!application/remote_printing 'LTSW =use-instead:application/remote-printing
+*!application/x-u-star 'LTSW =use-instead:application/x-ustar
+*!application/x400.bp 'LTSW =use-instead:application/x400-bp
+*application/acad 'LTSW
+*application/clariscad 'LTSW
+*application/drafting 'LTSW
+*application/dxf 'LTSW
+*application/excel @xls,xlt 'LTSW
+*application/fractals 'LTSW
+*application/i-deas 'LTSW
+*application/macbinary 'LTSW
+*application/netcdf @nc,cdf 'LTSW
+*application/powerpoint @ppt,pps,pot :base64 'LTSW
+*application/pro_eng 'LTSW
+*application/set 'LTSW
+*application/SLA 'LTSW
+*application/solids 'LTSW
+*application/STEP 'LTSW
+*application/vda 'LTSW
+*application/word @doc,dot 'LTSW
+
+ # Unregistered: audio/*
+audio/x-aiff @aif,aifc,aiff :base64
+audio/x-midi @mid,midi,kar :base64
+audio/x-pn-realaudio @rm,ram :base64
+audio/x-pn-realaudio-plugin @rpm
+audio/x-realaudio @ra :base64
+audio/x-wav @wav :base64
+
+ # Unregistered: image/*
+*image/vnd.dgn @dgn =use-instead:image/x-vnd.dgn
+image/x-bmp @bmp
+image/x-cmu-raster @ras
+image/x-paintshoppro @psp,pspimage :base64
+image/x-pict
+image/x-portable-anymap @pnm :base64
+image/x-portable-bitmap @pbm :base64
+image/x-portable-graymap @pgm :base64
+image/x-portable-pixmap @ppm :base64
+image/x-rgb @rgb :base64
+image/x-targa @tga
+image/x-vnd.dgn @dgn
+image/x-win-bmp
+image/x-xbitmap @xbm :7bit
+image/x-xbm @xbm :7bit
+image/x-xpixmap @xpm :8bit
+image/x-xwindowdump @xwd :base64
+*!image/cmu-raster =use-instead:image/x-cmu-raster
+*!image/vnd.net.fpx =use-instead:image/vnd.net-fpx
+*image/bmp @bmp
+*image/targa @tga
+
+ # Unregistered: multipart/*
+multipart/x-gzip
+multipart/x-mixed-replace
+multipart/x-tar
+multipart/x-ustar
+multipart/x-www-form-urlencoded
+multipart/x-zip
+*!multipart/parallel =use-instead:multipart/parallel
+
+ # Unregistered: text/*
+*text/comma-separated-values @csv :8bit
+*text/vnd.flatland.3dml =use-instead:model/vnd.flatland.3dml
+text/x-vnd.flatland.3dml =use-instead:model/vnd.flatland.3dml
+text/x-setext @etx
+text/x-vcalendar @vcs :8bit
+text/x-vcard @vcf :8bit
+text/x-yaml @yaml,yml :8bit
+
+ # Unregistered: video/*
+*video/dl @dl :base64
+*video/gl @gl :base64
+video/x-msvideo @avi :base64
+video/x-sgi-movie @movie :base64
+
+ # Unregistered: other/*
+x-chemical/x-pdb @pdb
+x-chemical/x-xyz @xyz
+x-conference/x-cooltalk @ice
+x-drawing/dwf @dwf
+x-world/x-vrml @wrl,vrml
+MIME_TYPES
+
+_re = %r{
+ ^
+ ([*])? # 0: Unregistered?
+ (!)? # 1: Obsolete?
+ (?:(\w+):)? # 2: Platform marker
+ #{MIME::Type::MEDIA_TYPE_RE} # 3,4: Media type
+ (?:\s@([^\s]+))? # 5: Extensions
+ (?:\s:(#{MIME::Type::ENCODING_RE}))? # 6: Encoding
+ (?:\s'(.+))? # 7: URL list
+ (?:\s=(.+))? # 8: Documentation
+ $
+}x
+
+data_mime_type.each do |i|
+ item = i.chomp.strip.gsub(%r{#.*}o, '')
+ next if item.empty?
+
+ m = _re.match(item).captures
+
+ unregistered, obsolete, platform, mediatype, subtype, extensions,
+ encoding, urls, docs = *m
+
+ extensions &&= extensions.split(/,/)
+ urls &&= urls.split(/,/)
+
+ mime_type = MIME::Type.new("#{mediatype}/#{subtype}") do |t|
+ t.extensions = extensions
+ t.encoding = encoding
+ t.system = platform
+ t.obsolete = obsolete
+ t.registered = false if unregistered
+ t.docs = docs
+ t.url = urls
+ end
+
+ MIME::Types.add_type_variant(mime_type)
+ MIME::Types.index_extensions(mime_type)
+end
+
+_re = nil
+data_mime_type = nil
diff --git a/vendor/mime-types-1.15/pre-setup.rb b/vendor/mime-types-1.15/pre-setup.rb
new file mode 100644
index 0000000..88f6938
--- /dev/null
+++ b/vendor/mime-types-1.15/pre-setup.rb
@@ -0,0 +1,46 @@
+require 'rdoc/rdoc'
+##
+# Build the rdoc documentation. Also, try to build the RI documentation.
+#
+def build_rdoc(options)
+ RDoc::RDoc.new.document(options)
+rescue RDoc::RDocError => e
+ $stderr.puts e.message
+rescue Exception => e
+ $stderr.puts "Couldn't build RDoc documentation\n#{e.message}"
+end
+
+def build_ri(files)
+ RDoc::RDoc.new(["--ri-site", "--merge"] + files)
+rescue RDoc::RDocError => e
+ $stderr.puts e.message
+rescue Exception => e
+ $stderr.puts "Couldn't build Ri documentation\n#{e.message}"
+end
+
+def run_tests(test_list)
+ return if test_list.empty?
+
+ require 'test/unit/ui/console/testrunner'
+ $:.unshift "lib"
+ test_list.each do |test|
+ next if File.directory?(test)
+ require test
+ end
+
+ tests = []
+ ObjectSpace.each_object { |o| tests << o if o.kind_of?(Class) }
+ tests.delete_if { |o| !o.ancestors.include?(Test::Unit::TestCase) }
+ tests.delete_if { |o| o == Test::Unit::TestCase }
+
+ tests.each { |test| Test::Unit::UI::Console::TestRunner.run(test) }
+ $:.shift
+end
+
+rdoc = %w(--main README --line-numbers
+ --title MIME::Types)
+ri = %w(--ri-site --merge)
+dox = %w(README ChangeLog lib)
+build_rdoc rdoc + dox
+build_ri ri + dox
+run_tests Dir["tests/**/*"]
diff --git a/vendor/mime-types-1.15/setup.rb b/vendor/mime-types-1.15/setup.rb
new file mode 100644
index 0000000..0673386
--- /dev/null
+++ b/vendor/mime-types-1.15/setup.rb
@@ -0,0 +1,1366 @@
+#
+# setup.rb
+#
+# Copyright (c) 2000-2004 Minero Aoki
+#
+# This program is free software.
+# You can distribute/modify this program under the terms of
+# the GNU LGPL, Lesser General Public License version 2.1.
+#
+
+unless Enumerable.method_defined?(:map) # Ruby 1.4.6
+ module Enumerable
+ alias map collect
+ end
+end
+
+unless File.respond_to?(:read) # Ruby 1.6
+ def File.read(fname)
+ open(fname) {|f|
+ return f.read
+ }
+ end
+end
+
+def File.binread(fname)
+ open(fname, 'rb') {|f|
+ return f.read
+ }
+end
+
+# for corrupted windows stat(2)
+def File.dir?(path)
+ File.directory?((path[-1,1] == '/') ? path : path + '/')
+end
+
+
+class SetupError < StandardError; end
+
+def setup_rb_error(msg)
+ raise SetupError, msg
+end
+
+#
+# Config
+#
+
+if arg = ARGV.detect {|arg| /\A--rbconfig=/ =~ arg }
+ ARGV.delete(arg)
+ require arg.split(/=/, 2)[1]
+ $".push 'rbconfig.rb'
+else
+ require 'rbconfig'
+end
+
+def multipackage_install?
+ FileTest.directory?(File.dirname($0) + '/packages')
+end
+
+
+class ConfigItem
+ def initialize(name, template, default, desc)
+ @name = name.freeze
+ @template = template
+ @value = default
+ @default = default.dup.freeze
+ @description = desc
+ end
+
+ attr_reader :name
+ attr_reader :description
+
+ attr_accessor :default
+ alias help_default default
+
+ def help_opt
+ "--#{@name}=#{@template}"
+ end
+
+ def value
+ @value
+ end
+
+ def eval(table)
+ @value.gsub(%r<\$([^/]+)>) { table[$1] }
+ end
+
+ def set(val)
+ @value = check(val)
+ end
+
+ private
+
+ def check(val)
+ setup_rb_error "config: --#{name} requires argument" unless val
+ val
+ end
+end
+
+class BoolItem < ConfigItem
+ def config_type
+ 'bool'
+ end
+
+ def help_opt
+ "--#{@name}"
+ end
+
+ private
+
+ def check(val)
+ return 'yes' unless val
+ unless /\A(y(es)?|n(o)?|t(rue)?|f(alse))\z/i =~ val
+ setup_rb_error "config: --#{@name} accepts only yes/no for argument"
+ end
+ (/\Ay(es)?|\At(rue)/i =~ value) ? 'yes' : 'no'
+ end
+end
+
+class PathItem < ConfigItem
+ def config_type
+ 'path'
+ end
+
+ private
+
+ def check(path)
+ setup_rb_error "config: --#{@name} requires argument" unless path
+ path[0,1] == '$' ? path : File.expand_path(path)
+ end
+end
+
+class ProgramItem < ConfigItem
+ def config_type
+ 'program'
+ end
+end
+
+class SelectItem < ConfigItem
+ def initialize(name, template, default, desc)
+ super
+ @ok = template.split('/')
+ end
+
+ def config_type
+ 'select'
+ end
+
+ private
+
+ def check(val)
+ unless @ok.include?(val.strip)
+ setup_rb_error "config: use --#{@name}=#{@template} (#{val})"
+ end
+ val.strip
+ end
+end
+
+class PackageSelectionItem < ConfigItem
+ def initialize(name, template, default, help_default, desc)
+ super name, template, default, desc
+ @help_default = help_default
+ end
+
+ attr_reader :help_default
+
+ def config_type
+ 'package'
+ end
+
+ private
+
+ def check(val)
+ unless File.dir?("packages/#{val}")
+ setup_rb_error "config: no such package: #{val}"
+ end
+ val
+ end
+end
+
+class ConfigTable_class
+
+ def initialize(items)
+ @items = items
+ @table = {}
+ items.each do |i|
+ @table[i.name] = i
+ end
+ ALIASES.each do |ali, name|
+ @table[ali] = @table[name]
+ end
+ @script_extensions = ['rb']
+ end
+
+ attr_accessor :script_extensions
+
+ include Enumerable
+
+ def each(&block)
+ @items.each(&block)
+ end
+
+ def key?(name)
+ @table.key?(name)
+ end
+
+ def lookup(name)
+ @table[name] or raise ArgumentError, "no such config item: #{name}"
+ end
+
+ def add(item)
+ @items.push item
+ @table[item.name] = item
+ end
+
+ def remove(name)
+ item = lookup(name)
+ @items.delete_if {|i| i.name == name }
+ @table.delete_if {|name, i| i.name == name }
+ item
+ end
+
+ def new
+ dup()
+ end
+
+ def savefile
+ '.config'
+ end
+
+ def load
+ begin
+ t = dup()
+ File.foreach(savefile()) do |line|
+ k, v = *line.split(/=/, 2)
+ t[k] = v.strip
+ end
+ t
+ rescue Errno::ENOENT
+ setup_rb_error $!.message + "#{File.basename($0)} config first"
+ end
+ end
+
+ def save
+ @items.each {|i| i.value }
+ File.open(savefile(), 'w') {|f|
+ @items.each do |i|
+ f.printf "%s=%s\n", i.name, i.value if i.value
+ end
+ }
+ end
+
+ def [](key)
+ lookup(key).eval(self)
+ end
+
+ def []=(key, val)
+ lookup(key).set val
+ end
+
+end
+
+c = ::Config::CONFIG
+
+rubypath = c['bindir'] + '/' + c['ruby_install_name']
+
+major = c['MAJOR'].to_i
+minor = c['MINOR'].to_i
+teeny = c['TEENY'].to_i
+version = "#{major}.#{minor}"
+
+# ruby ver. >= 1.4.4?
+newpath_p = ((major >= 2) or
+ ((major == 1) and
+ ((minor >= 5) or
+ ((minor == 4) and (teeny >= 4)))))
+
+if c['rubylibdir']
+ # V < 1.6.3
+ _stdruby = c['rubylibdir']
+ _siteruby = c['sitedir']
+ _siterubyver = c['sitelibdir']
+ _siterubyverarch = c['sitearchdir']
+elsif newpath_p
+ # 1.4.4 <= V <= 1.6.3
+ _stdruby = "$prefix/lib/ruby/#{version}"
+ _siteruby = c['sitedir']
+ _siterubyver = "$siteruby/#{version}"
+ _siterubyverarch = "$siterubyver/#{c['arch']}"
+else
+ # V < 1.4.4
+ _stdruby = "$prefix/lib/ruby/#{version}"
+ _siteruby = "$prefix/lib/ruby/#{version}/site_ruby"
+ _siterubyver = _siteruby
+ _siterubyverarch = "$siterubyver/#{c['arch']}"
+end
+libdir = '-* dummy libdir *-'
+stdruby = '-* dummy rubylibdir *-'
+siteruby = '-* dummy site_ruby *-'
+siterubyver = '-* dummy site_ruby version *-'
+parameterize = lambda {|path|
+ path.sub(/\A#{Regexp.quote(c['prefix'])}/, '$prefix')\
+ .sub(/\A#{Regexp.quote(libdir)}/, '$libdir')\
+ .sub(/\A#{Regexp.quote(stdruby)}/, '$stdruby')\
+ .sub(/\A#{Regexp.quote(siteruby)}/, '$siteruby')\
+ .sub(/\A#{Regexp.quote(siterubyver)}/, '$siterubyver')
+}
+libdir = parameterize.call(c['libdir'])
+stdruby = parameterize.call(_stdruby)
+siteruby = parameterize.call(_siteruby)
+siterubyver = parameterize.call(_siterubyver)
+siterubyverarch = parameterize.call(_siterubyverarch)
+
+if arg = c['configure_args'].split.detect {|arg| /--with-make-prog=/ =~ arg }
+ makeprog = arg.sub(/'/, '').split(/=/, 2)[1]
+else
+ makeprog = 'make'
+end
+
+common_conf = [
+ PathItem.new('prefix', 'path', c['prefix'],
+ 'path prefix of target environment'),
+ PathItem.new('bindir', 'path', parameterize.call(c['bindir']),
+ 'the directory for commands'),
+ PathItem.new('libdir', 'path', libdir,
+ 'the directory for libraries'),
+ PathItem.new('datadir', 'path', parameterize.call(c['datadir']),
+ 'the directory for shared data'),
+ PathItem.new('mandir', 'path', parameterize.call(c['mandir']),
+ 'the directory for man pages'),
+ PathItem.new('sysconfdir', 'path', parameterize.call(c['sysconfdir']),
+ 'the directory for man pages'),
+ PathItem.new('stdruby', 'path', stdruby,
+ 'the directory for standard ruby libraries'),
+ PathItem.new('siteruby', 'path', siteruby,
+ 'the directory for version-independent aux ruby libraries'),
+ PathItem.new('siterubyver', 'path', siterubyver,
+ 'the directory for aux ruby libraries'),
+ PathItem.new('siterubyverarch', 'path', siterubyverarch,
+ 'the directory for aux ruby binaries'),
+ PathItem.new('rbdir', 'path', '$siterubyver',
+ 'the directory for ruby scripts'),
+ PathItem.new('sodir', 'path', '$siterubyverarch',
+ 'the directory for ruby extentions'),
+ PathItem.new('rubypath', 'path', rubypath,
+ 'the path to set to #! line'),
+ ProgramItem.new('rubyprog', 'name', rubypath,
+ 'the ruby program using for installation'),
+ ProgramItem.new('makeprog', 'name', makeprog,
+ 'the make program to compile ruby extentions'),
+ SelectItem.new('shebang', 'all/ruby/never', 'ruby',
+ 'shebang line (#!) editing mode'),
+ BoolItem.new('without-ext', 'yes/no', 'no',
+ 'does not compile/install ruby extentions')
+]
+class ConfigTable_class # open again
+ ALIASES = {
+ 'std-ruby' => 'stdruby',
+ 'site-ruby-common' => 'siteruby', # For backward compatibility
+ 'site-ruby' => 'siterubyver', # For backward compatibility
+ 'bin-dir' => 'bindir',
+ 'bin-dir' => 'bindir',
+ 'rb-dir' => 'rbdir',
+ 'so-dir' => 'sodir',
+ 'data-dir' => 'datadir',
+ 'ruby-path' => 'rubypath',
+ 'ruby-prog' => 'rubyprog',
+ 'ruby' => 'rubyprog',
+ 'make-prog' => 'makeprog',
+ 'make' => 'makeprog'
+ }
+end
+multipackage_conf = [
+ PackageSelectionItem.new('with', 'name,name...', '', 'ALL',
+ 'package names that you want to install'),
+ PackageSelectionItem.new('without', 'name,name...', '', 'NONE',
+ 'package names that you do not want to install')
+]
+if multipackage_install?
+ ConfigTable = ConfigTable_class.new(common_conf + multipackage_conf)
+else
+ ConfigTable = ConfigTable_class.new(common_conf)
+end
+
+
+module MetaConfigAPI
+
+ def eval_file_ifexist(fname)
+ instance_eval File.read(fname), fname, 1 if File.file?(fname)
+ end
+
+ def config_names
+ ConfigTable.map {|i| i.name }
+ end
+
+ def config?(name)
+ ConfigTable.key?(name)
+ end
+
+ def bool_config?(name)
+ ConfigTable.lookup(name).config_type == 'bool'
+ end
+
+ def path_config?(name)
+ ConfigTable.lookup(name).config_type == 'path'
+ end
+
+ def value_config?(name)
+ case ConfigTable.lookup(name).config_type
+ when 'bool', 'path'
+ true
+ else
+ false
+ end
+ end
+
+ def add_config(item)
+ ConfigTable.add item
+ end
+
+ def add_bool_config(name, default, desc)
+ ConfigTable.add BoolItem.new(name, 'yes/no', default ? 'yes' : 'no', desc)
+ end
+
+ def add_path_config(name, default, desc)
+ ConfigTable.add PathItem.new(name, 'path', default, desc)
+ end
+
+ def set_config_default(name, default)
+ ConfigTable.lookup(name).default = default
+ end
+
+ def remove_config(name)
+ ConfigTable.remove(name)
+ end
+
+ def add_script_extension(ext)
+ ConfigTable.script_extensions << ext
+ end
+end
+
+
+#
+# File Operations
+#
+
+module FileOperations
+
+ def mkdir_p(dirname, prefix = nil)
+ dirname = prefix + File.expand_path(dirname) if prefix
+ $stderr.puts "mkdir -p #{dirname}" if verbose?
+ return if no_harm?
+
+ # does not check '/'... it's too abnormal case
+ dirs = File.expand_path(dirname).split(%r<(?=/)>)
+ if /\A[a-z]:\z/i =~ dirs[0]
+ disk = dirs.shift
+ dirs[0] = disk + dirs[0]
+ end
+ dirs.each_index do |idx|
+ path = dirs[0..idx].join('')
+ Dir.mkdir path unless File.dir?(path)
+ end
+ end
+
+ def rm_f(fname)
+ $stderr.puts "rm -f #{fname}" if verbose?
+ return if no_harm?
+
+ if File.exist?(fname) or File.symlink?(fname)
+ File.chmod 0777, fname
+ File.unlink fname
+ end
+ end
+
+ def rm_rf(dn)
+ $stderr.puts "rm -rf #{dn}" if verbose?
+ return if no_harm?
+
+ Dir.chdir dn
+ Dir.foreach('.') do |fn|
+ next if fn == '.'
+ next if fn == '..'
+ if File.dir?(fn)
+ verbose_off {
+ rm_rf fn
+ }
+ else
+ verbose_off {
+ rm_f fn
+ }
+ end
+ end
+ Dir.chdir '..'
+ Dir.rmdir dn
+ end
+
+ def move_file(src, dest)
+ File.unlink dest if File.exist?(dest)
+ begin
+ File.rename src, dest
+ rescue
+ File.open(dest, 'wb') {|f| f.write File.binread(src) }
+ File.chmod File.stat(src).mode, dest
+ File.unlink src
+ end
+ end
+
+ def install(from, dest, mode, prefix = nil)
+ $stderr.puts "install #{from} #{dest}" if verbose?
+ return if no_harm?
+
+ realdest = prefix ? prefix + File.expand_path(dest) : dest
+ realdest = File.join(realdest, File.basename(from)) if File.dir?(realdest)
+ str = File.binread(from)
+ if diff?(str, realdest)
+ verbose_off {
+ rm_f realdest if File.exist?(realdest)
+ }
+ File.open(realdest, 'wb') {|f|
+ f.write str
+ }
+ File.chmod mode, realdest
+
+ File.open("#{objdir_root()}/InstalledFiles", 'a') {|f|
+ if prefix
+ f.puts realdest.sub(prefix, '')
+ else
+ f.puts realdest
+ end
+ }
+ end
+ end
+
+ def diff?(new_content, path)
+ return true unless File.exist?(path)
+ new_content != File.binread(path)
+ end
+
+ def command(str)
+ $stderr.puts str if verbose?
+ system str or raise RuntimeError, "'system #{str}' failed"
+ end
+
+ def ruby(str)
+ command config('rubyprog') + ' ' + str
+ end
+
+ def make(task = '')
+ command config('makeprog') + ' ' + task
+ end
+
+ def extdir?(dir)
+ File.exist?(dir + '/MANIFEST')
+ end
+
+ def all_files_in(dirname)
+ Dir.open(dirname) {|d|
+ return d.select {|ent| File.file?("#{dirname}/#{ent}") }
+ }
+ end
+
+ REJECT_DIRS = %w(
+ CVS SCCS RCS CVS.adm .svn
+ )
+
+ def all_dirs_in(dirname)
+ Dir.open(dirname) {|d|
+ return d.select {|n| File.dir?("#{dirname}/#{n}") } - %w(. ..) - REJECT_DIRS
+ }
+ end
+
+end
+
+
+#
+# Main Installer
+#
+
+module HookUtils
+
+ def run_hook(name)
+ try_run_hook "#{curr_srcdir()}/#{name}" or
+ try_run_hook "#{curr_srcdir()}/#{name}.rb"
+ end
+
+ def try_run_hook(fname)
+ return false unless File.file?(fname)
+ begin
+ instance_eval File.read(fname), fname, 1
+ rescue
+ setup_rb_error "hook #{fname} failed:\n" + $!.message
+ end
+ true
+ end
+
+end
+
+
+module HookScriptAPI
+
+ def get_config(key)
+ @config[key]
+ end
+
+ alias config get_config
+
+ def set_config(key, val)
+ @config[key] = val
+ end
+
+ #
+ # srcdir/objdir (works only in the package directory)
+ #
+
+ #abstract srcdir_root
+ #abstract objdir_root
+ #abstract relpath
+
+ def curr_srcdir
+ "#{srcdir_root()}/#{relpath()}"
+ end
+
+ def curr_objdir
+ "#{objdir_root()}/#{relpath()}"
+ end
+
+ def srcfile(path)
+ "#{curr_srcdir()}/#{path}"
+ end
+
+ def srcexist?(path)
+ File.exist?(srcfile(path))
+ end
+
+ def srcdirectory?(path)
+ File.dir?(srcfile(path))
+ end
+
+ def srcfile?(path)
+ File.file? srcfile(path)
+ end
+
+ def srcentries(path = '.')
+ Dir.open("#{curr_srcdir()}/#{path}") {|d|
+ return d.to_a - %w(. ..)
+ }
+ end
+
+ def srcfiles(path = '.')
+ srcentries(path).select {|fname|
+ File.file?(File.join(curr_srcdir(), path, fname))
+ }
+ end
+
+ def srcdirectories(path = '.')
+ srcentries(path).select {|fname|
+ File.dir?(File.join(curr_srcdir(), path, fname))
+ }
+ end
+
+end
+
+
+class ToplevelInstaller
+
+ Version = '3.3.1'
+ Copyright = 'Copyright (c) 2000-2004 Minero Aoki'
+
+ TASKS = [
+ [ 'all', 'do config, setup, then install' ],
+ [ 'config', 'saves your configurations' ],
+ [ 'show', 'shows current configuration' ],
+ [ 'setup', 'compiles ruby extentions and others' ],
+ [ 'install', 'installs files' ],
+ [ 'clean', "does `make clean' for each extention" ],
+ [ 'distclean',"does `make distclean' for each extention" ]
+ ]
+
+ def ToplevelInstaller.invoke
+ instance().invoke
+ end
+
+ @singleton = nil
+
+ def ToplevelInstaller.instance
+ @singleton ||= new(File.dirname($0))
+ @singleton
+ end
+
+ include MetaConfigAPI
+
+ def initialize(ardir_root)
+ @config = nil
+ @options = { 'verbose' => true }
+ @ardir = File.expand_path(ardir_root)
+ end
+
+ def inspect
+ "#<#{self.class} #{__id__()}>"
+ end
+
+ def invoke
+ run_metaconfigs
+ case task = parsearg_global()
+ when nil, 'all'
+ @config = load_config('config')
+ parsearg_config
+ init_installers
+ exec_config
+ exec_setup
+ exec_install
+ else
+ @config = load_config(task)
+ __send__ "parsearg_#{task}"
+ init_installers
+ __send__ "exec_#{task}"
+ end
+ end
+
+ def run_metaconfigs
+ eval_file_ifexist "#{@ardir}/metaconfig"
+ end
+
+ def load_config(task)
+ case task
+ when 'config'
+ ConfigTable.new
+ when 'clean', 'distclean'
+ if File.exist?(ConfigTable.savefile)
+ then ConfigTable.load
+ else ConfigTable.new
+ end
+ else
+ ConfigTable.load
+ end
+ end
+
+ def init_installers
+ @installer = Installer.new(@config, @options, @ardir, File.expand_path('.'))
+ end
+
+ #
+ # Hook Script API bases
+ #
+
+ def srcdir_root
+ @ardir
+ end
+
+ def objdir_root
+ '.'
+ end
+
+ def relpath
+ '.'
+ end
+
+ #
+ # Option Parsing
+ #
+
+ def parsearg_global
+ valid_task = /\A(?:#{TASKS.map {|task,desc| task }.join '|'})\z/
+
+ while arg = ARGV.shift
+ case arg
+ when /\A\w+\z/
+ setup_rb_error "invalid task: #{arg}" unless valid_task =~ arg
+ return arg
+
+ when '-q', '--quiet'
+ @options['verbose'] = false
+
+ when '--verbose'
+ @options['verbose'] = true
+
+ when '-h', '--help'
+ print_usage $stdout
+ exit 0
+
+ when '-v', '--version'
+ puts "#{File.basename($0)} version #{Version}"
+ exit 0
+
+ when '--copyright'
+ puts Copyright
+ exit 0
+
+ else
+ setup_rb_error "unknown global option '#{arg}'"
+ end
+ end
+
+ nil
+ end
+
+
+ def parsearg_no_options
+ unless ARGV.empty?
+ setup_rb_error "#{task}: unknown options: #{ARGV.join ' '}"
+ end
+ end
+
+ alias parsearg_show parsearg_no_options
+ alias parsearg_setup parsearg_no_options
+ alias parsearg_clean parsearg_no_options
+ alias parsearg_distclean parsearg_no_options
+
+ def parsearg_config
+ re = /\A--(#{ConfigTable.map {|i| i.name }.join('|')})(?:=(.*))?\z/
+ @options['config-opt'] = []
+
+ while i = ARGV.shift
+ if /\A--?\z/ =~ i
+ @options['config-opt'] = ARGV.dup
+ break
+ end
+ m = re.match(i) or setup_rb_error "config: unknown option #{i}"
+ name, value = *m.to_a[1,2]
+ @config[name] = value
+ end
+ end
+
+ def parsearg_install
+ @options['no-harm'] = false
+ @options['install-prefix'] = ''
+ while a = ARGV.shift
+ case a
+ when /\A--no-harm\z/
+ @options['no-harm'] = true
+ when /\A--prefix=(.*)\z/
+ path = $1
+ path = File.expand_path(path) unless path[0,1] == '/'
+ @options['install-prefix'] = path
+ else
+ setup_rb_error "install: unknown option #{a}"
+ end
+ end
+ end
+
+ def print_usage(out)
+ out.puts 'Typical Installation Procedure:'
+ out.puts " $ ruby #{File.basename $0} config"
+ out.puts " $ ruby #{File.basename $0} setup"
+ out.puts " # ruby #{File.basename $0} install (may require root privilege)"
+ out.puts
+ out.puts 'Detailed Usage:'
+ out.puts " ruby #{File.basename $0} <global option>"
+ out.puts " ruby #{File.basename $0} [<global options>] <task> [<task options>]"
+
+ fmt = " %-24s %s\n"
+ out.puts
+ out.puts 'Global options:'
+ out.printf fmt, '-q,--quiet', 'suppress message outputs'
+ out.printf fmt, ' --verbose', 'output messages verbosely'
+ out.printf fmt, '-h,--help', 'print this message'
+ out.printf fmt, '-v,--version', 'print version and quit'
+ out.printf fmt, ' --copyright', 'print copyright and quit'
+ out.puts
+ out.puts 'Tasks:'
+ TASKS.each do |name, desc|
+ out.printf fmt, name, desc
+ end
+
+ fmt = " %-24s %s [%s]\n"
+ out.puts
+ out.puts 'Options for CONFIG or ALL:'
+ ConfigTable.each do |item|
+ out.printf fmt, item.help_opt, item.description, item.help_default
+ end
+ out.printf fmt, '--rbconfig=path', 'rbconfig.rb to load',"running ruby's"
+ out.puts
+ out.puts 'Options for INSTALL:'
+ out.printf fmt, '--no-harm', 'only display what to do if given', 'off'
+ out.printf fmt, '--prefix=path', 'install path prefix', '$prefix'
+ out.puts
+ end
+
+ #
+ # Task Handlers
+ #
+
+ def exec_config
+ @installer.exec_config
+ @config.save # must be final
+ end
+
+ def exec_setup
+ @installer.exec_setup
+ end
+
+ def exec_install
+ @installer.exec_install
+ end
+
+ def exec_show
+ ConfigTable.each do |i|
+ printf "%-20s %s\n", i.name, i.value
+ end
+ end
+
+ def exec_clean
+ @installer.exec_clean
+ end
+
+ def exec_distclean
+ @installer.exec_distclean
+ end
+
+end
+
+
+class ToplevelInstallerMulti < ToplevelInstaller
+
+ include HookUtils
+ include HookScriptAPI
+ include FileOperations
+
+ def initialize(ardir)
+ super
+ @packages = all_dirs_in("#{@ardir}/packages")
+ raise 'no package exists' if @packages.empty?
+ end
+
+ def run_metaconfigs
+ eval_file_ifexist "#{@ardir}/metaconfig"
+ @packages.each do |name|
+ eval_file_ifexist "#{@ardir}/packages/#{name}/metaconfig"
+ end
+ end
+
+ def init_installers
+ @installers = {}
+ @packages.each do |pack|
+ @installers[pack] = Installer.new(@config, @options,
+ "#{@ardir}/packages/#{pack}",
+ "packages/#{pack}")
+ end
+
+ with = extract_selection(config('with'))
+ without = extract_selection(config('without'))
+ @selected = @installers.keys.select {|name|
+ (with.empty? or with.include?(name)) \
+ and not without.include?(name)
+ }
+ end
+
+ def extract_selection(list)
+ a = list.split(/,/)
+ a.each do |name|
+ setup_rb_error "no such package: #{name}" unless @installers.key?(name)
+ end
+ a
+ end
+
+ def print_usage(f)
+ super
+ f.puts 'Inluded packages:'
+ f.puts ' ' + @packages.sort.join(' ')
+ f.puts
+ end
+
+ #
+ # multi-package metaconfig API
+ #
+
+ attr_reader :packages
+
+ def declare_packages(list)
+ raise 'package list is empty' if list.empty?
+ list.each do |name|
+ raise "directory packages/#{name} does not exist"\
+ unless File.dir?("#{@ardir}/packages/#{name}")
+ end
+ @packages = list
+ end
+
+ #
+ # Task Handlers
+ #
+
+ def exec_config
+ run_hook 'pre-config'
+ each_selected_installers {|inst| inst.exec_config }
+ run_hook 'post-config'
+ @config.save # must be final
+ end
+
+ def exec_setup
+ run_hook 'pre-setup'
+ each_selected_installers {|inst| inst.exec_setup }
+ run_hook 'post-setup'
+ end
+
+ def exec_install
+ run_hook 'pre-install'
+ each_selected_installers {|inst| inst.exec_install }
+ run_hook 'post-install'
+ end
+
+ def exec_clean
+ rm_f ConfigTable.savefile
+ run_hook 'pre-clean'
+ each_selected_installers {|inst| inst.exec_clean }
+ run_hook 'post-clean'
+ end
+
+ def exec_distclean
+ rm_f ConfigTable.savefile
+ run_hook 'pre-distclean'
+ each_selected_installers {|inst| inst.exec_distclean }
+ run_hook 'post-distclean'
+ end
+
+ #
+ # lib
+ #
+
+ def each_selected_installers
+ Dir.mkdir 'packages' unless File.dir?('packages')
+ @selected.each do |pack|
+ $stderr.puts "Processing the package `#{pack}' ..." if @options['verbose']
+ Dir.mkdir "packages/#{pack}" unless File.dir?("packages/#{pack}")
+ Dir.chdir "packages/#{pack}"
+ yield @installers[pack]
+ Dir.chdir '../..'
+ end
+ end
+
+ def verbose?
+ @options['verbose']
+ end
+
+ def no_harm?
+ @options['no-harm']
+ end
+
+end
+
+
+class Installer
+
+ FILETYPES = %w( bin lib ext data )
+
+ include HookScriptAPI
+ include HookUtils
+ include FileOperations
+
+ def initialize(config, opt, srcroot, objroot)
+ @config = config
+ @options = opt
+ @srcdir = File.expand_path(srcroot)
+ @objdir = File.expand_path(objroot)
+ @currdir = '.'
+ end
+
+ def inspect
+ "#<#{self.class} #{File.basename(@srcdir)}>"
+ end
+
+ #
+ # Hook Script API base methods
+ #
+
+ def srcdir_root
+ @srcdir
+ end
+
+ def objdir_root
+ @objdir
+ end
+
+ def relpath
+ @currdir
+ end
+
+ #
+ # configs/options
+ #
+
+ def no_harm?
+ @options['no-harm']
+ end
+
+ def verbose?
+ @options['verbose']
+ end
+
+ def verbose_off
+ begin
+ save, @options['verbose'] = @options['verbose'], false
+ yield
+ ensure
+ @options['verbose'] = save
+ end
+ end
+
+ #
+ # TASK config
+ #
+
+ def exec_config
+ exec_task_traverse 'config'
+ end
+
+ def config_dir_bin(rel)
+ end
+
+ def config_dir_lib(rel)
+ end
+
+ def config_dir_ext(rel)
+ extconf if extdir?(curr_srcdir())
+ end
+
+ def extconf
+ opt = @options['config-opt'].join(' ')
+ command "#{config('rubyprog')} #{curr_srcdir()}/extconf.rb #{opt}"
+ end
+
+ def config_dir_data(rel)
+ end
+
+ #
+ # TASK setup
+ #
+
+ def exec_setup
+ exec_task_traverse 'setup'
+ end
+
+ def setup_dir_bin(rel)
+ all_files_in(curr_srcdir()).each do |fname|
+ adjust_shebang "#{curr_srcdir()}/#{fname}"
+ end
+ end
+
+ def adjust_shebang(path)
+ return if no_harm?
+ tmpfile = File.basename(path) + '.tmp'
+ begin
+ File.open(path, 'rb') {|r|
+ first = r.gets
+ return unless File.basename(config('rubypath')) == 'ruby'
+ return unless File.basename(first.sub(/\A\#!/, '').split[0]) == 'ruby'
+ $stderr.puts "adjusting shebang: #{File.basename(path)}" if verbose?
+ File.open(tmpfile, 'wb') {|w|
+ w.print first.sub(/\A\#!\s*\S+/, '#! ' + config('rubypath'))
+ w.write r.read
+ }
+ move_file tmpfile, File.basename(path)
+ }
+ ensure
+ File.unlink tmpfile if File.exist?(tmpfile)
+ end
+ end
+
+ def setup_dir_lib(rel)
+ end
+
+ def setup_dir_ext(rel)
+ make if extdir?(curr_srcdir())
+ end
+
+ def setup_dir_data(rel)
+ end
+
+ #
+ # TASK install
+ #
+
+ def exec_install
+ rm_f 'InstalledFiles'
+ exec_task_traverse 'install'
+ end
+
+ def install_dir_bin(rel)
+ install_files collect_filenames_auto(), "#{config('bindir')}/#{rel}", 0755
+ end
+
+ def install_dir_lib(rel)
+ install_files ruby_scripts(), "#{config('rbdir')}/#{rel}", 0644
+ end
+
+ def install_dir_ext(rel)
+ return unless extdir?(curr_srcdir())
+ install_files ruby_extentions('.'),
+ "#{config('sodir')}/#{File.dirname(rel)}",
+ 0555
+ end
+
+ def install_dir_data(rel)
+ install_files collect_filenames_auto(), "#{config('datadir')}/#{rel}", 0644
+ end
+
+ def install_files(list, dest, mode)
+ mkdir_p dest, @options['install-prefix']
+ list.each do |fname|
+ install fname, dest, mode, @options['install-prefix']
+ end
+ end
+
+ def ruby_scripts
+ collect_filenames_auto().select {|n| /\.(#{ConfigTable.script_extensions.join('|')})\z/ =~ n }
+ end
+
+ # picked up many entries from cvs-1.11.1/src/ignore.c
+ reject_patterns = %w(
+ core RCSLOG tags TAGS .make.state
+ .nse_depinfo #* .#* cvslog.* ,* .del-* *.olb
+ *~ *.old *.bak *.BAK *.orig *.rej _$* *$
+
+ *.org *.in .*
+ )
+ mapping = {
+ '.' => '\.',
+ '$' => '\$',
+ '#' => '\#',
+ '*' => '.*'
+ }
+ REJECT_PATTERNS = Regexp.new('\A(?:' +
+ reject_patterns.map {|pat|
+ pat.gsub(/[\.\$\#\*]/) {|ch| mapping[ch] }
+ }.join('|') +
+ ')\z')
+
+ def collect_filenames_auto
+ mapdir((existfiles() - hookfiles()).reject {|fname|
+ REJECT_PATTERNS =~ fname
+ })
+ end
+
+ def existfiles
+ all_files_in(curr_srcdir()) | all_files_in('.')
+ end
+
+ def hookfiles
+ %w( pre-%s post-%s pre-%s.rb post-%s.rb ).map {|fmt|
+ %w( config setup install clean ).map {|t| sprintf(fmt, t) }
+ }.flatten
+ end
+
+ def mapdir(filelist)
+ filelist.map {|fname|
+ if File.exist?(fname) # objdir
+ fname
+ else # srcdir
+ File.join(curr_srcdir(), fname)
+ end
+ }
+ end
+
+ def ruby_extentions(dir)
+ Dir.open(dir) {|d|
+ ents = d.select {|fname| /\.#{::Config::CONFIG['DLEXT']}\z/ =~ fname }
+ if ents.empty?
+ setup_rb_error "no ruby extention exists: 'ruby #{$0} setup' first"
+ end
+ return ents
+ }
+ end
+
+ #
+ # TASK clean
+ #
+
+ def exec_clean
+ exec_task_traverse 'clean'
+ rm_f ConfigTable.savefile
+ rm_f 'InstalledFiles'
+ end
+
+ def clean_dir_bin(rel)
+ end
+
+ def clean_dir_lib(rel)
+ end
+
+ def clean_dir_ext(rel)
+ return unless extdir?(curr_srcdir())
+ make 'clean' if File.file?('Makefile')
+ end
+
+ def clean_dir_data(rel)
+ end
+
+ #
+ # TASK distclean
+ #
+
+ def exec_distclean
+ exec_task_traverse 'distclean'
+ rm_f ConfigTable.savefile
+ rm_f 'InstalledFiles'
+ end
+
+ def distclean_dir_bin(rel)
+ end
+
+ def distclean_dir_lib(rel)
+ end
+
+ def distclean_dir_ext(rel)
+ return unless extdir?(curr_srcdir())
+ make 'distclean' if File.file?('Makefile')
+ end
+
+ #
+ # lib
+ #
+
+ def exec_task_traverse(task)
+ run_hook "pre-#{task}"
+ FILETYPES.each do |type|
+ if config('without-ext') == 'yes' and type == 'ext'
+ $stderr.puts 'skipping ext/* by user option' if verbose?
+ next
+ end
+ traverse task, type, "#{task}_dir_#{type}"
+ end
+ run_hook "post-#{task}"
+ end
+
+ def traverse(task, rel, mid)
+ dive_into(rel) {
+ run_hook "pre-#{task}"
+ __send__ mid, rel.sub(%r[\A.*?(?:/|\z)], '')
+ all_dirs_in(curr_srcdir()).each do |d|
+ traverse task, "#{rel}/#{d}", mid
+ end
+ run_hook "post-#{task}"
+ }
+ end
+
+ def dive_into(rel)
+ return unless File.dir?("#{@srcdir}/#{rel}")
+
+ dir = File.basename(rel)
+ Dir.mkdir dir unless File.dir?(dir)
+ prevdir = Dir.pwd
+ Dir.chdir dir
+ $stderr.puts '---> ' + rel if verbose?
+ @currdir = rel
+ yield
+ Dir.chdir prevdir
+ $stderr.puts '<--- ' + rel if verbose?
+ @currdir = File.dirname(rel)
+ end
+
+end
+
+
+if $0 == __FILE__
+ begin
+ if multipackage_install?
+ ToplevelInstallerMulti.invoke
+ else
+ ToplevelInstaller.invoke
+ end
+ rescue SetupError
+ raise if $DEBUG
+ $stderr.puts $!.message
+ $stderr.puts "Try 'ruby #{$0} --help' for detailed usage."
+ exit 1
+ end
+end
diff --git a/vendor/mime-types-1.15/tests/tc_mime_type.rb b/vendor/mime-types-1.15/tests/tc_mime_type.rb
new file mode 100644
index 0000000..4121edd
--- /dev/null
+++ b/vendor/mime-types-1.15/tests/tc_mime_type.rb
@@ -0,0 +1,275 @@
+#! /usr/bin/env ruby
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2005 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: tc_mime_type.rb,v 1.2 2006/02/12 21:27:22 austin Exp $
+#++
+$LOAD_PATH.unshift("#{File.dirname(__FILE__)}/../lib") if __FILE__ == $0
+
+require 'mime/types'
+require 'test/unit'
+
+class Test_MIME__Type < Test::Unit::TestCase #:nodoc:
+ def setup
+ @zip = MIME::Type.new('x-appl/x-zip') { |t| t.extensions = ['zip', 'zp'] }
+ end
+
+ def test_CMP # '<=>'
+ assert(MIME::Type.new('text/plain') == MIME::Type.new('text/plain'))
+ assert(MIME::Type.new('text/plain') != MIME::Type.new('image/jpeg'))
+ assert(MIME::Type.new('text/plain') == 'text/plain')
+ assert(MIME::Type.new('text/plain') != 'image/jpeg')
+ assert(MIME::Type.new('text/plain') > MIME::Type.new('text/html'))
+ assert(MIME::Type.new('text/plain') > 'text/html')
+ assert(MIME::Type.new('text/html') < MIME::Type.new('text/plain'))
+ assert(MIME::Type.new('text/html') < 'text/plain')
+ assert('text/html' == MIME::Type.new('text/html'))
+ assert('text/html' < MIME::Type.new('text/plain'))
+ assert('text/plain' > MIME::Type.new('text/html'))
+ end
+
+ def test_ascii?
+ assert(MIME::Type.new('text/plain').ascii?)
+ assert(!MIME::Type.new('image/jpeg').ascii?)
+ assert(!MIME::Type.new('application/x-msword').ascii?)
+ assert(MIME::Type.new('text/vCard').ascii?)
+ assert(!MIME::Type.new('application/pkcs7-mime').ascii?)
+ assert([email protected]?)
+ end
+
+ def test_binary?
+ assert(!MIME::Type.new('text/plain').binary?)
+ assert(MIME::Type.new('image/jpeg').binary?)
+ assert(MIME::Type.new('application/x-msword').binary?)
+ assert(!MIME::Type.new('text/vCard').binary?)
+ assert(MIME::Type.new('application/pkcs7-mime').binary?)
+ assert(@zip.binary?)
+ end
+
+ def test_complete?
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert(@yaml.complete?)
+ assert_nothing_raised { @yaml.extensions = nil }
+ assert([email protected]?)
+ end
+
+ def test_content_type
+ assert_equal(MIME::Type.new('text/plain').content_type, 'text/plain')
+ assert_equal(MIME::Type.new('image/jpeg').content_type, 'image/jpeg')
+ assert_equal(MIME::Type.new('application/x-msword').content_type, 'application/x-msword')
+ assert_equal(MIME::Type.new('text/vCard').content_type, 'text/vCard')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').content_type, 'application/pkcs7-mime')
+ assert_equal(@zip.content_type, 'x-appl/x-zip');
+ end
+
+ def test_encoding
+ assert_equal(MIME::Type.new('text/plain').encoding, 'quoted-printable')
+ assert_equal(MIME::Type.new('image/jpeg').encoding, 'base64')
+ assert_equal(MIME::Type.new('application/x-msword').encoding, 'base64')
+ assert_equal(MIME::Type.new('text/vCard').encoding, 'quoted-printable')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').encoding, 'base64')
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert_equal(@yaml.encoding, '8bit')
+ assert_nothing_raised { @yaml.encoding = 'base64' }
+ assert_equal(@yaml.encoding, 'base64')
+ assert_nothing_raised { @yaml.encoding = :default }
+ assert_equal(@yaml.encoding, 'quoted-printable')
+ assert_raises(ArgumentError) { @yaml.encoding = 'binary' }
+ assert_equal(@zip.encoding, 'base64')
+ end
+
+ def test_eql?
+ assert(MIME::Type.new('text/plain').eql?(MIME::Type.new('text/plain')))
+ assert(!MIME::Type.new('text/plain').eql?(MIME::Type.new('image/jpeg')))
+ assert(!MIME::Type.new('text/plain').eql?('text/plain'))
+ assert(!MIME::Type.new('text/plain').eql?('image/jpeg'))
+ end
+
+ def test_extensions
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert_equal(@yaml.extensions, ['yaml', 'yml'])
+ assert_nothing_raised { @yaml.extensions = 'yaml' }
+ assert_equal(@yaml.extensions, ['yaml'])
+ assert_equal(@zip.extensions.size, 2)
+ assert_equal(@zip.extensions, ['zip', 'zp'])
+ end
+
+ def test_like?
+ assert(MIME::Type.new('text/plain').like?(MIME::Type.new('text/plain')))
+ assert(MIME::Type.new('text/plain').like?(MIME::Type.new('text/x-plain')))
+ assert(!MIME::Type.new('text/plain').like?(MIME::Type.new('image/jpeg')))
+ assert(MIME::Type.new('text/plain').like?('text/plain'))
+ assert(MIME::Type.new('text/plain').like?('text/x-plain'))
+ assert(!MIME::Type.new('text/plain').like?('image/jpeg'))
+ end
+
+ def test_media_type
+ assert_equal(MIME::Type.new('text/plain').media_type, 'text')
+ assert_equal(MIME::Type.new('image/jpeg').media_type, 'image')
+ assert_equal(MIME::Type.new('application/x-msword').media_type, 'application')
+ assert_equal(MIME::Type.new('text/vCard').media_type, 'text')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').media_type, 'application')
+ assert_equal(MIME::Type.new('x-chemical/x-pdb').media_type, 'chemical')
+ assert_equal(@zip.media_type, 'appl')
+ end
+
+ def test_platform?
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'oddbox')
+ end
+ assert([email protected]?)
+ assert_nothing_raised { @yaml.system = nil }
+ assert([email protected]?)
+ assert_nothing_raised { @yaml.system = /#{RUBY_PLATFORM}/ }
+ assert(@yaml.platform?)
+ end
+
+ def test_raw_media_type
+ assert_equal(MIME::Type.new('text/plain').raw_media_type, 'text')
+ assert_equal(MIME::Type.new('image/jpeg').raw_media_type, 'image')
+ assert_equal(MIME::Type.new('application/x-msword').raw_media_type, 'application')
+ assert_equal(MIME::Type.new('text/vCard').raw_media_type, 'text')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').raw_media_type, 'application')
+
+ assert_equal(MIME::Type.new('x-chemical/x-pdb').raw_media_type, 'x-chemical')
+ assert_equal(@zip.raw_media_type, 'x-appl')
+ end
+
+ def test_raw_sub_type
+ assert_equal(MIME::Type.new('text/plain').raw_sub_type, 'plain')
+ assert_equal(MIME::Type.new('image/jpeg').raw_sub_type, 'jpeg')
+ assert_equal(MIME::Type.new('application/x-msword').raw_sub_type, 'x-msword')
+ assert_equal(MIME::Type.new('text/vCard').raw_sub_type, 'vCard')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').raw_sub_type, 'pkcs7-mime')
+ assert_equal(@zip.raw_sub_type, 'x-zip')
+ end
+
+ def test_registered?
+ assert(MIME::Type.new('text/plain').registered?)
+ assert(MIME::Type.new('image/jpeg').registered?)
+ assert(!MIME::Type.new('application/x-msword').registered?)
+ assert(MIME::Type.new('text/vCard').registered?)
+ assert(MIME::Type.new('application/pkcs7-mime').registered?)
+ assert([email protected]?)
+ end
+
+ def test_signature?
+ assert(!MIME::Type.new('text/plain').signature?)
+ assert(!MIME::Type.new('image/jpeg').signature?)
+ assert(!MIME::Type.new('application/x-msword').signature?)
+ assert(MIME::Type.new('text/vCard').signature?)
+ assert(MIME::Type.new('application/pkcs7-mime').signature?)
+ end
+
+ def test_simplified
+ assert_equal(MIME::Type.new('text/plain').simplified, 'text/plain')
+ assert_equal(MIME::Type.new('image/jpeg').simplified, 'image/jpeg')
+ assert_equal(MIME::Type.new('application/x-msword').simplified, 'application/msword')
+ assert_equal(MIME::Type.new('text/vCard').simplified, 'text/vcard')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').simplified, 'application/pkcs7-mime')
+ assert_equal(MIME::Type.new('x-chemical/x-pdb').simplified, 'chemical/pdb')
+ end
+
+ def test_sub_type
+ assert_equal(MIME::Type.new('text/plain').sub_type, 'plain')
+ assert_equal(MIME::Type.new('image/jpeg').sub_type, 'jpeg')
+ assert_equal(MIME::Type.new('application/x-msword').sub_type, 'msword')
+ assert_equal(MIME::Type.new('text/vCard').sub_type, 'vcard')
+ assert_equal(MIME::Type.new('application/pkcs7-mime').sub_type, 'pkcs7-mime')
+ assert_equal(@zip.sub_type, 'zip')
+ end
+
+ def test_system
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert_equal(@yaml.system, %r{linux})
+ assert_nothing_raised { @yaml.system = /win32/ }
+ assert_equal(@yaml.system, %r{win32})
+ assert_nothing_raised { @yaml.system = nil }
+ assert_nil(@yaml.system)
+ end
+
+ def test_system?
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert(@yaml.system?)
+ assert_nothing_raised { @yaml.system = nil }
+ assert([email protected]?)
+ end
+
+ def test_to_a
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert_equal(@yaml.to_a, ['text/x-yaml', ['yaml', 'yml'], '8bit',
+ /linux/, nil, nil, nil, false])
+ end
+
+ def test_to_hash
+ assert_nothing_raised do
+ @yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ end
+ assert_equal(@yaml.to_hash,
+ { 'Content-Type' => 'text/x-yaml',
+ 'Content-Transfer-Encoding' => '8bit',
+ 'Extensions' => ['yaml', 'yml'],
+ 'System' => /linux/,
+ 'Registered' => false,
+ 'URL' => nil,
+ 'Obsolete' => nil,
+ 'Docs' => nil })
+ end
+
+ def test_to_s
+ assert_equal("#{MIME::Type.new('text/plain')}", 'text/plain')
+ end
+
+ def test_s_constructors
+ assert_not_nil(@zip)
+ yaml = MIME::Type.from_array('text/x-yaml', ['yaml', 'yml'], '8bit',
+ 'linux')
+ assert_instance_of(MIME::Type, yaml)
+ yaml = MIME::Type.from_hash('Content-Type' => 'text/x-yaml',
+ 'Content-Transfer-Encoding' => '8bit',
+ 'System' => 'linux',
+ 'Extensions' => ['yaml', 'yml'])
+ assert_instance_of(MIME::Type, yaml)
+ yaml = MIME::Type.new('text/x-yaml') do |y|
+ y.extensions = ['yaml', 'yml']
+ y.encoding = '8bit'
+ y.system = 'linux'
+ end
+ assert_instance_of(MIME::Type, yaml)
+ assert_raises(MIME::InvalidContentType) { MIME::Type.new('apps') }
+ assert_raises(MIME::InvalidContentType) { MIME::Type.new(nil) }
+ end
+
+ def test_s_simplified
+ assert_equal(MIME::Type.simplified('text/plain'), 'text/plain')
+ assert_equal(MIME::Type.simplified('image/jpeg'), 'image/jpeg')
+ assert_equal(MIME::Type.simplified('application/x-msword'), 'application/msword')
+ assert_equal(MIME::Type.simplified('text/vCard'), 'text/vcard')
+ assert_equal(MIME::Type.simplified('application/pkcs7-mime'), 'application/pkcs7-mime')
+ assert_equal(@zip.simplified, 'appl/zip')
+ assert_equal(MIME::Type.simplified('x-xyz/abc'), 'xyz/abc')
+ end
+end
diff --git a/vendor/mime-types-1.15/tests/tc_mime_types.rb b/vendor/mime-types-1.15/tests/tc_mime_types.rb
new file mode 100644
index 0000000..e48f9e2
--- /dev/null
+++ b/vendor/mime-types-1.15/tests/tc_mime_types.rb
@@ -0,0 +1,77 @@
+#! /usr/bin/env ruby
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2005 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: tc_mime_types.rb,v 1.2 2006/02/12 21:27:22 austin Exp $
+#++
+$LOAD_PATH.unshift("#{File.dirname(__FILE__)}/../lib") if __FILE__ == $0
+
+require 'mime/types'
+require 'test/unit'
+
+class TestMIME__Types < Test::Unit::TestCase #:nodoc:
+ def test_s_AREF # singleton method '[]'
+ text_plain = MIME::Type.new('text/plain') do |t|
+ t.encoding = '8bit'
+ t.extensions = ['asc', 'txt', 'c', 'cc', 'h', 'hh', 'cpp', 'hpp',
+ 'dat', 'hlp']
+ end
+ text_plain_vms = MIME::Type.new('text/plain') do |t|
+ t.encoding = '8bit'
+ t.extensions = ['doc']
+ t.system = 'vms'
+ end
+ text_vnd_fly = MIME::Type.new('text/vnd.fly')
+ assert_equal(MIME::Types['text/plain'].sort,
+ [text_plain, text_plain_vms].sort)
+
+ tst_bmp = MIME::Types["image/x-bmp"] +
+ MIME::Types["image/vnd.wap.wbmp"] + MIME::Types["image/x-win-bmp"]
+
+ assert_equal(tst_bmp.sort, MIME::Types[/bmp$/].sort)
+ assert_nothing_raised {
+ MIME::Types['image/bmp'][0].system = RUBY_PLATFORM
+ }
+ assert_equal([MIME::Type.from_array('image/x-bmp', ['bmp'])],
+ MIME::Types[/bmp$/, { :platform => true }])
+
+ assert(MIME::Types['text/vnd.fly', { :complete => true }].empty?)
+ assert(!MIME::Types['text/plain', { :complete => true} ].empty?)
+ end
+
+ def test_s_add
+ assert_nothing_raised do
+ @eruby = MIME::Type.new("application/x-eruby") do |t|
+ t.extensions = "rhtml"
+ t.encoding = "8bit"
+ end
+
+ MIME::Types.add(@eruby)
+ end
+
+ assert_equal(MIME::Types['application/x-eruby'], [@eruby])
+ end
+
+ def test_s_type_for
+ assert_equal(MIME::Types.type_for('xml').sort, [ MIME::Types['text/xml'], MIME::Types['application/xml'] ].sort)
+ assert_equal(MIME::Types.type_for('gif'), MIME::Types['image/gif'])
+ assert_nothing_raised do
+ MIME::Types['image/gif'][0].system = RUBY_PLATFORM
+ end
+ assert_equal(MIME::Types.type_for('gif', true), MIME::Types['image/gif'])
+ assert(MIME::Types.type_for('zzz').empty?)
+ end
+
+ def test_s_of
+ assert_equal(MIME::Types.of('xml').sort, [ MIME::Types['text/xml'], MIME::Types['application/xml'] ].sort)
+ assert_equal(MIME::Types.of('gif'), MIME::Types['image/gif'])
+ assert_nothing_raised do
+ MIME::Types['image/gif'][0].system = RUBY_PLATFORM
+ end
+ assert_equal(MIME::Types.of('gif', true), MIME::Types['image/gif'])
+ assert(MIME::Types.of('zzz').empty?)
+ end
+end
diff --git a/vendor/mime-types-1.15/tests/testall.rb b/vendor/mime-types-1.15/tests/testall.rb
new file mode 100644
index 0000000..bdf311f
--- /dev/null
+++ b/vendor/mime-types-1.15/tests/testall.rb
@@ -0,0 +1,18 @@
+#! /usr/bin/env ruby
+#--
+# MIME::Types for Ruby
+# http://rubyforge.org/projects/mime-types/
+# Copyright 2003 - 2005 Austin Ziegler.
+# Licensed under a MIT-style licence.
+#
+# $Id: testall.rb,v 1.1 2005/07/08 11:58:06 austin Exp $
+#++
+
+$LOAD_PATH.unshift("#{File.dirname(__FILE__)}/../lib") if __FILE__ == $0
+
+puts "Checking for test cases:"
+Dir['tc_*.rb'].each do |testcase|
+ puts "\t#{testcase}"
+ require testcase
+end
+puts
|
jwilger/jack-the-ripper
|
dc3c3dbeb51626a3adb5ac27a2b04f2a17b517ec
|
added rubyforge-0.4.4 to vendor
|
diff --git a/Rakefile b/Rakefile
index 625e602..91dcaef 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,15 +1,17 @@
-$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/vendor/hoe-1.5.0/lib' ) )
+VENDOR_DIR = File.expand_path( File.dirname( __FILE__ ) + '/vendor' )
+$:.unshift( VENDOR_DIR + '/hoe-1.5.0/lib' )
+$:.unshift( VENDOR_DIR + '/rubyforge-0.4.4/lib' )
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
p.extra_deps = %w( right_aws mime-types daemons )
end
\ No newline at end of file
diff --git a/vendor/rubyforge-0.4.4/History.txt b/vendor/rubyforge-0.4.4/History.txt
new file mode 100644
index 0000000..b4b8059
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/History.txt
@@ -0,0 +1,66 @@
+== Version History:
+
+=== 0.4.4 / 2007-08-13:
+
+* New type_id values will merge with extant data. (self-repairing data is Good)
+* Scrape processor_ids, merging in with extant data.
+* Default to "Other" if a file's type is unrecognized.
+
+=== 0.4.3 / 2007-07-23:
+
+* Set mode on .rubyforge directory to 700.
+* Fix fetching of user id when user has no releases.
+
+=== 0.4.2 / 2007-05-21:
+
+* Fix for windoze users (spaces in path).
+* Added check for extant release.
+* Added default hash for first-time releases.
+
+=== 0.4.1 / 2007-03-08:
+
+* Verify that login succeeded and warn against if not (prolly should raise).
+* Print a friendly error if you have the wrong package id.
+* Handle upload error in add_release a bit better.
+
+=== 0.4.0 / 2007-01-09:
+
+* config.yml split and moved to user-config.yml (up to the user to do).
+* auto-config.yml now generated via config command.
+* @config renamed to @userconfig.
+* @config["rubyforge"] moved to @autoconfig.
+* Added save_autoconfig.
+* Pulled scrape_project from scrape_config.
+* scrape_config no longer takes a user param. Use opts to specify.
+* scrape_project, add_project, add/remove_release now save automatically.
+
+=== 0.3.2 / 2006-11-29:
+
+* Fixed file uploads for windows.
+* Correctly scrape releases with funky characters.
+
+=== 0.3.1 / 2006-10-24:
+
+* Added SSL login.
+* Added yet more debugging output if $DEBUG.
+
+=== 0.3.0 / 2006-09-30:
+
+* Added more debugging output if $DEBUG
+* Added news posting.
+* Added multiple file release to add_release (uses add_file for extras).
+* add_release now returns release_id
+* Fixed config scraper to include '-' in names.
+
+=== 0.2.1 / 2006-09-14:
+
+* Gemspec was too loose about packaging. Now using manifest.
+
+=== 0.2.0 / 2006-09-13:
+
+* Split original script into script and library.
+* Added tests for library.
+* Refactored heavily.
+* Added "config" command to scrape group/project/release ids from rubyforge.
+* Added "names" command to help pick groups and projects.
+* Added "add_file" command to add a file to an existing release.
diff --git a/vendor/rubyforge-0.4.4/Manifest.txt b/vendor/rubyforge-0.4.4/Manifest.txt
new file mode 100644
index 0000000..b9b758b
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/Manifest.txt
@@ -0,0 +1,10 @@
+History.txt
+Manifest.txt
+README.txt
+Rakefile
+bin/rubyforge
+lib/http-access2.rb
+lib/http-access2/cookie.rb
+lib/http-access2/http.rb
+lib/rubyforge.rb
+test/test_rubyforge.rb
diff --git a/vendor/rubyforge-0.4.4/README.txt b/vendor/rubyforge-0.4.4/README.txt
new file mode 100644
index 0000000..2217844
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/README.txt
@@ -0,0 +1,24 @@
+= Rubyforge
+
+* http://codeforpeople.rubyforge.org/rubyforge/
+* http://rubyforge.org/projects/codeforpeople/
+
+== Description
+
+A script which automates a limited set of rubyforge operations.
+
+* Run 'rubyforge help' for complete usage.
+* Setup: For first time users AND upgrades to 0.4.0:
+ * rubyforge setup (deletes your username and password, so run sparingly!)
+ * edit ~/.rubyforge/user-config.yml
+ * rubyforge config
+* For all rubyforge upgrades, run 'rubyforge config' to ensure you have latest.
+* Don't forget to login! logging in will store a cookie in your
+ .rubyforge directory which expires after a time. always run the
+ login command before any operation that requires authentication,
+ such as uploading a package.
+
+== Synopsis
+
+ rubyforge [options]* mode [mode_args]*
+
diff --git a/vendor/rubyforge-0.4.4/Rakefile b/vendor/rubyforge-0.4.4/Rakefile
new file mode 100644
index 0000000..68967af
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/Rakefile
@@ -0,0 +1,28 @@
+# -*- ruby -*-
+
+begin
+ require 'hoe'
+rescue LoadError
+ abort "ERROR: This Rakefile is only useful with hoe installed.
+ If you're trying to install the rubyforge library,
+ please install it via rubygems."
+end
+
+Object.send :remove_const, :RubyForge if defined? RubyForge
+require './lib/rubyforge.rb'
+
+Hoe.new("rubyforge", RubyForge::VERSION) do |p|
+ p.rubyforge_name = "codeforpeople"
+ p.url = "http://rubyforge.org/projects/codeforpeople"
+ p.author = ['Ara T Howard', 'Ryan Davis', 'Eric Hodel']
+ p.need_tar = false
+
+ changes = p.paragraphs_of("History.txt", 1..2).join("\n\n")
+ summary, *description = p.paragraphs_of("README.txt", 3, 3..4)
+
+ p.changes = changes
+ p.summary = summary
+ p.description = description.join("\n\n")
+end
+
+# vim:syntax=ruby
diff --git a/vendor/rubyforge-0.4.4/bin/rubyforge b/vendor/rubyforge-0.4.4/bin/rubyforge
new file mode 100644
index 0000000..b9fcc00
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/bin/rubyforge
@@ -0,0 +1,223 @@
+#! /usr/bin/env ruby
+
+$VERBOSE = true
+
+$:.unshift(File::join(File::dirname(File::dirname(__FILE__)), "lib"))
+
+require 'getoptlong'
+require 'rubyforge'
+
+PROGRAM = File::basename $0
+
+USAGE = <<-EOL
+SYNOPSIS
+
+ #{ PROGRAM } [options]* mode [mode_args]*
+
+DESCRIPTION
+
+ simplistic script which automates a limited set of rubyforge operations
+
+MODES
+
+ setup()
+ initializes your .rubyforge directory. you need to run this first before
+ doing anything else.
+
+ example :
+ #{ PROGRAM } setup
+
+ config([project])
+ Helps you populate your auto-config.yml file by scraping rubyforge and
+ getting your groups, projects, and releases.
+
+ example :
+ #{ PROGRAM } config
+ #{ PROGRAM } config myproject
+
+ names()
+ Prints out the names of your configured groups and projects.
+
+ example :
+ #{ PROGRAM } names
+
+ login()
+ sends username and password from config.yml (or --username/--password
+ options) and stores login cookie in cookie.dat. this is required for
+ subsquent operations work.
+
+ example :
+ #{ PROGRAM } login
+ #{ PROGRAM } login --username zaphod --password 42
+
+ create_package(group_id, package_name)
+ creates the named package under the specified group.
+
+ example :
+ #{ PROGRAM } create_package 1024 traits
+ #{ PROGRAM } login && #{ PROGRAM } create_package codeforpeople.com traits
+
+ add_release(group_id, package_id, release_name, userfile)
+ release a file as release_name under the specified group_id and
+ package_id.
+
+ example :
+ #{ PROGRAM } add_release codeforpeople.com traits 0.8.0 traits-0.8.0.gem
+ #{ PROGRAM } add_release codeforpeople.com traits 0.8.0 traits-0.8.0.tgz
+ #{ PROGRAM } add_release 1024 1242 0.8.0 traits-0.8.0.gem
+ #{ PROGRAM } login && #{ PROGRAM } add_release 1024 1242 0.8.0 traits-0.8.0.gem
+
+ add_file(group_id, package_id, release_id, userfile)
+ add a file to an existing release under the specified group_id,
+ package_id, and release_id
+
+ example :
+ #{ PROGRAM } add_file codeforpeople.com traits 0.8.0 traits-0.8.0.gem
+ #{ PROGRAM } add_file codeforpeople.com traits 0.8.0 traits-0.8.0.tgz
+ #{ PROGRAM } add_file 1024 1242 0.8.0 traits-0.8.0.gem
+
+ delete_package(group_id, package_name)
+ deletes a package and all its files.
+
+ example :
+ #{ PROGRAM } delete_package codeforpeople.com traits
+ #{ PROGRAM } delete_package 1024 traits
+
+NOTES
+
+ - In order to use group_id, package_id, or release_id by name,
+ rather than number, you must edit the rubyforge[group_ids] and
+ rubyforge[package_ids] translation tables in your config.yml. See
+ the config command for more information and help.
+
+ - don\'t forget to login! logging in will store a cookie in your
+ .rubyforge directory which expires after a time. always run the login
+ command before any operation that requires authentication, such as
+ uploading a package.
+
+TODO
+
+ - add error checking. this requires screen scraping to see of an operation
+ succeeded since 200 is returned from rubyforge even for failed operations
+ and only the html text reveals the status.
+
+OPTIONS
+
+ global :
+ --help , -h
+ this message
+ --config , -c
+ specify a config file (default #{ RubyForge::CONFIG_F })
+ --username , -u
+ specify username, taken from config otherwise
+ --password , -p
+ specify password, taken from config otherwise
+ --cookie_jar , -C
+ specify cookie storage file (default #{ RubyForge::COOKIE_F })
+
+ add_release :
+ --is_private , -P
+ if true, release is not public
+ --release_date , -r
+ specify time of release (default 'now')
+ --type_id , -t
+ specify filetype code (default determined by ext)
+ --processor_id , -o
+ specify processor (default 'Any')
+ --release_notes , -n
+ specify release notes as string or file
+ --release_changes , -a
+ specify release changes as string or file
+ --preformatted , -f
+ specify whether release_notes/changes are preformatted
+
+EOL
+
+mode = ARGV.shift
+
+opts = GetoptLong::new(
+ [ "--help" , "-h" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--config" , "-c" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--username" , "-u" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--password" , "-p" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--cookie_jar" , "-C" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--is_private" , "-P" , GetoptLong::REQUIRED_ARGUMENT ],
+ [ "--release_date" , "-r" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--type_id" , "-t" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--processor_id" , "-o" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--release_notes" , "-n" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--release_changes" , "-a" , GetoptLong::REQUIRED_ARGUMENT ] ,
+ [ "--preformatted" , "-f" , GetoptLong::NO_ARGUMENT ]
+ ).enum_for.inject({}){|h,kv| h.update kv.first.delete('-') => kv.last}
+
+rubyforge = RubyForge.new(opts["config"] || RubyForge::CONFIG_F, opts)
+
+mode = "help" if opts["help"]
+
+case mode
+when %r/help/
+ USAGE.display
+when %r/setup/
+ rubyforge.setup
+when %r/config/
+ project = ARGV.shift
+ if project then
+ rubyforge.scrape_project(project)
+ else
+ rubyforge.scrape_config
+ end
+when %r/names/
+ rf = rubyforge.autoconfig
+ puts "groups : #{rf["group_ids"].keys.sort.join(", ")}"
+ puts "packages: #{rf["package_ids"].keys.sort.join(", ")}"
+when %r/login/
+ rubyforge.login
+when %r/create_package/
+ page, msg = "/frs/admin/index.php", "post_content"
+
+ group_id, package_name = ARGV
+
+ abort "no <group_id>" unless group_id
+ abort "no <package_name>" unless package_name
+
+ group_id = Integer(group_id) rescue group_id
+
+ rubyforge.create_package group_id, package_name
+when %r/delete_package/
+ group_id, package_id = ARGV
+
+ abort "no <group_id>" unless group_id
+ abort "no <package_id>" unless package_id
+
+ group_id = Integer(group_id) rescue group_id
+ package_id = Integer(package_id) rescue package_id
+
+ rubyforge.delete_package group_id, package_id
+when %r/add_release/
+ group_id, package_id, release_name, userfile = ARGV
+
+ abort "no <group_id>" unless group_id
+ abort "no <package_id>" unless package_id
+ abort "no <release_name>" unless release_name
+ abort "no <userfile>" unless userfile
+
+ group_id = Integer(group_id) rescue group_id
+ package_id = Integer(package_id) rescue package_id
+
+ rubyforge.add_release group_id, package_id, release_name, userfile
+when %r/add_file/
+ group_id, package_id, release_id, userfile = ARGV
+
+ abort "no <group_id>" unless group_id
+ abort "no <package_id>" unless package_id
+ abort "no <release_id>" unless release_id
+ abort "no <userfile>" unless userfile
+
+ group_id = Integer(group_id) rescue group_id
+ package_id = Integer(package_id) rescue package_id
+ release_id = Integer(release_id) rescue release_id
+
+ rubyforge.add_file group_id, package_id, release_id, userfile
+else
+ abort USAGE
+end
diff --git a/vendor/rubyforge-0.4.4/lib/http-access2.rb b/vendor/rubyforge-0.4.4/lib/http-access2.rb
new file mode 100644
index 0000000..bcb0153
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/lib/http-access2.rb
@@ -0,0 +1,1588 @@
+# HTTPAccess2 - HTTP accessing library.
+# Copyright (C) 2000-2005 NAKAMURA, Hiroshi <[email protected]>.
+
+# This program is copyrighted free software by NAKAMURA, Hiroshi. You can
+# redistribute it and/or modify it under the same terms of Ruby's license;
+# either the dual license version in 2003, or any later version.
+
+# http-access2.rb is based on http-access.rb in http-access/0.0.4. Some part
+# of code in http-access.rb was recycled in http-access2.rb. Those part is
+# copyrighted by Maehashi-san.
+
+
+# Ruby standard library
+require 'timeout'
+require 'uri'
+require 'socket'
+require 'thread'
+
+# Extra library
+require 'http-access2/http'
+require 'http-access2/cookie'
+
+
+module HTTPAccess2
+ VERSION = '2.0.6'
+ RUBY_VERSION_STRING = "ruby #{RUBY_VERSION} (#{RUBY_RELEASE_DATE}) [#{RUBY_PLATFORM}]"
+ s = %w$Id: http-access2.rb,v 1.1.1.1 2006/09/13 04:28:30 zenspider Exp $
+ RCS_FILE, RCS_REVISION = s[1][/.*(?=,v$)/], s[2]
+
+ SSLEnabled = begin
+ require 'openssl'
+ true
+ rescue LoadError
+ false
+ end
+
+ DEBUG_SSL = true
+
+
+# DESCRIPTION
+# HTTPAccess2::Client -- Client to retrieve web resources via HTTP.
+#
+# How to create your client.
+# 1. Create simple client.
+# clnt = HTTPAccess2::Client.new
+#
+# 2. Accessing resources through HTTP proxy.
+# clnt = HTTPAccess2::Client.new("http://myproxy:8080")
+#
+# 3. Set User-Agent and From in HTTP request header.(nil means "No proxy")
+# clnt = HTTPAccess2::Client.new(nil, "MyAgent", "[email protected]")
+#
+# How to retrieve web resources.
+# 1. Get content of specified URL.
+# puts clnt.get_content("http://www.ruby-lang.org/en/")
+#
+# 2. Do HEAD request.
+# res = clnt.head(uri)
+#
+# 3. Do GET request with query.
+# res = clnt.get(uri)
+#
+# 4. Do POST request.
+# res = clnt.post(uri)
+# res = clnt.get|post|head(uri, proxy)
+#
+class Client
+ attr_reader :agent_name
+ attr_reader :from
+ attr_reader :ssl_config
+ attr_accessor :cookie_manager
+ attr_reader :test_loopback_response
+
+ class << self
+ %w(get_content head get post put delete options trace).each do |name|
+ eval <<-EOD
+ def #{name}(*arg)
+ new.#{name}(*arg)
+ end
+ EOD
+ end
+ end
+
+ # SYNOPSIS
+ # Client.new(proxy = nil, agent_name = nil, from = nil)
+ #
+ # ARGS
+ # proxy A String of HTTP proxy URL. ex. "http://proxy:8080".
+ # agent_name A String for "User-Agent" HTTP request header.
+ # from A String for "From" HTTP request header.
+ #
+ # DESCRIPTION
+ # Create an instance.
+ # SSLConfig cannot be re-initialized. Create new client.
+ #
+ def initialize(proxy = nil, agent_name = nil, from = nil)
+ @proxy = nil # assigned later.
+ @no_proxy = nil
+ @agent_name = agent_name
+ @from = from
+ @basic_auth = BasicAuth.new(self)
+ @debug_dev = nil
+ @ssl_config = SSLConfig.new(self)
+ @redirect_uri_callback = method(:default_redirect_uri_callback)
+ @test_loopback_response = []
+ @session_manager = SessionManager.new
+ @session_manager.agent_name = @agent_name
+ @session_manager.from = @from
+ @session_manager.ssl_config = @ssl_config
+ @cookie_manager = WebAgent::CookieManager.new
+ self.proxy = proxy
+ end
+
+ def debug_dev
+ @debug_dev
+ end
+
+ def debug_dev=(dev)
+ @debug_dev = dev
+ reset_all
+ @session_manager.debug_dev = dev
+ end
+
+ def protocol_version
+ @session_manager.protocol_version
+ end
+
+ def protocol_version=(protocol_version)
+ reset_all
+ @session_manager.protocol_version = protocol_version
+ end
+
+ def connect_timeout
+ @session_manager.connect_timeout
+ end
+
+ def connect_timeout=(connect_timeout)
+ reset_all
+ @session_manager.connect_timeout = connect_timeout
+ end
+
+ def send_timeout
+ @session_manager.send_timeout
+ end
+
+ def send_timeout=(send_timeout)
+ reset_all
+ @session_manager.send_timeout = send_timeout
+ end
+
+ def receive_timeout
+ @session_manager.receive_timeout
+ end
+
+ def receive_timeout=(receive_timeout)
+ reset_all
+ @session_manager.receive_timeout = receive_timeout
+ end
+
+ def proxy
+ @proxy
+ end
+
+ def proxy=(proxy)
+ if proxy.nil?
+ @proxy = nil
+ else
+ if proxy.is_a?(URI)
+ @proxy = proxy
+ else
+ @proxy = URI.parse(proxy)
+ end
+ if @proxy.scheme == nil or @proxy.scheme.downcase != 'http' or
+ @proxy.host == nil or @proxy.port == nil
+ raise ArgumentError.new("unsupported proxy `#{proxy}'")
+ end
+ end
+ reset_all
+ @proxy
+ end
+
+ def no_proxy
+ @no_proxy
+ end
+
+ def no_proxy=(no_proxy)
+ @no_proxy = no_proxy
+ reset_all
+ end
+
+ # if your ruby is older than 2005-09-06, do not set socket_sync = false to
+ # avoid an SSL socket blocking bug in openssl/buffering.rb.
+ def socket_sync=(socket_sync)
+ @session_manager.socket_sync = socket_sync
+ end
+
+ def set_basic_auth(uri, user_id, passwd)
+ unless uri.is_a?(URI)
+ uri = URI.parse(uri)
+ end
+ @basic_auth.set(uri, user_id, passwd)
+ end
+
+ def set_cookie_store(filename)
+ if @cookie_manager.cookies_file
+ raise RuntimeError.new("overriding cookie file location")
+ end
+ @cookie_manager.cookies_file = filename
+ @cookie_manager.load_cookies if filename
+ end
+
+ def save_cookie_store
+ @cookie_manager.save_cookies
+ end
+
+ def redirect_uri_callback=(redirect_uri_callback)
+ @redirect_uri_callback = redirect_uri_callback
+ end
+
+ # SYNOPSIS
+ # Client#get_content(uri, query = nil, extheader = {}, &block = nil)
+ #
+ # ARGS
+ # uri an_URI or a_string of uri to connect.
+ # query a_hash or an_array of query part. e.g. { "a" => "b" }.
+ # Give an array to pass multiple value like
+ # [["a" => "b"], ["a" => "c"]].
+ # extheader
+ # a_hash of extra headers like { "SOAPAction" => "urn:foo" }.
+ # &block Give a block to get chunked message-body of response like
+ # get_content(uri) { |chunked_body| ... }
+ # Size of each chunk may not be the same.
+ #
+ # DESCRIPTION
+ # Get a_sring of message-body of response.
+ #
+ def get_content(uri, query = nil, extheader = {}, &block)
+ retry_connect(uri, query) do |uri, query|
+ get(uri, query, extheader, &block)
+ end
+ end
+
+ def post_content(uri, body = nil, extheader = {}, &block)
+ retry_connect(uri, nil) do |uri, query|
+ post(uri, body, extheader, &block)
+ end
+ end
+
+ def default_redirect_uri_callback(res)
+ uri = res.header['location'][0]
+ puts "Redirect to: #{uri}" if $DEBUG
+ uri
+ end
+
+ def head(uri, query = nil, extheader = {})
+ request('HEAD', uri, query, nil, extheader)
+ end
+
+ def get(uri, query = nil, extheader = {}, &block)
+ request('GET', uri, query, nil, extheader, &block)
+ end
+
+ def post(uri, body = nil, extheader = {}, &block)
+ request('POST', uri, nil, body, extheader, &block)
+ end
+
+ def put(uri, body = nil, extheader = {}, &block)
+ request('PUT', uri, nil, body, extheader, &block)
+ end
+
+ def delete(uri, extheader = {}, &block)
+ request('DELETE', uri, nil, nil, extheader, &block)
+ end
+
+ def options(uri, extheader = {}, &block)
+ request('OPTIONS', uri, nil, nil, extheader, &block)
+ end
+
+ def trace(uri, query = nil, body = nil, extheader = {}, &block)
+ request('TRACE', uri, query, body, extheader, &block)
+ end
+
+ def request(method, uri, query = nil, body = nil, extheader = {}, &block)
+ conn = Connection.new
+ conn_request(conn, method, uri, query, body, extheader, &block)
+ conn.pop
+ end
+
+ # Async interface.
+
+ def head_async(uri, query = nil, extheader = {})
+ request_async('HEAD', uri, query, nil, extheader)
+ end
+
+ def get_async(uri, query = nil, extheader = {})
+ request_async('GET', uri, query, nil, extheader)
+ end
+
+ def post_async(uri, body = nil, extheader = {})
+ request_async('POST', uri, nil, body, extheader)
+ end
+
+ def put_async(uri, body = nil, extheader = {})
+ request_async('PUT', uri, nil, body, extheader)
+ end
+
+ def delete_async(uri, extheader = {})
+ request_async('DELETE', uri, nil, nil, extheader)
+ end
+
+ def options_async(uri, extheader = {})
+ request_async('OPTIONS', uri, nil, nil, extheader)
+ end
+
+ def trace_async(uri, query = nil, body = nil, extheader = {})
+ request_async('TRACE', uri, query, body, extheader)
+ end
+
+ def request_async(method, uri, query = nil, body = nil, extheader = {})
+ conn = Connection.new
+ t = Thread.new(conn) { |tconn|
+ conn_request(tconn, method, uri, query, body, extheader)
+ }
+ conn.async_thread = t
+ conn
+ end
+
+ ##
+ # Multiple call interface.
+
+ # ???
+
+ ##
+ # Management interface.
+
+ def reset(uri)
+ @session_manager.reset(uri)
+ end
+
+ def reset_all
+ @session_manager.reset_all
+ end
+
+private
+
+ def retry_connect(uri, query = nil)
+ retry_number = 0
+ while retry_number < 10
+ res = yield(uri, query)
+ if res.status == HTTP::Status::OK
+ return res.content
+ elsif HTTP::Status.redirect?(res.status)
+ uri = @redirect_uri_callback.call(res)
+ query = nil
+ retry_number += 1
+ else
+ raise RuntimeError.new("Unexpected response: #{res.header.inspect}")
+ end
+ end
+ raise RuntimeError.new("Retry count exceeded.")
+ end
+
+ def conn_request(conn, method, uri, query, body, extheader, &block)
+ unless uri.is_a?(URI)
+ uri = URI.parse(uri)
+ end
+ proxy = no_proxy?(uri) ? nil : @proxy
+ begin
+ req = create_request(method, uri, query, body, extheader, !proxy.nil?)
+ do_get_block(req, proxy, conn, &block)
+ rescue Session::KeepAliveDisconnected
+ req = create_request(method, uri, query, body, extheader, !proxy.nil?)
+ do_get_block(req, proxy, conn, &block)
+ end
+ end
+
+ def create_request(method, uri, query, body, extheader, proxy)
+ if extheader.is_a?(Hash)
+ extheader = extheader.to_a
+ end
+ cred = @basic_auth.get(uri)
+ if cred
+ extheader << ['Authorization', "Basic " << cred]
+ end
+ if cookies = @cookie_manager.find(uri)
+ extheader << ['Cookie', cookies]
+ end
+ boundary = nil
+ content_type = extheader.find { |key, value|
+ key.downcase == 'content-type'
+ }
+ if content_type && content_type[1] =~ /boundary=(.+)\z/
+ boundary = $1
+ end
+ req = HTTP::Message.new_request(method, uri, query, body, proxy, boundary)
+ extheader.each do |key, value|
+ req.header.set(key, value)
+ end
+ if content_type.nil? and !body.nil?
+ req.header.set('content-type', 'application/x-www-form-urlencoded')
+ end
+ req
+ end
+
+ NO_PROXY_HOSTS = ['localhost']
+
+ def no_proxy?(uri)
+ if !@proxy or NO_PROXY_HOSTS.include?(uri.host)
+ return true
+ end
+ unless @no_proxy
+ return false
+ end
+ @no_proxy.scan(/([^:,]+)(?::(\d+))?/) do |host, port|
+ if /(\A|\.)#{Regexp.quote(host)}\z/i =~ uri.host &&
+ (!port || uri.port == port.to_i)
+ return true
+ end
+ end
+ false
+ end
+
+ # !! CAUTION !!
+ # Method 'do_get*' runs under MT conditon. Be careful to change.
+ def do_get_block(req, proxy, conn, &block)
+ if str = @test_loopback_response.shift
+ dump_dummy_request_response(req.body.dump, str) if @debug_dev
+ conn.push(HTTP::Message.new_response(str))
+ return
+ end
+ content = ''
+ res = HTTP::Message.new_response(content)
+ @debug_dev << "= Request\n\n" if @debug_dev
+ sess = @session_manager.query(req, proxy)
+ @debug_dev << "\n\n= Response\n\n" if @debug_dev
+ do_get_header(req, res, sess)
+ conn.push(res)
+ sess.get_data() do |str|
+ block.call(str) if block
+ content << str
+ end
+ @session_manager.keep(sess) unless sess.closed?
+ end
+
+ def do_get_stream(req, proxy, conn)
+ if str = @test_loopback_response.shift
+ dump_dummy_request_response(req.body.dump, str) if @debug_dev
+ conn.push(HTTP::Message.new_response(str))
+ return
+ end
+ piper, pipew = IO.pipe
+ res = HTTP::Message.new_response(piper)
+ @debug_dev << "= Request\n\n" if @debug_dev
+ sess = @session_manager.query(req, proxy)
+ @debug_dev << "\n\n= Response\n\n" if @debug_dev
+ do_get_header(req, res, sess)
+ conn.push(res)
+ sess.get_data() do |str|
+ pipew.syswrite(str)
+ end
+ pipew.close
+ @session_manager.keep(sess) unless sess.closed?
+ end
+
+ def do_get_header(req, res, sess)
+ res.version, res.status, res.reason = sess.get_status
+ sess.get_header().each do |line|
+ unless /^([^:]+)\s*:\s*(.*)$/ =~ line
+ raise RuntimeError.new("Unparsable header: '#{line}'.") if $DEBUG
+ end
+ res.header.set($1, $2)
+ end
+ if res.header['set-cookie']
+ res.header['set-cookie'].each do |cookie|
+ @cookie_manager.parse(cookie, req.header.request_uri)
+ end
+ end
+ end
+
+ def dump_dummy_request_response(req, res)
+ @debug_dev << "= Dummy Request\n\n"
+ @debug_dev << req
+ @debug_dev << "\n\n= Dummy Response\n\n"
+ @debug_dev << res
+ end
+end
+
+
+# HTTPAccess2::SSLConfig -- SSL configuration of a client.
+#
+class SSLConfig # :nodoc:
+ attr_reader :client_cert
+ attr_reader :client_key
+ attr_reader :client_ca
+
+ attr_reader :verify_mode
+ attr_reader :verify_depth
+ attr_reader :verify_callback
+
+ attr_reader :timeout
+ attr_reader :options
+ attr_reader :ciphers
+
+ attr_reader :cert_store # don't use if you don't know what it is.
+
+ def initialize(client)
+ return unless SSLEnabled
+ @client = client
+ @cert_store = OpenSSL::X509::Store.new
+ @client_cert = @client_key = @client_ca = nil
+ @verify_mode = OpenSSL::SSL::VERIFY_PEER |
+ OpenSSL::SSL::VERIFY_FAIL_IF_NO_PEER_CERT
+ @verify_depth = nil
+ @verify_callback = nil
+ @dest = nil
+ @timeout = nil
+ @options = defined?(OpenSSL::SSL::OP_ALL) ?
+ OpenSSL::SSL::OP_ALL | OpenSSL::SSL::OP_NO_SSLv2 : nil
+ @ciphers = "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH"
+ end
+
+ def set_client_cert_file(cert_file, key_file)
+ @client_cert = OpenSSL::X509::Certificate.new(File.open(cert_file).read)
+ @client_key = OpenSSL::PKey::RSA.new(File.open(key_file).read)
+ change_notify
+ end
+
+ def set_trust_ca(trust_ca_file_or_hashed_dir)
+ if FileTest.directory?(trust_ca_file_or_hashed_dir)
+ @cert_store.add_path(trust_ca_file_or_hashed_dir)
+ else
+ @cert_store.add_file(trust_ca_file_or_hashed_dir)
+ end
+ change_notify
+ end
+
+ def set_crl(crl_file)
+ crl = OpenSSL::X509::CRL.new(File.open(crl_file).read)
+ @cert_store.add_crl(crl)
+ @cert_store.flags = OpenSSL::X509::V_FLAG_CRL_CHECK | OpenSSL::X509::V_FLAG_CRL_CHECK_ALL
+ change_notify
+ end
+
+ def client_cert=(client_cert)
+ @client_cert = client_cert
+ change_notify
+ end
+
+ def client_key=(client_key)
+ @client_key = client_key
+ change_notify
+ end
+
+ def client_ca=(client_ca)
+ @client_ca = client_ca
+ change_notify
+ end
+
+ def verify_mode=(verify_mode)
+ @verify_mode = verify_mode
+ change_notify
+ end
+
+ def verify_depth=(verify_depth)
+ @verify_depth = verify_depth
+ change_notify
+ end
+
+ def verify_callback=(verify_callback)
+ @verify_callback = verify_callback
+ change_notify
+ end
+
+ def timeout=(timeout)
+ @timeout = timeout
+ change_notify
+ end
+
+ def options=(options)
+ @options = options
+ change_notify
+ end
+
+ def ciphers=(ciphers)
+ @ciphers = ciphers
+ change_notify
+ end
+
+ # don't use if you don't know what it is.
+ def cert_store=(cert_store)
+ @cert_store = cert_store
+ change_notify
+ end
+
+ # interfaces for SSLSocketWrap.
+
+ def set_context(ctx)
+ # Verification: Use Store#verify_callback instead of SSLContext#verify*?
+ ctx.cert_store = @cert_store
+ ctx.verify_mode = @verify_mode
+ ctx.verify_depth = @verify_depth if @verify_depth
+ ctx.verify_callback = @verify_callback || method(:default_verify_callback)
+ # SSL config
+ ctx.cert = @client_cert
+ ctx.key = @client_key
+ ctx.client_ca = @client_ca
+ ctx.timeout = @timeout
+ ctx.options = @options
+ ctx.ciphers = @ciphers
+ end
+
+ # this definition must match with the one in ext/openssl/lib/openssl/ssl.rb
+ def post_connection_check(peer_cert, hostname)
+ check_common_name = true
+ cert = peer_cert
+ cert.extensions.each{|ext|
+ next if ext.oid != "subjectAltName"
+ ext.value.split(/,\s+/).each{|general_name|
+ if /\ADNS:(.*)/ =~ general_name
+ check_common_name = false
+ reg = Regexp.escape($1).gsub(/\\\*/, "[^.]+")
+ return true if /\A#{reg}\z/i =~ hostname
+ elsif /\AIP Address:(.*)/ =~ general_name
+ check_common_name = false
+ return true if $1 == hostname
+ end
+ }
+ }
+ if check_common_name
+ cert.subject.to_a.each{|oid, value|
+ if oid == "CN" && value.casecmp(hostname) == 0
+ return true
+ end
+ }
+ end
+ raise OpenSSL::SSL::SSLError, "hostname not match"
+ end
+
+ # Default callback for verification: only dumps error.
+ def default_verify_callback(is_ok, ctx)
+ if $DEBUG
+ puts "#{ is_ok ? 'ok' : 'ng' }: #{ctx.current_cert.subject}"
+ end
+ if !is_ok
+ depth = ctx.error_depth
+ code = ctx.error
+ msg = ctx.error_string
+ STDERR.puts "at depth #{depth} - #{code}: #{msg}"
+ end
+ is_ok
+ end
+
+ # Sample callback method: CAUTION: does not check CRL/ARL.
+ def sample_verify_callback(is_ok, ctx)
+ unless is_ok
+ depth = ctx.error_depth
+ code = ctx.error
+ msg = ctx.error_string
+ STDERR.puts "at depth #{depth} - #{code}: #{msg}" if $DEBUG
+ return false
+ end
+
+ cert = ctx.current_cert
+ self_signed = false
+ ca = false
+ pathlen = nil
+ server_auth = true
+ self_signed = (cert.subject.cmp(cert.issuer) == 0)
+
+ # Check extensions whatever its criticality is. (sample)
+ cert.extensions.each do |ex|
+ case ex.oid
+ when 'basicConstraints'
+ /CA:(TRUE|FALSE), pathlen:(\d+)/ =~ ex.value
+ ca = ($1 == 'TRUE')
+ pathlen = $2.to_i
+ when 'keyUsage'
+ usage = ex.value.split(/\s*,\s*/)
+ ca = usage.include?('Certificate Sign')
+ server_auth = usage.include?('Key Encipherment')
+ when 'extendedKeyUsage'
+ usage = ex.value.split(/\s*,\s*/)
+ server_auth = usage.include?('Netscape Server Gated Crypto')
+ when 'nsCertType'
+ usage = ex.value.split(/\s*,\s*/)
+ ca = usage.include?('SSL CA')
+ server_auth = usage.include?('SSL Server')
+ end
+ end
+
+ if self_signed
+ STDERR.puts 'self signing CA' if $DEBUG
+ return true
+ elsif ca
+ STDERR.puts 'middle level CA' if $DEBUG
+ return true
+ elsif server_auth
+ STDERR.puts 'for server authentication' if $DEBUG
+ return true
+ end
+
+ return false
+ end
+
+private
+
+ def change_notify
+ @client.reset_all
+ end
+end
+
+
+# HTTPAccess2::BasicAuth -- BasicAuth repository.
+#
+class BasicAuth # :nodoc:
+ def initialize(client)
+ @client = client
+ @auth = {}
+ end
+
+ def set(uri, user_id, passwd)
+ uri = uri.clone
+ uri.path = uri.path.sub(/\/[^\/]*$/, '/')
+ @auth[uri] = ["#{user_id}:#{passwd}"].pack('m').strip
+ @client.reset_all
+ end
+
+ def get(uri)
+ @auth.each do |realm_uri, cred|
+ if ((realm_uri.host == uri.host) and
+ (realm_uri.scheme == uri.scheme) and
+ (realm_uri.port == uri.port) and
+ uri.path.upcase.index(realm_uri.path.upcase) == 0)
+ return cred
+ end
+ end
+ nil
+ end
+end
+
+
+# HTTPAccess2::Site -- manage a site(host and port)
+#
+class Site # :nodoc:
+ attr_accessor :scheme
+ attr_accessor :host
+ attr_reader :port
+
+ def initialize(uri = nil)
+ if uri
+ @scheme = uri.scheme
+ @host = uri.host
+ @port = uri.port.to_i
+ else
+ @scheme = 'tcp'
+ @host = '0.0.0.0'
+ @port = 0
+ end
+ end
+
+ def addr
+ "#{@scheme}://#{@host}:#{@port.to_s}"
+ end
+
+ def port=(port)
+ @port = port.to_i
+ end
+
+ def ==(rhs)
+ if rhs.is_a?(Site)
+ ((@scheme == rhs.scheme) and (@host == rhs.host) and (@port == rhs.port))
+ else
+ false
+ end
+ end
+
+ def to_s
+ addr
+ end
+
+ def inspect
+ sprintf("#<%s:0x%x %s>", self.class.name, __id__, addr)
+ end
+end
+
+
+# HTTPAccess2::Connection -- magage a connection(one request and response to it).
+#
+class Connection # :nodoc:
+ attr_accessor :async_thread
+
+ def initialize(header_queue = [], body_queue = [])
+ @headers = header_queue
+ @body = body_queue
+ @async_thread = nil
+ @queue = Queue.new
+ end
+
+ def finished?
+ if !@async_thread
+ # Not in async mode.
+ true
+ elsif @async_thread.alive?
+ # Working...
+ false
+ else
+ # Async thread have been finished.
+ @async_thread.join
+ true
+ end
+ end
+
+ def pop
+ @queue.pop
+ end
+
+ def push(result)
+ @queue.push(result)
+ end
+
+ def join
+ unless @async_thread
+ false
+ else
+ @async_thread.join
+ end
+ end
+end
+
+
+# HTTPAccess2::SessionManager -- manage several sessions.
+#
+class SessionManager # :nodoc:
+ attr_accessor :agent_name # Name of this client.
+ attr_accessor :from # Owner of this client.
+
+ attr_accessor :protocol_version # Requested protocol version
+ attr_accessor :chunk_size # Chunk size for chunked request
+ attr_accessor :debug_dev # Device for dumping log for debugging
+ attr_accessor :socket_sync # Boolean value for Socket#sync
+
+ # These parameters are not used now...
+ attr_accessor :connect_timeout
+ attr_accessor :connect_retry # Maximum retry count. 0 for infinite.
+ attr_accessor :send_timeout
+ attr_accessor :receive_timeout
+ attr_accessor :read_block_size
+
+ attr_accessor :ssl_config
+
+ def initialize
+ @proxy = nil
+
+ @agent_name = nil
+ @from = nil
+
+ @protocol_version = nil
+ @debug_dev = nil
+ @socket_sync = true
+ @chunk_size = 4096
+
+ @connect_timeout = 60
+ @connect_retry = 1
+ @send_timeout = 120
+ @receive_timeout = 60 # For each read_block_size bytes
+ @read_block_size = 8192
+
+ @ssl_config = nil
+
+ @sess_pool = []
+ @sess_pool_mutex = Mutex.new
+ end
+
+ def proxy=(proxy)
+ if proxy.nil?
+ @proxy = nil
+ else
+ @proxy = Site.new(proxy)
+ end
+ end
+
+ def query(req, proxy)
+ req.body.chunk_size = @chunk_size
+ dest_site = Site.new(req.header.request_uri)
+ proxy_site = if proxy
+ Site.new(proxy)
+ else
+ @proxy
+ end
+ sess = open(dest_site, proxy_site)
+ begin
+ sess.query(req)
+ rescue
+ sess.close
+ raise
+ end
+ sess
+ end
+
+ def reset(uri)
+ unless uri.is_a?(URI)
+ uri = URI.parse(uri.to_s)
+ end
+ site = Site.new(uri)
+ close(site)
+ end
+
+ def reset_all
+ close_all
+ end
+
+ def keep(sess)
+ add_cached_session(sess)
+ end
+
+private
+
+ def open(dest, proxy = nil)
+ sess = nil
+ if cached = get_cached_session(dest)
+ sess = cached
+ else
+ sess = Session.new(dest, @agent_name, @from)
+ sess.proxy = proxy
+ sess.socket_sync = @socket_sync
+ sess.requested_version = @protocol_version if @protocol_version
+ sess.connect_timeout = @connect_timeout
+ sess.connect_retry = @connect_retry
+ sess.send_timeout = @send_timeout
+ sess.receive_timeout = @receive_timeout
+ sess.read_block_size = @read_block_size
+ sess.ssl_config = @ssl_config
+ sess.debug_dev = @debug_dev
+ end
+ sess
+ end
+
+ def close_all
+ each_sess do |sess|
+ sess.close
+ end
+ @sess_pool.clear
+ end
+
+ def close(dest)
+ if cached = get_cached_session(dest)
+ cached.close
+ true
+ else
+ false
+ end
+ end
+
+ def get_cached_session(dest)
+ cached = nil
+ @sess_pool_mutex.synchronize do
+ new_pool = []
+ @sess_pool.each do |s|
+ if s.dest == dest
+ cached = s
+ else
+ new_pool << s
+ end
+ end
+ @sess_pool = new_pool
+ end
+ cached
+ end
+
+ def add_cached_session(sess)
+ @sess_pool_mutex.synchronize do
+ @sess_pool << sess
+ end
+ end
+
+ def each_sess
+ @sess_pool_mutex.synchronize do
+ @sess_pool.each do |sess|
+ yield(sess)
+ end
+ end
+ end
+end
+
+
+# HTTPAccess2::SSLSocketWrap
+#
+class SSLSocketWrap
+ def initialize(socket, context, debug_dev = nil)
+ unless SSLEnabled
+ raise RuntimeError.new(
+ "Ruby/OpenSSL module is required for https access.")
+ end
+ @context = context
+ @socket = socket
+ @ssl_socket = create_ssl_socket(@socket)
+ @debug_dev = debug_dev
+ end
+
+ def ssl_connect
+ @ssl_socket.connect
+ end
+
+ def post_connection_check(host)
+ verify_mode = @context.verify_mode || OpenSSL::SSL::VERIFY_NONE
+ if verify_mode == OpenSSL::SSL::VERIFY_NONE
+ return
+ elsif @ssl_socket.peer_cert.nil? and
+ check_mask(verify_mode, OpenSSL::SSL::VERIFY_FAIL_IF_NO_PEER_CERT)
+ raise OpenSSL::SSL::SSLError, "no peer cert"
+ end
+ hostname = host.host
+ if @ssl_socket.respond_to?(:post_connection_check)
+ @ssl_socket.post_connection_check(hostname)
+ end
+ @context.post_connection_check(@ssl_socket.peer_cert, hostname)
+ end
+
+ def peer_cert
+ @ssl_socket.peer_cert
+ end
+
+ def addr
+ @socket.addr
+ end
+
+ def close
+ @ssl_socket.close
+ @socket.close
+ end
+
+ def closed?
+ @socket.closed?
+ end
+
+ def eof?
+ @ssl_socket.eof?
+ end
+
+ def gets(*args)
+ str = @ssl_socket.gets(*args)
+ @debug_dev << str if @debug_dev
+ str
+ end
+
+ def read(*args)
+ str = @ssl_socket.read(*args)
+ @debug_dev << str if @debug_dev
+ str
+ end
+
+ def <<(str)
+ rv = @ssl_socket.write(str)
+ @debug_dev << str if @debug_dev
+ rv
+ end
+
+ def flush
+ @ssl_socket.flush
+ end
+
+ def sync
+ @ssl_socket.sync
+ end
+
+ def sync=(sync)
+ @ssl_socket.sync = sync
+ end
+
+private
+
+ def check_mask(value, mask)
+ value & mask == mask
+ end
+
+ def create_ssl_socket(socket)
+ ssl_socket = nil
+ if OpenSSL::SSL.const_defined?("SSLContext")
+ ctx = OpenSSL::SSL::SSLContext.new
+ @context.set_context(ctx)
+ ssl_socket = OpenSSL::SSL::SSLSocket.new(socket, ctx)
+ else
+ ssl_socket = OpenSSL::SSL::SSLSocket.new(socket)
+ @context.set_context(ssl_socket)
+ end
+ ssl_socket
+ end
+end
+
+
+# HTTPAccess2::DebugSocket -- debugging support
+#
+class DebugSocket < TCPSocket
+ attr_accessor :debug_dev # Device for logging.
+
+ class << self
+ def create_socket(host, port, debug_dev)
+ debug_dev << "! CONNECT TO #{host}:#{port}\n"
+ socket = new(host, port)
+ socket.debug_dev = debug_dev
+ socket.log_connect
+ socket
+ end
+
+ private :new
+ end
+
+ def initialize(*args)
+ super
+ @debug_dev = nil
+ end
+
+ def log_connect
+ @debug_dev << '! CONNECTION ESTABLISHED' << "\n"
+ end
+
+ def close
+ super
+ @debug_dev << '! CONNECTION CLOSED' << "\n"
+ end
+
+ def gets(*args)
+ str = super
+ @debug_dev << str if str
+ str
+ end
+
+ def read(*args)
+ str = super
+ @debug_dev << str if str
+ str
+ end
+
+ def <<(str)
+ super
+ @debug_dev << str
+ end
+end
+
+
+# HTTPAccess2::Session -- manage http session with one site.
+# One or more TCP sessions with the site may be created.
+# Only 1 TCP session is live at the same time.
+#
+class Session # :nodoc:
+
+ class Error < StandardError # :nodoc:
+ end
+
+ class InvalidState < Error # :nodoc:
+ end
+
+ class BadResponse < Error # :nodoc:
+ end
+
+ class KeepAliveDisconnected < Error # :nodoc:
+ end
+
+ attr_reader :dest # Destination site
+ attr_reader :src # Source site
+ attr_accessor :proxy # Proxy site
+ attr_accessor :socket_sync # Boolean value for Socket#sync
+
+ attr_accessor :requested_version # Requested protocol version
+
+ attr_accessor :debug_dev # Device for dumping log for debugging
+
+ # These session parameters are not used now...
+ attr_accessor :connect_timeout
+ attr_accessor :connect_retry
+ attr_accessor :send_timeout
+ attr_accessor :receive_timeout
+ attr_accessor :read_block_size
+
+ attr_accessor :ssl_config
+
+ def initialize(dest, user_agent, from)
+ @dest = dest
+ @src = Site.new
+ @proxy = nil
+ @socket_sync = true
+ @requested_version = nil
+
+ @debug_dev = nil
+
+ @connect_timeout = nil
+ @connect_retry = 1
+ @send_timeout = nil
+ @receive_timeout = nil
+ @read_block_size = nil
+
+ @ssl_config = nil
+
+ @user_agent = user_agent
+ @from = from
+ @state = :INIT
+
+ @requests = []
+
+ @status = nil
+ @reason = nil
+ @headers = []
+
+ @socket = nil
+ end
+
+ # Send a request to the server
+ def query(req)
+ connect() if @state == :INIT
+ begin
+ timeout(@send_timeout) do
+ set_header(req)
+ req.dump(@socket)
+ # flush the IO stream as IO::sync mode is false
+ @socket.flush unless @socket_sync
+ end
+ rescue Errno::ECONNABORTED
+ close
+ raise KeepAliveDisconnected.new
+ rescue
+ if SSLEnabled and $!.is_a?(OpenSSL::SSL::SSLError)
+ raise KeepAliveDisconnected.new
+ elsif $!.is_a?(TimeoutError)
+ close
+ raise
+ else
+ raise
+ end
+ end
+
+ @state = :META if @state == :WAIT
+ @next_connection = nil
+ @requests.push(req)
+ end
+
+ def close
+ unless @socket.nil?
+ @socket.flush
+ @socket.close unless @socket.closed?
+ end
+ @state = :INIT
+ end
+
+ def closed?
+ @state == :INIT
+ end
+
+ def get_status
+ version = status = reason = nil
+ begin
+ if @state != :META
+ raise RuntimeError.new("get_status must be called at the beginning of a session.")
+ end
+ version, status, reason = read_header()
+ rescue
+ close
+ raise
+ end
+ return version, status, reason
+ end
+
+ def get_header(&block)
+ begin
+ read_header() if @state == :META
+ rescue
+ close
+ raise
+ end
+ if block
+ @headers.each do |line|
+ block.call(line)
+ end
+ else
+ @headers
+ end
+ end
+
+ def eof?
+ if @content_length == 0
+ true
+ elsif @readbuf.length > 0
+ false
+ else
+ @socket.closed? or @socket.eof?
+ end
+ end
+
+ def get_data(&block)
+ begin
+ read_header() if @state == :META
+ return nil if @state != :DATA
+ unless @state == :DATA
+ raise InvalidState.new('state != DATA')
+ end
+ data = nil
+ if block
+ until eof?
+ begin
+ timeout(@receive_timeout) do
+ data = read_body()
+ end
+ rescue TimeoutError
+ raise
+ end
+ block.call(data) if data
+ end
+ data = nil # Calling with block returns nil.
+ else
+ begin
+ timeout(@receive_timeout) do
+ data = read_body()
+ end
+ rescue TimeoutError
+ raise
+ end
+ end
+ rescue
+ close
+ raise
+ end
+ if eof?
+ if @next_connection
+ @state = :WAIT
+ else
+ close
+ end
+ end
+ data
+ end
+
+private
+
+ LibNames = "(#{RCS_FILE}/#{RCS_REVISION}, #{RUBY_VERSION_STRING})"
+
+ def set_header(req)
+ req.version = @requested_version if @requested_version
+ if @user_agent
+ req.header.set('User-Agent', "#{@user_agent} #{LibNames}")
+ end
+ if @from
+ req.header.set('From', @from)
+ end
+ req.header.set('Date', Time.now)
+ end
+
+ # Connect to the server
+ def connect
+ site = @proxy || @dest
+ begin
+ retry_number = 0
+ timeout(@connect_timeout) do
+ @socket = create_socket(site)
+ begin
+ @src.host = @socket.addr[3]
+ @src.port = @socket.addr[1]
+ rescue SocketError
+ # to avoid IPSocket#addr problem on Mac OS X 10.3 + ruby-1.8.1.
+ # cf. [ruby-talk:84909], [ruby-talk:95827]
+ end
+ if @dest.scheme == 'https'
+ @socket = create_ssl_socket(@socket)
+ connect_ssl_proxy(@socket) if @proxy
+ @socket.ssl_connect
+ @socket.post_connection_check(@dest)
+ end
+ # Use Ruby internal buffering instead of passing data immediatly
+ # to the underlying layer
+ # => we need to to call explicitely flush on the socket
+ @socket.sync = @socket_sync
+ end
+ rescue TimeoutError
+ if @connect_retry == 0
+ retry
+ else
+ retry_number += 1
+ retry if retry_number < @connect_retry
+ end
+ close
+ raise
+ end
+
+ @state = :WAIT
+ @readbuf = ''
+ end
+
+ def create_socket(site)
+ begin
+ if @debug_dev
+ DebugSocket.create_socket(site.host, site.port, @debug_dev)
+ else
+ TCPSocket.new(site.host, site.port)
+ end
+ rescue SystemCallError => e
+ e.message << " (#{site.host}, ##{site.port})"
+ raise
+ end
+ end
+
+ # wrap socket with OpenSSL.
+ def create_ssl_socket(raw_socket)
+ SSLSocketWrap.new(raw_socket, @ssl_config, (DEBUG_SSL ? @debug_dev : nil))
+ end
+
+ def connect_ssl_proxy(socket)
+ socket << sprintf("CONNECT %s:%s HTTP/1.1\r\n\r\n", @dest.host, @dest.port)
+ parse_header(socket)
+ unless @status == 200
+ raise BadResponse.new(
+ "connect to ssl proxy failed with status #{@status} #{@reason}")
+ end
+ end
+
+ # Read status block.
+ def read_header
+ if @state == :DATA
+ get_data {}
+ check_state()
+ end
+ unless @state == :META
+ raise InvalidState, 'state != :META'
+ end
+ parse_header(@socket)
+ @content_length = nil
+ @chunked = false
+ @headers.each do |line|
+ case line
+ when /^Content-Length:\s+(\d+)/i
+ @content_length = $1.to_i
+ when /^Transfer-Encoding:\s+chunked/i
+ @chunked = true
+ @content_length = true # how?
+ @chunk_length = 0
+ when /^Connection:\s+([\-\w]+)/i, /^Proxy-Connection:\s+([\-\w]+)/i
+ case $1
+ when /^Keep-Alive$/i
+ @next_connection = true
+ when /^close$/i
+ @next_connection = false
+ end
+ else
+ # Nothing to parse.
+ end
+ end
+
+ # Head of the request has been parsed.
+ @state = :DATA
+ req = @requests.shift
+
+ if req.header.request_method == 'HEAD'
+ @content_length = 0
+ if @next_connection
+ @state = :WAIT
+ else
+ close
+ end
+ end
+ @next_connection = false unless @content_length
+ return [@version, @status, @reason]
+ end
+
+ StatusParseRegexp = %r(\AHTTP/(\d+\.\d+)\s+(\d+)(?:\s+(.*))?\r?\n\z)
+ def parse_header(socket)
+ begin
+ timeout(@receive_timeout) do
+ begin
+ initial_line = socket.gets("\n")
+ if initial_line.nil?
+ raise KeepAliveDisconnected.new
+ end
+ if StatusParseRegexp =~ initial_line
+ @version, @status, @reason = $1, $2.to_i, $3
+ @next_connection = HTTP.keep_alive_enabled?(@version)
+ else
+ @version = '0.9'
+ @status = nil
+ @reason = nil
+ @next_connection = false
+ @readbuf = initial_line
+ break
+ end
+ @headers = []
+ while true
+ line = socket.gets("\n")
+ unless line
+ raise BadResponse.new('Unexpected EOF.')
+ end
+ line.sub!(/\r?\n\z/, '')
+ break if line.empty?
+ if line.sub!(/^\t/, '')
+ @headers[-1] << line
+ else
+ @headers.push(line)
+ end
+ end
+ end while (@version == '1.1' && @status == 100)
+ end
+ rescue TimeoutError
+ raise
+ end
+ end
+
+ def read_body
+ if @chunked
+ return read_body_chunked()
+ elsif @content_length == 0
+ return nil
+ elsif @content_length
+ return read_body_length()
+ else
+ if @readbuf.length > 0
+ data = @readbuf
+ @readbuf = ''
+ return data
+ else
+ data = @socket.read(@read_block_size)
+ data = nil if data.empty? # Absorbing interface mismatch.
+ return data
+ end
+ end
+ end
+
+ def read_body_length
+ maxbytes = @read_block_size
+ if @readbuf.length > 0
+ data = @readbuf[0, @content_length]
+ @readbuf[0, @content_length] = ''
+ @content_length -= data.length
+ return data
+ end
+ maxbytes = @content_length if maxbytes > @content_length
+ data = @socket.read(maxbytes)
+ if data
+ @content_length -= data.length
+ else
+ @content_length = 0
+ end
+ return data
+ end
+
+ RS = "\r\n"
+ ChunkDelimiter = "0#{RS}"
+ ChunkTrailer = "0#{RS}#{RS}"
+ def read_body_chunked
+ if @chunk_length == 0
+ until (i = @readbuf.index(RS))
+ @readbuf << @socket.gets(RS)
+ end
+ i += 2
+ if @readbuf[0, i] == ChunkDelimiter
+ @content_length = 0
+ unless @readbuf[0, 5] == ChunkTrailer
+ @readbuf << @socket.gets(RS)
+ end
+ @readbuf[0, 5] = ''
+ return nil
+ end
+ @chunk_length = @readbuf[0, i].hex
+ @readbuf[0, i] = ''
+ end
+ while @readbuf.length < @chunk_length + 2
+ @readbuf << @socket.read(@chunk_length + 2 - @readbuf.length)
+ end
+ data = @readbuf[0, @chunk_length]
+ @readbuf[0, @chunk_length + 2] = ''
+ @chunk_length = 0
+ return data
+ end
+
+ def check_state
+ if @state == :DATA
+ if eof?
+ if @next_connection
+ if @requests.empty?
+ @state = :WAIT
+ else
+ @state = :META
+ end
+ end
+ end
+ end
+ end
+end
+
+
+end
+
+
+HTTPClient = HTTPAccess2::Client
diff --git a/vendor/rubyforge-0.4.4/lib/http-access2/cookie.rb b/vendor/rubyforge-0.4.4/lib/http-access2/cookie.rb
new file mode 100644
index 0000000..664aef6
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/lib/http-access2/cookie.rb
@@ -0,0 +1,538 @@
+# cookie.rb is redistributed file which is originally included in Webagent
+# version 0.6.2 by TAKAHASHI `Maki' Masayoshi. And it contains some bug fixes.
+# You can download the entire package of Webagent from
+# http://www.rubycolor.org/arc/.
+
+
+# Cookie class
+#
+# I refered to w3m's source to make these classes. Some comments
+# are quoted from it. I'm thanksful for author(s) of it.
+#
+# w3m homepage: http://ei5nazha.yz.yamagata-u.ac.jp/~aito/w3m/eng/
+
+require 'uri'
+
+class WebAgent
+
+ module CookieUtils
+
+ def head_match?(str1, str2)
+ str1 == str2[0, str1.length]
+ end
+
+ def tail_match?(str1, str2)
+ if str1.length > 0
+ str1 == str2[-str1.length..-1].to_s
+ else
+ true
+ end
+ end
+
+ def domain_match(host, domain)
+ case domain
+ when /\d+\.\d+\.\d+\.\d+/
+ return (host == domain)
+ when '.'
+ return true
+ when /^\./
+ return tail_match?(domain, host)
+ else
+ return (host == domain)
+ end
+ end
+
+ def total_dot_num(string)
+ string.scan(/\./).length()
+ end
+
+ end
+
+ class Cookie
+ include CookieUtils
+
+ require 'parsedate'
+ include ParseDate
+
+ attr_accessor :name, :value
+ attr_accessor :domain, :path
+ attr_accessor :expires ## for Netscape Cookie
+ attr_accessor :url
+ attr_writer :use, :secure, :discard, :domain_orig, :path_orig, :override
+
+ USE = 1
+ SECURE = 2
+ DOMAIN = 4
+ PATH = 8
+ DISCARD = 16
+ OVERRIDE = 32
+ OVERRIDE_OK = 32
+
+ def initialize()
+ @discard = @use = @secure = @domain_orig = @path_orig = @override = nil
+ end
+
+ def discard?
+ @discard
+ end
+
+ def use?
+ @use
+ end
+
+ def secure?
+ @secure
+ end
+
+ def domain_orig?
+ @domain_orig
+ end
+
+ def path_orig?
+ @path_orig
+ end
+
+ def override?
+ @override
+ end
+
+ def flag
+ flg = 0
+ flg += USE if @use
+ flg += SECURE if @secure
+ flg += DOMAIN if @domain_orig
+ flg += PATH if @path_orig
+ flg += DISCARD if @discard
+ flg += OVERRIDE if @override
+ flg
+ end
+
+ def set_flag(flag)
+ flag = flag.to_i
+ @use = true if flag & USE > 0
+ @secure = true if flag & SECURE > 0
+ @domain_orig = true if flag & DOMAIN > 0
+ @path_orig = true if flag & PATH > 0
+ @discard = true if flag & DISCARD > 0
+ @override = true if flag & OVERRIDE > 0
+ end
+
+ def match?(url)
+ domainname = url.host
+ if (!domainname ||
+ !domain_match(domainname, @domain) ||
+ (@path && !head_match?(@path, url.path)) ||
+ (@secure && (url.scheme != 'https')) )
+ return false
+ else
+ return true
+ end
+ end
+
+ def join_quotedstr(array, sep)
+ ret = Array.new()
+ old_elem = nil
+ array.each{|elem|
+ if (elem.scan(/"/).length % 2) == 0
+ if old_elem
+ old_elem << sep << elem
+ else
+ ret << elem
+ old_elem = nil
+ end
+ else
+ if old_elem
+ old_elem << sep << elem
+ ret << old_elem
+ old_elem = nil
+ else
+ old_elem = elem.dup
+ end
+ end
+ }
+ ret
+ end
+
+ def parse(str, url)
+ @url = url
+ cookie_elem = str.split(/;/)
+ cookie_elem = join_quotedstr(cookie_elem, ';')
+ first_elem = cookie_elem.shift
+ if first_elem !~ /([^=]*)(\=(.*))?/
+ return
+ ## raise ArgumentError 'invalid cookie value'
+ end
+ @name = $1.strip
+ @value = $3
+ if @value
+ if @value =~ /^\s*"(.*)"\s*$/
+ @value = $1
+ else
+ @value.dup.strip!
+ end
+ end
+ cookie_elem.each{|pair|
+ key, value = pair.split(/=/) ## value may nil
+ key.strip!
+ if value
+ value = value.strip.sub(/\A"(.*)"\z/) { $1 }
+ end
+ case key.downcase
+ when 'domain'
+ @domain = value
+ when 'expires'
+ begin
+ @expires = Time.gm(*parsedate(value)[0,6])
+ rescue ArgumentError
+ @expires = nil
+ end
+ when 'path'
+ @path = value
+ when 'secure'
+ @secure = true ## value may nil, but must 'true'.
+ else
+ ## ignore
+ end
+ }
+ end
+
+ end
+
+ class CookieManager
+ include CookieUtils
+
+ ### errors
+ class Error < StandardError; end
+ class ErrorOverrideOK < Error; end
+ class SpecialError < Error; end
+ class NoDotError < ErrorOverrideOK; end
+
+ SPECIAL_DOMAIN = [".com",".edu",".gov",".mil",".net",".org",".int"]
+
+ attr_accessor :cookies
+ attr_accessor :cookies_file
+ attr_accessor :accept_domains, :reject_domains
+ attr_accessor :netscape_rule
+
+ def initialize(file=nil)
+ @cookies = Array.new()
+ @cookies_file = file
+ @is_saved = true
+ @reject_domains = Array.new()
+ @accept_domains = Array.new()
+ # for conformance to http://wp.netscape.com/newsref/std/cookie_spec.html
+ @netscape_rule = false
+ end
+
+ def save_all_cookies(force = nil, save_unused = true, save_discarded = true)
+ if @is_saved and !force
+ return
+ end
+ File.open(@cookies_file, 'w') do |f|
+ @cookies.each do |cookie|
+ if (cookie.use? or save_unused) and
+ (!cookie.discard? or save_discarded)
+ f.print(cookie.url.to_s,"\t",
+ cookie.name,"\t",
+ cookie.value,"\t",
+ cookie.expires.to_i,"\t",
+ cookie.domain,"\t",
+ cookie.path,"\t",
+ cookie.flag,"\n")
+ end
+ end
+ end
+ end
+
+ def save_cookies(force = nil)
+ save_all_cookies(force, false, false)
+ end
+
+ def check_expired_cookies()
+ @cookies.reject!{|cookie|
+ is_expired = (cookie.expires && (cookie.expires < Time.now.gmtime))
+ if is_expired && !cookie.discard?
+ @is_saved = false
+ end
+ is_expired
+ }
+ end
+
+ def parse(str, url)
+ cookie = WebAgent::Cookie.new()
+ cookie.parse(str, url)
+ add(cookie)
+ end
+
+ def make_cookie_str(cookie_list)
+ if cookie_list.empty?
+ return nil
+ end
+
+ ret = ''
+ c = cookie_list.shift
+ ret += "#{c.name}=#{c.value}"
+ cookie_list.each{|cookie|
+ ret += "; #{cookie.name}=#{cookie.value}"
+ }
+ return ret
+ end
+ private :make_cookie_str
+
+
+ def find(url)
+
+ check_expired_cookies()
+
+ cookie_list = Array.new()
+
+ @cookies.each{|cookie|
+ if cookie.use? && cookie.match?(url)
+ if cookie_list.select{|c1| c1.name == cookie.name}.empty?
+ cookie_list << cookie
+ end
+ end
+ }
+ return make_cookie_str(cookie_list)
+ end
+
+ def find_cookie_info(domain, path, name)
+ @cookies.find{|c|
+ c.domain == domain && c.path == path && c.name == name
+ }
+ end
+ private :find_cookie_info
+
+ def cookie_error(err, override)
+ if err.kind_of?(ErrorOverrideOK) || !override
+ raise err
+ end
+ end
+ private :cookie_error
+
+ def add(cookie)
+ url = cookie.url
+ name, value = cookie.name, cookie.value
+ expires, domain, path =
+ cookie.expires, cookie.domain, cookie.path
+ secure, domain_orig, path_orig =
+ cookie.secure?, cookie.domain_orig?, cookie.path_orig?
+ discard, override =
+ cookie.discard?, cookie.override?
+
+ domainname = url.host
+ domain_orig, path_orig = domain, path
+ use_security = override
+
+ if !domainname
+ cookie_error(NodotError.new(), override)
+ end
+
+ if domain
+
+ # [DRAFT 12] s. 4.2.2 (does not apply in the case that
+ # host name is the same as domain attribute for version 0
+ # cookie)
+ # I think that this rule has almost the same effect as the
+ # tail match of [NETSCAPE].
+ if domain !~ /^\./ && domainname != domain
+ domain = '.'+domain
+ end
+
+ n = total_dot_num(domain)
+ if n < 2
+ cookie_error(SpecialError.new(), override)
+ elsif @netscape_rule and n == 2
+ ## [NETSCAPE] rule
+ ok = SPECIAL_DOMAIN.select{|sdomain|
+ sdomain == domain[-(sdomain.length)..-1]
+ }
+ if ok.empty?
+ cookie_error(SpecialError.new(), override)
+ end
+ end
+
+ end
+
+ path ||= url.path.sub!(%r|/[^/]*|, '')
+ domain ||= domainname
+ cookie = find_cookie_info(domain, path, name)
+
+ if !cookie
+ cookie = WebAgent::Cookie.new()
+ cookie.use = true
+ @cookies << cookie
+ end
+
+ cookie.url = url
+ cookie.name = name
+ cookie.value = value
+ cookie.expires = expires
+ cookie.domain = domain
+ cookie.path = path
+
+ ## for flag
+ cookie.secure = secure
+ cookie.domain_orig = domain_orig
+ cookie.path_orig = path_orig
+ if discard || cookie.expires == nil
+ cookie.discard = true
+ else
+ cookie.discard = false
+ @is_saved = false
+ end
+
+ check_expired_cookies()
+ return false
+ end
+
+ def load_cookies()
+ return if !File.readable?(@cookies_file)
+ File.open(@cookies_file,'r'){|f|
+ while line = f.gets
+ cookie = WebAgent::Cookie.new()
+ @cookies << cookie
+ col = line.chomp.split(/\t/)
+ cookie.url = URI.parse(col[0])
+ cookie.name = col[1]
+ cookie.value = col[2]
+ cookie.expires = Time.at(col[3].to_i)
+ cookie.domain = col[4]
+ cookie.path = col[5]
+ cookie.set_flag(col[6])
+ end
+ }
+ end
+
+ def check_cookie_accept_domain(domain)
+ unless domain
+ return false
+ end
+ @accept_domains.each{|dom|
+ if domain_match(domain, dom)
+ return true
+ end
+ }
+ @reject_domains.each{|dom|
+ if domain_match(domain, dom)
+ return false
+ end
+ }
+ return true
+ end
+ end
+end
+
+__END__
+
+=begin
+
+== WebAgent::CookieManager Class
+
+Load, save, parse and send cookies.
+
+=== Usage
+
+ ## initialize
+ cm = WebAgent::CookieManager.new("/home/foo/bar/cookie")
+
+ ## load cookie data
+ cm.load_cookies()
+
+ ## parse cookie from string (maybe "Set-Cookie:" header)
+ cm.parse(str)
+
+ ## send cookie data to url
+ f.write(cm.find(url))
+
+ ## save cookie to cookiefile
+ cm.save_cookies()
+
+
+=== Class Methods
+
+ -- CookieManager::new(file=nil)
+
+ create new CookieManager. If a file is provided,
+ use it as cookies' file.
+
+=== Methods
+
+ -- CookieManager#save_cookies(force = nil)
+
+ save cookies' data into file. if argument is true,
+ save data although data is not modified.
+
+ -- CookieManager#parse(str, url)
+
+ parse string and store cookie (to parse HTTP response header).
+
+ -- CookieManager#find(url)
+
+ get cookies and make into string (to send as HTTP request header).
+
+ -- CookieManager#add(cookie)
+
+ add new cookie.
+
+ -- CookieManager#load_cookies()
+
+ load cookies' data from file.
+
+
+== WebAgent::CookieUtils Module
+
+ -- CookieUtils::head_match?(str1, str2)
+ -- CookieUtils::tail_match?(str1, str2)
+ -- CookieUtils::domain_match(host, domain)
+ -- CookieUtils::total_dot_num(str)
+
+
+== WebAgent::Cookie Class
+
+=== Class Methods
+
+ -- Cookie::new()
+
+ create new cookie.
+
+=== Methods
+
+ -- Cookie#match?(url)
+
+ match cookie by url. if match, return true. otherwise,
+ return false.
+
+ -- Cookie#name
+ -- Cookie#name=(name)
+ -- Cookie#value
+ -- Cookie#value=(value)
+ -- Cookie#domain
+ -- Cookie#domain=(domain)
+ -- Cookie#path
+ -- Cookie#path=(path)
+ -- Cookie#expires
+ -- Cookie#expires=(expires)
+ -- Cookie#url
+ -- Cookie#url=(url)
+
+ accessor methods for cookie's items.
+
+ -- Cookie#discard?
+ -- Cookie#discard=(discard)
+ -- Cookie#use?
+ -- Cookie#use=(use)
+ -- Cookie#secure?
+ -- Cookie#secure=(secure)
+ -- Cookie#domain_orig?
+ -- Cookie#domain_orig=(domain_orig)
+ -- Cookie#path_orig?
+ -- Cookie#path_orig=(path_orig)
+ -- Cookie#override?
+ -- Cookie#override=(override)
+ -- Cookie#flag
+ -- Cookie#set_flag(flag_num)
+
+ accessor methods for flags.
+
+=end
diff --git a/vendor/rubyforge-0.4.4/lib/http-access2/http.rb b/vendor/rubyforge-0.4.4/lib/http-access2/http.rb
new file mode 100644
index 0000000..c33c4f7
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/lib/http-access2/http.rb
@@ -0,0 +1,542 @@
+# HTTP - HTTP container.
+# Copyright (C) 2001, 2002, 2003, 2005 NAKAMURA, Hiroshi.
+#
+# This module is copyrighted free software by NAKAMURA, Hiroshi.
+# You can redistribute it and/or modify it under the same term as Ruby.
+
+require 'uri'
+require 'time'
+
+module HTTP
+
+
+module Status
+ OK = 200
+ MOVED_PERMANENTLY = 301
+ FOUND = 302
+ SEE_OTHER = 303
+ TEMPORARY_REDIRECT = MOVED_TEMPORARILY = 307
+ BAD_REQUEST = 400
+ INTERNAL = 500
+
+ def self.redirect?(status)
+ [
+ MOVED_PERMANENTLY, FOUND, SEE_OTHER,
+ TEMPORARY_REDIRECT, MOVED_TEMPORARILY
+ ].include?(status)
+ end
+end
+
+
+class Error < StandardError; end
+class BadResponseError < Error; end
+
+ class << self
+ def http_date(a_time)
+ a_time.gmtime.strftime("%a, %d %b %Y %H:%M:%S GMT")
+ end
+
+ ProtocolVersionRegexp = Regexp.new('^(?:HTTP/|)(\d+)\.(\d+)$')
+ def keep_alive_enabled?(version)
+ ProtocolVersionRegexp =~ version
+ if !($1 and $2)
+ false
+ elsif $1.to_i > 1
+ true
+ elsif $1.to_i == 1 and $2.to_i >= 1
+ true
+ else
+ false
+ end
+ end
+ end
+
+
+# HTTP::Message -- HTTP message.
+#
+# DESCRIPTION
+# A class that describes 1 HTTP request / response message.
+#
+class Message
+ CRLF = "\r\n"
+
+ # HTTP::Message::Headers -- HTTP message header.
+ #
+ # DESCRIPTION
+ # A class that describes header part of HTTP message.
+ #
+ class Headers
+ # HTTP version string in a HTTP header.
+ attr_accessor :http_version
+ # Content-type.
+ attr_accessor :body_type
+ # Charset.
+ attr_accessor :body_charset
+ # Size of body.
+ attr_reader :body_size
+ # A milestone of body.
+ attr_accessor :body_date
+ # Chunked or not.
+ attr_reader :chunked
+ # Request method.
+ attr_reader :request_method
+ # Requested URI.
+ attr_reader :request_uri
+ # HTTP status reason phrase.
+ attr_accessor :reason_phrase
+
+ StatusCodeMap = {
+ Status::OK => 'OK',
+ Status::MOVED_PERMANENTLY => 'Moved Permanently',
+ Status::FOUND => 'Found',
+ Status::SEE_OTHER => 'See Other',
+ Status::TEMPORARY_REDIRECT => 'Temporary Redirect',
+ Status::MOVED_TEMPORARILY => 'Temporary Redirect',
+ Status::BAD_REQUEST => 'Bad Request',
+ Status::INTERNAL => 'Internal Server Error',
+ }
+
+ CharsetMap = {
+ 'NONE' => 'us-ascii',
+ 'EUC' => 'euc-jp',
+ 'SJIS' => 'shift_jis',
+ 'UTF8' => 'utf-8',
+ }
+
+ # SYNOPSIS
+ # HTTP::Message.new
+ #
+ # ARGS
+ # N/A
+ #
+ # DESCRIPTION
+ # Create a instance of HTTP request or HTTP response. Specify
+ # status_code for HTTP response.
+ #
+ def initialize
+ @is_request = nil # true, false and nil
+ @http_version = 'HTTP/1.1'
+ @body_type = nil
+ @body_charset = nil
+ @body_size = nil
+ @body_date = nil
+ @header_item = []
+ @chunked = false
+ @response_status_code = nil
+ @reason_phrase = nil
+ @request_method = nil
+ @request_uri = nil
+ @request_query = nil
+ @request_via_proxy = nil
+ end
+
+ def init_request(method, uri, query = nil, via_proxy = nil)
+ @is_request = true
+ @request_method = method
+ @request_uri = if uri.is_a?(URI)
+ uri
+ else
+ URI.parse(uri.to_s)
+ end
+ @request_query = create_query_uri(@request_uri, query)
+ @request_via_proxy = via_proxy
+ end
+
+ def init_response(status_code)
+ @is_request = false
+ self.response_status_code = status_code
+ end
+
+ attr_accessor :request_via_proxy
+
+ attr_reader :response_status_code
+ def response_status_code=(status_code)
+ @response_status_code = status_code
+ @reason_phrase = StatusCodeMap[@response_status_code]
+ end
+
+ def contenttype
+ self['content-type'][0]
+ end
+
+ def contenttype=(contenttype)
+ self['content-type'] = contenttype
+ end
+
+ # body_size == nil means that the body is_a? IO
+ def body_size=(body_size)
+ @body_size = body_size
+ if @body_size
+ @chunked = false
+ else
+ @chunked = true
+ end
+ end
+
+ def dump(dev = '')
+ set_header
+ if @is_request
+ dev << request_line
+ else
+ dev << response_status_line
+ end
+ dev << @header_item.collect { |key, value|
+ dump_line("#{ key }: #{ value }")
+ }.join
+ dev
+ end
+
+ def set(key, value)
+ @header_item.push([key, value])
+ end
+
+ def get(key = nil)
+ if !key
+ @header_item
+ else
+ @header_item.find_all { |pair| pair[0].upcase == key.upcase }
+ end
+ end
+
+ def []=(key, value)
+ set(key, value)
+ end
+
+ def [](key)
+ get(key).collect { |item| item[1] }
+ end
+
+ private
+
+ def request_line
+ path = if @request_via_proxy
+ if @request_uri.port
+ "#{ @request_uri.scheme }://#{ @request_uri.host }:#{ @request_uri.port }#{ @request_query }"
+ else
+ "#{ @request_uri.scheme }://#{ @request_uri.host }#{ @request_query }"
+ end
+ else
+ @request_query
+ end
+ dump_line("#{ @request_method } #{ path } #{ @http_version }")
+ end
+
+ def response_status_line
+ if defined?(Apache)
+ dump_line("#{ @http_version } #{ response_status_code } #{ @reason_phrase }")
+ else
+ dump_line("Status: #{ response_status_code } #{ @reason_phrase }")
+ end
+ end
+
+ def set_header
+ if defined?(Apache)
+ set('Date', HTTP.http_date(Time.now))
+ end
+
+ keep_alive = HTTP.keep_alive_enabled?(@http_version)
+ set('Connection', 'close') unless keep_alive
+
+ if @chunked
+ set('Transfer-Encoding', 'chunked')
+ else
+ if keep_alive or @body_size != 0
+ set('Content-Length', @body_size.to_s)
+ end
+ end
+
+ if @body_date
+ set('Last-Modified', HTTP.http_date(@body_date))
+ end
+
+ if @is_request == true
+ if @http_version >= 'HTTP/1.1'
+ if @request_uri.port == @request_uri.default_port
+ set('Host', "#{@request_uri.host}")
+ else
+ set('Host', "#{@request_uri.host}:#{@request_uri.port}")
+ end
+ end
+ elsif @is_request == false
+ set('Content-Type', "#{ @body_type || 'text/html' }; charset=#{ CharsetMap[@body_charset || $KCODE] }")
+ end
+ end
+
+ def dump_line(str)
+ str + CRLF
+ end
+
+ def create_query_uri(uri, query)
+ path = uri.path.dup
+ path = '/' if path.empty?
+ query_str = nil
+ if uri.query
+ query_str = uri.query
+ end
+ if query
+ if query_str
+ query_str << '&' << Message.create_query_part_str(query)
+ else
+ query_str = Message.create_query_part_str(query)
+ end
+ end
+ if query_str
+ path << '?' << query_str
+ end
+ path
+ end
+ end
+
+ class Body
+ attr_accessor :type, :charset, :date, :chunk_size
+
+ def initialize(body = nil, date = nil, type = nil, charset = nil,
+ boundary = nil)
+ @body = nil
+ @boundary = boundary
+ set_content(body || '', boundary)
+ @type = type
+ @charset = charset
+ @date = date
+ @chunk_size = 4096
+ end
+
+ def size
+ if @body.respond_to?(:read)
+ nil
+ else
+ @body.size
+ end
+ end
+
+ def dump(dev = '')
+ if @body.respond_to?(:read)
+ begin
+ while true
+ chunk = @body.read(@chunk_size)
+ break if chunk.nil?
+ dev << dump_chunk(chunk)
+ end
+ rescue EOFError
+ end
+ dev << (dump_last_chunk + CRLF)
+ else
+ dev << @body
+ end
+ dev
+ end
+
+ def content
+ @body
+ end
+
+ def set_content(body, boundary = nil)
+ if body.respond_to?(:read)
+ @body = body
+ elsif boundary
+ @body = Message.create_query_multipart_str(body, boundary)
+ else
+ @body = Message.create_query_part_str(body)
+ end
+ end
+
+ private
+
+ def dump_chunk(str)
+ dump_chunk_size(str.size) << (str + CRLF)
+ end
+
+ def dump_last_chunk
+ dump_chunk_size(0)
+ end
+
+ def dump_chunk_size(size)
+ sprintf("%x", size) << CRLF
+ end
+ end
+
+ def initialize
+ @body = @header = nil
+ end
+
+ class << self
+ alias __new new
+ undef new
+ end
+
+ def self.new_request(method, uri, query = nil, body = nil, proxy = nil,
+ boundary = nil)
+ m = self.__new
+ m.header = Headers.new
+ m.header.init_request(method, uri, query, proxy)
+ m.body = Body.new(body, nil, nil, nil, boundary)
+ m
+ end
+
+ def self.new_response(body = '')
+ m = self.__new
+ m.header = Headers.new
+ m.header.init_response(Status::OK)
+ m.body = Body.new(body)
+ m
+ end
+
+ def dump(dev = '')
+ sync_header
+ dev = header.dump(dev)
+ dev << CRLF
+ dev = body.dump(dev) if body
+ dev
+ end
+
+ def load(str)
+ buf = str.dup
+ unless self.header.load(buf)
+ self.body.load(buf)
+ end
+ end
+
+ def header
+ @header
+ end
+
+ def header=(header)
+ @header = header
+ sync_body
+ end
+
+ def content
+ @body.content
+ end
+
+ def body
+ @body
+ end
+
+ def body=(body)
+ @body = body
+ sync_header
+ end
+
+ def status
+ @header.response_status_code
+ end
+
+ def status=(status)
+ @header.response_status_code = status
+ end
+
+ def version
+ @header.http_version
+ end
+
+ def version=(version)
+ @header.http_version = version
+ end
+
+ def reason
+ @header.reason_phrase
+ end
+
+ def reason=(reason)
+ @header.reason_phrase = reason
+ end
+
+ def contenttype
+ @header.contenttype
+ end
+
+ def contenttype=(contenttype)
+ @header.contenttype = contenttype
+ end
+
+ class << self
+ def create_query_part_str(query)
+ if multiparam_query?(query)
+ escape_query(query)
+ else
+ query.to_s
+ end
+ end
+
+ def create_query_multipart_str(query, boundary)
+ if multiparam_query?(query)
+ query.collect { |attr, value|
+ value ||= ''
+ if value.is_a? File
+ params = {
+ 'filename' => value.path,
+ # Creation time is not available from File::Stat
+ # 'creation-date' => value.ctime.rfc822,
+ 'modification-date' => value.mtime.rfc822,
+ 'read-date' => value.atime.rfc822,
+ }
+ param_str = params.to_a.collect { |k, v|
+ "#{k}=\"#{v}\""
+ }.join("; ")
+ "--#{boundary}\n" +
+ %{Content-Disposition: form-data; name="#{attr.to_s}"; #{param_str}\n} +
+ "Content-Type: #{mime_type(value.path)}\n\n#{value.read}\n"
+ else
+ "--#{boundary}\n" +
+ %{Content-Disposition: form-data; name="#{attr.to_s}"\n} +
+ "\n#{value.to_s}\n"
+ end
+ }.join('') + "--#{boundary}--\n"
+ else
+ query.to_s
+ end
+ end
+
+ def multiparam_query?(query)
+ query.is_a?(Array) or query.is_a?(Hash)
+ end
+
+ def escape_query(query)
+ query.collect { |attr, value|
+ escape(attr.to_s) << '=' << escape(value.to_s)
+ }.join('&')
+ end
+
+ # from CGI.escape
+ def escape(str)
+ str.gsub(/([^ a-zA-Z0-9_.-]+)/n) {
+ '%' + $1.unpack('H2' * $1.size).join('%').upcase
+ }.tr(' ', '+')
+ end
+
+ def mime_type(path)
+ case path
+ when /.(htm|html)$/
+ 'text/html'
+ when /.doc$/
+ 'application/msword'
+ else
+ 'text/plain'
+ end
+ end
+ end
+
+private
+
+ def sync_header
+ if @header and @body
+ @header.body_type = @body.type
+ @header.body_charset = @body.charset
+ @header.body_size = @body.size
+ @header.body_date = @body.date
+ end
+ end
+
+ def sync_body
+ if @header and @body
+ @body.type = @header.body_type
+ @body.charset = @header.body_charset
+ @body.size = @header.body_size
+ @body.date = @header.body_date
+ end
+ end
+end
+
+
+end
diff --git a/vendor/rubyforge-0.4.4/lib/rubyforge.rb b/vendor/rubyforge-0.4.4/lib/rubyforge.rb
new file mode 100644
index 0000000..43d0d4b
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/lib/rubyforge.rb
@@ -0,0 +1,451 @@
+#! /usr/bin/env ruby -w
+
+require 'enumerator'
+require 'fileutils'
+require 'http-access2'
+require 'yaml'
+require 'open-uri'
+
+$TESTING = false unless defined? $TESTING
+
+# HACK to fix http-access2 cookie selection bug
+class WebAgent # :nodoc: all
+ module CookieUtils
+ alias :old_domain_match :domain_match
+ def domain_match(host, domain)
+ case domain
+ when /^\./
+ return tail_match?(host, domain) # was (domain, host)
+ else
+ return old_domain_match(host, domain)
+ end
+ end
+ end
+end
+
+class RubyForge
+
+ # :stopdoc:
+ VERSION = '0.4.4'
+ HOME = ENV["HOME"] || ENV["HOMEPATH"] || File::expand_path("~")
+ RUBYFORGE_D = File::join HOME, ".rubyforge"
+ CONFIG_F = File::join RUBYFORGE_D, "user-config.yml"
+ COOKIE_F = File::join RUBYFORGE_D, "cookie.dat"
+
+ # We must use __FILE__ instead of DATA because this is now a library
+ # and DATA is relative to $0, not __FILE__.
+ CONFIG = File.read(__FILE__).split(/__END__/).last.gsub(/#\{(.*)\}/) { eval $1 }
+
+ attr_reader :client if $TESTING
+ # :startdoc:
+
+ attr_reader :userconfig, :autoconfig
+
+ def initialize(userconfig=CONFIG_F, opts={})
+ @userconfig = test(?e, userconfig) ? IO::read(userconfig) : CONFIG
+ @userconfig = YAML.load(@userconfig).merge(opts)
+ dir, file = File.split(userconfig)
+ @autoconfig_path = File.join(dir, file.sub(/^user/, 'auto'))
+ @autoconfig = test(?e, @autoconfig_path) ? YAML.load_file(@autoconfig_path) : YAML.load(CONFIG)["rubyforge"]
+
+ @uri = URI.parse @userconfig['uri']
+
+ raise "no <username>" unless @userconfig["username"]
+ raise "no <password>" unless @userconfig["password"]
+ raise "no <cookie_jar>" unless @userconfig["cookie_jar"]
+ end
+
+ def setup
+ FileUtils::mkdir_p RUBYFORGE_D, :mode => 0700 unless test ?d, RUBYFORGE_D
+ test ?e, CONFIG_F and FileUtils::mv CONFIG_F, "#{CONFIG_F}.bak"
+ config = CONFIG[/\A.*(?=^\# AUTOCONFIG)/m]
+ open(CONFIG_F, "w") { |f| f.write config }
+ FileUtils::touch COOKIE_F
+ edit = (ENV["EDITOR"] || ENV["EDIT"] || "vi") + " '#{CONFIG_F}'"
+ system edit or puts "edit '#{CONFIG_F}'"
+ end
+
+ def save_autoconfig
+ File.open(@autoconfig_path, "w") do |file|
+ YAML.dump @autoconfig, file
+ end
+ end
+
+ def scrape_config
+ username = @userconfig['username']
+
+ %w(group package release).each do |type|
+ @autoconfig["#{type}_ids"].clear
+ end
+
+ puts "Getting #{username}"
+ html = URI.parse("http://rubyforge.org/users/#{username}/index.html").read
+ projects = html.scan(%r%/projects/([^/]+)/%).flatten
+ puts "Fetching #{projects.size} projects"
+ projects.each do |project|
+ next if project == "support"
+ scrape_project(project)
+ end
+ end
+
+ def scrape_project(project)
+ data = {
+ "group_ids" => {},
+ "package_ids" => {},
+ "release_ids" => Hash.new { |h,k| h[k] = {} },
+ }
+
+ puts "Updating #{project}"
+
+ unless data["group_ids"].has_key? project then
+ html = URI.parse("http://rubyforge.org/projects/#{project}/index.html").read
+ group_id = html[/(frs|tracker|mail)\/\?group_id=\d+/][/\d+/].to_i
+ data["group_ids"][project] = group_id
+ end
+
+ group_id = data["group_ids"][project]
+
+ html = URI.parse("http://rubyforge.org/frs/?group_id=#{group_id}").read
+
+ package = nil
+ html.scan(/<h3>[^<]+|release_id=\d+">[^>]+|filemodule_id=\d+/).each do |s|
+ case s
+ when /<h3>([^<]+)/ then
+ package = $1.strip
+ when /release_id=(\d+)">([^<]+)/ then
+ data["release_ids"][package][$2] = $1.to_i
+ when /filemodule_id=(\d+)/ then
+ data["package_ids"][package] = $1.to_i
+ end
+ end
+
+ data.each do |key, val|
+ @autoconfig[key].merge! val
+ end
+
+ save_autoconfig
+ end
+
+ def login
+ page = @uri + "/account/login.php"
+ page.scheme = 'https'
+ page = URI.parse page.to_s # set SSL port correctly
+
+ username = @userconfig["username"]
+ password = @userconfig["password"]
+
+ form = {
+ "return_to" => "",
+ "form_loginname" => username,
+ "form_pw" => password,
+ "login" => "Login"
+ }
+
+ response = run page, form
+
+ re = %r/personal\s+page\s+for:\s+#{ Regexp.escape username }/iom
+ unless response =~ re
+ warn("%s:%d: warning: potentially failed login using %s:%s" %
+ [__FILE__,__LINE__,username,password]) unless $TESTING
+ end
+
+ response
+ end
+
+ def create_package(group_id, package_name)
+ page = "/frs/admin/index.php"
+
+ group_id = lookup "group", group_id
+ is_private = @userconfig["is_private"]
+ is_public = is_private ? 0 : 1
+
+ form = {
+ "func" => "add_package",
+ "group_id" => group_id,
+ "package_name" => package_name,
+ "is_public" => is_public,
+ "submit" => "Create This Package",
+ }
+
+ run page, form
+
+ group_name = @autoconfig["group_ids"].invert[group_id]
+ scrape_project(group_name)
+ end
+
+ ##
+ # Posts news item to +group_id+ (can be name) with +subject+ and +body+
+
+ def post_news(group_id, subject, body)
+ page = "/news/submit.php"
+ group_id = lookup "group", group_id
+
+ form = {
+ "group_id" => group_id,
+ "post_changes" => "y",
+ "summary" => subject,
+ "details" => body,
+ "submit" => "Submit",
+ }
+
+ run page, form
+ end
+
+ def delete_package(group_id, package_id)
+ page = "/frs/admin/index.php"
+
+ group_id = lookup "group", group_id
+ package_id = lookup "package", package_id
+
+ form = {
+ "func" => "delete_package",
+ "group_id" => group_id,
+ "package_id" => package_id,
+ "sure" => "1",
+ "really_sure" => "1",
+ "submit" => "Delete",
+ }
+
+ package_name = @autoconfig["package_ids"].invert[package_id]
+ @autoconfig["package_ids"].delete package_name
+ @autoconfig["release_ids"].delete package_name
+ save_autoconfig
+
+ run page, form
+ end
+
+ def add_release(group_id, package_id, release_name, *files)
+ userfile = files.shift
+ page = "/frs/admin/qrs.php"
+
+ group_id = lookup "group", group_id
+ package_id = lookup "package", package_id
+ userfile = open userfile, 'rb'
+ release_date = @userconfig["release_date"]
+ type_id = @userconfig["type_id"]
+ processor_id = @userconfig["processor_id"]
+ release_notes = @userconfig["release_notes"]
+ release_changes = @userconfig["release_changes"]
+ preformatted = @userconfig["preformatted"]
+
+ release_date ||= Time::now.strftime("%Y-%m-%d %H:%M")
+
+ type_id ||= userfile.path[%r|\.[^\./]+$|]
+ type_id = (lookup "type", type_id rescue lookup "type", ".oth")
+
+ processor_id ||= "Any"
+ processor_id = lookup "processor", processor_id
+
+ release_notes = IO::read(release_notes) if release_notes and test(?e, release_notes)
+
+ release_changes = IO::read(release_changes) if release_changes and test(?e, release_changes)
+
+ preformatted = preformatted ? 1 : 0
+
+ form = {
+ "group_id" => group_id,
+ "package_id" => package_id,
+ "release_name" => release_name,
+ "release_date" => release_date,
+ "type_id" => type_id,
+ "processor_id" => processor_id,
+ "release_notes" => release_notes,
+ "release_changes" => release_changes,
+ "preformatted" => preformatted,
+ "userfile" => userfile,
+ "submit" => "Release File"
+ }
+
+ boundary = Array::new(8){ "%2.2d" % rand(42) }.join('__')
+ boundary = "multipart/form-data; boundary=___#{ boundary }___"
+
+ html = run(page, form, 'content-type' => boundary)
+ raise "Invalid package_id #{package_id}" if html[/Invalid package_id/]
+ raise "You have already released this version." if html[/That filename already exists in this project/]
+
+ release_id = html[/release_id=\d+/][/\d+/].to_i rescue nil
+
+ unless release_id then
+ puts html if $DEBUG
+ raise "Couldn't get release_id, upload failed\?"
+ end
+
+ puts "RELEASE ID = #{release_id}" if $DEBUG
+
+ files.each do |file|
+ add_file(group_id, package_id, release_id, file)
+ end
+
+ package_name = @autoconfig["package_ids"].invert[package_id]
+ raise "unknown package name for #{package_id}" if package_name.nil?
+ @autoconfig["release_ids"][package_name] ||= {}
+ @autoconfig["release_ids"][package_name][release_name] = release_id
+ save_autoconfig
+
+ release_id
+ end
+
+ ##
+ # add a file to an existing release under the specified group_id,
+ # package_id, and release_id
+ #
+ # example :
+ # add_file("codeforpeople.com", "traits", "0.8.0", "traits-0.8.0.gem")
+ # add_file("codeforpeople.com", "traits", "0.8.0", "traits-0.8.0.tgz")
+ # add_file(1024, 1242, "0.8.0", "traits-0.8.0.gem")
+
+ def add_file(group_name, package_name, release_name, userfile)
+ page = '/frs/admin/editrelease.php'
+ type_id = @userconfig["type_id"]
+ group_id = lookup "group", group_name
+ package_id = lookup "package", package_name
+ release_id = (Integer === release_name) ? release_name : lookup("release", package_name)[release_name]
+ processor_id = @userconfig["processor_id"]
+
+ page = "/frs/admin/editrelease.php?group_id=#{group_id}&release_id=#{release_id}&package_id=#{package_id}"
+
+ userfile = open userfile, 'rb'
+
+ type_id ||= userfile.path[%r|\.[^\./]+$|]
+ type_id = (lookup "type", type_id rescue lookup "type", ".oth")
+
+ processor_id ||= "Any"
+ processor_id = lookup "processor", processor_id
+
+ form = {
+ "step2" => 1,
+ "type_id" => type_id,
+ "processor_id" => processor_id,
+ "userfile" => userfile,
+ "submit" => "Add This File"
+ }
+
+ boundary = Array::new(8){ "%2.2d" % rand(42) }.join('__')
+ boundary = "multipart/form-data; boundary=___#{ boundary }___"
+
+ run page, form, 'content-type' => boundary
+ end
+
+ def run(page, form, extheader={}) # :nodoc:
+ client = HTTPAccess2::Client::new ENV["HTTP_PROXY"]
+ client.debug_dev = STDERR if ENV["RUBYFORGE_DEBUG"] || ENV["DEBUG"] || $DEBUG
+ client.set_cookie_store @userconfig["cookie_jar"]
+ client.ssl_config.verify_mode = OpenSSL::SSL::VERIFY_NONE
+
+ # HACK to fix http-access2 redirect bug/feature
+ client.redirect_uri_callback = lambda do |res|
+ page = res.header['location'].first
+ page =~ %r/http/ ? page : @uri + page
+ end
+
+ uri = @uri + page
+ if $DEBUG then
+ puts "client.post_content #{uri.inspect}, #{form.inspect}, #{extheader.inspect}"
+ end
+
+ response = client.post_content uri, form, extheader
+
+ @client = client if $TESTING
+
+ client.save_cookie_store
+
+ if $DEBUG then
+ response.sub!(/\A.*end tabGenerator -->/m, '')
+ response.gsub!(/\t/, ' ')
+ response.gsub!(/\n{3,}/, "\n\n")
+ puts response
+ end
+
+ return response
+ end
+
+ def lookup(type, val) # :nodoc:
+ unless Fixnum === val then
+ key = val.to_s
+ val = @autoconfig["#{type}_ids"][key]
+ raise "no <#{type}_id> configured for <#{ key }>" unless val
+ end
+ val
+ end
+end
+
+__END__
+#
+# base rubyforge uri - store in #{ CONFIG_F }
+#
+ uri : http://rubyforge.org
+#
+# this must be your username
+#
+ username : username
+#
+# this must be your password
+#
+ password : password
+#
+# defaults for some values
+#
+ cookie_jar : #{ COOKIE_F }
+ is_private : false
+# AUTOCONFIG:
+ rubyforge :
+ #
+ # map your group names to their rubyforge ids
+ #
+ group_ids :
+ codeforpeople.com : 1024
+ #
+ # map your package names to their rubyforge ids
+ #
+ package_ids :
+ traits : 1241
+ #
+ # map your package names to their rubyforge ids
+ #
+ release_ids :
+ traits :
+ 1.2.3 : 666
+ #
+ # mapping file exts to rubyforge ids
+ #
+ type_ids :
+ .deb : 1000
+ .rpm : 2000
+ .zip : 3000
+ .bz2 : 3100
+ .gz : 3110
+ .src.zip : 5000
+ .src.bz2 : 5010
+ .src.tar.bz2 : 5010
+ .src.gz : 5020
+ .src.tar.gz : 5020
+ .src.rpm : 5100
+ .src : 5900
+ .jpg : 8000
+ .txt : 8100
+ .text : 8100
+ .htm : 8200
+ .html : 8200
+ .pdf : 8300
+ .oth : 9999
+ .ebuild : 1300
+ .exe : 1100
+ .dmg : 1200
+ .tar.gz : 5000
+ .tgz : 5000
+ .gem : 1400
+ .pgp : 8150
+ .sig : 8150
+ #
+ # map processor names to rubyforge ids
+ #
+ processor_ids :
+ i386 : 1000
+ IA64 : 6000
+ Alpha : 7000
+ Any : 8000
+ PPC : 2000
+ MIPS : 3000
+ Sparc : 4000
+ UltraSparc : 5000
+ Other : 9999
diff --git a/vendor/rubyforge-0.4.4/test/test_rubyforge.rb b/vendor/rubyforge-0.4.4/test/test_rubyforge.rb
new file mode 100644
index 0000000..702d583
--- /dev/null
+++ b/vendor/rubyforge-0.4.4/test/test_rubyforge.rb
@@ -0,0 +1,296 @@
+require 'test/unit' unless defined? $ZENTEST and $ZENTEST
+
+$TESTING = true
+require 'rubyforge'
+
+class FakeRubyForge < RubyForge
+ HTML = "blah blah <form action=\"/frs/admin/editrelease.php?group_id=440&release_id=6948&package_id=491\" method=\"post\"> blah blah"
+
+ attr_accessor :page, :form, :extheader, :requests, :scrape
+ def run(page, form, extheader={})
+ @page, @form, @extheader = page, form, extheader
+ @requests ||= []
+ @requests << { :url => page, :form => form, :headers => extheader }
+ HTML
+ end
+
+ def scrape_project(proj)
+ @scrape ||= []
+ @scrape << proj
+ end
+end
+
+class HTTPAccess2::Client
+ attr_accessor :url, :form, :headers
+ alias :old_post_content :post_content
+ def post_content(url, form, headers)
+ @url, @form, @headers = url, form, headers
+ FakeRubyForge::HTML
+ end
+end
+
+class TestRubyForge < Test::Unit::TestCase
+
+ def setup
+ srand(0)
+ util_new FakeRubyForge
+ end
+
+ def teardown
+ @rubyforge.autoconfig.replace @old_autoconfig
+ @rubyforge.save_autoconfig
+ end
+
+ def test_initialize_bad
+ assert_raise RuntimeError do
+ RubyForge.new(RubyForge::CONFIG_F, "username" => nil)
+ end
+ assert_raise RuntimeError do
+ RubyForge.new(RubyForge::CONFIG_F, "password" => nil)
+ end
+ assert_raise RuntimeError do
+ RubyForge.new(RubyForge::CONFIG_F, "cookie_jar" => nil)
+ end
+ end
+
+ def test_setup
+ # TODO raise NotImplementedError, 'Need to write test_setup'
+ end
+
+ def test_login
+ u, p = 'fooby', 's3kr3t'
+ @rubyforge.userconfig['username'] = u
+ @rubyforge.userconfig['password'] = p
+ @rubyforge.login
+
+ util_run('https://rubyforge.org/account/login.php',
+ 'form_pw' => p,
+ 'form_loginname' => u,
+ 'return_to' => '',
+ 'login' => 'Login')
+ end
+
+ def test_create_package
+ @rubyforge.create_package(42, 'woot_pkg')
+
+ util_run('/frs/admin/index.php',
+ "submit" => "Create This Package",
+ "group_id" => 42,
+ "is_public" => 1,
+ "package_name" => "woot_pkg",
+ "func" => "add_package")
+ end
+
+ def test_delete_package
+ @rubyforge.delete_package(42, 666)
+ util_delete_package
+ end
+
+ def test_delete_package_package_name
+ @rubyforge.delete_package(42, "woot_pkg")
+ util_delete_package
+ end
+
+ def test_delete_package_undefined_package_name
+ assert_raise RuntimeError do
+ @rubyforge.delete_package(42, "blah")
+ end
+ end
+
+ def test_delete_package_group_name
+ @rubyforge.delete_package("seattlerb", 666)
+ util_delete_package
+ end
+
+ def test_delete_package_undefined_group_name
+ assert_raise RuntimeError do
+ @rubyforge.delete_package("blah", 666)
+ end
+ end
+
+ def test_add_file
+ @rubyforge.autoconfig["package_ids"]["ringy_dingy"] = 314
+ @rubyforge.autoconfig["release_ids"]["ringy_dingy"] ||= {}
+ @rubyforge.autoconfig["release_ids"]["ringy_dingy"]["1.2.3"] = 43
+
+ @rubyforge.add_file('seattlerb', 'ringy_dingy', '1.2.3', __FILE__)
+
+ util_run('/frs/admin/editrelease.php?group_id=42&release_id=43&package_id=314',
+ { "step2" => 1,
+ "type_id" => 9999,
+ "processor_id" => 8000,
+ "submit" => "Add This File"},
+ {"content-type"=> "multipart/form-data; boundary=___00__03__03__39__09__19__21__36___"})
+ end
+
+ def test_add_release
+ @rubyforge.add_release(42, 666, '1.2.3', __FILE__)
+ util_add_release
+ end
+
+ def test_add_release_multiple
+ @rubyforge.add_release(42, 666, '1.2.3', __FILE__, __FILE__) # dunno if that'll work
+ add_release = ({ :url=>"/frs/admin/qrs.php",
+ :form=>{"processor_id"=>8000,
+ "submit"=>"Release File",
+ "preformatted"=>0,
+ "release_changes"=>nil,
+ "type_id"=>9999,
+ "group_id"=>42,
+ "release_name"=>"1.2.3",
+ "release_notes"=>nil,
+ "package_id"=>666,
+ "release_date"=>"today"},
+ :headers=> {"content-type" => "multipart/form-data; boundary=___00__03__03__39__09__19__21__36___"}})
+ add_file = ({ :url => '/frs/admin/editrelease.php?group_id=42&release_id=6948&package_id=666',
+ :form => { "step2" => 1,
+ "type_id" => 9999,
+ "processor_id" => 8000,
+ "submit" => "Add This File"},
+ :headers => {"content-type"=> "multipart/form-data; boundary=___23__06__24__24__12__01__38__39___"}})
+ expected = [add_release, add_file]
+
+ result = @rubyforge.requests
+ result.each do |r|
+ r[:form].delete "userfile"
+ end
+
+ assert_equal expected, result
+ end
+
+ def test_post_news
+ @rubyforge.post_news("seattlerb", "my summary", "my news")
+
+ util_run("/news/submit.php",
+ "group_id" => 42,
+ "post_changes" => "y",
+ "details" => "my news",
+ "summary" => "my summary",
+ "submit" => "Submit")
+ end
+
+ def test_add_release_package_name
+ @rubyforge.add_release(42, "woot_pkg", '1.2.3', __FILE__)
+ util_add_release
+ end
+
+ def test_add_release_undefined_package_name
+ assert_raise RuntimeError do
+ @rubyforge.add_release(42, "blah", '1.2.3', __FILE__)
+ end
+ end
+
+ def test_add_release_group_name
+ @rubyforge.add_release("seattlerb", 666, '1.2.3', __FILE__)
+ util_add_release
+ end
+
+ def test_add_release_undefined_group_name
+ assert_raise RuntimeError do
+ @rubyforge.add_release("blah", 666, '1.2.3', __FILE__)
+ end
+ end
+
+ def test_lookup_id
+ assert_equal 43, @rubyforge.lookup("package", 43)
+ end
+
+ def test_lookup_string_number
+ assert_raise RuntimeError do
+ @rubyforge.lookup("package", "43")
+ end
+ end
+
+ def test_lookup_name
+ @rubyforge.autoconfig["package_ids"]["ringy_dingy"] = 314
+ assert_equal 314, @rubyforge.lookup("package", "ringy_dingy")
+ end
+
+ def test_lookup_undefined
+ assert_raise RuntimeError do
+ @rubyforge.lookup("package", "blah")
+ end
+ end
+
+ def test_run
+ util_new RubyForge
+ result = @rubyforge.add_release(42, 666, '1.2.3', __FILE__)
+
+ assert_equal 6948, result
+ extheader = {"content-type"=> "multipart/form-data; boundary=___00__03__03__39__09__19__21__36___"}
+
+ form = {
+ "processor_id" => 8000,
+ "submit" => "Release File",
+ "preformatted" => 0,
+ "release_changes" => nil,
+ "type_id" => 9999,
+ "group_id" => 42,
+ "release_name" => "1.2.3",
+ "release_notes" => nil,
+ "package_id" => 666,
+ "release_date" => "today"
+ }
+
+ client = @rubyforge.client
+ assert client.form.delete("userfile")
+
+ assert_equal 'http://rubyforge.org/frs/admin/qrs.php', client.url.to_s
+ assert_equal form, client.form
+ assert_equal extheader, client.headers
+ end
+
+ def util_new(klass)
+ @rubyforge = klass.new
+ @old_autoconfig = @rubyforge.autoconfig.dup
+
+ data = { # REFACTOR
+ "group_ids" => {},
+ "package_ids" => {},
+ "release_ids" => Hash.new { |h,k| h[k] = {} },
+ "type_ids" => {},
+ "processor_ids" => {"Any"=>8000},
+ }
+
+ @rubyforge.autoconfig.replace data
+
+ @rubyforge.userconfig["release_date"] = "today"
+ @rubyforge.autoconfig["type_ids"][".rb"] = 9999
+ @rubyforge.autoconfig["group_ids"]["seattlerb"] = 42
+ @rubyforge.autoconfig["package_ids"]["woot_pkg"] = 666
+ end
+
+ def util_run(page, form={}, extheader={})
+ form_result = @rubyforge.form
+ assert form_result.delete("userfile") unless extheader.empty?
+
+ assert_equal page, @rubyforge.page.to_s
+ assert_equal form, form_result
+ assert_equal extheader, @rubyforge.extheader
+ end
+
+ def util_add_release
+ util_run('/frs/admin/qrs.php',
+ {"processor_id" => 8000,
+ "submit" => "Release File",
+ "preformatted" => 0,
+ "release_changes" => nil,
+ "type_id" => 9999,
+ "group_id" => 42,
+ "release_name" => "1.2.3",
+ "release_notes" => nil,
+ "package_id" => 666,
+ "release_date" => "today"},
+ {"content-type"=> "multipart/form-data; boundary=___00__03__03__39__09__19__21__36___"})
+ end
+
+ def util_delete_package
+ util_run('/frs/admin/index.php',
+ "submit" => "Delete",
+ "really_sure" => "1",
+ "group_id" => 42,
+ "func" => "delete_package",
+ "package_id" => 666,
+ "sure" => "1")
+ end
+end
|
jwilger/jack-the-ripper
|
a2c93cfbb1fbbc374c8f1b60e3a216bdb49a393b
|
added hoe library to vendor
|
diff --git a/Manifest.txt b/Manifest.txt
index 22d005b..060994f 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,59 +1,66 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
lib/jack_the_ripper/uri_fix.rb
lib/jack_the_ripper_server.rb
test/jack_the_ripper/test_http_file.rb
test/jack_the_ripper/test_processor.rb
test/test_jack_the_ripper.rb
+vendor/hoe-1.5.0/History.txt
+vendor/hoe-1.5.0/Manifest.txt
+vendor/hoe-1.5.0/README.txt
+vendor/hoe-1.5.0/Rakefile
+vendor/hoe-1.5.0/bin/sow
+vendor/hoe-1.5.0/lib/hoe.rb
+vendor/hoe-1.5.0/test/test_hoe.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
diff --git a/Rakefile b/Rakefile
index b604f64..625e602 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,19 +1,15 @@
-# -*- ruby -*-
-
-require 'rubygems'
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/vendor/hoe-1.5.0/lib' ) )
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
p.extra_deps = %w( right_aws mime-types daemons )
-end
-
-# vim: syntax=Ruby
+end
\ No newline at end of file
diff --git a/vendor/hoe-1.5.0/History.txt b/vendor/hoe-1.5.0/History.txt
new file mode 100644
index 0000000..f36c0a1
--- /dev/null
+++ b/vendor/hoe-1.5.0/History.txt
@@ -0,0 +1,208 @@
+=== 1.5.0 / 2008-01-30
+
+* 9 Minor Enhancements:
+
+ * Added autopopulation of changes from History.txt.
+ * Added autopopulation of urls from History.txt.
+ * Added autopopulation of description from History.txt
+ * Added autopopulation of summary from description.
+ * Added description_sections to declare what sections of readme to use.
+ * Added summary_sentences to declare how many sentences you want in summary.
+ * Added developer(name, email) to cleanly populate both author/email arrays.
+ * author and email now default to "doofus".
+ * author and email warn that they'll blow up on 2008-04-01.
+
+=== 1.4.0 / 2007-12-20
+
+* 1 Major Enhancement:
+
+ * rake package now supports INLINE=1 and FORCE_PLATFORM=whatever.
+ * Supports ruby_inline extensions.
+ * Contributed by Luis Lavena. Thanks Luis!
+
+=== 1.3.0 / 2007-08-13
+
+* 1 Major Enhancement:
+
+ * Hoe now builds signed gems automatically. Run the generate_key task to
+ automatically create a signing key.
+
+* 4 Minor Enhancements:
+
+ * Extended rdoc pattern to include ext dirs.
+ * Fixed dependency adding for versionless dependencies.
+ * Added NODOT env var to disable RDoc diagram generation.
+ * The config_hoe task automatically merges in new config entries.
+
+=== 1.2.2 / 2007-07-23
+
+* 2 Minor Enhancements:
+
+ * Added exclude parameter for check_manifest filtering to .hoerc.
+ * Documented .hoerc stuffs.
+
+* 1 Bug Fix:
+
+ * Various (untested) fixes for windows compatibility.
+
+=== 1.2.1 / 2007-05-21
+
+* 8 Minor Enhancements:
+
+ * Allow for spaces in filenames in manifest. Thanks to Aaron Patterson.
+ * Allow rsync flags to be set.
+ * Allow rdoc destination directory to be set.
+ * Deal with bad line-endings. Stupid windoze users... :(
+ * Added WINDOZE check for diff.exe and look for gdiff first.
+ * Use gdiff if available, diff otherwise. Allows to work on borked Solaris.
+ * Move RDoc to attr* from big 'ol chunk at the top of the class.
+ * Basic conversion of history/urls from rdoc to markdown.
+
+* 1 Bug Fix:
+
+ * Fixed executables regexp to /^bin/.
+
+=== 1.2.0 / 2007-02-13
+
+* 4 Minor Enhancements:
+
+ * Added more support for ext dirs.
+ * Added a simple config file (yaml). Use 'rake config_hoe' to edit.
+ * Added post_blog task (thanks Aaron!), configured via config_hoe.
+ * Announce task now posts to your blogs and/or publishes API
+ depending on config.
+
+=== 1.1.7 / 2007-01-10
+
+* 5 Minor Enhancements:
+
+ * extra_deps is now self-healing, and ensures no (direct) cycles.
+ * cleans check_manifest for CVS projects.
+ * rubyforge changes for config.
+ * Now uses rsync for publish_docs. YAY for fast!
+ * Bug #7193 fix spelling of 'synopsys'. Submitted by Jacob Atzen.
+
+=== 1.1.6 / 2006-11-29
+
+* 1 Bug Fix:
+
+ * Fix release to work correctly with need_zip and need_tar.
+
+=== 1.1.5 / 2006-11-29
+
+* 2 Minor Enhancements:
+
+ * Reduced check_manifest dependencies to just diff for windows.
+ * Don't use default author in summary, description or changes.
+
+=== 1.1.4 / 2006-11-12
+
+* 3 Minor Enhancements:
+
+ * Added need_tar and need_zip to customize package requirements. Stupid windoze.
+ * Extended spec_extras to take procs as values. Passes in named parameter.
+ * Removed test from require_paths. I thought I already parameterized this. :/
+
+=== 1.1.3 / 2006-11-09
+
+* 6 Minor Enhancements:
+
+ * Added test_deps, now you can automatically discover test dependency ommisions.
+ * Added ext support! Build C extensions with hoe!
+ * Gemspec uses test_all.rb or result of test_globs. Tweak those tests.
+ * Now uses https to login to rubyforge. Rubyforge crackers beware!
+ * Fixed doco and automated updating of it.
+ * Added rdoc_pattern. Go doco go!
+
+=== 1.1.2 / 2006-10-22
+
+* 4 Minor Enhancements:
+
+ * Added -d and -t flags to sow to make dev or trunk subdirs for p4
+ and svn projects.
+ * Added install_gem to further test gem builds.
+ * Added test_globs to customize your test file list.
+ * Removed demo.rb from clean_globs. I'm torn on this one.
+
+* 1 Bug Fix:
+
+ * Fixed bug in install rule.
+
+=== 1.1.1 / 2006-10-11
+
+* 2 Bug Fixes:
+
+ * Fixed minor problem with subject of email.
+ * Fixed problem in test.
+
+=== 1.1.0 / 2006-10-04
+
+* 1 Major Enhancement:
+
+ * Added sow, a command-line tool for quickly creating new projects.
+
+* 1 Minor Enhancement:
+
+ * Added check_manifest task
+
+=== 1.0.5 / 2006-10-03
+
+* 8 Minor Enhancements:
+
+ * Doco cleanup.
+ * Removed Manifest.txt from rdoc and added title.
+ * Added changeset support.
+ * Added spec_extras for easy gemspec attribute setting.
+ * Added release_notes, changeset setting for releases.
+ * Added paragraphs_of utility method.
+ * Added email and rubyforge news announcement tasks.
+ * Url attribute may now be an array of urls.
+
+=== 1.0.4 / 2006-09-23
+
+* 1 Bug Fix:
+
+ * Damnit... I messed up. There is no rubygems gem to be dependent upon. Duh.
+
+=== 1.0.3 / 2006-09-23
+
+* 9 Minor Enhancements:
+
+ * Added debug_gem rule.
+ * Added lots of doco.
+ * Added proper deps to hoe for other's gems, and
+ rake/rubyforge/rubygems for hoe.
+ * Added ridocs to generate ri locally for testing.
+ * Added support for multiple authors.
+ * Rdoc now includes any top level .txt files.
+ * Renamed deploy to release.
+ * Renamed upload to publish_docs.
+ * publish_docs is now smart about subprojects and missing subdirectories.
+
+* 1 Bug Fix:
+
+ * Fixed include paths.
+
+=== 1.0.2 / 2006-09-20
+
+* 2 Minor Enhancements:
+
+ * Wee little tests.
+ * Fixed up gemspec's require_paths.
+
+=== 1.0.1 / 2006-09-20
+
+* 5 Minor Enhancements:
+
+ * Finally got deployment straightened out. Maybe. Some might be on
+ rubyforge.org.
+ * Added default description and summary.
+ * Added dependency mechanism.
+ * Improved gemspec debugging.
+ * Swapped gem with tgz in deploy... we'd rather screw up on tgz
+
+=== 1.0.0 / 2006-09-19
+
+* 1 Major Enhancement:
+
+ * Birthday!
diff --git a/vendor/hoe-1.5.0/Manifest.txt b/vendor/hoe-1.5.0/Manifest.txt
new file mode 100644
index 0000000..5893d70
--- /dev/null
+++ b/vendor/hoe-1.5.0/Manifest.txt
@@ -0,0 +1,7 @@
+History.txt
+Manifest.txt
+README.txt
+Rakefile
+bin/sow
+lib/hoe.rb
+test/test_hoe.rb
diff --git a/vendor/hoe-1.5.0/README.txt b/vendor/hoe-1.5.0/README.txt
new file mode 100644
index 0000000..bcd6a55
--- /dev/null
+++ b/vendor/hoe-1.5.0/README.txt
@@ -0,0 +1,92 @@
+= Hoe
+
+* http://rubyforge.org/projects/seattlerb/
+* http://seattlerb.rubyforge.org/hoe/
+* mailto:[email protected]
+
+== DESCRIPTION:
+
+Hoe is a simple rake/rubygems helper for project Rakefiles. It
+generates all the usual tasks for projects including rdoc generation,
+testing, packaging, and deployment.
+
+Tasks Provided:
+
+* announce - Generate email announcement file and post to rubyforge.
+* audit - Run ZenTest against the package
+* check_manifest - Verify the manifest
+* clean - Clean up all the extras
+* config_hoe - Create a fresh ~/.hoerc file
+* debug_gem - Show information about the gem.
+* default - Run the default tasks
+* docs - Build the docs HTML Files
+* email - Generate email announcement file.
+* gem - Build the gem file only.
+* install - Install the package. Uses PREFIX and RUBYLIB
+* install_gem - Install the package as a gem
+* multi - Run the test suite using multiruby
+* package - Build all the packages
+* post_blog - Post announcement to blog.
+* post_news - Post announcement to rubyforge.
+* publish_docs - Publish RDoc to RubyForge
+* release - Package and upload the release to rubyforge.
+* ridocs - Generate ri locally for testing
+* test - Run the test suite. Use FILTER to add to the command line.
+* test_deps - Show which test files fail when run alone.
+* uninstall - Uninstall the package.
+
+See class rdoc for help. Hint: ri Hoe
+
+== FEATURES/PROBLEMS:
+
+* Provides 'sow' for quick project directory creation.
+* Make making and maintaining Rakefiles fun and easy.
+
+== SYNOPSIS:
+
+ % sow [group] project
+
+or
+
+ require 'hoe'
+
+ Hoe.new(projectname, version) do |p|
+ # ... project specific data ...
+ end
+
+ # ... project specific tasks ...
+
+== REQUIREMENTS:
+
+* rake
+* rubyforge
+* rubygems
+
+== INSTALL:
+
+* sudo gem install hoe
+
+== LICENSE:
+
+(The MIT License)
+
+Copyright (c) 2006 Ryan Davis, Zen Spider Software
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/hoe-1.5.0/Rakefile b/vendor/hoe-1.5.0/Rakefile
new file mode 100644
index 0000000..6db6deb
--- /dev/null
+++ b/vendor/hoe-1.5.0/Rakefile
@@ -0,0 +1,10 @@
+# -*- ruby -*-
+
+require './lib/hoe.rb'
+
+Hoe.new("hoe", Hoe::VERSION) do |hoe|
+ hoe.rubyforge_name = "seattlerb"
+ hoe.developer("Ryan Davis", "[email protected]")
+end
+
+# vim: syntax=Ruby
diff --git a/vendor/hoe-1.5.0/bin/sow b/vendor/hoe-1.5.0/bin/sow
new file mode 100644
index 0000000..5f060db
--- /dev/null
+++ b/vendor/hoe-1.5.0/bin/sow
@@ -0,0 +1,78 @@
+#!/usr/bin/env ruby -ws
+
+$t ||= false
+$d ||= false
+
+if defined? $h then
+ puts "usage: #{File.dirname($0)} [-d|-t] [group] project"
+ puts " -t = add project to subdir under 'trunk'"
+ puts " -d = add project to subdir under 'dev'"
+end
+
+abort "You must specify only one of -t or -d" if $t and $d
+
+group = ARGV.shift
+project = ARGV.shift
+
+project ||= group
+
+# prevents false positives on my tag reporter
+X = 'FI' + 'X'
+
+abort "You must supply a project name on the commandline" unless project
+abort "Project #{project} seems to exist" if test ?d, project
+puts "creating project #{project}"
+
+case project
+when /_/ then
+ file_name = project
+ project = project.capitalize.gsub(/_([a-z])/) {$1.upcase}
+ klass = project
+else
+ file_name = project.gsub(/([A-Z])/, '_\1').downcase.sub(/^_/, '')
+ klass = project.capitalize.gsub(/_([a-z])/) {$1.upcase}
+end
+
+Dir.mkdir project
+Dir.chdir project do
+
+ if $d then
+ Dir.mkdir "dev"
+ Dir.chdir "dev"
+ elsif $t then
+ Dir.mkdir "trunk"
+ Dir.chdir "trunk"
+ end
+
+ %w(bin lib test).each do |path|
+ Dir.mkdir path
+ end
+
+ files = {
+ "History.txt" => "=== 1.0.0 / #{Time.new.strftime("%Y-%m-%d")}\n\n* 1 major enhancement\n * Birthday!\n\n",
+ "README.txt" => "= #{project}\n\n* #{X} (url)\n\n== DESCRIPTION:\n\n#{X} (describe your package)\n\n== FEATURES/PROBLEMS:\n\n* #{X} (list of features or problems)\n\n== SYNOPSIS:\n\n #{X} (code sample of usage)\n\n== REQUIREMENTS:\n\n* #{X} (list of requirements)\n\n== INSTALL:\n\n* #{X} (sudo gem install, anything else)\n\n== LICENSE:\n\n(The MIT License)\n\nCopyright (c) #{Time.new.strftime("%Y")} #{X}\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n",
+ "Manifest.txt" => "",
+ "bin/#{file_name}" => "",
+ "lib/#{file_name}.rb" => "class #{klass}\n VERSION = '1.0.0'\nend",
+ "test/test_#{file_name}.rb" => "",
+ "Rakefile" => "# -*- ruby -*-\n\nrequire 'rubygems'\nrequire 'hoe'\nrequire './lib/#{file_name}.rb'\n\nHoe.new('#{project}', #{klass}::VERSION) do |p|\n p.rubyforge_name = \'#{group}\'\n # p.author = '#{X}'\n # p.email = '#{X}'\n # p.summary = '#{X}'\n # p.description = p.paragraphs_of('README.txt', 2..5).join(\"\\n\\n\")\n # p.url = p.paragraphs_of('README.txt', 0).first.split(/\\n/)[1..-1]\n p.changes = p.paragraphs_of('History.txt', 0..1).join(\"\\n\\n\")\nend\n\n# vim: syntax=Ruby\n"
+ }
+
+ files["Manifest.txt"] = files.keys.sort.join("\n")
+
+ files.each do |file, content|
+ File.open(file, "w") do |f|
+ f.write content
+ end
+ end
+end
+
+WINDOZE = /win32/ =~ RUBY_PLATFORM
+
+puts "... done, now go fix all occurrences of '#{X}'"
+if WINDOZE then
+ puts `findstr /N /S /C:#{X} #{project}\\*`
+else
+ puts `find #{project} -type f | xargs grep -n #{X}`.gsub(/\A|\n/, "\n ")
+end
+
diff --git a/vendor/hoe-1.5.0/lib/hoe.rb b/vendor/hoe-1.5.0/lib/hoe.rb
new file mode 100644
index 0000000..d1e1e67
--- /dev/null
+++ b/vendor/hoe-1.5.0/lib/hoe.rb
@@ -0,0 +1,828 @@
+# -*- ruby -*-
+
+require 'rubygems'
+require 'rake'
+require 'rake/contrib/sshpublisher'
+require 'rake/gempackagetask'
+require 'rake/rdoctask'
+require 'rake/testtask'
+require 'rbconfig'
+require 'rubyforge'
+require 'yaml'
+
+##
+# hoe - a tool to help rake
+#
+# Hoe is a simple rake/rubygems helper for project Rakefiles. It
+# generates all the usual tasks for projects including rdoc generation,
+# testing, packaging, and deployment.
+#
+# == Using Hoe
+#
+# === Basics
+#
+# Use this as a minimal starting point:
+#
+# require 'hoe'
+#
+# Hoe.new("project_name", '1.0.0') do |p|
+# p.rubyforge_name = "rf_project"
+# # add other details here
+# end
+#
+# # add other tasks here
+#
+# === Tasks Provided:
+#
+# announce:: Generate email announcement file and post to rubyforge.
+# audit:: Run ZenTest against the package
+# check_manifest:: Verify the manifest
+# clean:: Clean up all the extras
+# config_hoe:: Create a fresh ~/.hoerc file
+# debug_gem:: Show information about the gem.
+# default:: Run the default tasks
+# docs:: Build the docs HTML Files
+# email:: Generate email announcement file.
+# gem:: Build the gem file only.
+# install:: Install the package. Uses PREFIX and RUBYLIB
+# install_gem:: Install the package as a gem
+# multi:: Run the test suite using multiruby
+# package:: Build all the packages
+# post_blog:: Post announcement to blog.
+# post_news:: Post announcement to rubyforge.
+# publish_docs:: Publish RDoc to RubyForge
+# release:: Package and upload the release to rubyforge.
+# ridocs:: Generate ri locally for testing
+# test:: Run the test suite. Use FILTER to add to the command line.
+# test_deps:: Show which test files fail when run alone.
+# uninstall:: Uninstall the package.
+#
+# === Extra Configuration Options:
+#
+# Run +config_hoe+ to generate a new ~/.hoerc file. The file is a
+# YAML formatted config file with the following settings:
+#
+# exclude:: A regular expression of files to exclude from
+# +check_manifest+.
+# publish_on_announce:: Run +publish_docs+ when you run +release+.
+# signing_key_file:: Signs your gems with this private key.
+# signing_cert_file:: Signs your gem with this certificate.
+# blogs:: An array of hashes of blog settings.
+#
+# Run +config_hoe+ and see ~/.hoerc for examples.
+#
+# === Signing Gems:
+#
+# Run the 'generate_key' task. This will:
+#
+# 1. Configure your ~/.hoerc.
+# 2. Generate a signing key and certificate.
+# 3. Install the private key and public certificate files into ~/.gem.
+# 4. Upload the certificate to RubyForge.
+#
+# Hoe will now generate signed gems when the package task is run. If you have
+# multiple machines you build gems on, be sure to install your key and
+# certificate on each machine.
+#
+# Keep your private key secret! Keep your private key safe!
+#
+# To make sure your gems are signed run:
+#
+# rake package; tar tf pkg/yourproject-1.2.3.gem
+#
+# If your gem is signed you will see:
+#
+# data.tar.gz
+# data.tar.gz.sig
+# metadata.gz
+# metadata.gz.sig
+#
+# === Platform awareness
+#
+# Hoe allows bundling of pre-compiled extensions in the +package+ task.
+#
+# To create a package for your current platform:
+#
+# rake package INLINE=1
+#
+# This will force Hoe analize your +Inline+ already compiled
+# extensions and include them in your gem.
+#
+# If somehow you need to force a specific platform:
+#
+# rake package INLINE=1 FORCE_PLATFORM=mswin32
+#
+# This will set the +Gem::Specification+ platform to the one indicated in
+# +FORCE_PLATFORM+ (instead of default Gem::Platform::CURRENT)
+#
+
+class Hoe
+ VERSION = '1.5.0'
+
+ ruby_prefix = Config::CONFIG['prefix']
+ sitelibdir = Config::CONFIG['sitelibdir']
+
+ ##
+ # Used to specify a custom install location (for rake install).
+
+ PREFIX = ENV['PREFIX'] || ruby_prefix
+
+ ##
+ # Used to add extra flags to RUBY_FLAGS.
+
+ RUBY_DEBUG = ENV['RUBY_DEBUG']
+
+ default_ruby_flags = "-w -I#{%w(lib ext bin test).join(File::PATH_SEPARATOR)}" +
+ (RUBY_DEBUG ? " #{RUBY_DEBUG}" : '')
+
+ ##
+ # Used to specify flags to ruby [has smart default].
+
+ RUBY_FLAGS = ENV['RUBY_FLAGS'] || default_ruby_flags
+
+ ##
+ # Used to add flags to test_unit (e.g., -n test_borked).
+
+ FILTER = ENV['FILTER'] # for tests (eg FILTER="-n test_blah")
+
+ # :stopdoc:
+
+ RUBYLIB = if PREFIX == ruby_prefix then
+ sitelibdir
+ else
+ File.join(PREFIX, sitelibdir[ruby_prefix.size..-1])
+ end
+
+ DLEXT = Config::CONFIG['DLEXT']
+
+ WINDOZE = /djgpp|(cyg|ms|bcc)win|mingw/ =~ RUBY_PLATFORM unless defined? WINDOZE
+
+ DIFF = if WINDOZE
+ 'diff.exe'
+ else
+ if system("gdiff", __FILE__, __FILE__)
+ 'gdiff' # solaris and kin suck
+ else
+ 'diff'
+ end
+ end unless defined? DIFF
+
+ # :startdoc:
+
+ ##
+ # *Recommended*: The author(s) of the package. (can be array)
+ # Really. Set this or we'll tease you.
+
+ attr_accessor :author
+
+ ##
+ # Populated automatically from the manifest. List of executables.
+
+ attr_accessor :bin_files # :nodoc:
+
+ ##
+ # Optional: A description of the release's latest changes. Auto-populates.
+
+ attr_accessor :changes
+
+ ##
+ # Optional: An array of file patterns to delete on clean.
+
+ attr_accessor :clean_globs
+
+ ##
+ # Optional: A description of the project. Auto-populates.
+
+ attr_accessor :description
+
+ ##
+ # Optional: What sections from the readme to use for auto-description. Defaults to %w(description).
+
+ attr_accessor :description_sections
+
+ ##
+ # *Recommended*: The author's email address(es). (can be array)
+
+ attr_accessor :email
+
+ ##
+ # Optional: An array of rubygem dependencies.
+
+ attr_accessor :extra_deps
+
+ ##
+ # Populated automatically from the manifest. List of library files.
+
+ attr_accessor :lib_files # :nodoc:
+
+ ##
+ # *MANDATORY*: The name of the release.
+
+ attr_accessor :name
+
+ ##
+ # Optional: Should package create a tarball? [default: true]
+
+ attr_accessor :need_tar
+
+ ##
+ # Optional: Should package create a zipfile? [default: false]
+
+ attr_accessor :need_zip
+
+ ##
+ # Optional: A regexp to match documentation files against the manifest.
+
+ attr_accessor :rdoc_pattern
+
+ ##
+ # Optional: Name of RDoc destination directory on Rubyforge. [default: +name+]
+
+ attr_accessor :remote_rdoc_dir
+
+ ##
+ # Optional: Flags for RDoc rsync. [default: "-av --delete"]
+
+ attr_accessor :rsync_args
+
+ ##
+ # Optional: The name of the rubyforge project. [default: name.downcase]
+
+ attr_accessor :rubyforge_name
+
+ ##
+ # The Gem::Specification.
+
+ attr_accessor :spec # :nodoc:
+
+ ##
+ # Optional: A hash of extra values to set in the gemspec. Value may be a proc.
+
+ attr_accessor :spec_extras
+
+ ##
+ # Optional: A short summary of the project. Auto-populates.
+
+ attr_accessor :summary
+
+ ##
+ # Optional: Number of sentences from description for summary. Defaults to 1.
+
+ attr_accessor :summary_sentences
+
+ ##
+ # Populated automatically from the manifest. List of tests.
+
+ attr_accessor :test_files # :nodoc:
+
+ ##
+ # Optional: An array of test file patterns [default: test/**/test_*.rb]
+
+ attr_accessor :test_globs
+
+ ##
+ # Optional: The url(s) of the project. (can be array). Auto-populates.
+
+ attr_accessor :url
+
+ ##
+ # *MANDATORY*: The version. Don't hardcode! use a constant in the project.
+
+ attr_accessor :version
+
+ def initialize(name, version) # :nodoc:
+ self.name = name
+ self.version = version
+
+ # Defaults
+ self.author = []
+ self.clean_globs = %w(diff diff.txt email.txt ri *.gem **/*~)
+ self.description_sections = %w(description)
+ self.email = []
+ self.extra_deps = []
+ self.need_tar = true
+ self.need_zip = false
+ self.rdoc_pattern = /^(lib|bin|ext)|txt$/
+ self.remote_rdoc_dir = name
+ self.rsync_args = '-av --delete'
+ self.rubyforge_name = name.downcase
+ self.spec_extras = {}
+ self.summary_sentences = 1
+ self.test_globs = ['test/**/test_*.rb']
+
+ yield self if block_given?
+
+ # Intuit values:
+
+ history = File.read("History.txt").split(/^(===.*)/)
+ readme = File.read("README.txt").split(/^(=+ .*)$/)[1..-1]
+ sections = readme.map { |s| s =~ /^=/ ? s.strip.downcase.chomp(':').split.last : s.strip }
+ sections = Hash[*sections]
+ desc = sections.values_at(*description_sections).join("\n\n")
+ summ = desc.split(/\.\s+/).first(summary_sentences).join(". ")
+
+ self.description ||= desc
+ self.changes ||= history[0..2].join.strip
+ self.summary ||= summ
+ self.url ||= readme[1].gsub(/^\* /, '').split(/\n/).grep(/\S+/)
+
+ %w(email author).each do |field|
+ value = self.send(field)
+ if value.nil? or value.empty? then
+ if Time.now < Time.local(2008, 4, 1) then
+ warn "Hoe #{field} value not set - Fix by 2008-04-01!"
+ self.send "#{field}=", "doofus"
+ else
+ abort "Hoe #{field} value not set"
+ end
+ end
+ end
+
+ hoe_deps = {
+ 'rake' => ">= #{RAKEVERSION}",
+ 'rubyforge' => ">= #{::RubyForge::VERSION}",
+ }
+
+ self.extra_deps = Array(extra_deps).map { |o| String === o ? [o] : o }
+
+ if name == 'hoe' then
+ hoe_deps.each do |pkg, version|
+ extra_deps << [pkg, version]
+ end
+ else
+ extra_deps << ['hoe', ">= #{VERSION}"] unless hoe_deps.has_key? name
+ end
+
+ define_tasks
+ end
+
+ def developer name, email
+ self.author << name
+ self.email << email
+ end
+
+ def define_tasks # :nodoc:
+ def with_config # :nodoc:
+ rc = File.expand_path("~/.hoerc")
+ exists = File.exist? rc
+ config = exists ? YAML.load_file(rc) : {}
+ yield(config, rc)
+ end
+
+ desc 'Run the default tasks'
+ task :default => :test
+
+ desc 'Run the test suite. Use FILTER to add to the command line.'
+ task :test do
+ run_tests
+ end
+
+ desc 'Show which test files fail when run alone.'
+ task :test_deps do
+ tests = Dir["test/**/test_*.rb"] + Dir["test/**/*_test.rb"]
+
+ tests.each do |test|
+ if not system "ruby -Ibin:lib:test #{test} &> /dev/null" then
+ puts "Dependency Issues: #{test}"
+ end
+ end
+ end
+
+ desc 'Run the test suite using multiruby'
+ task :multi do
+ run_tests :multi
+ end
+
+ ############################################################
+ # Packaging and Installing
+
+ signing_key = nil
+ cert_chain = []
+
+ with_config do |config, path|
+ break unless config['signing_key_file'] and config['signing_cert_file']
+ key_file = File.expand_path config['signing_key_file'].to_s
+ signing_key = key_file if File.exist? key_file
+
+ cert_file = File.expand_path config['signing_cert_file'].to_s
+ cert_chain << cert_file if File.exist? cert_file
+ end
+
+ self.spec = Gem::Specification.new do |s|
+ s.name = name
+ s.version = version
+ s.summary = summary
+ case author
+ when Array
+ s.authors = author
+ else
+ s.author = author
+ end
+ s.email = email
+ s.homepage = Array(url).first
+ s.rubyforge_project = rubyforge_name
+
+ s.description = description
+
+ extra_deps.each do |dep|
+ s.add_dependency(*dep)
+ end
+
+ s.files = File.read("Manifest.txt").delete("\r").split(/\n/)
+ s.executables = s.files.grep(/^bin/) { |f| File.basename(f) }
+
+ s.bindir = "bin"
+ dirs = Dir['{lib,ext}']
+ s.require_paths = dirs unless dirs.empty?
+
+ s.rdoc_options = ['--main', 'README.txt']
+ s.extra_rdoc_files = s.files.grep(/txt$/)
+ s.has_rdoc = true
+
+ if test ?f, "test/test_all.rb" then
+ s.test_file = "test/test_all.rb"
+ else
+ s.test_files = Dir[*test_globs]
+ end
+
+ if signing_key and cert_chain then
+ s.signing_key = signing_key
+ s.cert_chain = cert_chain
+ end
+
+ ############################################################
+ # Allow automatic inclusion of compiled extensions
+ if ENV['INLINE'] then
+ s.platform = ENV['FORCE_PLATFORM'] || Gem::Platform::CURRENT
+ # name of the extension is CamelCase
+ if name =~ /[A-Z]/
+ # ClassName => class_name
+ alternate_name = name.reverse.scan(%r/[A-Z]+|[^A-Z]*[A-Z]+?/).reverse.map { |word| word.reverse.downcase }.join('_')
+ elsif name =~ /_/
+ # class_name = ClassName
+ alternate_name = name.strip.split(/\s*_+\s*/).map! { |w| w.downcase.sub(/^./) { |c| c.upcase } }.join
+ end
+
+ # Try collecting Inline extensions for +name+
+ if defined?(Inline) then
+ directory 'lib/inline'
+
+ extensions = Dir.chdir(Inline::directory) {
+ Dir["Inline_{#{name},#{alternate_name}}_*.#{DLEXT}"]
+ }
+ extensions.each do |ext|
+ # add the inlined extension to the spec files
+ s.files += ["lib/inline/#{ext}"]
+
+ # include the file in the tasks
+ file "lib/inline/#{ext}" => ["lib/inline"] do
+ cp File.join(Inline::directory, ext), "lib/inline"
+ end
+ end
+ end
+ end
+
+ # Do any extra stuff the user wants
+ spec_extras.each do |msg, val|
+ case val
+ when Proc
+ val.call(s.send(msg))
+ else
+ s.send "#{msg}=", val
+ end
+ end
+ end
+
+ desc 'Show information about the gem.'
+ task :debug_gem do
+ puts spec.to_ruby
+ end
+
+ self.lib_files = spec.files.grep(/^(lib|ext)/)
+ self.bin_files = spec.files.grep(/^bin/)
+ self.test_files = spec.files.grep(/^test/)
+
+ Rake::GemPackageTask.new spec do |pkg|
+ pkg.need_tar = @need_tar
+ pkg.need_zip = @need_zip
+ end
+
+ desc 'Install the package. Uses PREFIX and RUBYLIB'
+ task :install do
+ [
+ [lib_files + test_files, RUBYLIB, 0444],
+ [bin_files, File.join(PREFIX, 'bin'), 0555]
+ ].each do |files, dest, mode|
+ FileUtils.mkdir_p dest unless test ?d, dest
+ files.each do |file|
+ install file, dest, :mode => mode
+ end
+ end
+ end
+
+ desc 'Install the package as a gem'
+ task :install_gem => [:clean, :package] do
+ sh "#{'sudo ' unless WINDOZE}gem install --local pkg/*.gem"
+ end
+
+ desc 'Uninstall the package.'
+ task :uninstall do
+ Dir.chdir RUBYLIB do
+ rm_f((lib_files + test_files).map { |f| File.basename f })
+ end
+ Dir.chdir File.join(PREFIX, 'bin') do
+ rm_f bin_files.map { |f| File.basename f }
+ end
+ end
+
+ desc 'Package and upload the release to rubyforge.'
+ task :release => [:clean, :package] do |t|
+ v = ENV["VERSION"] or abort "Must supply VERSION=x.y.z"
+ abort "Versions don't match #{v} vs #{version}" if v != version
+ pkg = "pkg/#{name}-#{version}"
+
+ if $DEBUG then
+ puts "release_id = rf.add_release #{rubyforge_name.inspect}, #{name.inspect}, #{version.inspect}, \"#{pkg}.tgz\""
+ puts "rf.add_file #{rubyforge_name.inspect}, #{name.inspect}, release_id, \"#{pkg}.gem\""
+ end
+
+ rf = RubyForge.new
+ puts "Logging in"
+ rf.login
+
+ c = rf.userconfig
+ c["release_notes"] = description if description
+ c["release_changes"] = changes if changes
+ c["preformatted"] = true
+
+ files = [(@need_tar ? "#{pkg}.tgz" : nil),
+ (@need_zip ? "#{pkg}.zip" : nil),
+ "#{pkg}.gem"].compact
+
+ puts "Releasing #{name} v. #{version}"
+ rf.add_release rubyforge_name, name, version, *files
+ end
+
+ ############################################################
+ # Doco
+
+ Rake::RDocTask.new(:docs) do |rd|
+ rd.main = "README.txt"
+ rd.options << '-d' if RUBY_PLATFORM !~ /win32/ and `which dot` =~ /\/dot/ and not ENV['NODOT']
+ rd.rdoc_dir = 'doc'
+ files = spec.files.grep(rdoc_pattern)
+ files -= ['Manifest.txt']
+ rd.rdoc_files.push(*files)
+
+ title = "#{name}-#{version} Documentation"
+ title = "#{rubyforge_name}'s " + title if rubyforge_name != title
+
+ rd.options << "-t #{title}"
+ end
+
+ desc "Generate ri locally for testing"
+ task :ridocs => :clean do
+ sh %q{ rdoc --ri -o ri . }
+ end
+
+ desc "Publish RDoc to RubyForge"
+ task :publish_docs => [:clean, :docs] do
+ config = YAML.load(File.read(File.expand_path("~/.rubyforge/user-config.yml")))
+ host = "#{config["username"]}@rubyforge.org"
+
+ remote_dir = "/var/www/gforge-projects/#{rubyforge_name}/#{remote_rdoc_dir}"
+ local_dir = 'doc'
+
+ sh %{rsync #{rsync_args} #{local_dir}/ #{host}:#{remote_dir}}
+ end
+
+ # no doco for this one
+ task :publish_on_announce do
+ with_config do |config, _|
+ Rake::Task['publish_docs'].invoke if config["publish_on_announce"]
+ end
+ end
+
+ ############################################################
+ # Misc/Maintenance:
+
+ desc 'Run ZenTest against the package'
+ task :audit do
+ libs = %w(lib test ext).join(File::PATH_SEPARATOR)
+ sh "zentest -I=#{libs} #{spec.files.grep(/^(lib|test)/).join(' ')}"
+ end
+
+ desc 'Clean up all the extras'
+ task :clean => [ :clobber_docs, :clobber_package ] do
+ clean_globs.each do |pattern|
+ files = Dir[pattern]
+ rm_rf files unless files.empty?
+ end
+ end
+
+ desc 'Create a fresh ~/.hoerc file'
+ task :config_hoe do
+ with_config do |config, path|
+ default_config = {
+ "exclude" => /tmp$|CVS|\.svn/,
+ "publish_on_announce" => false,
+ "signing_key_file" => "~/.gem/gem-private_key.pem",
+ "signing_cert_file" => "~/.gem/gem-public_cert.pem",
+ "blogs" => [ {
+ "user" => "user",
+ "url" => "url",
+ "extra_headers" => {
+ "mt_convert_breaks" => "markdown"
+ },
+ "blog_id" => "blog_id",
+ "password"=>"password",
+ } ],
+ }
+ File.open(path, "w") do |f|
+ YAML.dump(default_config.merge(config), f)
+ end
+
+ editor = ENV['EDITOR'] || 'vi'
+ system "#{editor} #{path}" if ENV['SHOW_EDITOR'] != 'no'
+ end
+ end
+
+ desc 'Generate email announcement file.'
+ task :email do
+ require 'rubyforge'
+ subject, title, body, urls = announcement
+
+ File.open("email.txt", "w") do |mail|
+ mail.puts "Subject: [ANN] #{subject}"
+ mail.puts
+ mail.puts title
+ mail.puts
+ mail.puts urls
+ mail.puts
+ mail.puts body
+ mail.puts
+ mail.puts urls
+ end
+ puts "Created email.txt"
+ end
+
+ desc 'Post announcement to blog.'
+ task :post_blog do
+ require 'xmlrpc/client'
+
+ with_config do |config, path|
+ break unless config['blogs']
+
+ subject, title, body, urls = announcement
+ body += "\n\n#{urls}"
+
+ config['blogs'].each do |site|
+ server = XMLRPC::Client.new2(site['url'])
+ content = site['extra_headers'].merge(:title => title,
+ :description => body)
+ result = server.call('metaWeblog.newPost',
+ site['blog_id'],
+ site['user'],
+ site['password'],
+ content,
+ true)
+ end
+ end
+ end
+
+ desc 'Post announcement to rubyforge.'
+ task :post_news do
+ require 'rubyforge'
+ subject, title, body, urls = announcement
+
+ rf = RubyForge.new
+ rf.login
+ rf.post_news(rubyforge_name, subject, "#{title}\n\n#{body}")
+ puts "Posted to rubyforge"
+ end
+
+ desc 'Generate email announcement file and post to rubyforge.'
+ task :announce => [:email, :post_news, :post_blog, :publish_on_announce ]
+
+ desc "Verify the manifest"
+ task :check_manifest => :clean do
+ f = "Manifest.tmp"
+ require 'find'
+ files = []
+ with_config do |config, _|
+ exclusions = config["exclude"] || /tmp$|CVS|\.svn/
+ Find.find '.' do |path|
+ next unless File.file? path
+ next if path =~ exclusions
+ files << path[2..-1]
+ end
+ files = files.sort.join "\n"
+ File.open f, 'w' do |fp| fp.puts files end
+ system "#{DIFF} -du Manifest.txt #{f}"
+ rm f
+ end
+ end
+
+ desc 'Generate a key for signing your gems.'
+ task :generate_key do
+ email = spec.email
+ abort "No email in your gemspec" if email.nil? or email.empty?
+
+ key_file = with_config { |config, _| config['signing_key_file'] }
+ cert_file = with_config { |config, _| config['signing_cert_file'] }
+
+ if key_file.nil? or cert_file.nil? then
+ ENV['SHOW_EDITOR'] ||= 'no'
+ Rake::Task['config_hoe'].invoke
+
+ key_file = with_config { |config, _| config['signing_key_file'] }
+ cert_file = with_config { |config, _| config['signing_cert_file'] }
+ end
+
+ key_file = File.expand_path key_file
+ cert_file = File.expand_path cert_file
+
+ unless File.exist? key_file or File.exist? cert_file then
+ sh "gem cert --build #{email}"
+ mv "gem-private_key.pem", key_file, :verbose => true
+ mv "gem-public_cert.pem", cert_file, :verbose => true
+
+ puts "Installed key and certificate."
+
+ rf = RubyForge.new
+ rf.login
+
+ cert_package = "#{rubyforge_name}-certificates"
+
+ begin
+ rf.lookup 'package', cert_package
+ rescue
+ rf.create_package rubyforge_name, cert_package
+ end
+
+ begin
+ rf.lookup('release', cert_package)['certificates']
+ rf.add_file rubyforge_name, cert_package, 'certificates', cert_file
+ rescue
+ rf.add_release rubyforge_name, cert_package, 'certificates', cert_file
+ end
+
+ puts "Uploaded certificate to release \"certificates\" in package #{cert_package}"
+ else
+ puts "Keys already exist."
+ end
+ end
+
+ end # end define
+
+ def announcement # :nodoc:
+ changes = self.changes.rdoc_to_markdown
+
+ subject = "#{name} #{version} Released"
+ title = "#{name} version #{version} has been released!"
+ body = "#{description}\n\nChanges:\n\n#{changes}".rdoc_to_markdown
+ urls = Array(url).map { |s| "* <#{s.strip.rdoc_to_markdown}>" }.join("\n")
+
+ return subject, title, body, urls
+ end
+
+ def run_tests(multi=false) # :nodoc:
+ msg = multi ? :sh : :ruby
+ cmd = if test ?f, 'test/test_all.rb' then
+ "#{RUBY_FLAGS} test/test_all.rb #{FILTER}"
+ else
+ tests = test_globs.map { |g| Dir.glob(g) }.flatten << 'test/unit'
+ tests.map! {|f| %Q(require "#{f}")}
+ "#{RUBY_FLAGS} -e '#{tests.join("; ")}' #{FILTER}"
+ end
+ cmd = "multiruby #{cmd}" if multi
+ send msg, cmd
+ end
+
+ ##
+ # Reads a file at +path+ and spits out an array of the +paragraphs+ specified.
+ #
+ # changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
+ # summary, *description = p.paragraphs_of('README.txt', 3, 3..8)
+
+ def paragraphs_of(path, *paragraphs)
+ File.read(path).delete("\r").split(/\n\n+/).values_at(*paragraphs)
+ end
+end
+
+# :enddoc:
+
+class ::Rake::SshDirPublisher # :nodoc:
+ attr_reader :host, :remote_dir, :local_dir
+end
+
+class String
+ def rdoc_to_markdown
+ self.gsub(/^mailto:/, '').gsub(/^(=+)/) { "#" * $1.size }
+ end
+end
+
+if $0 == __FILE__ then
+ out = `rake -T | egrep -v "redocs|repackage|clobber|trunk"`
+ puts out.gsub(/\#/, '-').gsub(/^rake /, '# * ')
+end
diff --git a/vendor/hoe-1.5.0/test/test_hoe.rb b/vendor/hoe-1.5.0/test/test_hoe.rb
new file mode 100644
index 0000000..5aa611a
--- /dev/null
+++ b/vendor/hoe-1.5.0/test/test_hoe.rb
@@ -0,0 +1,54 @@
+
+require 'test/unit/testcase'
+require 'hoe'
+
+$rakefile = nil # shuts up a warning in rdoctask.rb
+
+class TestHoe < Test::Unit::TestCase
+ def setup
+ Rake.application.clear
+ end
+
+ ##
+ # Yes, these tests suck, but it is damn hard to test this since
+ # everything is forked out.
+
+ def test_basics
+ boring = %w(clobber_docs clobber_package gem redocs repackage)
+ expected = %w(audit
+ announce
+ check_manifest
+ clean
+ config_hoe
+ debug_gem
+ default
+ docs
+ email
+ generate_key
+ install
+ install_gem
+ multi
+ package
+ post_blog
+ post_news
+ publish_docs
+ release
+ ridocs
+ test
+ test_deps
+ uninstall)
+ expected += boring
+
+ spec = Hoe.new('blah', '1.0.0') do |h|
+ h.developer("name", "email")
+ end
+
+ assert_equal ["name"], spec.author
+ assert_equal ["email"], spec.email
+
+ tasks = Rake.application.tasks
+ public_tasks = tasks.reject { |t| t.comment.nil? }.map { |t| t.name }.sort
+
+ assert_equal expected.sort, public_tasks
+ end
+end
|
jwilger/jack-the-ripper
|
aa37c0a23ea367ff2646614e3b4c1968d438b686
|
tagging 1.3.0
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index c6332d1..9d56bf5 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,52 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.2.1'
+ VERSION = '1.3.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
08d907965c73ae1dd62e226c52cf77347eb7bc64
|
Fixed the URI module so that URI.parse does not raise an exception on URIs with square brackets ("[" and "]") in them.
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 634389d..13a5f1d 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,96 +1,97 @@
require 'uri'
+require 'jack_the_ripper/uri_fix'
require 'net/http'
require 'net/https'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def logger
self.class.logger
end
def delete
logger.debug "Deleting file #{@path}"
File.unlink( @path ) if File.exist?( @path )
end
def put( uri = nil, redirection_limit = 10 )
if redirection_limit == 0
raise RemoteError, "Too many redirects for PUT: #{uri}"
end
logger.info "PUTing file: #{@uri}"
content_type = MIME::Types.type_for( @path ).first.content_type
result = HTTPFile.send_request( uri || @uri, :put, { 'Content-Type' => content_type }, Base64.encode64( File.read( @path ) ) )
case result
when Net::HTTPSuccess
# ok
logger.info "File PUT successful"
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
put( result[ 'location' ], redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
def logger
JackTheRIPper.logger || Proc.new{ l = Logger.new( STDERR ); l.level = Logger::ERROR; l }.call
end
def get( uri, directory, basename, redirection_limit = 10 )
logger.info "GETing file: #{uri}"
if redirection_limit == 0
raise RemoteError, "Too many redirects for GET: #{uri}"
end
result = send_request( uri, :get )
case result
when Net::HTTPSuccess
logger.info "File GET successful"
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
logger.debug "File stored at #{file_path}"
new( nil, file_path )
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
def send_request( uri, method, headers = {}, body = nil )
uri = URI.parse( uri )
http = Net::HTTP.new( uri.host, uri.port )
http.use_ssl = true if uri.scheme == 'https'
http.start do |h|
logger.debug "HTTP#{ uri.scheme == 'https' ? 'S' : '' } connection started."
h.send_request( method.to_s.upcase, uri.request_uri, body, headers )
end
rescue URI::InvalidURIError => e
raise JackTheRIPper::ProcessorError, "Invalid URI for #{method}: #{uri}"
end
end
end
end
\ No newline at end of file
diff --git a/lib/jack_the_ripper/uri_fix.rb b/lib/jack_the_ripper/uri_fix.rb
new file mode 100644
index 0000000..f3cd62a
--- /dev/null
+++ b/lib/jack_the_ripper/uri_fix.rb
@@ -0,0 +1,220 @@
+# The URI::REGEXP module is being replaced, because the original
+# implementation raises a URI::InvalidURIError for URIs that have square
+# brackets in the path (i.e. http://example.com/file[1].pdf).
+#
+# The only real change is to the REGEXP::PATTERN::UNRESERVED and
+# REGEXP::PATTERN::RESERVED, but a lot of other stuff builds on these, so
+# the whole thing needs to be replaced. This is, of course, why constants
+# are evil.
+
+require 'uri'
+require 'uri/common'
+module URI
+ remove_const( 'REGEXP' )
+ module REGEXP
+ #
+ # Patterns used to parse URI's
+ #
+ module PATTERN
+ # :stopdoc:
+
+ # RFC 2396 (URI Generic Syntax)
+ # RFC 2732 (IPv6 Literal Addresses in URL's)
+ # RFC 2373 (IPv6 Addressing Architecture)
+
+ # alpha = lowalpha | upalpha
+ ALPHA = "a-zA-Z"
+ # alphanum = alpha | digit
+ ALNUM = "#{ALPHA}\\d"
+
+ # hex = digit | "A" | "B" | "C" | "D" | "E" | "F" |
+ # "a" | "b" | "c" | "d" | "e" | "f"
+ HEX = "a-fA-F\\d"
+ # escaped = "%" hex hex
+ ESCAPED = "%[#{HEX}]{2}"
+ # mark = "-" | "_" | "." | "!" | "~" | "*" | "'" |
+ # "(" | ")" | "[" | "]"
+ # unreserved = alphanum | mark
+ UNRESERVED = "-_.!~*'()#{ALNUM}\\[\\]"
+ # reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+ # "$" | ","
+ # reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+ # "$" | "," (RFC 2732)
+ RESERVED = ";/?:@&=+$,"
+
+ # uric = reserved | unreserved | escaped
+ URIC = "(?:[#{UNRESERVED}#{RESERVED}]|#{ESCAPED})"
+ # uric_no_slash = unreserved | escaped | ";" | "?" | ":" | "@" |
+ # "&" | "=" | "+" | "$" | ","
+ URIC_NO_SLASH = "(?:[#{UNRESERVED};?:@&=+$,]|#{ESCAPED})"
+ # query = *uric
+ QUERY = "#{URIC}*"
+ # fragment = *uric
+ FRAGMENT = "#{URIC}*"
+
+ # domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
+ DOMLABEL = "(?:[#{ALNUM}](?:[-#{ALNUM}]*[#{ALNUM}])?)"
+ # toplabel = alpha | alpha *( alphanum | "-" ) alphanum
+ TOPLABEL = "(?:[#{ALPHA}](?:[-#{ALNUM}]*[#{ALNUM}])?)"
+ # hostname = *( domainlabel "." ) toplabel [ "." ]
+ HOSTNAME = "(?:#{DOMLABEL}\\.)*#{TOPLABEL}\\.?"
+
+ # RFC 2373, APPENDIX B:
+ # IPv6address = hexpart [ ":" IPv4address ]
+ # IPv4address = 1*3DIGIT "." 1*3DIGIT "." 1*3DIGIT "." 1*3DIGIT
+ # hexpart = hexseq | hexseq "::" [ hexseq ] | "::" [ hexseq ]
+ # hexseq = hex4 *( ":" hex4)
+ # hex4 = 1*4HEXDIG
+ #
+ # XXX: This definition has a flaw. "::" + IPv4address must be
+ # allowed too. Here is a replacement.
+ #
+ # IPv4address = 1*3DIGIT "." 1*3DIGIT "." 1*3DIGIT "." 1*3DIGIT
+ IPV4ADDR = "\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"
+ # hex4 = 1*4HEXDIG
+ HEX4 = "[#{HEX}]{1,4}"
+ # lastpart = hex4 | IPv4address
+ LASTPART = "(?:#{HEX4}|#{IPV4ADDR})"
+ # hexseq1 = *( hex4 ":" ) hex4
+ HEXSEQ1 = "(?:#{HEX4}:)*#{HEX4}"
+ # hexseq2 = *( hex4 ":" ) lastpart
+ HEXSEQ2 = "(?:#{HEX4}:)*#{LASTPART}"
+ # IPv6address = hexseq2 | [ hexseq1 ] "::" [ hexseq2 ]
+ IPV6ADDR = "(?:#{HEXSEQ2}|(?:#{HEXSEQ1})?::(?:#{HEXSEQ2})?)"
+
+ # IPv6prefix = ( hexseq1 | [ hexseq1 ] "::" [ hexseq1 ] ) "/" 1*2DIGIT
+ # unused
+
+ # ipv6reference = "[" IPv6address "]" (RFC 2732)
+ IPV6REF = "\\[#{IPV6ADDR}\\]"
+
+ # host = hostname | IPv4address
+ # host = hostname | IPv4address | IPv6reference (RFC 2732)
+ HOST = "(?:#{HOSTNAME}|#{IPV4ADDR}|#{IPV6REF})"
+ # port = *digit
+ PORT = '\d*'
+ # hostport = host [ ":" port ]
+ HOSTPORT = "#{HOST}(?::#{PORT})?"
+
+ # userinfo = *( unreserved | escaped |
+ # ";" | ":" | "&" | "=" | "+" | "$" | "," )
+ USERINFO = "(?:[#{UNRESERVED};:&=+$,]|#{ESCAPED})*"
+
+ # pchar = unreserved | escaped |
+ # ":" | "@" | "&" | "=" | "+" | "$" | ","
+ PCHAR = "(?:[#{UNRESERVED}:@&=+$,]|#{ESCAPED})"
+ # param = *pchar
+ PARAM = "#{PCHAR}*"
+ # segment = *pchar *( ";" param )
+ SEGMENT = "#{PCHAR}*(?:;#{PARAM})*"
+ # path_segments = segment *( "/" segment )
+ PATH_SEGMENTS = "#{SEGMENT}(?:/#{SEGMENT})*"
+
+ # server = [ [ userinfo "@" ] hostport ]
+ SERVER = "(?:#{USERINFO}@)?#{HOSTPORT}"
+ # reg_name = 1*( unreserved | escaped | "$" | "," |
+ # ";" | ":" | "@" | "&" | "=" | "+" )
+ REG_NAME = "(?:[#{UNRESERVED}$,;+@&=+]|#{ESCAPED})+"
+ # authority = server | reg_name
+ AUTHORITY = "(?:#{SERVER}|#{REG_NAME})"
+
+ # rel_segment = 1*( unreserved | escaped |
+ # ";" | "@" | "&" | "=" | "+" | "$" | "," )
+ REL_SEGMENT = "(?:[#{UNRESERVED};@&=+$,]|#{ESCAPED})+"
+
+ # scheme = alpha *( alpha | digit | "+" | "-" | "." )
+ SCHEME = "[#{ALPHA}][-+.#{ALPHA}\\d]*"
+
+ # abs_path = "/" path_segments
+ ABS_PATH = "/#{PATH_SEGMENTS}"
+ # rel_path = rel_segment [ abs_path ]
+ REL_PATH = "#{REL_SEGMENT}(?:#{ABS_PATH})?"
+ # net_path = "//" authority [ abs_path ]
+ NET_PATH = "//#{AUTHORITY}(?:#{ABS_PATH})?"
+
+ # hier_part = ( net_path | abs_path ) [ "?" query ]
+ HIER_PART = "(?:#{NET_PATH}|#{ABS_PATH})(?:\\?(?:#{QUERY}))?"
+ # opaque_part = uric_no_slash *uric
+ OPAQUE_PART = "#{URIC_NO_SLASH}#{URIC}*"
+
+ # absoluteURI = scheme ":" ( hier_part | opaque_part )
+ ABS_URI = "#{SCHEME}:(?:#{HIER_PART}|#{OPAQUE_PART})"
+ # relativeURI = ( net_path | abs_path | rel_path ) [ "?" query ]
+ REL_URI = "(?:#{NET_PATH}|#{ABS_PATH}|#{REL_PATH})(?:\\?#{QUERY})?"
+
+ # URI-reference = [ absoluteURI | relativeURI ] [ "#" fragment ]
+ URI_REF = "(?:#{ABS_URI}|#{REL_URI})?(?:##{FRAGMENT})?"
+
+ # XXX:
+ X_ABS_URI = "
+ (#{PATTERN::SCHEME}): (?# 1: scheme)
+ (?:
+ (#{PATTERN::OPAQUE_PART}) (?# 2: opaque)
+ |
+ (?:(?:
+ //(?:
+ (?:(?:(#{PATTERN::USERINFO})@)? (?# 3: userinfo)
+ (?:(#{PATTERN::HOST})(?::(\\d*))?))?(?# 4: host, 5: port)
+ |
+ (#{PATTERN::REG_NAME}) (?# 6: registry)
+ )
+ |
+ (?!//)) (?# XXX: '//' is the mark for hostport)
+ (#{PATTERN::ABS_PATH})? (?# 7: path)
+ )(?:\\?(#{PATTERN::QUERY}))? (?# 8: query)
+ )
+ (?:\\#(#{PATTERN::FRAGMENT}))? (?# 9: fragment)
+ "
+ X_REL_URI = "
+ (?:
+ (?:
+ //
+ (?:
+ (?:(#{PATTERN::USERINFO})@)? (?# 1: userinfo)
+ (#{PATTERN::HOST})?(?::(\\d*))? (?# 2: host, 3: port)
+ |
+ (#{PATTERN::REG_NAME}) (?# 4: registry)
+ )
+ )
+ |
+ (#{PATTERN::REL_SEGMENT}) (?# 5: rel_segment)
+ )?
+ (#{PATTERN::ABS_PATH})? (?# 6: abs_path)
+ (?:\\?(#{PATTERN::QUERY}))? (?# 7: query)
+ (?:\\#(#{PATTERN::FRAGMENT}))? (?# 8: fragment)
+ "
+ # :startdoc:
+ end # PATTERN
+
+ # :stopdoc:
+
+ # for URI::split
+ ABS_URI = Regexp.new('^' + PATTERN::X_ABS_URI + '$', #'
+ Regexp::EXTENDED, 'N').freeze
+ REL_URI = Regexp.new('^' + PATTERN::X_REL_URI + '$', #'
+ Regexp::EXTENDED, 'N').freeze
+
+ # for URI::extract
+ URI_REF = Regexp.new(PATTERN::URI_REF, false, 'N').freeze
+ ABS_URI_REF = Regexp.new(PATTERN::X_ABS_URI, Regexp::EXTENDED, 'N').freeze
+ REL_URI_REF = Regexp.new(PATTERN::X_REL_URI, Regexp::EXTENDED, 'N').freeze
+
+ # for URI::escape/unescape
+ ESCAPED = Regexp.new(PATTERN::ESCAPED, false, 'N').freeze
+ UNSAFE = Regexp.new("[^#{PATTERN::UNRESERVED}#{PATTERN::RESERVED}]",
+ false, 'N').freeze
+
+ # for Generic#initialize
+ SCHEME = Regexp.new("^#{PATTERN::SCHEME}$", false, 'N').freeze #"
+ USERINFO = Regexp.new("^#{PATTERN::USERINFO}$", false, 'N').freeze #"
+ HOST = Regexp.new("^#{PATTERN::HOST}$", false, 'N').freeze #"
+ PORT = Regexp.new("^#{PATTERN::PORT}$", false, 'N').freeze #"
+ OPAQUE = Regexp.new("^#{PATTERN::OPAQUE_PART}$", false, 'N').freeze #"
+ REGISTRY = Regexp.new("^#{PATTERN::REG_NAME}$", false, 'N').freeze #"
+ ABS_PATH = Regexp.new("^#{PATTERN::ABS_PATH}$", false, 'N').freeze #"
+ REL_PATH = Regexp.new("^#{PATTERN::REL_PATH}$", false, 'N').freeze #"
+ QUERY = Regexp.new("^#{PATTERN::QUERY}$", false, 'N').freeze #"
+ FRAGMENT = Regexp.new("^#{PATTERN::FRAGMENT}$", false, 'N').freeze #"
+ # :startdoc:
+ end # REGEXP
+end
\ No newline at end of file
diff --git a/lib/jack_the_ripper_server.rb b/lib/jack_the_ripper_server.rb
index b1fe8e2..4a67ac3 100755
--- a/lib/jack_the_ripper_server.rb
+++ b/lib/jack_the_ripper_server.rb
@@ -1,72 +1,77 @@
#!/usr/bin/env ruby
unless ENV[ 'AWS_ACCESS_KEY_ID' ] && ENV[ 'AWS_SECRET_ACCESS_KEY' ]
raise "Must set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY first!"
end
Signal.trap( "INT" ) { shutdown() }
Signal.trap( "TERM" ) { shutdown() }
@keep_running = true
def shutdown
@logger.info "Starting System Shutdown"
@keep_running = false
end
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../lib' ) )
require 'jack_the_ripper'
require 'optparse'
require 'ostruct'
options = OpenStruct.new
options.access_key_id = ENV[ 'AWS_ACCESS_KEY_ID' ]
options.secret_access_key = ENV[ 'AWS_SECRET_ACCESS_KEY' ]
options.queue_name = ''
options.tmp_path = '/tmp'
options.log_file = '/var/log/jack_the_ripper.log'
+options.debug = false
opts = OptionParser.new do |opts|
opts.banner = "Usage: jack_the_ripper_server [options]"
opts.separator ""
opts.separator "Specific options:"
opts.on( '-q', '--queue SQS_QUEUE_NAME', 'REQUIRED' ) do |queue_name|
options.queue_name = queue_name
end
opts.on( '-t', '--tmpdir [PATH]', 'Path to save temporary image files. Defaults to "/tmp"' ) do |tmp_path|
options.tmp_path = tmp_path
end
opts.on( '-l', '--log [PATH]', 'Path to the log file. Defaults to "/var/log/jack_The_ripper.log"' ) do |log_file|
options.log_file = log_file
end
+ opts.on( '--debug', 'Set debug-level logging.' ) do |debug|
+ options.debug = true
+ end
+
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
opts.parse!( ARGV )
@logger = Logger.new( options.log_file )
[email protected] = Logger::WARN
[email protected] = Logger::WARN unless options.debug
JackTheRIPper.tmp_path = options.tmp_path
JackTheRIPper.logger = @logger
queue = JackTheRIPper.get_queue( options.access_key_id,
options.secret_access_key, options.queue_name )
@logger.info "Connected to SQS Queue #{queue.name}"
begin
while @keep_running do
if JackTheRIPper.process_next_message( queue ) == false
@logger.debug "No messages in queue. Sleeping for 60 seconds"
60.times { sleep( 1 ) if @keep_running }
end
end
exit 0
rescue SystemExit
@logger.info "Shutdown Complete"
exit 0
rescue Exception => e
JackTheRIPper.logger.fatal e.class.to_s + ': ' + e.message
JackTheRIPper.logger.fatal e.backtrace.join( "\n" )
exit 1
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 9b68b34..09cb86b 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,158 +1,158 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
f = mock
File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/redirect_me', :get ).
returns( redirect )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/result.jpg', :put, headers, Base64.encode64( data ) ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_404
http_result = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_invalid_uri
assert_raises( JackTheRIPper::ProcessorError ) do
- JackTheRIPper::HTTPFile.get( 'http://example.com/file[invalid].pdf',
+ JackTheRIPper::HTTPFile.get( 'not a url',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_other_client_error
http_result = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_uncaught_exception
JackTheRIPper::HTTPFile.stubs( :send_request ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_404
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_invalid_uri
- f = JackTheRIPper::HTTPFile.new( 'http://example.com/result[invalid].jpg',
+ f = JackTheRIPper::HTTPFile.new( 'not a url',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_other_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_uncaught_exception
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
Net::HTTP.stubs( :start ).raises( Exception.new )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
33497875569d4338834608aa170a4a53f8efea97
|
tagging 1.2.0 release
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 05d6c51..c6332d1 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,52 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.1.1'
+ VERSION = '1.2.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
93ea0584e9e829b7b92af01ce4d3788a55195551
|
Remove message from queue if conversion fails because of invalid URI for GET or PUT
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 2f69239..634389d 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,94 +1,96 @@
require 'uri'
require 'net/http'
require 'net/https'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def logger
self.class.logger
end
def delete
logger.debug "Deleting file #{@path}"
File.unlink( @path ) if File.exist?( @path )
end
def put( uri = nil, redirection_limit = 10 )
if redirection_limit == 0
raise RemoteError, "Too many redirects for PUT: #{uri}"
end
logger.info "PUTing file: #{@uri}"
content_type = MIME::Types.type_for( @path ).first.content_type
result = HTTPFile.send_request( uri || @uri, :put, { 'Content-Type' => content_type }, Base64.encode64( File.read( @path ) ) )
case result
when Net::HTTPSuccess
# ok
logger.info "File PUT successful"
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
put( result[ 'location' ], redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
def logger
JackTheRIPper.logger || Proc.new{ l = Logger.new( STDERR ); l.level = Logger::ERROR; l }.call
end
def get( uri, directory, basename, redirection_limit = 10 )
logger.info "GETing file: #{uri}"
if redirection_limit == 0
raise RemoteError, "Too many redirects for GET: #{uri}"
end
result = send_request( uri, :get )
case result
when Net::HTTPSuccess
logger.info "File GET successful"
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
logger.debug "File stored at #{file_path}"
new( nil, file_path )
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
def send_request( uri, method, headers = {}, body = nil )
uri = URI.parse( uri )
http = Net::HTTP.new( uri.host, uri.port )
http.use_ssl = true if uri.scheme == 'https'
http.start do |h|
logger.debug "HTTP#{ uri.scheme == 'https' ? 'S' : '' } connection started."
h.send_request( method.to_s.upcase, uri.request_uri, body, headers )
end
+ rescue URI::InvalidURIError => e
+ raise JackTheRIPper::ProcessorError, "Invalid URI for #{method}: #{uri}"
end
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index fcdfa7b..9b68b34 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,144 +1,158 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
f = mock
File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/redirect_me', :get ).
returns( redirect )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/result.jpg', :put, headers, Base64.encode64( data ) ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_404
http_result = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
+ def test_should_raise_processor_error_if_get_fails_due_to_invalid_uri
+ assert_raises( JackTheRIPper::ProcessorError ) do
+ JackTheRIPper::HTTPFile.get( 'http://example.com/file[invalid].pdf',
+ '/tmp', 'source' )
+ end
+ end
+
def test_should_raise_remote_error_if_get_fails_due_to_other_client_error
http_result = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
-
def test_should_raise_remote_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_uncaught_exception
JackTheRIPper::HTTPFile.stubs( :send_request ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_404
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
+ def test_should_raise_processor_error_if_put_fails_due_to_invalid_uri
+ f = JackTheRIPper::HTTPFile.new( 'http://example.com/result[invalid].jpg',
+ '/tmp/result.jpg' )
+ File.stubs( :read ).returns( ' ' )
+ assert_raises( JackTheRIPper::ProcessorError ) { f.put }
+ end
+
def test_should_raise_remote_error_if_put_fails_due_to_other_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_uncaught_exception
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
- Net::HTTP.stubs( :start ).raises( Exception )
+ File.stubs( :read ).returns( ' ' )
+ Net::HTTP.stubs( :start ).raises( Exception.new )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
c2223e1f8608c3d433896b2574921af0b4d46b90
|
tagging 1.1.0
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 848e617..05d6c51 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,52 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '1.0.1'
+ VERSION = '1.1.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
9684e1475e8254334384852303a5751b21d22382
|
set logging level to WARN
|
diff --git a/lib/jack_the_ripper_server.rb b/lib/jack_the_ripper_server.rb
index 40e9eaf..b1fe8e2 100755
--- a/lib/jack_the_ripper_server.rb
+++ b/lib/jack_the_ripper_server.rb
@@ -1,71 +1,72 @@
#!/usr/bin/env ruby
unless ENV[ 'AWS_ACCESS_KEY_ID' ] && ENV[ 'AWS_SECRET_ACCESS_KEY' ]
raise "Must set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY first!"
end
Signal.trap( "INT" ) { shutdown() }
Signal.trap( "TERM" ) { shutdown() }
@keep_running = true
def shutdown
@logger.info "Starting System Shutdown"
@keep_running = false
end
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../lib' ) )
require 'jack_the_ripper'
require 'optparse'
require 'ostruct'
options = OpenStruct.new
options.access_key_id = ENV[ 'AWS_ACCESS_KEY_ID' ]
options.secret_access_key = ENV[ 'AWS_SECRET_ACCESS_KEY' ]
options.queue_name = ''
options.tmp_path = '/tmp'
options.log_file = '/var/log/jack_the_ripper.log'
opts = OptionParser.new do |opts|
opts.banner = "Usage: jack_the_ripper_server [options]"
opts.separator ""
opts.separator "Specific options:"
opts.on( '-q', '--queue SQS_QUEUE_NAME', 'REQUIRED' ) do |queue_name|
options.queue_name = queue_name
end
opts.on( '-t', '--tmpdir [PATH]', 'Path to save temporary image files. Defaults to "/tmp"' ) do |tmp_path|
options.tmp_path = tmp_path
end
opts.on( '-l', '--log [PATH]', 'Path to the log file. Defaults to "/var/log/jack_The_ripper.log"' ) do |log_file|
options.log_file = log_file
end
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
opts.parse!( ARGV )
@logger = Logger.new( options.log_file )
[email protected] = Logger::WARN
JackTheRIPper.tmp_path = options.tmp_path
JackTheRIPper.logger = @logger
queue = JackTheRIPper.get_queue( options.access_key_id,
options.secret_access_key, options.queue_name )
@logger.info "Connected to SQS Queue #{queue.name}"
begin
while @keep_running do
if JackTheRIPper.process_next_message( queue ) == false
@logger.debug "No messages in queue. Sleeping for 60 seconds"
60.times { sleep( 1 ) if @keep_running }
end
end
exit 0
rescue SystemExit
@logger.info "Shutdown Complete"
exit 0
rescue Exception => e
JackTheRIPper.logger.fatal e.class.to_s + ': ' + e.message
JackTheRIPper.logger.fatal e.backtrace.join( "\n" )
exit 1
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
04715ef841c04ed061a1cb2f36cf9d873039d662
|
tagged 1.0.0
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 68f7cc8..848e617 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,52 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.2.2'
+ VERSION = '1.0.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
6ea87b8b9c566385132b2af7ae32fa2a0cd48a3e
|
removed speaking part - it's getting a bit annoying, plus it slows things down quite a bit
|
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index 53e772b..fa55599 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,45 +1,44 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
JackTheRIPper.logger.debug "Processing message"
source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
JackTheRIPper.logger.debug "Source file retrieved."
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = JackTheRIPper.tmp_path + '/result' + result_ext
cmd = "sips #{sips_args} #{source_file.path} --out #{result_path}"
output = `#{cmd}`
JackTheRIPper.logger.debug "Ran command #{cmd}"
raise ProcessorError, output unless File.exist?( result_path )
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
- `say -v "Bad News" "Image converted money in the ba-ank."`
ensure
source_file.delete unless source_file.nil?
result_file.delete unless result_file.nil?
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index 9d37ae4..e8d93e5 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,198 +1,193 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
- processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
File.expects( :exist? ).with( working_dir_path + '/result.png' ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_raise_processor_error_if_sips_process_does_not_write_result_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = stub_everything( :path => '/foo/bar.jpg' )
JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.stubs( :` ).returns( 'blah blah blah' )
File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( false )
begin
processor.process
fail "Expected ProcessorError to be raised."
rescue JackTheRIPper::ProcessorError => e
assert_equal 'blah blah blah', e.message
end
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
f1fc9e4e8602ddbd13eecbf2c1a4101a59dd5431
|
tagging 0.2.1
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 073b8c8..68f7cc8 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,52 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.2.1'
+ VERSION = '0.2.2'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
58fa5bdc75ada770e184d9d217db76febeae7269
|
fixed mistake in code where order of arguments was reversed
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index cce00da..2f69239 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,94 +1,94 @@
require 'uri'
require 'net/http'
require 'net/https'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def logger
self.class.logger
end
def delete
logger.debug "Deleting file #{@path}"
File.unlink( @path ) if File.exist?( @path )
end
def put( uri = nil, redirection_limit = 10 )
if redirection_limit == 0
raise RemoteError, "Too many redirects for PUT: #{uri}"
end
logger.info "PUTing file: #{@uri}"
content_type = MIME::Types.type_for( @path ).first.content_type
- result = HTTPFile.send_request( uri || @uri, :put, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
+ result = HTTPFile.send_request( uri || @uri, :put, { 'Content-Type' => content_type }, Base64.encode64( File.read( @path ) ) )
case result
when Net::HTTPSuccess
# ok
logger.info "File PUT successful"
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
put( result[ 'location' ], redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
def logger
JackTheRIPper.logger || Proc.new{ l = Logger.new( STDERR ); l.level = Logger::ERROR; l }.call
end
def get( uri, directory, basename, redirection_limit = 10 )
logger.info "GETing file: #{uri}"
if redirection_limit == 0
raise RemoteError, "Too many redirects for GET: #{uri}"
end
result = send_request( uri, :get )
case result
when Net::HTTPSuccess
logger.info "File GET successful"
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
logger.debug "File stored at #{file_path}"
new( nil, file_path )
when Net::HTTPRedirection
logger.info "Got redirected to #{result[ 'location' ]}"
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
def send_request( uri, method, headers = {}, body = nil )
uri = URI.parse( uri )
http = Net::HTTP.new( uri.host, uri.port )
http.use_ssl = true if uri.scheme == 'https'
http.start do |h|
logger.debug "HTTP#{ uri.scheme == 'https' ? 'S' : '' } connection started."
h.send_request( method.to_s.upcase, uri.request_uri, body, headers )
end
end
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index ec504ea..fcdfa7b 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,144 +1,144 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
f = mock
File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/redirect_me', :get ).
returns( redirect )
JackTheRIPper::HTTPFile.expects( :send_request ).
with( 'http://example.com/file.pdf', :get ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
JackTheRIPper::HTTPFile.expects( :send_request ).
- with( 'http://example.com/result.jpg', :put, Base64.encode64( data ), headers ).
+ with( 'http://example.com/result.jpg', :put, headers, Base64.encode64( data ) ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_404
http_result = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_other_client_error
http_result = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_uncaught_exception
JackTheRIPper::HTTPFile.stubs( :send_request ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPServerError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_404
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPNotFound.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_other_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPClientError.allocate
JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_uncaught_exception
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
Net::HTTP.stubs( :start ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
9b9c8385becd3e3de6842ef44d312fdd20a3110d
|
tagged release 0.2.0
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index e075482..073b8c8 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,52 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.1.1'
+ VERSION = '0.2.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
logger.debug "Message found:"
logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
820977e7f8dc2f5b04547703625f1310097a9592
|
HTTPFile now handles SSL URIs, follows up to 10 redirects on PUT.
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 4544074..e075482 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,47 +1,52 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
VERSION = '0.1.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
+ logger.debug "Checking queue for message."
message = queue.receive
return false if message.nil?
+ logger.debug "Message found:"
+ logger.debug message.body
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
+ logger.debug "Message deleted."
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
+ logger.debug "Message deleted."
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index a9a7607..cce00da 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,65 +1,94 @@
require 'uri'
require 'net/http'
+require 'net/https'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
+ def logger
+ self.class.logger
+ end
+
def delete
+ logger.debug "Deleting file #{@path}"
File.unlink( @path ) if File.exist?( @path )
end
- def put
- uri = URI.parse( @uri )
+ def put( uri = nil, redirection_limit = 10 )
+ if redirection_limit == 0
+ raise RemoteError, "Too many redirects for PUT: #{uri}"
+ end
+ logger.info "PUTing file: #{@uri}"
content_type = MIME::Types.type_for( @path ).first.content_type
- Net::HTTP.start( uri.host, uri.port ) do |http|
- result = http.send_request( 'PUT', uri.request_uri, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
- case result
- when Net::HTTPSuccess
- # ok
- when Net::HTTPNotFound
- raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
- else
- raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
- end
+ result = HTTPFile.send_request( uri || @uri, :put, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
+ case result
+ when Net::HTTPSuccess
+ # ok
+ logger.info "File PUT successful"
+ when Net::HTTPRedirection
+ logger.info "Got redirected to #{result[ 'location' ]}"
+ put( result[ 'location' ], redirection_limit - 1 )
+ when Net::HTTPNotFound
+ raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
+ else
+ raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
+ def logger
+ JackTheRIPper.logger || Proc.new{ l = Logger.new( STDERR ); l.level = Logger::ERROR; l }.call
+ end
+
def get( uri, directory, basename, redirection_limit = 10 )
+ logger.info "GETing file: #{uri}"
if redirection_limit == 0
raise RemoteError, "Too many redirects for GET: #{uri}"
end
- result = Net::HTTP.get_response( URI.parse( uri ) )
+ result = send_request( uri, :get )
case result
when Net::HTTPSuccess
+ logger.info "File GET successful"
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
+ logger.debug "File stored at #{file_path}"
new( nil, file_path )
when Net::HTTPRedirection
+ logger.info "Got redirected to #{result[ 'location' ]}"
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue ProcessorError, RemoteError => e
raise e
rescue Exception => e
raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
+
+ def send_request( uri, method, headers = {}, body = nil )
+ uri = URI.parse( uri )
+ http = Net::HTTP.new( uri.host, uri.port )
+ http.use_ssl = true if uri.scheme == 'https'
+ http.start do |h|
+ logger.debug "HTTP#{ uri.scheme == 'https' ? 'S' : '' } connection started."
+ h.send_request( method.to_s.upcase, uri.request_uri, body, headers )
+ end
+ end
end
end
end
\ No newline at end of file
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index 651e6eb..53e772b 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,41 +1,45 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
+ JackTheRIPper.logger.debug "Processing message"
source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
+ JackTheRIPper.logger.debug "Source file retrieved."
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = JackTheRIPper.tmp_path + '/result' + result_ext
- output = `sips #{sips_args} #{source_file.path} --out #{result_path}`
+ cmd = "sips #{sips_args} #{source_file.path} --out #{result_path}"
+ output = `#{cmd}`
+ JackTheRIPper.logger.debug "Ran command #{cmd}"
raise ProcessorError, output unless File.exist?( result_path )
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
`say -v "Bad News" "Image converted money in the ba-ank."`
ensure
source_file.delete unless source_file.nil?
result_file.delete unless result_file.nil?
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
\ No newline at end of file
diff --git a/lib/jack_the_ripper_server.rb b/lib/jack_the_ripper_server.rb
index af5a6d6..40e9eaf 100755
--- a/lib/jack_the_ripper_server.rb
+++ b/lib/jack_the_ripper_server.rb
@@ -1,64 +1,71 @@
#!/usr/bin/env ruby
unless ENV[ 'AWS_ACCESS_KEY_ID' ] && ENV[ 'AWS_SECRET_ACCESS_KEY' ]
raise "Must set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY first!"
end
Signal.trap( "INT" ) { shutdown() }
Signal.trap( "TERM" ) { shutdown() }
@keep_running = true
def shutdown
+ @logger.info "Starting System Shutdown"
@keep_running = false
end
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../lib' ) )
require 'jack_the_ripper'
require 'optparse'
require 'ostruct'
options = OpenStruct.new
options.access_key_id = ENV[ 'AWS_ACCESS_KEY_ID' ]
options.secret_access_key = ENV[ 'AWS_SECRET_ACCESS_KEY' ]
options.queue_name = ''
options.tmp_path = '/tmp'
options.log_file = '/var/log/jack_the_ripper.log'
opts = OptionParser.new do |opts|
opts.banner = "Usage: jack_the_ripper_server [options]"
opts.separator ""
opts.separator "Specific options:"
opts.on( '-q', '--queue SQS_QUEUE_NAME', 'REQUIRED' ) do |queue_name|
options.queue_name = queue_name
end
opts.on( '-t', '--tmpdir [PATH]', 'Path to save temporary image files. Defaults to "/tmp"' ) do |tmp_path|
options.tmp_path = tmp_path
end
opts.on( '-l', '--log [PATH]', 'Path to the log file. Defaults to "/var/log/jack_The_ripper.log"' ) do |log_file|
options.log_file = log_file
end
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
opts.parse!( ARGV )
+@logger = Logger.new( options.log_file )
JackTheRIPper.tmp_path = options.tmp_path
-JackTheRIPper.logger = Logger.new( options.log_file )
+JackTheRIPper.logger = @logger
queue = JackTheRIPper.get_queue( options.access_key_id,
options.secret_access_key, options.queue_name )
[email protected] "Connected to SQS Queue #{queue.name}"
begin
while @keep_running do
if JackTheRIPper.process_next_message( queue ) == false
+ @logger.debug "No messages in queue. Sleeping for 60 seconds"
60.times { sleep( 1 ) if @keep_running }
end
end
exit 0
+rescue SystemExit
+ @logger.info "Shutdown Complete"
+ exit 0
rescue Exception => e
JackTheRIPper.logger.fatal e.class.to_s + ': ' + e.message
JackTheRIPper.logger.fatal e.backtrace.join( "\n" )
exit 1
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 201ea3c..ec504ea 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,178 +1,144 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
- Net::HTTP.expects( :get_response ).
- with( URI.parse( 'http://example.com/file.pdf' ) ).
- returns( http_result )
f = mock
File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
+ JackTheRIPper::HTTPFile.expects( :send_request ).
+ with( 'http://example.com/file.pdf', :get ).
+ returns( http_result )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
- Net::HTTP.expects( :get_response ).
- with( URI.parse( 'http://example.com/redirect_me' ) ).
+ JackTheRIPper::HTTPFile.expects( :send_request ).
+ with( 'http://example.com/redirect_me', :get ).
returns( redirect )
- Net::HTTP.expects( :get_response ).
- with( URI.parse( 'http://example.com/file.pdf' ) ).
+ JackTheRIPper::HTTPFile.expects( :send_request ).
+ with( 'http://example.com/file.pdf', :get ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
- uri = URI.parse( 'http://example.com/result.jpg' )
- http_conn = mock
- Net::HTTP.expects( :start ).
- with( uri.host, uri.port ).
- yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
- http_conn.expects( :send_request ).
- with( 'PUT', uri.request_uri, Base64.encode64( data ), headers ).
+ JackTheRIPper::HTTPFile.expects( :send_request ).
+ with( 'http://example.com/result.jpg', :put, Base64.encode64( data ), headers ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
- Net::HTTP.expects( :get_response ).
- with( URI.parse( 'http://example.com/file.pdf' ) ).
- returns( http_result )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_404
http_result = Net::HTTPNotFound.allocate
- Net::HTTP.expects( :get_response ).
- with( URI.parse( 'http://example.com/file.pdf' ) ).
- returns( http_result )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_other_client_error
http_result = Net::HTTPClientError.allocate
- Net::HTTP.expects( :get_response ).
- with( URI.parse( 'http://example.com/file.pdf' ) ).
- returns( http_result )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
- Net::HTTP.expects( :get_response ).times( 10 ).
- with( URI.parse( 'http://example.com/file.pdf' ) ).
- returns( http_result )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
def test_should_raise_remote_error_if_get_fails_due_to_uncaught_exception
- Net::HTTP.stubs( :get_response ).raises( Exception )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
end
end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
- uri = URI.parse( 'http://example.com/result.jpg' )
- http_conn = mock
- Net::HTTP.expects( :start ).
- with( uri.host, uri.port ).
- yields( http_conn )
- headers = { 'Content-Type' => 'image/jpeg' }
- data = 'file contents'
- File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPServerError.allocate
- http_conn.stubs( :send_request ).returns( http_response )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_404
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
- uri = URI.parse( 'http://example.com/result.jpg' )
- http_conn = mock
- Net::HTTP.expects( :start ).
- with( uri.host, uri.port ).
- yields( http_conn )
- headers = { 'Content-Type' => 'image/jpeg' }
- data = 'file contents'
- File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPNotFound.allocate
- http_conn.stubs( :send_request ).returns( http_response )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_other_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
- uri = URI.parse( 'http://example.com/result.jpg' )
- http_conn = mock
- Net::HTTP.expects( :start ).
- with( uri.host, uri.port ).
- yields( http_conn )
- headers = { 'Content-Type' => 'image/jpeg' }
- data = 'file contents'
- File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ File.stubs( :read ).returns( ' ' )
http_response = Net::HTTPClientError.allocate
- http_conn.stubs( :send_request ).returns( http_response )
+ JackTheRIPper::HTTPFile.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_remote_error_if_put_fails_due_to_uncaught_exception
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
Net::HTTP.stubs( :start ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
end
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index bd13475..6357fa4 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -1,77 +1,77 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPper < Test::Unit::TestCase
def test_should_allow_logger_to_be_set_and_retrieved
logger = stub
assert_nil JackTheRIPper.logger
assert_nothing_raised { JackTheRIPper.logger = logger }
assert_same logger, JackTheRIPper.logger
end
def test_should_process_one_message_from_the_queue_then_delete_the_message_and_return_true
queue = mock
message = mock
body = YAML::dump( { :foo => 'bar' } )
queue.expects( :receive ).returns( message )
message.expects( :delete )
message.stubs( :body ).returns( body )
processor = mock
JackTheRIPper::Processor.expects( :new ).with( { :foo => 'bar' } ).
returns( processor )
processor.expects( :process )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
def test_should_not_delete_message_from_queue_if_conversion_fails_due_to_remote_error
- logger = mock
+ logger = stub_everything
JackTheRIPper.logger = logger
message = mock
queue = stub_everything( :receive => message )
message.stubs( :body ).returns( 'foo' )
message.expects( :delete ).never
processor = stub
JackTheRIPper::Processor.stubs( :new ).returns( processor )
processor.stubs( :process ).raises( JackTheRIPper::RemoteError.new( 'blah' ) )
logger.expects( :warn ).with( 'Remote Error: blah' )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
def test_should_delete_message_from_queue_if_conversion_fails_due_to_processor_error
- logger = mock
+ logger = stub_everything
JackTheRIPper.logger = logger
message = mock
queue = stub_everything( :receive => message )
message.stubs( :body ).returns( 'foo' )
message.expects( :delete )
processor = stub
JackTheRIPper::Processor.stubs( :new ).returns( processor )
processor.stubs( :process ).raises( JackTheRIPper::ProcessorError.new( 'blah' ) )
logger.expects( :error ).with( 'Processor Error: blah' )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
def test_should_return_false_if_there_are_no_messages_retrieved
queue = mock
queue.expects( :receive ).returns( nil )
assert_equal false, JackTheRIPper.process_next_message( queue )
end
def test_should_instantiate_queue_and_return_it
sqs = mock
queue = stub
sqs.expects( :queue ).with( 'myqueue', true, 240 ).returns( queue )
RightAws::Sqs.expects( :new ).with( 'myaccesskeyid', 'mysecretaccesskey' ).
returns( sqs )
assert_same queue, JackTheRIPper.get_queue( 'myaccesskeyid',
'mysecretaccesskey', 'myqueue' )
end
def test_should_have_tmp_path_attribute
assert_equal '/tmp', JackTheRIPper.tmp_path
assert_nothing_raised { JackTheRIPper.tmp_path = '/foo/bar' }
assert_equal '/foo/bar', JackTheRIPper.tmp_path
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
f87a8f9d4a774a92415f678b332fee17f37ecfa8
|
tagging release 0.1.0
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 0d35a2e..4544074 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,47 +1,47 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.1.0'
+ VERSION = '0.1.1'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
9326863559746a91fef57621d7150bbfeff184e1
|
Tweaked error handling. Only a 404 on source GET, 404 on result PUT or failure of the system conversion command should cause the message to be deleted from the queue.
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 8151b33..a9a7607 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,65 +1,65 @@
require 'uri'
require 'net/http'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def delete
File.unlink( @path ) if File.exist?( @path )
end
def put
uri = URI.parse( @uri )
content_type = MIME::Types.type_for( @path ).first.content_type
Net::HTTP.start( uri.host, uri.port ) do |http|
result = http.send_request( 'PUT', uri.request_uri, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
case result
when Net::HTTPSuccess
# ok
- when Net::HTTPClientError
+ when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
end
- rescue Timeout::Error => e
- raise RemoteError, "Got Timeout Error for PUT: #{@uri}"
- rescue Errno::ECONNREFUSED => e
- raise RemoteError, "Connection Refused during PUT: #{@uri}"
+ rescue ProcessorError, RemoteError => e
+ raise e
+ rescue Exception => e
+ raise RemoteError, "Exception during GET: #{@uri} - #{e.class}: #{e.message}"
end
class << self
def get( uri, directory, basename, redirection_limit = 10 )
if redirection_limit == 0
- raise ProcessorError, "Too many redirects for GET: #{uri}"
+ raise RemoteError, "Too many redirects for GET: #{uri}"
end
result = Net::HTTP.get_response( URI.parse( uri ) )
case result
when Net::HTTPSuccess
file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
new( nil, file_path )
when Net::HTTPRedirection
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
- when Net::HTTPClientError
+ when Net::HTTPNotFound
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
- rescue Timeout::Error => e
- raise RemoteError, "Got Timeout Error for GET: #{uri}"
- rescue Errno::ECONNREFUSED => e
- raise RemoteError, "Connection Refused during GET: #{uri}"
+ rescue ProcessorError, RemoteError => e
+ raise e
+ rescue Exception => e
+ raise RemoteError, "Exception during GET: #{uri} - #{e.class}: #{e.message}"
end
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 0c29fab..201ea3c 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,150 +1,178 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
f = mock
File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/redirect_me' ) ).
returns( redirect )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
http_conn.expects( :send_request ).
with( 'PUT', uri.request_uri, Base64.encode64( data ), headers ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
- def test_should_raise_processor_error_if_get_fails_due_to_client_error
- http_result = Net::HTTPClientError.allocate
+ def test_should_raise_processor_error_if_get_fails_due_to_404
+ http_result = Net::HTTPNotFound.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
- def test_should_raise_processor_error_if_get_redirects_too_many_times
+ def test_should_raise_remote_error_if_get_fails_due_to_other_client_error
+ http_result = Net::HTTPClientError.allocate
+ Net::HTTP.expects( :get_response ).
+ with( URI.parse( 'http://example.com/file.pdf' ) ).
+ returns( http_result )
+ assert_raises( JackTheRIPper::RemoteError ) do
+ JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
+ '/tmp', 'source' )
+ end
+ end
+
+
+ def test_should_raise_remote_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
Net::HTTP.expects( :get_response ).times( 10 ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
- assert_raises( JackTheRIPper::ProcessorError ) do
+ assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
- def test_should_raise_remote_error_if_get_fails_due_to_connection_refused
- Net::HTTP.stubs( :get_response ).raises( Errno::ECONNREFUSED )
+ def test_should_raise_remote_error_if_get_fails_due_to_uncaught_exception
+ Net::HTTP.stubs( :get_response ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
end
end
-
+
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPServerError.allocate
http_conn.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
- def test_should_raise_processor_error_if_put_fails_due_to_client_error
+ def test_should_raise_processor_error_if_put_fails_due_to_404
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
- http_response = Net::HTTPClientError.allocate
+ http_response = Net::HTTPNotFound.allocate
http_conn.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
- def test_should_raise_remote_error_if_connection_refused_during_put
+ def test_should_raise_remote_error_if_put_fails_due_to_other_client_error
+ f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
+ '/tmp/result.jpg' )
+ uri = URI.parse( 'http://example.com/result.jpg' )
+ http_conn = mock
+ Net::HTTP.expects( :start ).
+ with( uri.host, uri.port ).
+ yields( http_conn )
+ headers = { 'Content-Type' => 'image/jpeg' }
+ data = 'file contents'
+ File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ http_response = Net::HTTPClientError.allocate
+ http_conn.stubs( :send_request ).returns( http_response )
+ assert_raises( JackTheRIPper::RemoteError ) { f.put }
+ end
+
+ def test_should_raise_remote_error_if_put_fails_due_to_uncaught_exception
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
- Net::HTTP.stubs( :start ).raises( Errno::ECONNREFUSED )
+ Net::HTTP.stubs( :start ).raises( Exception )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
8b8893dae476fb67c44826ff77f350eec1d19d2d
|
Don't die if we get a connection refused when downloading source file or putting result.
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index ef3ca9f..8151b33 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,62 +1,65 @@
require 'uri'
require 'net/http'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def delete
File.unlink( @path ) if File.exist?( @path )
end
def put
uri = URI.parse( @uri )
content_type = MIME::Types.type_for( @path ).first.content_type
Net::HTTP.start( uri.host, uri.port ) do |http|
result = http.send_request( 'PUT', uri.request_uri, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
case result
when Net::HTTPSuccess
# ok
when Net::HTTPClientError
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
end
rescue Timeout::Error => e
- raise RemoteError, "Got Timeout Error for PUT: #{uri}"
+ raise RemoteError, "Got Timeout Error for PUT: #{@uri}"
+ rescue Errno::ECONNREFUSED => e
+ raise RemoteError, "Connection Refused during PUT: #{@uri}"
end
class << self
def get( uri, directory, basename, redirection_limit = 10 )
if redirection_limit == 0
raise ProcessorError, "Too many redirects for GET: #{uri}"
end
result = Net::HTTP.get_response( URI.parse( uri ) )
case result
when Net::HTTPSuccess
- ext = MIME::Types[ result.content_type ].first.extensions.first
- file_path = directory + '/' + basename + '.' + ext
+ file_path = directory + '/' + basename
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
new( nil, file_path )
when Net::HTTPRedirection
get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPClientError
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue Timeout::Error => e
raise RemoteError, "Got Timeout Error for GET: #{uri}"
+ rescue Errno::ECONNREFUSED => e
+ raise RemoteError, "Connection Refused during GET: #{uri}"
end
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 79f3481..0c29fab 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,136 +1,150 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
f = mock
- File.expects( :open ).with( '/tmp/source.pdf', 'w' ).yields( f )
+ File.expects( :open ).with( '/tmp/source', 'w' ).yields( f )
f.expects( :write ).with( 'file contents' )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
- assert_equal '/tmp/source.pdf', file.path
+ assert_equal '/tmp/source', file.path
end
def test_should_get_file_via_redirect
redirect = Net::HTTPRedirection.allocate
redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/redirect_me' ) ).
returns( redirect )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
f = stub_everything
File.stubs( :open ).yields( f )
JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
http_conn.expects( :send_request ).
with( 'PUT', uri.request_uri, Base64.encode64( data ), headers ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_client_error
http_result = Net::HTTPClientError.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_redirects_too_many_times
http_result = Net::HTTPRedirection.allocate
http_result.expects( :[] ).at_least_once.
with( 'location' ).returns( 'http://example.com/file.pdf' )
Net::HTTP.expects( :get_response ).times( 10 ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source', 10 )
end
end
+
+ def test_should_raise_remote_error_if_get_fails_due_to_connection_refused
+ Net::HTTP.stubs( :get_response ).raises( Errno::ECONNREFUSED )
+ assert_raises( JackTheRIPper::RemoteError ) do
+ JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf', '/tmp', 'source' )
+ end
+ end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPServerError.allocate
http_conn.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPClientError.allocate
http_conn.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
+
+ def test_should_raise_remote_error_if_connection_refused_during_put
+ f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
+ '/tmp/result.jpg' )
+ Net::HTTP.stubs( :start ).raises( Errno::ECONNREFUSED )
+ assert_raises( JackTheRIPper::RemoteError ) { f.put }
+ end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
65346c4dd501d2e66c2623d55bdd7cf07a8070bb
|
Follow redirection up to 10 times in HTTPFile.get
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index fbba643..ef3ca9f 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,57 +1,62 @@
require 'uri'
require 'net/http'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def delete
File.unlink( @path ) if File.exist?( @path )
end
def put
uri = URI.parse( @uri )
content_type = MIME::Types.type_for( @path ).first.content_type
Net::HTTP.start( uri.host, uri.port ) do |http|
result = http.send_request( 'PUT', uri.request_uri, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
case result
when Net::HTTPSuccess
# ok
when Net::HTTPClientError
raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
end
end
rescue Timeout::Error => e
raise RemoteError, "Got Timeout Error for PUT: #{uri}"
end
class << self
- def get( uri, directory, basename )
+ def get( uri, directory, basename, redirection_limit = 10 )
+ if redirection_limit == 0
+ raise ProcessorError, "Too many redirects for GET: #{uri}"
+ end
result = Net::HTTP.get_response( URI.parse( uri ) )
case result
when Net::HTTPSuccess
ext = MIME::Types[ result.content_type ].first.extensions.first
file_path = directory + '/' + basename + '.' + ext
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
new( nil, file_path )
+ when Net::HTTPRedirection
+ get( result[ 'location' ], directory, basename, redirection_limit - 1 )
when Net::HTTPClientError
raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
rescue Timeout::Error => e
raise RemoteError, "Got Timeout Error for GET: #{uri}"
end
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 525d3ee..79f3481 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,106 +1,136 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
- file = mock
- File.expects( :open ).with( '/tmp/source.pdf', 'w' ).yields( file )
- file.expects( :write ).with( 'file contents' )
+ f = mock
+ File.expects( :open ).with( '/tmp/source.pdf', 'w' ).yields( f )
+ f.expects( :write ).with( 'file contents' )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source.pdf', file.path
end
+ def test_should_get_file_via_redirect
+ redirect = Net::HTTPRedirection.allocate
+ redirect.stubs( :[] ).with( 'location' ).returns( 'http://example.com/file.pdf' )
+ http_result = Net::HTTPSuccess.allocate
+ http_result.stubs( :content_type ).returns( 'application/pdf' )
+ http_result.stubs( :read_body ).returns( 'file contents' )
+ Net::HTTP.expects( :get_response ).
+ with( URI.parse( 'http://example.com/redirect_me' ) ).
+ returns( redirect )
+ Net::HTTP.expects( :get_response ).
+ with( URI.parse( 'http://example.com/file.pdf' ) ).
+ returns( http_result )
+ f = stub_everything
+ File.stubs( :open ).yields( f )
+ JackTheRIPper::HTTPFile.get( 'http://example.com/redirect_me', '/tmp', 'source' )
+ end
+
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_not_raise_exception_on_delete_if_file_does_not_exist
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
assert_nothing_raised { f.delete }
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
http_conn.expects( :send_request ).
with( 'PUT', uri.request_uri, Base64.encode64( data ), headers ).
returns( http_response )
f.put
end
def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
def test_should_raise_processor_error_if_get_fails_due_to_client_error
http_result = Net::HTTPClientError.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
assert_raises( JackTheRIPper::ProcessorError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
+
+ def test_should_raise_processor_error_if_get_redirects_too_many_times
+ http_result = Net::HTTPRedirection.allocate
+ http_result.expects( :[] ).at_least_once.
+ with( 'location' ).returns( 'http://example.com/file.pdf' )
+ Net::HTTP.expects( :get_response ).times( 10 ).
+ with( URI.parse( 'http://example.com/file.pdf' ) ).
+ returns( http_result )
+ assert_raises( JackTheRIPper::ProcessorError ) do
+ JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
+ '/tmp', 'source', 10 )
+ end
+ end
def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPServerError.allocate
http_conn.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::RemoteError ) { f.put }
end
def test_should_raise_processor_error_if_put_fails_due_to_client_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPClientError.allocate
http_conn.stubs( :send_request ).returns( http_response )
assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
f06b606748c09f74a42eb9286ffdb629a8b0e02f
|
removed existing release tags, should not have made them yet
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 2b9be67..0d35a2e 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,47 +1,47 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.1.2'
+ VERSION = '0.1.0'
class RemoteError < StandardError; end
class ProcessorError < StandardError; end
class << self
attr_accessor :logger
def tmp_path
@tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
rescue RemoteError => e
logger.warn( 'Remote Error: ' + e.message )
true
rescue ProcessorError => e
logger.error( 'Processor Error: ' + e.message )
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
c13c8ad60ab283f4ed64e7841057d3b698745cf6
|
added a bunch of error handling
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 3559c0a..2b9be67 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,34 +1,47 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
VERSION = '0.1.2'
+
+ class RemoteError < StandardError; end
+ class ProcessorError < StandardError; end
+
class << self
+ attr_accessor :logger
+
def tmp_path
- @tmp_path || '/tmp'
+ @tmp_path ||= '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
+ rescue RemoteError => e
+ logger.warn( 'Remote Error: ' + e.message )
+ true
+ rescue ProcessorError => e
+ logger.error( 'Processor Error: ' + e.message )
+ message.delete
+ true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 0aa2be6..fbba643 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,43 +1,57 @@
require 'uri'
require 'net/http'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def delete
- File.unlink( @path )
+ File.unlink( @path ) if File.exist?( @path )
end
def put
uri = URI.parse( @uri )
content_type = MIME::Types.type_for( @path ).first.content_type
Net::HTTP.start( uri.host, uri.port ) do |http|
- result = http.send_request( 'PUT', uri.request_uri, File.read( @path ), { 'Content-Type' => content_type } )
- result.error! unless result.kind_of?( Net::HTTPSuccess )
+ result = http.send_request( 'PUT', uri.request_uri, Base64.encode64( File.read( @path ) ), { 'Content-Type' => content_type } )
+ case result
+ when Net::HTTPSuccess
+ # ok
+ when Net::HTTPClientError
+ raise ProcessorError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
+ else
+ raise RemoteError, "Got #{result.code} #{result.message} for PUT: #{@uri}"
+ end
end
+ rescue Timeout::Error => e
+ raise RemoteError, "Got Timeout Error for PUT: #{uri}"
end
class << self
def get( uri, directory, basename )
result = Net::HTTP.get_response( URI.parse( uri ) )
- if result.kind_of?( Net::HTTPSuccess )
+ case result
+ when Net::HTTPSuccess
ext = MIME::Types[ result.content_type ].first.extensions.first
file_path = directory + '/' + basename + '.' + ext
File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
new( nil, file_path )
+ when Net::HTTPClientError
+ raise ProcessorError, "Got #{result.code} #{result.message} for GET: #{uri}"
else
- result.error!
+ raise RemoteError, "Got #{result.code} #{result.message} for GET: #{uri}"
end
+ rescue Timeout::Error => e
+ raise RemoteError, "Got Timeout Error for GET: #{uri}"
end
end
end
end
\ No newline at end of file
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index a51a5d2..651e6eb 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,42 +1,41 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
- class ConversionFailed < StandardError; end
-
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = JackTheRIPper.tmp_path + '/result' + result_ext
output = `sips #{sips_args} #{source_file.path} --out #{result_path}`
- raise ConversionFailed, output unless $?.success?
- source_file.delete
+ raise ProcessorError, output unless File.exist?( result_path )
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
- result_file.delete
- `say "Image converted. Money in the bank!"`
+ `say -v "Bad News" "Image converted money in the ba-ank."`
+ ensure
+ source_file.delete unless source_file.nil?
+ result_file.delete unless result_file.nil?
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
\ No newline at end of file
diff --git a/lib/jack_the_ripper_server.rb b/lib/jack_the_ripper_server.rb
index 552e05f..af5a6d6 100755
--- a/lib/jack_the_ripper_server.rb
+++ b/lib/jack_the_ripper_server.rb
@@ -1,65 +1,64 @@
#!/usr/bin/env ruby
+unless ENV[ 'AWS_ACCESS_KEY_ID' ] && ENV[ 'AWS_SECRET_ACCESS_KEY' ]
+ raise "Must set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY first!"
+end
Signal.trap( "INT" ) { shutdown() }
Signal.trap( "TERM" ) { shutdown() }
@keep_running = true
def shutdown
@keep_running = false
end
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../lib' ) )
require 'jack_the_ripper'
require 'optparse'
require 'ostruct'
options = OpenStruct.new
-options.access_key_id = ''
-options.secret_access_key = ''
+options.access_key_id = ENV[ 'AWS_ACCESS_KEY_ID' ]
+options.secret_access_key = ENV[ 'AWS_SECRET_ACCESS_KEY' ]
options.queue_name = ''
options.tmp_path = '/tmp'
+options.log_file = '/var/log/jack_the_ripper.log'
opts = OptionParser.new do |opts|
opts.banner = "Usage: jack_the_ripper_server [options]"
opts.separator ""
opts.separator "Specific options:"
-
- opts.on( '-a', '--access_key_id AWS_ACCESS_KEY_ID', 'REQUIRED' ) do |access_key_id|
- options.access_key_id = access_key_id
- end
-
- opts.on( '-s', '--secret_access_key AWS_SECRET_ACCESS_KEY', 'REQUIRED' ) do |secret_access_key|
- options.secret_access_key = secret_access_key
- end
-
+
opts.on( '-q', '--queue SQS_QUEUE_NAME', 'REQUIRED' ) do |queue_name|
options.queue_name = queue_name
end
- opts.on( '-t', '--tmpdir [TMPDIR]', 'Path to save temporary image files. Defaults to "/tmp"' ) do |tmp_path|
+ opts.on( '-t', '--tmpdir [PATH]', 'Path to save temporary image files. Defaults to "/tmp"' ) do |tmp_path|
options.tmp_path = tmp_path
end
+ opts.on( '-l', '--log [PATH]', 'Path to the log file. Defaults to "/var/log/jack_The_ripper.log"' ) do |log_file|
+ options.log_file = log_file
+ end
+
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
opts.parse!( ARGV )
JackTheRIPper.tmp_path = options.tmp_path
+JackTheRIPper.logger = Logger.new( options.log_file )
queue = JackTheRIPper.get_queue( options.access_key_id,
options.secret_access_key, options.queue_name )
-
-while @keep_running do
- begin
+begin
+ while @keep_running do
if JackTheRIPper.process_next_message( queue ) == false
60.times { sleep( 1 ) if @keep_running }
end
- rescue Exception => e
- STDERR.puts( "An Exception Occured!" )
- STDERR.puts( e.to_s )
- STDERR.puts( e.message )
- STDERR.puts( e.backtrace.join( "\n" ) )
- STDERR.puts( "\n\n" )
end
+ exit 0
+rescue Exception => e
+ JackTheRIPper.logger.fatal e.class.to_s + ': ' + e.message
+ JackTheRIPper.logger.fatal e.backtrace.join( "\n" )
+ exit 1
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index f5a8278..525d3ee 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,76 +1,106 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
-require 'jack_the_ripper/http_file'
+require 'jack_the_ripper'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
http_result = Net::HTTPSuccess.allocate
http_result.stubs( :content_type ).returns( 'application/pdf' )
http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
file = mock
File.expects( :open ).with( '/tmp/source.pdf', 'w' ).yields( file )
file.expects( :write ).with( 'file contents' )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source.pdf', file.path
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
+ File.expects( :exist? ).with( '/tmp/some_file' ).returns( true )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
+ def test_should_not_raise_exception_on_delete_if_file_does_not_exist
+ f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
+ File.expects( :exist? ).with( '/tmp/some_file' ).returns( false )
+ assert_nothing_raised { f.delete }
+ end
+
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
http_conn.expects( :send_request ).
- with( 'PUT', uri.request_uri, data, headers ).
+ with( 'PUT', uri.request_uri, Base64.encode64( data ), headers ).
returns( http_response )
f.put
end
- def test_should_raise_exception_if_get_fails
+ def test_should_raise_remote_error_if_get_fails_due_to_server_error
http_result = Net::HTTPServerError.allocate
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
- http_result.expects( :error! ).raises( Net::HTTPServerException.allocate )
- assert_raises( Net::HTTPServerException ) do
+ assert_raises( JackTheRIPper::RemoteError ) do
JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
end
end
- def test_should_raise_exception_if_put_fails
+ def test_should_raise_processor_error_if_get_fails_due_to_client_error
+ http_result = Net::HTTPClientError.allocate
+ Net::HTTP.expects( :get_response ).
+ with( URI.parse( 'http://example.com/file.pdf' ) ).
+ returns( http_result )
+ assert_raises( JackTheRIPper::ProcessorError ) do
+ JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
+ '/tmp', 'source' )
+ end
+ end
+
+ def test_should_raise_remote_error_if_put_fails_due_to_server_error
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPServerError.allocate
- http_conn.expects( :send_request ).
- with( 'PUT', uri.request_uri, data, headers ).
- returns( http_response )
- http_response.expects( :error! ).raises( Net::HTTPServerException.allocate )
- assert_raises( Net::HTTPServerException ) { f.put }
+ http_conn.stubs( :send_request ).returns( http_response )
+ assert_raises( JackTheRIPper::RemoteError ) { f.put }
+ end
+
+ def test_should_raise_processor_error_if_put_fails_due_to_client_error
+ f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
+ '/tmp/result.jpg' )
+ uri = URI.parse( 'http://example.com/result.jpg' )
+ http_conn = mock
+ Net::HTTP.expects( :start ).
+ with( uri.host, uri.port ).
+ yields( http_conn )
+ headers = { 'Content-Type' => 'image/jpeg' }
+ data = 'file contents'
+ File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ http_response = Net::HTTPClientError.allocate
+ http_conn.stubs( :send_request ).returns( http_response )
+ assert_raises( JackTheRIPper::ProcessorError ) { f.put }
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index 934991d..9d37ae4 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,196 +1,198 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
-require 'jack_the_ripper/processor'
+require 'jack_the_ripper'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
- $?.stubs( :success? ).returns( true )
+ processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
- processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
- $?.stubs( :success? ).returns( true )
+ processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
+ File.expects( :exist? ).with( working_dir_path + '/result.png' ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
- $?.stubs( :success? ).returns( true )
+ processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
- $?.stubs( :success? ).returns( true )
+ processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
- processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
- $?.stubs( :success? ).returns( true )
+ processor.expects( :` ).with( "say -v \"Bad News\" \"Image converted money in the ba-ank.\"" )
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( true )
processor.process
end
- def test_should_raise_conversion_failed_exception_if_sips_process_exits_uncleanly
+ def test_should_raise_processor_error_if_sips_process_does_not_write_result_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = stub_everything( :path => '/foo/bar.jpg' )
- result_file = stub_everything
-
JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
- JackTheRIPper::HTTPFile.stubs( :new ).returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.stubs( :` ).returns( 'blah blah blah' )
- $?.stubs( :success? ).returns( false )
-
- assert_raises( JackTheRIPper::Processor::ConversionFailed ) { processor.process }
+ File.expects( :exist? ).with( working_dir_path + '/result.jpg' ).returns( false )
+
+ begin
+ processor.process
+ fail "Expected ProcessorError to be raised."
+ rescue JackTheRIPper::ProcessorError => e
+ assert_equal 'blah blah blah', e.message
+ end
end
end
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index 7c56e41..bd13475 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -1,42 +1,77 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPper < Test::Unit::TestCase
+ def test_should_allow_logger_to_be_set_and_retrieved
+ logger = stub
+ assert_nil JackTheRIPper.logger
+ assert_nothing_raised { JackTheRIPper.logger = logger }
+ assert_same logger, JackTheRIPper.logger
+ end
+
def test_should_process_one_message_from_the_queue_then_delete_the_message_and_return_true
queue = mock
message = mock
body = YAML::dump( { :foo => 'bar' } )
queue.expects( :receive ).returns( message )
message.expects( :delete )
message.stubs( :body ).returns( body )
processor = mock
JackTheRIPper::Processor.expects( :new ).with( { :foo => 'bar' } ).
returns( processor )
processor.expects( :process )
assert_equal true, JackTheRIPper.process_next_message( queue )
end
+ def test_should_not_delete_message_from_queue_if_conversion_fails_due_to_remote_error
+ logger = mock
+ JackTheRIPper.logger = logger
+ message = mock
+ queue = stub_everything( :receive => message )
+ message.stubs( :body ).returns( 'foo' )
+ message.expects( :delete ).never
+ processor = stub
+ JackTheRIPper::Processor.stubs( :new ).returns( processor )
+ processor.stubs( :process ).raises( JackTheRIPper::RemoteError.new( 'blah' ) )
+ logger.expects( :warn ).with( 'Remote Error: blah' )
+ assert_equal true, JackTheRIPper.process_next_message( queue )
+ end
+
+ def test_should_delete_message_from_queue_if_conversion_fails_due_to_processor_error
+ logger = mock
+ JackTheRIPper.logger = logger
+ message = mock
+ queue = stub_everything( :receive => message )
+ message.stubs( :body ).returns( 'foo' )
+ message.expects( :delete )
+ processor = stub
+ JackTheRIPper::Processor.stubs( :new ).returns( processor )
+ processor.stubs( :process ).raises( JackTheRIPper::ProcessorError.new( 'blah' ) )
+ logger.expects( :error ).with( 'Processor Error: blah' )
+ assert_equal true, JackTheRIPper.process_next_message( queue )
+ end
+
def test_should_return_false_if_there_are_no_messages_retrieved
queue = mock
queue.expects( :receive ).returns( nil )
assert_equal false, JackTheRIPper.process_next_message( queue )
end
def test_should_instantiate_queue_and_return_it
sqs = mock
queue = stub
sqs.expects( :queue ).with( 'myqueue', true, 240 ).returns( queue )
RightAws::Sqs.expects( :new ).with( 'myaccesskeyid', 'mysecretaccesskey' ).
returns( sqs )
assert_same queue, JackTheRIPper.get_queue( 'myaccesskeyid',
'mysecretaccesskey', 'myqueue' )
end
def test_should_have_tmp_path_attribute
assert_equal '/tmp', JackTheRIPper.tmp_path
assert_nothing_raised { JackTheRIPper.tmp_path = '/foo/bar' }
assert_equal '/foo/bar', JackTheRIPper.tmp_path
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
29c5dd4510ec6bd92b2e147791363753ad8b0ed1
|
set next version # to 0.1.2
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 5c3e80d..3559c0a 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,34 +1,34 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.1.1'
+ VERSION = '0.1.2'
class << self
def tmp_path
@tmp_path || '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
61a523fd21a105a9fde1cd1b28a748e34e84afb1
|
set next version number to 0.1.1
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 23b6dc0..5c3e80d 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,34 +1,34 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.2.0'
+ VERSION = '0.1.1'
class << self
def tmp_path
@tmp_path || '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
fbabe2ed46298516b450a7b736b08cf496f5dea1
|
HTTPFile will raise exceptions if get or put fails.
|
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 72052d4..0aa2be6 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,38 +1,43 @@
require 'uri'
require 'net/http'
require 'rubygems'
gem 'mime-types'
require 'mime/types'
module JackTheRIPper
class HTTPFile
attr_reader :path
def initialize( uri, path )
@uri = uri
@path = path
end
def delete
File.unlink( @path )
end
def put
uri = URI.parse( @uri )
content_type = MIME::Types.type_for( @path ).first.content_type
Net::HTTP.start( uri.host, uri.port ) do |http|
- http.send_request( 'PUT', uri.request_uri, File.read( @path ), { 'Content-Type' => content_type } )
+ result = http.send_request( 'PUT', uri.request_uri, File.read( @path ), { 'Content-Type' => content_type } )
+ result.error! unless result.kind_of?( Net::HTTPSuccess )
end
end
class << self
def get( uri, directory, basename )
result = Net::HTTP.get_response( URI.parse( uri ) )
- ext = MIME::Types[ result.content_type ].first.extensions.first
- file_path = directory + '/' + basename + '.' + ext
- File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
- new( nil, file_path )
+ if result.kind_of?( Net::HTTPSuccess )
+ ext = MIME::Types[ result.content_type ].first.extensions.first
+ file_path = directory + '/' + basename + '.' + ext
+ File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
+ new( nil, file_path )
+ else
+ result.error!
+ end
end
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
index 54ddce1..f5a8278 100644
--- a/test/jack_the_ripper/test_http_file.rb
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -1,44 +1,76 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper/http_file'
class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
- http_result = stub( :content_type => 'application/pdf',
- :read_body => 'file contents' )
+ http_result = Net::HTTPSuccess.allocate
+ http_result.stubs( :content_type ).returns( 'application/pdf' )
+ http_result.stubs( :read_body ).returns( 'file contents' )
Net::HTTP.expects( :get_response ).
with( URI.parse( 'http://example.com/file.pdf' ) ).
returns( http_result )
file = mock
File.expects( :open ).with( '/tmp/source.pdf', 'w' ).yields( file )
file.expects( :write ).with( 'file contents' )
file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
'/tmp', 'source' )
assert_equal '/tmp/source.pdf', file.path
end
def test_should_delete_file_from_path
f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
File.expects( :unlink ).with( '/tmp/some_file' )
f.delete
end
def test_should_upload_file_to_specified_uri_via_put
f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
'/tmp/result.jpg' )
uri = URI.parse( 'http://example.com/result.jpg' )
http_conn = mock
Net::HTTP.expects( :start ).
with( uri.host, uri.port ).
yields( http_conn )
headers = { 'Content-Type' => 'image/jpeg' }
data = 'file contents'
File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
http_response = Net::HTTPSuccess.allocate
http_conn.expects( :send_request ).
with( 'PUT', uri.request_uri, data, headers ).
returns( http_response )
f.put
end
+
+ def test_should_raise_exception_if_get_fails
+ http_result = Net::HTTPServerError.allocate
+ Net::HTTP.expects( :get_response ).
+ with( URI.parse( 'http://example.com/file.pdf' ) ).
+ returns( http_result )
+ http_result.expects( :error! ).raises( Net::HTTPServerException.allocate )
+ assert_raises( Net::HTTPServerException ) do
+ JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
+ '/tmp', 'source' )
+ end
+ end
+
+ def test_should_raise_exception_if_put_fails
+ f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
+ '/tmp/result.jpg' )
+ uri = URI.parse( 'http://example.com/result.jpg' )
+ http_conn = mock
+ Net::HTTP.expects( :start ).
+ with( uri.host, uri.port ).
+ yields( http_conn )
+ headers = { 'Content-Type' => 'image/jpeg' }
+ data = 'file contents'
+ File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ http_response = Net::HTTPServerError.allocate
+ http_conn.expects( :send_request ).
+ with( 'PUT', uri.request_uri, data, headers ).
+ returns( http_response )
+ http_response.expects( :error! ).raises( Net::HTTPServerException.allocate )
+ assert_raises( Net::HTTPServerException ) { f.put }
+ end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
edfc7a9e5ec328331831244d38de0a5e02521c45
|
Fixed that Processor silently ignored failed conversion.
|
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index d8960fe..a51a5d2 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,39 +1,42 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
+ class ConversionFailed < StandardError; end
+
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = JackTheRIPper.tmp_path + '/result' + result_ext
- `sips #{sips_args} #{source_file.path} --out #{result_path}`
+ output = `sips #{sips_args} #{source_file.path} --out #{result_path}`
+ raise ConversionFailed, output unless $?.success?
source_file.delete
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
result_file.delete
`say "Image converted. Money in the bank!"`
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index 1669c17..934991d 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,173 +1,196 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper/processor'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
+
+ def test_should_raise_conversion_failed_exception_if_sips_process_exits_uncleanly
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file'
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = stub_everything( :path => '/foo/bar.jpg' )
+ result_file = stub_everything
+
+ JackTheRIPper::HTTPFile.stubs( :get ).returns( source_file )
+ JackTheRIPper::HTTPFile.stubs( :new ).returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.stubs( :` ).returns( 'blah blah blah' )
+ $?.stubs( :success? ).returns( false )
+
+ assert_raises( JackTheRIPper::Processor::ConversionFailed ) { processor.process }
+ end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
dfe52dccc6522132a2c44e3e74e3e122b7da6527
|
fixed version #
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index e60b4f8..23b6dc0 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,34 +1,34 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.2.0dev'
+ VERSION = '0.2.0'
class << self
def tmp_path
@tmp_path || '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
34bfffc01b72fe76d79fae3b119e50f49a655a62
|
tagging release 0.1.0
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 8fb4999..e60b4f8 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,34 +1,34 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
- VERSION = '0.1.0'
+ VERSION = '0.2.0dev'
class << self
def tmp_path
@tmp_path || '/tmp'
end
def tmp_path=( path )
@tmp_path = path
end
def process_next_message( queue )
message = queue.receive
return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
|
jwilger/jack-the-ripper
|
283eea11857d1a9e3e2a7ce8333cc072c7b538de
|
set up daemonization
|
diff --git a/Manifest.txt b/Manifest.txt
index fa2b7c7..8b157e0 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,57 +1,58 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
+lib/jack_the_ripper_server.rb
test/jack_the_ripper/test_http_file.rb
test/jack_the_ripper/test_processor.rb
test/test_jack_the_ripper.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
diff --git a/Rakefile b/Rakefile
index e84f92d..b604f64 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,19 +1,19 @@
# -*- ruby -*-
require 'rubygems'
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
- p.extra_deps = %w( right_aws mime-types )
+ p.extra_deps = %w( right_aws mime-types daemons )
end
# vim: syntax=Ruby
diff --git a/bin/jack_the_ripper b/bin/jack_the_ripper
index 22581a3..8fd6191 100644
--- a/bin/jack_the_ripper
+++ b/bin/jack_the_ripper
@@ -1,2 +1,11 @@
#!/usr/bin/env ruby
-puts "Hello World!"
\ No newline at end of file
+LOAD_PATH = File.expand_path( File.dirname( __FILE__ ) + '/../lib' )
+require 'rubygems'
+gem 'daemons', '= 1.0.9'
+require 'daemons'
+Daemons.run( LOAD_PATH + '/jack_the_ripper_server.rb', {
+ :app_name => 'JackTheRIPper',
+ :dir_mode => :normal,
+ :dir => '/tmp',
+ :log_output => true
+} )
\ No newline at end of file
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 5c53277..8fb4999 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,25 +1,34 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
require 'rubygems'
gem 'right_aws', '= 1.5.0'
require 'right_aws'
module JackTheRIPper
VERSION = '0.1.0'
-
class << self
+ def tmp_path
+ @tmp_path || '/tmp'
+ end
+
+ def tmp_path=( path )
+ @tmp_path = path
+ end
+
def process_next_message( queue )
message = queue.receive
+ return false if message.nil?
processor = Processor.new( YAML::load( message.body ) )
processor.process
message.delete
+ true
end
def get_queue( access_key_id, secret_access_key, queue_name )
RightAws::Sqs.new( access_key_id, secret_access_key ).
queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index 95be008..d8960fe 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,41 +1,39 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
- WORKING_PATH = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
-
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
- source_file = HTTPFile.get( @source_uri, WORKING_PATH, 'source' )
+ source_file = HTTPFile.get( @source_uri, JackTheRIPper.tmp_path, 'source' )
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
- result_path = WORKING_PATH + '/result' + result_ext
+ result_path = JackTheRIPper.tmp_path + '/result' + result_ext
`sips #{sips_args} #{source_file.path} --out #{result_path}`
source_file.delete
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
result_file.delete
`say "Image converted. Money in the bank!"`
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
\ No newline at end of file
diff --git a/lib/jack_the_ripper_server.rb b/lib/jack_the_ripper_server.rb
new file mode 100755
index 0000000..552e05f
--- /dev/null
+++ b/lib/jack_the_ripper_server.rb
@@ -0,0 +1,65 @@
+#!/usr/bin/env ruby
+Signal.trap( "INT" ) { shutdown() }
+Signal.trap( "TERM" ) { shutdown() }
+
+@keep_running = true
+def shutdown
+ @keep_running = false
+end
+
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../lib' ) )
+require 'jack_the_ripper'
+require 'optparse'
+require 'ostruct'
+
+options = OpenStruct.new
+options.access_key_id = ''
+options.secret_access_key = ''
+options.queue_name = ''
+options.tmp_path = '/tmp'
+
+opts = OptionParser.new do |opts|
+ opts.banner = "Usage: jack_the_ripper_server [options]"
+ opts.separator ""
+ opts.separator "Specific options:"
+
+ opts.on( '-a', '--access_key_id AWS_ACCESS_KEY_ID', 'REQUIRED' ) do |access_key_id|
+ options.access_key_id = access_key_id
+ end
+
+ opts.on( '-s', '--secret_access_key AWS_SECRET_ACCESS_KEY', 'REQUIRED' ) do |secret_access_key|
+ options.secret_access_key = secret_access_key
+ end
+
+ opts.on( '-q', '--queue SQS_QUEUE_NAME', 'REQUIRED' ) do |queue_name|
+ options.queue_name = queue_name
+ end
+
+ opts.on( '-t', '--tmpdir [TMPDIR]', 'Path to save temporary image files. Defaults to "/tmp"' ) do |tmp_path|
+ options.tmp_path = tmp_path
+ end
+
+ opts.on_tail("-h", "--help", "Show this message") do
+ puts opts
+ exit
+ end
+end
+opts.parse!( ARGV )
+
+JackTheRIPper.tmp_path = options.tmp_path
+queue = JackTheRIPper.get_queue( options.access_key_id,
+ options.secret_access_key, options.queue_name )
+
+while @keep_running do
+ begin
+ if JackTheRIPper.process_next_message( queue ) == false
+ 60.times { sleep( 1 ) if @keep_running }
+ end
+ rescue Exception => e
+ STDERR.puts( "An Exception Occured!" )
+ STDERR.puts( e.to_s )
+ STDERR.puts( e.message )
+ STDERR.puts( e.backtrace.join( "\n" ) )
+ STDERR.puts( "\n\n" )
+ end
+end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index 96e7600..1669c17 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,168 +1,173 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper/processor'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ JackTheRIPper.stubs( :tmp_path ).returns( working_dir_path )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
end
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index be1b93a..7c56e41 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -1,30 +1,42 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPper < Test::Unit::TestCase
- def test_should_process_one_message_from_the_queue_then_delete_the_message
+ def test_should_process_one_message_from_the_queue_then_delete_the_message_and_return_true
queue = mock
message = mock
body = YAML::dump( { :foo => 'bar' } )
queue.expects( :receive ).returns( message )
message.expects( :delete )
message.stubs( :body ).returns( body )
processor = mock
JackTheRIPper::Processor.expects( :new ).with( { :foo => 'bar' } ).
returns( processor )
processor.expects( :process )
- JackTheRIPper.process_next_message( queue )
+ assert_equal true, JackTheRIPper.process_next_message( queue )
+ end
+
+ def test_should_return_false_if_there_are_no_messages_retrieved
+ queue = mock
+ queue.expects( :receive ).returns( nil )
+ assert_equal false, JackTheRIPper.process_next_message( queue )
end
def test_should_instantiate_queue_and_return_it
sqs = mock
queue = stub
sqs.expects( :queue ).with( 'myqueue', true, 240 ).returns( queue )
RightAws::Sqs.expects( :new ).with( 'myaccesskeyid', 'mysecretaccesskey' ).
returns( sqs )
assert_same queue, JackTheRIPper.get_queue( 'myaccesskeyid',
'mysecretaccesskey', 'myqueue' )
end
+
+ def test_should_have_tmp_path_attribute
+ assert_equal '/tmp', JackTheRIPper.tmp_path
+ assert_nothing_raised { JackTheRIPper.tmp_path = '/foo/bar' }
+ assert_equal '/foo/bar', JackTheRIPper.tmp_path
+ end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
32010dfdb1e21cf691d37a91569c7b21d8448539
|
fixed Manifest
|
diff --git a/Manifest.txt b/Manifest.txt
index bfd2f20..fa2b7c7 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,56 +1,57 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
+test/jack_the_ripper/test_http_file.rb
test/jack_the_ripper/test_processor.rb
test/test_jack_the_ripper.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
|
jwilger/jack-the-ripper
|
3e826271800e33d77e0990405b10322104650823
|
Added implementation of JackTheRIPper::HTTPFile
|
diff --git a/Rakefile b/Rakefile
index e120d8d..e84f92d 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,19 +1,19 @@
# -*- ruby -*-
require 'rubygems'
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
- p.extra_deps = %w( right_aws )
+ p.extra_deps = %w( right_aws mime-types )
end
# vim: syntax=Ruby
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
index 7e0f3a8..72052d4 100644
--- a/lib/jack_the_ripper/http_file.rb
+++ b/lib/jack_the_ripper/http_file.rb
@@ -1,4 +1,38 @@
+require 'uri'
+require 'net/http'
+require 'rubygems'
+gem 'mime-types'
+require 'mime/types'
+
module JackTheRIPper
class HTTPFile
+ attr_reader :path
+
+ def initialize( uri, path )
+ @uri = uri
+ @path = path
+ end
+
+ def delete
+ File.unlink( @path )
+ end
+
+ def put
+ uri = URI.parse( @uri )
+ content_type = MIME::Types.type_for( @path ).first.content_type
+ Net::HTTP.start( uri.host, uri.port ) do |http|
+ http.send_request( 'PUT', uri.request_uri, File.read( @path ), { 'Content-Type' => content_type } )
+ end
+ end
+
+ class << self
+ def get( uri, directory, basename )
+ result = Net::HTTP.get_response( URI.parse( uri ) )
+ ext = MIME::Types[ result.content_type ].first.extensions.first
+ file_path = directory + '/' + basename + '.' + ext
+ File.open( file_path, 'w' ) { |f| f.write( result.read_body ) }
+ new( nil, file_path )
+ end
+ end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_http_file.rb b/test/jack_the_ripper/test_http_file.rb
new file mode 100644
index 0000000..54ddce1
--- /dev/null
+++ b/test/jack_the_ripper/test_http_file.rb
@@ -0,0 +1,44 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
+require 'test/unit'
+require 'mocha'
+require 'jack_the_ripper/http_file'
+
+class TestJackTheRIPperHTTPFile < Test::Unit::TestCase
+ def test_should_get_file_and_store_it_at_specified_path_and_return_http_file_instance
+ http_result = stub( :content_type => 'application/pdf',
+ :read_body => 'file contents' )
+ Net::HTTP.expects( :get_response ).
+ with( URI.parse( 'http://example.com/file.pdf' ) ).
+ returns( http_result )
+ file = mock
+ File.expects( :open ).with( '/tmp/source.pdf', 'w' ).yields( file )
+ file.expects( :write ).with( 'file contents' )
+ file = JackTheRIPper::HTTPFile.get( 'http://example.com/file.pdf',
+ '/tmp', 'source' )
+ assert_equal '/tmp/source.pdf', file.path
+ end
+
+ def test_should_delete_file_from_path
+ f = JackTheRIPper::HTTPFile.new( nil, '/tmp/some_file' )
+ File.expects( :unlink ).with( '/tmp/some_file' )
+ f.delete
+ end
+
+ def test_should_upload_file_to_specified_uri_via_put
+ f = JackTheRIPper::HTTPFile.new( 'http://example.com/result.jpg',
+ '/tmp/result.jpg' )
+ uri = URI.parse( 'http://example.com/result.jpg' )
+ http_conn = mock
+ Net::HTTP.expects( :start ).
+ with( uri.host, uri.port ).
+ yields( http_conn )
+ headers = { 'Content-Type' => 'image/jpeg' }
+ data = 'file contents'
+ File.expects( :read ).with( '/tmp/result.jpg' ).returns( data )
+ http_response = Net::HTTPSuccess.allocate
+ http_conn.expects( :send_request ).
+ with( 'PUT', uri.request_uri, data, headers ).
+ returns( http_response )
+ f.put
+ end
+end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
574fb1509b613a2b4cbb7f13bdae9be2757b02bc
|
Converted to using the RightAws library for communication with SQS
|
diff --git a/Manifest.txt b/Manifest.txt
index 0767703..bfd2f20 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,62 +1,56 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
-lib/sqs/queue.rb
-lib/sqs/signature.rb
-lib/sqs/signed_request.rb
test/jack_the_ripper/test_processor.rb
-test/sqs/test_queue.rb
-test/sqs/test_signature.rb
-test/sqs/test_signed_request.rb
test/test_jack_the_ripper.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
diff --git a/Rakefile b/Rakefile
index 8865623..e120d8d 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,19 +1,19 @@
# -*- ruby -*-
require 'rubygems'
require 'hoe'
require './lib/jack_the_ripper.rb'
Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
- p.extra_deps = %w( )
+ p.extra_deps = %w( right_aws )
end
# vim: syntax=Ruby
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 6f37d04..5c53277 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,16 +1,25 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
require 'yaml'
+require 'rubygems'
+gem 'right_aws', '= 1.5.0'
+require 'right_aws'
+
module JackTheRIPper
VERSION = '0.1.0'
class << self
def process_next_message( queue )
- receipt, message = queue.next_message
- processor = Processor.new( YAML::load( message ) )
+ message = queue.receive
+ processor = Processor.new( YAML::load( message.body ) )
processor.process
- queue.delete_message( receipt )
+ message.delete
+ end
+
+ def get_queue( access_key_id, secret_access_key, queue_name )
+ RightAws::Sqs.new( access_key_id, secret_access_key ).
+ queue( queue_name, true, 240 )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index 7200215..95be008 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,40 +1,41 @@
require 'jack_the_ripper/http_file'
module JackTheRIPper
class Processor
WORKING_PATH = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
def initialize( instructions )
@source_uri = instructions[ :source_uri ]
@result_uri = instructions[ :result_uri ]
@format = instructions[ :format ]
@scale = instructions[ :scale ]
@pad = instructions[ :pad ]
end
def process
source_file = HTTPFile.get( @source_uri, WORKING_PATH, 'source' )
result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
result_path = WORKING_PATH + '/result' + result_ext
`sips #{sips_args} #{source_file.path} --out #{result_path}`
source_file.delete
result_file = HTTPFile.new( @result_uri, result_path )
result_file.put
result_file.delete
+ `say "Image converted. Money in the bank!"`
end
private
def sips_args
args = []
args << "-s format #{@format} -s formatOptions best" if @format
args << "-Z #{@scale}" if @scale
if @pad
dimensions, color = @pad.split( /\s/, 2 )
args << "-p #{dimensions.sub( 'x', ' ' )}"
args << "--padColor #{color}" if color
end
args.join( ' ' )
end
end
end
\ No newline at end of file
diff --git a/lib/sqs/queue.rb b/lib/sqs/queue.rb
deleted file mode 100644
index a3b5626..0000000
--- a/lib/sqs/queue.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-require 'rexml/document'
-require 'sqs/signed_request'
-
-module SQS
- class Queue
- attr_accessor :name, :access_key_id, :secret_access_key
-
- def initialize( access_key_id, secret_access_key, name )
- @name = name
- @access_key_id = access_key_id
- @secret_access_key = secret_access_key
- end
-
- def next_message
- response = SignedRequest.send( access_key_id, secret_access_key, name,
- 'ReceiveMessage' )
- response_xml = REXML::Document.new( response ).root
- message = response_xml.get_elements( '//Message' ).first
- receipt = message.get_elements( '//ReceiptHandle' ).first.text
- body = message.get_elements( '//Body' ).first.text
- return receipt, body
- end
-
- def delete_message( receipt )
- SignedRequest.send( access_key_id, secret_access_key, name,
- 'DeleteMessage', 'ReceiptHandle' => receipt )
- end
-
- class << self
- def create!( access_key_id, secret_access_key, name )
- SignedRequest.send( access_key_id, secret_access_key, nil,
- 'CreateQueue', 'QueueName' => name )
- new( access_key_id, secret_access_key, name )
- end
- end
- end
-end
\ No newline at end of file
diff --git a/lib/sqs/signature.rb b/lib/sqs/signature.rb
deleted file mode 100644
index 9232094..0000000
--- a/lib/sqs/signature.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-require 'base64'
-require 'digest/sha1'
-require 'cgi'
-
-module SQS
- class Signature
- def initialize( secret, params )
- @params = params
- @secret = secret
- end
-
- def to_s
- p_str = param_keys.inject( '' ) { |s,k| s + k + @params[ k ] }
- CGI.escape( Base64.encode64( Digest::SHA1.hexdigest( p_str + @secret ) ) )
- end
-
- private
-
- def param_keys
- @params.keys.sort_by { |k| k.downcase }
- end
- end
-end
\ No newline at end of file
diff --git a/lib/sqs/signed_request.rb b/lib/sqs/signed_request.rb
deleted file mode 100644
index 3303afa..0000000
--- a/lib/sqs/signed_request.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-require 'time'
-require 'uri'
-require 'net/http'
-require 'sqs/signature'
-
-module SQS
- module SignedRequest
- class << self
- def send( access_key_id, secret_access_key, queue, action, params = {} )
- params = params.merge( default_params( access_key_id, action ) )
- params.merge!( 'Signature' => Signature.new( secret_access_key, params ).to_s )
- uri = URI.parse( "http://queue.amazonaws.com/#{queue}" )
- response = Net::HTTP.post_form( uri, params )
- case response
- when Net::HTTPSuccess
- response.read_body
- else
- response.error!
- end
- end
-
- private
-
- def default_params( access_key_id, action )
- {
- 'Action' => action,
- 'AWSAccessKeyId' => access_key_id,
- 'SignatureVersion' => '1',
- 'Timestamp' => Time.now.utc.iso8601,
- 'Version' => '2008-01-01'
- }
- end
- end
- end
-end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
index b0c9401..96e7600 100644
--- a/test/jack_the_ripper/test_processor.rb
+++ b/test/jack_the_ripper/test_processor.rb
@@ -1,163 +1,168 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper/processor'
class TestJackTheRIPperProcessor < Test::Unit::TestCase
def test_should_convert_image_format_to_jpeg_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
+ processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_convert_image_format_to_png_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :png
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
+ processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:scale => 75
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
+ processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
+ processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
def test_should_combine_options
working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
instruction = {
:source_uri => 'http://example.com/source_file',
:result_uri => 'http://example.com/result_file',
:format => :jpg,
:scale => 75,
:pad => '75x100 FFFFFF'
}
processor = JackTheRIPper::Processor.new( instruction )
source_file = mock
source_file.expects( :delete )
source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
result_file = mock
result_file.expects( :put )
result_file.expects( :delete )
JackTheRIPper::HTTPFile.expects( :get ).
with( instruction[ :source_uri ], working_dir_path, 'source' ).
returns( source_file )
JackTheRIPper::HTTPFile.expects( :new ).
with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
returns( result_file )
processor = JackTheRIPper::Processor.new( instruction )
processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
+ processor.expects( :` ).with( "say \"Image converted. Money in the bank!\"" )
$?.stubs( :success? ).returns( true )
processor.process
end
end
\ No newline at end of file
diff --git a/test/sqs/test_queue.rb b/test/sqs/test_queue.rb
deleted file mode 100644
index 0d30bf2..0000000
--- a/test/sqs/test_queue.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
-require 'test/unit'
-require 'mocha'
-require 'sqs/queue'
-
-class TestSQSQueue < Test::Unit::TestCase
- def test_should_initialize_queue_object
- queue_name = 'myqueue'
- access_key_id = 'myaccesskey'
- secret_access_key = 'mysecretkey'
- queue = SQS::Queue.new( access_key_id, secret_access_key, queue_name )
- assert_equal queue_name, queue.name
- assert_equal access_key_id, queue.access_key_id
- assert_equal secret_access_key, queue.secret_access_key
- end
-
- def test_should_create_and_return_queue_with_specified_name
- queue_name = 'myqueue'
- access_key_id = 'myaccesskey'
- secret_access_key = 'mysecretkey'
- SQS::SignedRequest.expects( :send ).with( access_key_id, secret_access_key,
- nil, 'CreateQueue', 'QueueName' => queue_name )
- queue = SQS::Queue.create!( access_key_id, secret_access_key, queue_name )
- assert_equal queue_name, queue.name
- assert_equal access_key_id, queue.access_key_id
- assert_equal secret_access_key, queue.secret_access_key
- end
-
- def test_should_pull_a_message_from_the_queue_and_return_the_receipt_handle_and_message_body
- queue_name = 'myqueue'
- access_key_id = 'myaccesskey'
- secret_access_key = 'mysecretkey'
- expected_receipt = 'Z2hlcm1hbi5kZXNrdG9wLmFtYXpvbi5jb20=:AAABFoNJa/AAAAAAAAAANwAAAAAAAAAAAAAAAAAAAAQAAAEXAMPLE'
- expected_body = 'foo'
- sqs_response = <<-EOF
- <ReceiveMessageResponse>
- <ReceiveMessageResult>
- <Message>
- <MessageId>11YEJMCHE2DM483NGN40|3H4AA8J7EJKM0DQZR7E1|PT6DRTB278S4MNY77NJ0</MessageId>
- <ReceiptHandle>#{expected_receipt}</ReceiptHandle>
- <MD5OfBody>#{Digest::MD5.hexdigest( expected_body )}</MD5OfBody>
- <Body>#{expected_body}</Body>
- </Message>
- </ReceiveMessageResult>
- <ResponseMetadata>
- <RequestId>b5bf2332-e983-4d3e-941a-f64c0d21f00f</RequestId>
- </ResponseMetadata>
- </ReceiveMessageResponse>
- EOF
- SQS::SignedRequest.expects( :send ).with( access_key_id, secret_access_key,
- queue_name, 'ReceiveMessage' ).returns( sqs_response )
-
- queue = SQS::Queue.new( access_key_id, secret_access_key, queue_name )
- receipt, body = queue.next_message
- assert_equal expected_receipt, receipt
- assert_equal expected_body, body
- end
-
- def test_should_delete_a_message_from_the_queue
- queue_name = 'myqueue'
- access_key_id = 'myaccesskey'
- secret_access_key = 'mysecretkey'
- receipt = 'Z2hlcm1hbi5kZXNrdG9wLmFtYXpvbi5jb20=:AAABFoNJa/AAAAAAAAAANwAAAAAAAAAAAAAAAAAAAAQAAAEXAMPLE'
- SQS::SignedRequest.expects( :send ).with( access_key_id, secret_access_key,
- queue_name, 'DeleteMessage', 'ReceiptHandle' => receipt )
-
- queue = SQS::Queue.new( access_key_id, secret_access_key, queue_name )
- queue.delete_message( receipt )
- end
-end
\ No newline at end of file
diff --git a/test/sqs/test_signature.rb b/test/sqs/test_signature.rb
deleted file mode 100644
index 4ad3e6e..0000000
--- a/test/sqs/test_signature.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-require 'test/unit'
-require 'sqs/signature'
-
-class TestSQSSignature < Test::Unit::TestCase
- def test_should_generate_expected_signature_from_parameters
- params = {
- 'Action' => 'CreateQueue',
- 'QueueName' => 'queue2',
- 'AWSAccessKeyId' => '0A8BDF2G9KCB3ZNKFA82',
- 'SignatureVersion' => '1',
- 'Timestamp' => '2007-01-12T12:00:00Z',
- 'Version' => '2006-04-01'
- }
- sig = SQS::Signature.new( 'abc123', params )
- expected = CGI.escape( Base64.encode64( Digest::SHA1.hexdigest(
- 'Action' + 'CreateQueue' +
- 'AWSAccessKeyId' + '0A8BDF2G9KCB3ZNKFA82' +
- 'QueueName' + 'queue2' +
- 'SignatureVersion' + '1' +
- 'Timestamp' + '2007-01-12T12:00:00Z' +
- 'Version' + '2006-04-01' +
- 'abc123'
- ) ) )
- assert_equal expected, sig.to_s
- end
-end
\ No newline at end of file
diff --git a/test/sqs/test_signed_request.rb b/test/sqs/test_signed_request.rb
deleted file mode 100644
index 9e9ace3..0000000
--- a/test/sqs/test_signed_request.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
-require 'test/unit'
-require 'mocha'
-require 'sqs/signed_request'
-
-class TestSQSSignedRequest < Test::Unit::TestCase
- def test_should_submit_signed_request_to_sqs_and_return_result_body
- time = Time.now
- Time.stubs( :now ).returns( time )
- req_params = {
- 'Action' => 'ReceiveMessage',
- 'AWSAccessKeyId' => 'mykey',
- 'SignatureVersion' => '1',
- 'Timestamp' => time.utc.iso8601,
- 'Version' => '2008-01-01',
- 'MaxNumberOfMessages' => '1'
- }
- signature = stub( :to_s => 'aws_signature' )
- SQS::Signature.expects( :new ).with( 'mysecret', req_params ).
- returns( signature )
- signed_params = req_params.merge( 'Signature' => 'aws_signature' )
- uri = URI.parse( 'http://queue.amazonaws.com/myqueue' )
- expected_body = 'message response'
- response = Net::HTTPSuccess.allocate
- response.stubs( :read_body ).returns( expected_body )
- Net::HTTP.expects( :post_form ).with( uri, signed_params ).returns( response )
-
- result = SQS::SignedRequest.send( 'mykey', 'mysecret', 'myqueue',
- 'ReceiveMessage', { 'MaxNumberOfMessages' => '1' } )
- assert_equal expected_body, result
- end
-
- def test_should_raise_exception_if_sqs_response_is_not_in_2xx_range
- response = Net::HTTPServerError.allocate
- response.expects( :error! ).raises( Net::HTTPServerException.allocate )
- Net::HTTP.stubs( :post_form ).returns( response )
- assert_raises( Net::HTTPServerException ) do
- SQS::SignedRequest.send( 'mykey', 'mysecret', 'myqueue',
- 'ReceiveMessage', { 'MaxNumberOfMessages' => '1' } )
- end
- end
-end
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index e9c8c63..be1b93a 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -1,18 +1,30 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPper < Test::Unit::TestCase
def test_should_process_one_message_from_the_queue_then_delete_the_message
queue = mock
+ message = mock
body = YAML::dump( { :foo => 'bar' } )
- queue.expects( :next_message ).returns( [ 'receipt_handle', body ] )
- queue.expects( :delete_message ).with( 'receipt_handle' )
+ queue.expects( :receive ).returns( message )
+ message.expects( :delete )
+ message.stubs( :body ).returns( body )
processor = mock
JackTheRIPper::Processor.expects( :new ).with( { :foo => 'bar' } ).
returns( processor )
processor.expects( :process )
JackTheRIPper.process_next_message( queue )
end
+
+ def test_should_instantiate_queue_and_return_it
+ sqs = mock
+ queue = stub
+ sqs.expects( :queue ).with( 'myqueue', true, 240 ).returns( queue )
+ RightAws::Sqs.expects( :new ).with( 'myaccesskeyid', 'mysecretaccesskey' ).
+ returns( sqs )
+ assert_same queue, JackTheRIPper.get_queue( 'myaccesskeyid',
+ 'mysecretaccesskey', 'myqueue' )
+ end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
c3f39cb4367f95a4586d62212e0b985cd904b527
|
Run YAML::load on message body received from queue before passing it to the Processor
|
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index b6f0bc7..6f37d04 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,15 +1,16 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
+require 'yaml'
module JackTheRIPper
VERSION = '0.1.0'
class << self
def process_next_message( queue )
receipt, message = queue.next_message
- processor = Processor.new( message )
+ processor = Processor.new( YAML::load( message ) )
processor.process
queue.delete_message( receipt )
end
end
end
require 'jack_the_ripper/processor'
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index d519e70..e9c8c63 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -1,17 +1,18 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'jack_the_ripper'
class TestJackTheRIPper < Test::Unit::TestCase
def test_should_process_one_message_from_the_queue_then_delete_the_message
queue = mock
- queue.expects( :next_message ).returns( [ 'receipt_handle', 'message_body' ] )
+ body = YAML::dump( { :foo => 'bar' } )
+ queue.expects( :next_message ).returns( [ 'receipt_handle', body ] )
queue.expects( :delete_message ).with( 'receipt_handle' )
processor = mock
- JackTheRIPper::Processor.expects( :new ).with( 'message_body' ).
+ JackTheRIPper::Processor.expects( :new ).with( { :foo => 'bar' } ).
returns( processor )
processor.expects( :process )
JackTheRIPper.process_next_message( queue )
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
c520aa3a8d34f86dda24872ee6ad050a837683d3
|
Added functionality to JackTheRIPper::Processor to scale, pad and convert format of files
|
diff --git a/Manifest.txt b/Manifest.txt
index fb25d86..0767703 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,60 +1,62 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
+lib/jack_the_ripper/http_file.rb
lib/jack_the_ripper/processor.rb
lib/sqs/queue.rb
lib/sqs/signature.rb
lib/sqs/signed_request.rb
+test/jack_the_ripper/test_processor.rb
test/sqs/test_queue.rb
test/sqs/test_signature.rb
test/sqs/test_signed_request.rb
test/test_jack_the_ripper.rb
vendor/mocha/COPYING
vendor/mocha/MIT-LICENSE
vendor/mocha/README
vendor/mocha/RELEASE
vendor/mocha/TODO
vendor/mocha/lib/mocha.rb
vendor/mocha/lib/mocha/any_instance_method.rb
vendor/mocha/lib/mocha/auto_verify.rb
vendor/mocha/lib/mocha/central.rb
vendor/mocha/lib/mocha/class_method.rb
vendor/mocha/lib/mocha/deprecation.rb
vendor/mocha/lib/mocha/exception_raiser.rb
vendor/mocha/lib/mocha/expectation.rb
vendor/mocha/lib/mocha/expectation_error.rb
vendor/mocha/lib/mocha/expectation_list.rb
vendor/mocha/lib/mocha/infinite_range.rb
vendor/mocha/lib/mocha/inspect.rb
vendor/mocha/lib/mocha/instance_method.rb
vendor/mocha/lib/mocha/is_a.rb
vendor/mocha/lib/mocha/metaclass.rb
vendor/mocha/lib/mocha/missing_expectation.rb
vendor/mocha/lib/mocha/mock.rb
vendor/mocha/lib/mocha/multiple_yields.rb
vendor/mocha/lib/mocha/no_yields.rb
vendor/mocha/lib/mocha/object.rb
vendor/mocha/lib/mocha/parameter_matchers.rb
vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
vendor/mocha/lib/mocha/parameter_matchers/anything.rb
vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
vendor/mocha/lib/mocha/parameter_matchers/includes.rb
vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
vendor/mocha/lib/mocha/pretty_parameters.rb
vendor/mocha/lib/mocha/return_values.rb
vendor/mocha/lib/mocha/setup_and_teardown.rb
vendor/mocha/lib/mocha/single_return_value.rb
vendor/mocha/lib/mocha/single_yield.rb
vendor/mocha/lib/mocha/standalone.rb
vendor/mocha/lib/mocha/stub.rb
vendor/mocha/lib/mocha/test_case_adapter.rb
vendor/mocha/lib/mocha/yield_parameters.rb
vendor/mocha/lib/mocha_standalone.rb
vendor/mocha/lib/stubba.rb
diff --git a/lib/jack_the_ripper/http_file.rb b/lib/jack_the_ripper/http_file.rb
new file mode 100644
index 0000000..7e0f3a8
--- /dev/null
+++ b/lib/jack_the_ripper/http_file.rb
@@ -0,0 +1,4 @@
+module JackTheRIPper
+ class HTTPFile
+ end
+end
\ No newline at end of file
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
index ed406dc..7200215 100644
--- a/lib/jack_the_ripper/processor.rb
+++ b/lib/jack_the_ripper/processor.rb
@@ -1,4 +1,40 @@
+require 'jack_the_ripper/http_file'
+
module JackTheRIPper
class Processor
+ WORKING_PATH = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+
+ def initialize( instructions )
+ @source_uri = instructions[ :source_uri ]
+ @result_uri = instructions[ :result_uri ]
+ @format = instructions[ :format ]
+ @scale = instructions[ :scale ]
+ @pad = instructions[ :pad ]
+ end
+
+ def process
+ source_file = HTTPFile.get( @source_uri, WORKING_PATH, 'source' )
+ result_ext = @format.nil? ? File.extname( source_file.path ) : ".#{@format}"
+ result_path = WORKING_PATH + '/result' + result_ext
+ `sips #{sips_args} #{source_file.path} --out #{result_path}`
+ source_file.delete
+ result_file = HTTPFile.new( @result_uri, result_path )
+ result_file.put
+ result_file.delete
+ end
+
+ private
+
+ def sips_args
+ args = []
+ args << "-s format #{@format} -s formatOptions best" if @format
+ args << "-Z #{@scale}" if @scale
+ if @pad
+ dimensions, color = @pad.split( /\s/, 2 )
+ args << "-p #{dimensions.sub( 'x', ' ' )}"
+ args << "--padColor #{color}" if color
+ end
+ args.join( ' ' )
+ end
end
end
\ No newline at end of file
diff --git a/test/jack_the_ripper/test_processor.rb b/test/jack_the_ripper/test_processor.rb
new file mode 100644
index 0000000..b0c9401
--- /dev/null
+++ b/test/jack_the_ripper/test_processor.rb
@@ -0,0 +1,163 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
+require 'test/unit'
+require 'mocha'
+require 'jack_the_ripper/processor'
+
+class TestJackTheRIPperProcessor < Test::Unit::TestCase
+ def test_should_convert_image_format_to_jpeg_and_put_resulting_file
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :format => :jpg
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects( :` ).with( "sips -s format jpg -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
+ $?.stubs( :success? ).returns( true )
+
+ processor.process
+ end
+
+ def test_should_convert_image_format_to_png_and_put_resulting_file
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :format => :png
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.png' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects( :` ).with( "sips -s format png -s formatOptions best #{working_dir_path}/source.pdf --out #{working_dir_path}/result.png" )
+ $?.stubs( :success? ).returns( true )
+
+ processor.process
+ end
+
+ def test_should_scale_image_to_specified_max_dimension_and_put_resulting_file
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :scale => 75
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects( :` ).with( "sips -Z 75 #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
+ $?.stubs( :success? ).returns( true )
+
+ processor.process
+ end
+
+ def test_should_pad_image_to_specified_width_and_height_and_put_resulting_file
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :pad => '75x100 FFFFFF'
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.jpg' )
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects( :` ).with( "sips -p 75 100 --padColor FFFFFF #{working_dir_path}/source.jpg --out #{working_dir_path}/result.jpg" )
+ $?.stubs( :success? ).returns( true )
+
+ processor.process
+ end
+
+ def test_should_combine_options
+ working_dir_path = File.expand_path( File.dirname( __FILE__ ) + '/../../tmp' )
+ instruction = {
+ :source_uri => 'http://example.com/source_file',
+ :result_uri => 'http://example.com/result_file',
+ :format => :jpg,
+ :scale => 75,
+ :pad => '75x100 FFFFFF'
+ }
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ source_file = mock
+ source_file.expects( :delete )
+ source_file.stubs( :path ).returns( working_dir_path + '/source.pdf' )
+ result_file = mock
+ result_file.expects( :put )
+ result_file.expects( :delete )
+
+ JackTheRIPper::HTTPFile.expects( :get ).
+ with( instruction[ :source_uri ], working_dir_path, 'source' ).
+ returns( source_file )
+ JackTheRIPper::HTTPFile.expects( :new ).
+ with( instruction[ :result_uri ], working_dir_path + '/result.jpg' ).
+ returns( result_file )
+
+ processor = JackTheRIPper::Processor.new( instruction )
+
+ processor.expects( :` ).with( "sips -s format jpg -s formatOptions best -Z 75 -p 75 100 --padColor FFFFFF #{working_dir_path}/source.pdf --out #{working_dir_path}/result.jpg" )
+ $?.stubs( :success? ).returns( true )
+
+ processor.process
+ end
+end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
6570fba8d17cd059d25ca9bd00b5678fa0262aa5
|
added JackTheRIPper.process_next_message
|
diff --git a/Manifest.txt b/Manifest.txt
index 5d83507..fb25d86 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,9 +1,60 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
+lib/jack_the_ripper/processor.rb
+lib/sqs/queue.rb
lib/sqs/signature.rb
+lib/sqs/signed_request.rb
+test/sqs/test_queue.rb
test/sqs/test_signature.rb
+test/sqs/test_signed_request.rb
test/test_jack_the_ripper.rb
+vendor/mocha/COPYING
+vendor/mocha/MIT-LICENSE
+vendor/mocha/README
+vendor/mocha/RELEASE
+vendor/mocha/TODO
+vendor/mocha/lib/mocha.rb
+vendor/mocha/lib/mocha/any_instance_method.rb
+vendor/mocha/lib/mocha/auto_verify.rb
+vendor/mocha/lib/mocha/central.rb
+vendor/mocha/lib/mocha/class_method.rb
+vendor/mocha/lib/mocha/deprecation.rb
+vendor/mocha/lib/mocha/exception_raiser.rb
+vendor/mocha/lib/mocha/expectation.rb
+vendor/mocha/lib/mocha/expectation_error.rb
+vendor/mocha/lib/mocha/expectation_list.rb
+vendor/mocha/lib/mocha/infinite_range.rb
+vendor/mocha/lib/mocha/inspect.rb
+vendor/mocha/lib/mocha/instance_method.rb
+vendor/mocha/lib/mocha/is_a.rb
+vendor/mocha/lib/mocha/metaclass.rb
+vendor/mocha/lib/mocha/missing_expectation.rb
+vendor/mocha/lib/mocha/mock.rb
+vendor/mocha/lib/mocha/multiple_yields.rb
+vendor/mocha/lib/mocha/no_yields.rb
+vendor/mocha/lib/mocha/object.rb
+vendor/mocha/lib/mocha/parameter_matchers.rb
+vendor/mocha/lib/mocha/parameter_matchers/all_of.rb
+vendor/mocha/lib/mocha/parameter_matchers/any_of.rb
+vendor/mocha/lib/mocha/parameter_matchers/anything.rb
+vendor/mocha/lib/mocha/parameter_matchers/has_entry.rb
+vendor/mocha/lib/mocha/parameter_matchers/has_key.rb
+vendor/mocha/lib/mocha/parameter_matchers/has_value.rb
+vendor/mocha/lib/mocha/parameter_matchers/includes.rb
+vendor/mocha/lib/mocha/parameter_matchers/instance_of.rb
+vendor/mocha/lib/mocha/parameter_matchers/kind_of.rb
+vendor/mocha/lib/mocha/pretty_parameters.rb
+vendor/mocha/lib/mocha/return_values.rb
+vendor/mocha/lib/mocha/setup_and_teardown.rb
+vendor/mocha/lib/mocha/single_return_value.rb
+vendor/mocha/lib/mocha/single_yield.rb
+vendor/mocha/lib/mocha/standalone.rb
+vendor/mocha/lib/mocha/stub.rb
+vendor/mocha/lib/mocha/test_case_adapter.rb
+vendor/mocha/lib/mocha/yield_parameters.rb
+vendor/mocha/lib/mocha_standalone.rb
+vendor/mocha/lib/stubba.rb
diff --git a/Rakefile b/Rakefile
index e5dbd83..8865623 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,19 +1,19 @@
# -*- ruby -*-
require 'rubygems'
require 'hoe'
require './lib/jack_the_ripper.rb'
-Hoe.new('JackTheRIPper', JackTheRipper::VERSION) do |p|
+Hoe.new('JackTheRIPper', JackTheRIPper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
p.author = 'John Wilger'
p.email = '[email protected]'
p.summary = 'RIP Postscript documents and transform images based on ' +
'instructions pulled from Amazon SQS'
p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
p.extra_deps = %w( )
end
# vim: syntax=Ruby
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 041f181..b6f0bc7 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,3 +1,15 @@
-class JackTheRipper
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) ) )
+module JackTheRIPper
VERSION = '0.1.0'
-end
\ No newline at end of file
+
+ class << self
+ def process_next_message( queue )
+ receipt, message = queue.next_message
+ processor = Processor.new( message )
+ processor.process
+ queue.delete_message( receipt )
+ end
+ end
+end
+
+require 'jack_the_ripper/processor'
\ No newline at end of file
diff --git a/lib/jack_the_ripper/processor.rb b/lib/jack_the_ripper/processor.rb
new file mode 100644
index 0000000..ed406dc
--- /dev/null
+++ b/lib/jack_the_ripper/processor.rb
@@ -0,0 +1,4 @@
+module JackTheRIPper
+ class Processor
+ end
+end
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
index e69de29..d519e70 100644
--- a/test/test_jack_the_ripper.rb
+++ b/test/test_jack_the_ripper.rb
@@ -0,0 +1,17 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../vendor/mocha/lib' ) )
+require 'test/unit'
+require 'mocha'
+require 'jack_the_ripper'
+
+class TestJackTheRIPper < Test::Unit::TestCase
+ def test_should_process_one_message_from_the_queue_then_delete_the_message
+ queue = mock
+ queue.expects( :next_message ).returns( [ 'receipt_handle', 'message_body' ] )
+ queue.expects( :delete_message ).with( 'receipt_handle' )
+ processor = mock
+ JackTheRIPper::Processor.expects( :new ).with( 'message_body' ).
+ returns( processor )
+ processor.expects( :process )
+ JackTheRIPper.process_next_message( queue )
+ end
+end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
b7fecbd72f6ff814283e12251f4bb5ed3afcd20e
|
added SQS::Queue with ability to create queues, receive messages and delete messages. Also changed SQS::SignedRequest to raise an exception if the response from SQS is not in the 2xx range
|
diff --git a/lib/sqs/queue.rb b/lib/sqs/queue.rb
new file mode 100644
index 0000000..a3b5626
--- /dev/null
+++ b/lib/sqs/queue.rb
@@ -0,0 +1,37 @@
+require 'rexml/document'
+require 'sqs/signed_request'
+
+module SQS
+ class Queue
+ attr_accessor :name, :access_key_id, :secret_access_key
+
+ def initialize( access_key_id, secret_access_key, name )
+ @name = name
+ @access_key_id = access_key_id
+ @secret_access_key = secret_access_key
+ end
+
+ def next_message
+ response = SignedRequest.send( access_key_id, secret_access_key, name,
+ 'ReceiveMessage' )
+ response_xml = REXML::Document.new( response ).root
+ message = response_xml.get_elements( '//Message' ).first
+ receipt = message.get_elements( '//ReceiptHandle' ).first.text
+ body = message.get_elements( '//Body' ).first.text
+ return receipt, body
+ end
+
+ def delete_message( receipt )
+ SignedRequest.send( access_key_id, secret_access_key, name,
+ 'DeleteMessage', 'ReceiptHandle' => receipt )
+ end
+
+ class << self
+ def create!( access_key_id, secret_access_key, name )
+ SignedRequest.send( access_key_id, secret_access_key, nil,
+ 'CreateQueue', 'QueueName' => name )
+ new( access_key_id, secret_access_key, name )
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/sqs/signed_request.rb b/lib/sqs/signed_request.rb
index d78a81e..3303afa 100644
--- a/lib/sqs/signed_request.rb
+++ b/lib/sqs/signed_request.rb
@@ -1,30 +1,35 @@
require 'time'
require 'uri'
require 'net/http'
require 'sqs/signature'
module SQS
module SignedRequest
class << self
def send( access_key_id, secret_access_key, queue, action, params = {} )
params = params.merge( default_params( access_key_id, action ) )
params.merge!( 'Signature' => Signature.new( secret_access_key, params ).to_s )
uri = URI.parse( "http://queue.amazonaws.com/#{queue}" )
response = Net::HTTP.post_form( uri, params )
- response.read_body
+ case response
+ when Net::HTTPSuccess
+ response.read_body
+ else
+ response.error!
+ end
end
private
def default_params( access_key_id, action )
{
'Action' => action,
'AWSAccessKeyId' => access_key_id,
'SignatureVersion' => '1',
'Timestamp' => Time.now.utc.iso8601,
'Version' => '2008-01-01'
}
end
end
end
end
\ No newline at end of file
diff --git a/test/sqs/test_queue.rb b/test/sqs/test_queue.rb
new file mode 100644
index 0000000..0d30bf2
--- /dev/null
+++ b/test/sqs/test_queue.rb
@@ -0,0 +1,70 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
+require 'test/unit'
+require 'mocha'
+require 'sqs/queue'
+
+class TestSQSQueue < Test::Unit::TestCase
+ def test_should_initialize_queue_object
+ queue_name = 'myqueue'
+ access_key_id = 'myaccesskey'
+ secret_access_key = 'mysecretkey'
+ queue = SQS::Queue.new( access_key_id, secret_access_key, queue_name )
+ assert_equal queue_name, queue.name
+ assert_equal access_key_id, queue.access_key_id
+ assert_equal secret_access_key, queue.secret_access_key
+ end
+
+ def test_should_create_and_return_queue_with_specified_name
+ queue_name = 'myqueue'
+ access_key_id = 'myaccesskey'
+ secret_access_key = 'mysecretkey'
+ SQS::SignedRequest.expects( :send ).with( access_key_id, secret_access_key,
+ nil, 'CreateQueue', 'QueueName' => queue_name )
+ queue = SQS::Queue.create!( access_key_id, secret_access_key, queue_name )
+ assert_equal queue_name, queue.name
+ assert_equal access_key_id, queue.access_key_id
+ assert_equal secret_access_key, queue.secret_access_key
+ end
+
+ def test_should_pull_a_message_from_the_queue_and_return_the_receipt_handle_and_message_body
+ queue_name = 'myqueue'
+ access_key_id = 'myaccesskey'
+ secret_access_key = 'mysecretkey'
+ expected_receipt = 'Z2hlcm1hbi5kZXNrdG9wLmFtYXpvbi5jb20=:AAABFoNJa/AAAAAAAAAANwAAAAAAAAAAAAAAAAAAAAQAAAEXAMPLE'
+ expected_body = 'foo'
+ sqs_response = <<-EOF
+ <ReceiveMessageResponse>
+ <ReceiveMessageResult>
+ <Message>
+ <MessageId>11YEJMCHE2DM483NGN40|3H4AA8J7EJKM0DQZR7E1|PT6DRTB278S4MNY77NJ0</MessageId>
+ <ReceiptHandle>#{expected_receipt}</ReceiptHandle>
+ <MD5OfBody>#{Digest::MD5.hexdigest( expected_body )}</MD5OfBody>
+ <Body>#{expected_body}</Body>
+ </Message>
+ </ReceiveMessageResult>
+ <ResponseMetadata>
+ <RequestId>b5bf2332-e983-4d3e-941a-f64c0d21f00f</RequestId>
+ </ResponseMetadata>
+ </ReceiveMessageResponse>
+ EOF
+ SQS::SignedRequest.expects( :send ).with( access_key_id, secret_access_key,
+ queue_name, 'ReceiveMessage' ).returns( sqs_response )
+
+ queue = SQS::Queue.new( access_key_id, secret_access_key, queue_name )
+ receipt, body = queue.next_message
+ assert_equal expected_receipt, receipt
+ assert_equal expected_body, body
+ end
+
+ def test_should_delete_a_message_from_the_queue
+ queue_name = 'myqueue'
+ access_key_id = 'myaccesskey'
+ secret_access_key = 'mysecretkey'
+ receipt = 'Z2hlcm1hbi5kZXNrdG9wLmFtYXpvbi5jb20=:AAABFoNJa/AAAAAAAAAANwAAAAAAAAAAAAAAAAAAAAQAAAEXAMPLE'
+ SQS::SignedRequest.expects( :send ).with( access_key_id, secret_access_key,
+ queue_name, 'DeleteMessage', 'ReceiptHandle' => receipt )
+
+ queue = SQS::Queue.new( access_key_id, secret_access_key, queue_name )
+ queue.delete_message( receipt )
+ end
+end
\ No newline at end of file
diff --git a/test/sqs/test_signed_request.rb b/test/sqs/test_signed_request.rb
index 9c5e119..9e9ace3 100644
--- a/test/sqs/test_signed_request.rb
+++ b/test/sqs/test_signed_request.rb
@@ -1,32 +1,42 @@
$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
require 'test/unit'
require 'mocha'
require 'sqs/signed_request'
class TestSQSSignedRequest < Test::Unit::TestCase
def test_should_submit_signed_request_to_sqs_and_return_result_body
time = Time.now
Time.stubs( :now ).returns( time )
req_params = {
'Action' => 'ReceiveMessage',
'AWSAccessKeyId' => 'mykey',
'SignatureVersion' => '1',
'Timestamp' => time.utc.iso8601,
'Version' => '2008-01-01',
'MaxNumberOfMessages' => '1'
}
signature = stub( :to_s => 'aws_signature' )
SQS::Signature.expects( :new ).with( 'mysecret', req_params ).
returns( signature )
signed_params = req_params.merge( 'Signature' => 'aws_signature' )
uri = URI.parse( 'http://queue.amazonaws.com/myqueue' )
expected_body = 'message response'
response = Net::HTTPSuccess.allocate
response.stubs( :read_body ).returns( expected_body )
Net::HTTP.expects( :post_form ).with( uri, signed_params ).returns( response )
result = SQS::SignedRequest.send( 'mykey', 'mysecret', 'myqueue',
'ReceiveMessage', { 'MaxNumberOfMessages' => '1' } )
assert_equal expected_body, result
end
+
+ def test_should_raise_exception_if_sqs_response_is_not_in_2xx_range
+ response = Net::HTTPServerError.allocate
+ response.expects( :error! ).raises( Net::HTTPServerException.allocate )
+ Net::HTTP.stubs( :post_form ).returns( response )
+ assert_raises( Net::HTTPServerException ) do
+ SQS::SignedRequest.send( 'mykey', 'mysecret', 'myqueue',
+ 'ReceiveMessage', { 'MaxNumberOfMessages' => '1' } )
+ end
+ end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
26c7982ea1a8ba49bc351a4a9b8704771549398b
|
changed TestSQSSignature to indicate that we use Timestamp parameter rather than Expires parameter
|
diff --git a/test/sqs/test_signature.rb b/test/sqs/test_signature.rb
index 11778e9..4ad3e6e 100644
--- a/test/sqs/test_signature.rb
+++ b/test/sqs/test_signature.rb
@@ -1,26 +1,26 @@
require 'test/unit'
require 'sqs/signature'
class TestSQSSignature < Test::Unit::TestCase
def test_should_generate_expected_signature_from_parameters
params = {
'Action' => 'CreateQueue',
'QueueName' => 'queue2',
'AWSAccessKeyId' => '0A8BDF2G9KCB3ZNKFA82',
'SignatureVersion' => '1',
- 'Expires' => '2007-01-12T12:00:00Z',
+ 'Timestamp' => '2007-01-12T12:00:00Z',
'Version' => '2006-04-01'
}
sig = SQS::Signature.new( 'abc123', params )
expected = CGI.escape( Base64.encode64( Digest::SHA1.hexdigest(
'Action' + 'CreateQueue' +
'AWSAccessKeyId' + '0A8BDF2G9KCB3ZNKFA82' +
- 'Expires' + '2007-01-12T12:00:00Z' +
'QueueName' + 'queue2' +
'SignatureVersion' + '1' +
+ 'Timestamp' + '2007-01-12T12:00:00Z' +
'Version' + '2006-04-01' +
'abc123'
) ) )
assert_equal expected, sig.to_s
end
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
ee03f36b6b81c942c4b4d4091beda0601ceb770a
|
added SQS::SignedRequest module
|
diff --git a/lib/sqs/signature.rb b/lib/sqs/signature.rb
index 55b586f..9232094 100644
--- a/lib/sqs/signature.rb
+++ b/lib/sqs/signature.rb
@@ -1,23 +1,23 @@
require 'base64'
require 'digest/sha1'
require 'cgi'
module SQS
class Signature
- def initialize( args = {} )
- @params = args[ :params ]
- @secret = args[ :secret ]
+ def initialize( secret, params )
+ @params = params
+ @secret = secret
end
def to_s
p_str = param_keys.inject( '' ) { |s,k| s + k + @params[ k ] }
CGI.escape( Base64.encode64( Digest::SHA1.hexdigest( p_str + @secret ) ) )
end
private
def param_keys
@params.keys.sort_by { |k| k.downcase }
end
end
end
\ No newline at end of file
diff --git a/lib/sqs/signed_request.rb b/lib/sqs/signed_request.rb
new file mode 100644
index 0000000..d78a81e
--- /dev/null
+++ b/lib/sqs/signed_request.rb
@@ -0,0 +1,30 @@
+require 'time'
+require 'uri'
+require 'net/http'
+require 'sqs/signature'
+
+module SQS
+ module SignedRequest
+ class << self
+ def send( access_key_id, secret_access_key, queue, action, params = {} )
+ params = params.merge( default_params( access_key_id, action ) )
+ params.merge!( 'Signature' => Signature.new( secret_access_key, params ).to_s )
+ uri = URI.parse( "http://queue.amazonaws.com/#{queue}" )
+ response = Net::HTTP.post_form( uri, params )
+ response.read_body
+ end
+
+ private
+
+ def default_params( access_key_id, action )
+ {
+ 'Action' => action,
+ 'AWSAccessKeyId' => access_key_id,
+ 'SignatureVersion' => '1',
+ 'Timestamp' => Time.now.utc.iso8601,
+ 'Version' => '2008-01-01'
+ }
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/test/sqs/test_signature.rb b/test/sqs/test_signature.rb
index 147a40b..11778e9 100644
--- a/test/sqs/test_signature.rb
+++ b/test/sqs/test_signature.rb
@@ -1,26 +1,26 @@
require 'test/unit'
require 'sqs/signature'
class TestSQSSignature < Test::Unit::TestCase
def test_should_generate_expected_signature_from_parameters
params = {
'Action' => 'CreateQueue',
'QueueName' => 'queue2',
'AWSAccessKeyId' => '0A8BDF2G9KCB3ZNKFA82',
'SignatureVersion' => '1',
'Expires' => '2007-01-12T12:00:00Z',
'Version' => '2006-04-01'
}
- sig = SQS::Signature.new( :secret => 'abc123', :params => params )
+ sig = SQS::Signature.new( 'abc123', params )
expected = CGI.escape( Base64.encode64( Digest::SHA1.hexdigest(
'Action' + 'CreateQueue' +
'AWSAccessKeyId' + '0A8BDF2G9KCB3ZNKFA82' +
'Expires' + '2007-01-12T12:00:00Z' +
'QueueName' + 'queue2' +
'SignatureVersion' + '1' +
'Version' + '2006-04-01' +
'abc123'
) ) )
assert_equal expected, sig.to_s
end
end
\ No newline at end of file
diff --git a/test/sqs/test_signed_request.rb b/test/sqs/test_signed_request.rb
new file mode 100644
index 0000000..9c5e119
--- /dev/null
+++ b/test/sqs/test_signed_request.rb
@@ -0,0 +1,32 @@
+$:.unshift( File.expand_path( File.dirname( __FILE__ ) + '/../../vendor/mocha/lib' ) )
+require 'test/unit'
+require 'mocha'
+require 'sqs/signed_request'
+
+class TestSQSSignedRequest < Test::Unit::TestCase
+ def test_should_submit_signed_request_to_sqs_and_return_result_body
+ time = Time.now
+ Time.stubs( :now ).returns( time )
+ req_params = {
+ 'Action' => 'ReceiveMessage',
+ 'AWSAccessKeyId' => 'mykey',
+ 'SignatureVersion' => '1',
+ 'Timestamp' => time.utc.iso8601,
+ 'Version' => '2008-01-01',
+ 'MaxNumberOfMessages' => '1'
+ }
+ signature = stub( :to_s => 'aws_signature' )
+ SQS::Signature.expects( :new ).with( 'mysecret', req_params ).
+ returns( signature )
+ signed_params = req_params.merge( 'Signature' => 'aws_signature' )
+ uri = URI.parse( 'http://queue.amazonaws.com/myqueue' )
+ expected_body = 'message response'
+ response = Net::HTTPSuccess.allocate
+ response.stubs( :read_body ).returns( expected_body )
+ Net::HTTP.expects( :post_form ).with( uri, signed_params ).returns( response )
+
+ result = SQS::SignedRequest.send( 'mykey', 'mysecret', 'myqueue',
+ 'ReceiveMessage', { 'MaxNumberOfMessages' => '1' } )
+ assert_equal expected_body, result
+ end
+end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
bc17d7e6062e6fe151f88a2a9683d0344914c226
|
Added SQS::Signature class to calculate AWS SQS request signatures from specified params and secret key
|
diff --git a/Manifest.txt b/Manifest.txt
index 4d6625f..5d83507 100644
--- a/Manifest.txt
+++ b/Manifest.txt
@@ -1,7 +1,9 @@
History.txt
Manifest.txt
README.txt
Rakefile
bin/jack_the_ripper
lib/jack_the_ripper.rb
-test/test_jack_the_ripper.rb
\ No newline at end of file
+lib/sqs/signature.rb
+test/sqs/test_signature.rb
+test/test_jack_the_ripper.rb
diff --git a/lib/sqs/signature.rb b/lib/sqs/signature.rb
new file mode 100644
index 0000000..55b586f
--- /dev/null
+++ b/lib/sqs/signature.rb
@@ -0,0 +1,23 @@
+require 'base64'
+require 'digest/sha1'
+require 'cgi'
+
+module SQS
+ class Signature
+ def initialize( args = {} )
+ @params = args[ :params ]
+ @secret = args[ :secret ]
+ end
+
+ def to_s
+ p_str = param_keys.inject( '' ) { |s,k| s + k + @params[ k ] }
+ CGI.escape( Base64.encode64( Digest::SHA1.hexdigest( p_str + @secret ) ) )
+ end
+
+ private
+
+ def param_keys
+ @params.keys.sort_by { |k| k.downcase }
+ end
+ end
+end
\ No newline at end of file
diff --git a/test/sqs/test_signature.rb b/test/sqs/test_signature.rb
new file mode 100644
index 0000000..147a40b
--- /dev/null
+++ b/test/sqs/test_signature.rb
@@ -0,0 +1,26 @@
+require 'test/unit'
+require 'sqs/signature'
+
+class TestSQSSignature < Test::Unit::TestCase
+ def test_should_generate_expected_signature_from_parameters
+ params = {
+ 'Action' => 'CreateQueue',
+ 'QueueName' => 'queue2',
+ 'AWSAccessKeyId' => '0A8BDF2G9KCB3ZNKFA82',
+ 'SignatureVersion' => '1',
+ 'Expires' => '2007-01-12T12:00:00Z',
+ 'Version' => '2006-04-01'
+ }
+ sig = SQS::Signature.new( :secret => 'abc123', :params => params )
+ expected = CGI.escape( Base64.encode64( Digest::SHA1.hexdigest(
+ 'Action' + 'CreateQueue' +
+ 'AWSAccessKeyId' + '0A8BDF2G9KCB3ZNKFA82' +
+ 'Expires' + '2007-01-12T12:00:00Z' +
+ 'QueueName' + 'queue2' +
+ 'SignatureVersion' + '1' +
+ 'Version' + '2006-04-01' +
+ 'abc123'
+ ) ) )
+ assert_equal expected, sig.to_s
+ end
+end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
926b42c8d256dea966e93a3f0101a9f6808972ef
|
some project setup
|
diff --git a/README.txt b/README.txt
index 9b0dd55..7d2057b 100644
--- a/README.txt
+++ b/README.txt
@@ -1,48 +1,72 @@
-= JackTheRipper
+= JackTheRIPper
-* FIX (url)
+http://johnwilger.com/search?q=JackTheRIPper
== DESCRIPTION:
-FIX (describe your package)
+RIPs Postscript documents (PDF, AI, EPS, etc) and performs transformations on
+raster images such as scaling and padding based on instructions pulled from
+Amazon SQS. Source files are pulled from a URI when the instruction is
+processed, and result files are published to a URI specified by the client.
+
+Currently, only RIPing and transforming on OSX machines is supported; support
+for other platforms is planned for subsequent releases. On OSX, all RIP and
+transformation operations are performed using the SIPS utility.
== FEATURES/PROBLEMS:
-* FIX (list of features or problems)
+* Receives instructions from Amazon SQS
+* Retrieves source images from a URI specified in the instruction
+* Rasterizes Postscript documents (only the first page of multi-page documents)
+* Scales and optionally pads raster images
+* Publishes resulting image to a URI specified in the transformation instruction
== SYNOPSIS:
- FIX (code sample of usage)
+Just run `jack_the_ripper start` to start the service as a daemon.
+
+Run `jack_the_ripper stop` to stop the service. Any currently processing
+instructions will complete before the system exits. Pass the --kill switch
+to stop immediately.
+
+By default, the system will store its pidfile in /var/run/jack_the_ripper.pid
+and its log file in /var/log/jack_the_ripper.log. You can override these
+locations when starting the system by passing the --pid and --log switches,
+respectively. Note that if you change the location of the pidfile, you will
+also need to pass the --pid option when stopping the system.
== REQUIREMENTS:
-* FIX (list of requirements)
+* OSX 10.5.x
+* Ruby 1.8.6
+* An Amazon AWS account
+* An SQS queue
== INSTALL:
-* FIX (sudo gem install, anything else)
+ sudo gem install JackTheRIPper
== LICENSE:
(The MIT License)
-Copyright (c) 2008 FIX
+Copyright (c) 2008 John Wilger
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Rakefile b/Rakefile
index 433fa34..e5dbd83 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,17 +1,19 @@
# -*- ruby -*-
require 'rubygems'
require 'hoe'
require './lib/jack_the_ripper.rb'
-Hoe.new('JackTheRipper', JackTheRipper::VERSION) do |p|
+Hoe.new('JackTheRIPper', JackTheRipper::VERSION) do |p|
p.rubyforge_name = 'jack_the_ripper'
- # p.author = 'FIX'
- # p.email = 'FIX'
- # p.summary = 'FIX'
- # p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
- # p.url = p.paragraphs_of('README.txt', 0).first.split(/\n/)[1..-1]
+ p.author = 'John Wilger'
+ p.email = '[email protected]'
+ p.summary = 'RIP Postscript documents and transform images based on ' +
+ 'instructions pulled from Amazon SQS'
+ p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
+ p.url = 'http://johnwilger.com/search?q=JackTheRIPper'
p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
+ p.extra_deps = %w( )
end
# vim: syntax=Ruby
diff --git a/bin/jack_the_ripper b/bin/jack_the_ripper
index e69de29..22581a3 100644
--- a/bin/jack_the_ripper
+++ b/bin/jack_the_ripper
@@ -0,0 +1,2 @@
+#!/usr/bin/env ruby
+puts "Hello World!"
\ No newline at end of file
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
index 5f844a7..041f181 100644
--- a/lib/jack_the_ripper.rb
+++ b/lib/jack_the_ripper.rb
@@ -1,3 +1,3 @@
class JackTheRipper
- VERSION = '1.0.0'
+ VERSION = '0.1.0'
end
\ No newline at end of file
|
jwilger/jack-the-ripper
|
9d264b745684ea62235cd67ca5744eda661c0f8c
|
initial import
|
diff --git a/History.txt b/History.txt
new file mode 100644
index 0000000..d656c5f
--- /dev/null
+++ b/History.txt
@@ -0,0 +1,5 @@
+=== 1.0.0 / 2008-02-10
+
+* 1 major enhancement
+ * Birthday!
+
diff --git a/Manifest.txt b/Manifest.txt
new file mode 100644
index 0000000..4d6625f
--- /dev/null
+++ b/Manifest.txt
@@ -0,0 +1,7 @@
+History.txt
+Manifest.txt
+README.txt
+Rakefile
+bin/jack_the_ripper
+lib/jack_the_ripper.rb
+test/test_jack_the_ripper.rb
\ No newline at end of file
diff --git a/README.txt b/README.txt
new file mode 100644
index 0000000..9b0dd55
--- /dev/null
+++ b/README.txt
@@ -0,0 +1,48 @@
+= JackTheRipper
+
+* FIX (url)
+
+== DESCRIPTION:
+
+FIX (describe your package)
+
+== FEATURES/PROBLEMS:
+
+* FIX (list of features or problems)
+
+== SYNOPSIS:
+
+ FIX (code sample of usage)
+
+== REQUIREMENTS:
+
+* FIX (list of requirements)
+
+== INSTALL:
+
+* FIX (sudo gem install, anything else)
+
+== LICENSE:
+
+(The MIT License)
+
+Copyright (c) 2008 FIX
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Rakefile b/Rakefile
new file mode 100644
index 0000000..433fa34
--- /dev/null
+++ b/Rakefile
@@ -0,0 +1,17 @@
+# -*- ruby -*-
+
+require 'rubygems'
+require 'hoe'
+require './lib/jack_the_ripper.rb'
+
+Hoe.new('JackTheRipper', JackTheRipper::VERSION) do |p|
+ p.rubyforge_name = 'jack_the_ripper'
+ # p.author = 'FIX'
+ # p.email = 'FIX'
+ # p.summary = 'FIX'
+ # p.description = p.paragraphs_of('README.txt', 2..5).join("\n\n")
+ # p.url = p.paragraphs_of('README.txt', 0).first.split(/\n/)[1..-1]
+ p.changes = p.paragraphs_of('History.txt', 0..1).join("\n\n")
+end
+
+# vim: syntax=Ruby
diff --git a/bin/jack_the_ripper b/bin/jack_the_ripper
new file mode 100644
index 0000000..e69de29
diff --git a/lib/jack_the_ripper.rb b/lib/jack_the_ripper.rb
new file mode 100644
index 0000000..5f844a7
--- /dev/null
+++ b/lib/jack_the_ripper.rb
@@ -0,0 +1,3 @@
+class JackTheRipper
+ VERSION = '1.0.0'
+end
\ No newline at end of file
diff --git a/test/test_jack_the_ripper.rb b/test/test_jack_the_ripper.rb
new file mode 100644
index 0000000..e69de29
|
straup/gae-flickrforbusypeople
|
34dcc5e065f3e823a21c7f12dca1bd45c957ebcc
|
favicon and touch icon
|
diff --git a/app.yaml b/app.yaml
index 2b4e6f8..f0ddf3b 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,35 +1,43 @@
application: flickrforbusypeople
version: 11
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
+- url: /favicon.ico
+ static_files: images/favicon.ico
+ upload: images/favicon.ico
+
+- url: /apple-touch-icon.png
+ static_files: images/apple-touch-icon.png
+ upload: images/apple-touch-icon.png
+
- url: /images
static_dir: images
- url: /admin/.*
script: admin.py
login: admin
- url: .*
script: main.py
diff --git a/images/apple-touch-icon.png b/images/apple-touch-icon.png
new file mode 100644
index 0000000..9e1d9cd
Binary files /dev/null and b/images/apple-touch-icon.png differ
diff --git a/images/favicon.ico b/images/favicon.ico
new file mode 100644
index 0000000..34edad9
Binary files /dev/null and b/images/favicon.ico differ
|
straup/gae-flickrforbusypeople
|
7fa57284494fefd470abf72242bf854244425554
|
add ffbp_deferred_loading to sample config
|
diff --git a/config.py.sample b/config.py.sample
index 5e5e9b1..12e35ff 100644
--- a/config.py.sample
+++ b/config.py.sample
@@ -1,6 +1,7 @@
config = {
'flickr_apikey' : 'YER_FLICKR_APIKEY',
'flickr_apisecret' : 'YER_FLICKR_APISECRET',
'flickr_minperms' : 'read',
'javascript_logging' : 0,
+ 'ffbp_deferred_loading' : True,
};
|
straup/gae-flickrforbusypeople
|
10c64912b06c894041bd9cb387bd80be2ece8538
|
deferred loading (did I never commit this?) and sad face notice inre: API fail
|
diff --git a/app.yaml b/app.yaml
index 3ba5f2f..2b4e6f8 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,35 +1,35 @@
application: flickrforbusypeople
-version: 10
+version: 11
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
- url: /admin/.*
script: admin.py
login: admin
- url: .*
script: main.py
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index 545172c..9ff5ae9 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,160 +1,160 @@
from config import config
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
- contacts_crumb = self.generate_crumb(self.user, 'method=contacts')
+ contacts_crumb = self.generate_crumb(self.user, 'method=contacts')
self.assign('search_crumb', search_crumb)
self.assign('contacts_crumb', contacts_crumb)
-
+
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
- contacts_8h = self.get_contacts('8h', contacts_4h['filter'])
-
+ contacts_8h = self.get_contacts('8h', contacts_4h['filter'])
+
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
- slices.append(contacts_8h)
+ slices.append(contacts_8h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
elif duration == '8h' :
- hours = 8
+ hours = 8
else :
duration = 1
hours = 1
-
+
offset = 60 * 60 * hours
dt = int(time.time() - offset)
- if config['ffbp_deferred_loading'] :
+ if config['ffbp_deferred_loading'] :
return {'contacts' : [], 'filter' : filter, 'error' : None, 'defer' : 1, 'offset' : dt, 'duration' : duration, 'count' : 0 }
contacts_filter = self.user.settings.search_in_contacts_filter
# TO DO: Backet times, so 30 minutes becomes 0-30 minutes
# and 2hr becomes 30-120 minutes and so on. This requires
# changes in the Flickr API itself.
-
+
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
-
+
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
-
+
contacts = []
new_filter = filter
-
+
if not rsp or rsp['stat'] != 'ok' :
error = 'Hrm. Something went wrong calling the Flickr API...'
if rsp :
error = rsp['message']
-
+
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
-
+
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
-
+
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
-
+
icon = self.flickr_get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
-
+
contacts.append(user)
new_filter.append(c['nsid'])
-
+
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
#
filter = self.request.get('filter')
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
#
embiggen = self.request.get('embiggen')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
if embiggen == 'yes' :
ffbp.Settings.embiggen_photos(self.user.nsid, True)
else :
ffbp.Settings.embiggen_photos(self.user.nsid, False)
#
self.redirect('/')
diff --git a/ffbp/Auth/__init__.pyc b/ffbp/Auth/__init__.pyc
index 8944b22..0d1c544 100644
Binary files a/ffbp/Auth/__init__.pyc and b/ffbp/Auth/__init__.pyc differ
diff --git a/ffbp/Settings/__init__.pyc b/ffbp/Settings/__init__.pyc
index 68342c9..eb9db0c 100644
Binary files a/ffbp/Settings/__init__.pyc and b/ffbp/Settings/__init__.pyc differ
diff --git a/ffbp/Tables/__init__.pyc b/ffbp/Tables/__init__.pyc
index 72b9c6f..e8c0b40 100644
Binary files a/ffbp/Tables/__init__.pyc and b/ffbp/Tables/__init__.pyc differ
diff --git a/ffbp/__init__.py b/ffbp/__init__.py
index 93a41d7..55a5cda 100644
--- a/ffbp/__init__.py
+++ b/ffbp/__init__.py
@@ -1,19 +1,18 @@
from FlickrApp.Handlers import FlickrAppRequest
from config import config
import ffbp.Settings
class Request (FlickrAppRequest) :
def __init__ (self) :
FlickrAppRequest.__init__(self, config)
-
+
def check_logged_in (self, min_perms) :
if not FlickrAppRequest.check_logged_in(self, min_perms) :
return False
settings = ffbp.Settings.get_settings_for_user(self.user.nsid)
self.user.settings = settings
-
+
return True
-
diff --git a/ffbp/__init__.pyc b/ffbp/__init__.pyc
index f225f27..5ba5762 100644
Binary files a/ffbp/__init__.pyc and b/ffbp/__init__.pyc differ
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index 1487d0e..fc96741 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,367 +1,367 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
this.contacts_seen = {};
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
if ((id == uid) && (el.css('display') == 'none')){
el.show();
continue;
}
el.hide();
}
}
if ($("#" + uid).length){
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['search_crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
var embiggen = rsp.embiggen;
var mobile = rsp.mobile;
var count = parseInt(rsp.photos.total);
if ((embiggen) && (count > 20)){
embiggen = 0;
html += '<div class="donot_embiggen">Embiggen-ing has been disabled for ' + rsp['photos']['photo'][0]['ownername'] + '\' photos, this time, because there are way too many of them to display at once. It\'s probably a job best handled by <a href="http://www.flickr.com/photos/' + rsp['photos']['photo'][0]['owner'] + '" target="_flickr">their Flickr photostream</a>.</div>';
}
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
if (embiggen){
src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret']
if (mobile){
src += '_m';
}
src += '.jpg';
}
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
if (embiggen){
img += '<img src="' + src + '" style="border:4px solid #' + short_hex + ';" />';
}
else {
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
}
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
if (embiggen){
html += '<div class="slice_thumb_title">' + ph['title'] + '...</div>';
}
else {
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
}
html += '</div>';
}
html += '</div>';
$(container).append(html);
if (duration == '8h'){
window.location.href = "#slice_" + duration;
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
info.aaronland.ffbp.Photos.prototype.fetch_contacts = function(offset){
var _self = this;
var doThisOnSuccess = function(rsp){
var count = parseInt(rsp['count']);
if (! count){
$("#slice_noone_" + offset).html("Nothing new...");
return;
}
var html = '';
var contacts = 0;
for (var i=0; i < count; i++){
var contact = rsp['contacts'][i];
var nsid = contact['nsid'];
if (_self['contacts_seen'][nsid]){
continue;
}
_self['contacts_seen'][nsid] = rsp['offset'];
contacts += 1;
html += '<div id="photos_' + contact['nsid_hex'] + '" class="photos_hex">';
html += '<a href="#" onclick="window.ffbp.show_photos(\'' + contact['nsid'] + '\', \'' + rsp['offset'] + '\', \'' + rsp['duration'] + '\');return false;" title="yay! new photos from ' + contact['username'] + '">';
html += '<img id="buddy_' + contact['nsid_hex'] + '" src="' + contact['buddyicon'] + '" height="48" width="48" class="buddy_hex" style="border:3px solid #' + contact['nsid_short_hex'] + ';" alt="' + contact['username'] + '" />';
html += '</a>';
html += '<div id="count_thumbs_' + contact['nsid_hex'] + '" class="count_thumbs_hex">';
html += '<a href="http://www.flickr.com/photos/' + contact['nsid'] + '" target="' + contact['nsid_hex'] + '">';
if (parseInt(contact['count']) == 1){
html += '<strong>1</strong> photo';
}
else {
html += '<strong>' + contact['count'] + '</strong> photos';
}
html += '</a>';
html += '</div>';
html += '</div>';
}
if (! contacts){
$("#slice_noone_" + offset).html("Nothing new...");
return;
}
html += '<br clear="all" />';
html += '<div class="status" id="status_' + rsp['duration'] + '"></div>';
html += '<div class="slice_thumbs" id="slice_thumbs_' + rsp['duration'] + '"></div>';
html += '<br clear="all" />';
$("#slice_" + offset).html(html);
};
var doThisIfNot = function(rsp){
var html = '';
html += '<span style="font-size:small;">';
html += 'I give up! The magic future-world we keep dreaming of says: <em>' + rsp['error']['message'] + '</em>';
html += '</span>';
$("#slice_noone_" + offset).html(html);
return;
};
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'offset' : offset,
'format' : 'json',
'crumb' : this.args['contacts_crumb'],
};
- // Note: We are calling the ffbp API rather than the Flickr API
+ // Note: We are calling the ffbp API rather than the Flickr API
// directly. This may need to be revisited in light of token/sig
// stuff. I suppose on possibility would be to have an endpoint
- // that simply generated a sig when passed a bunch of API args
+ // that simply generated a sig when passed a bunch of API args
// and a (very) time-sensitive crumb. That might work for queries
// that are implicity scoped by time but I haven't thought it all
// through yet... (20091107/asc)
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('contacts', search_args, doThisOnSuccess, doThisIfNot);
};
info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "photos_" + hex;
var status_id = "status_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=slice_thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var ctx = $("#photos_" + hex);
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 48;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
ctx.after(html);
ctx = $("#thumb_" + ph['id']);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
\ No newline at end of file
diff --git a/templates/inc_head.html b/templates/inc_head.html
index 4fe896c..8c91863 100644
--- a/templates/inc_head.html
+++ b/templates/inc_head.html
@@ -1,47 +1,47 @@
<html>
<head>
<title>flickr for busy people{% if page_title %}{{ page_title|escape }}{% endif %}</title>
{% if browser.mobile %}
<link rel="stylesheet" href="/css/mobile.css" type="text/css" />
{% else %}
<link rel="stylesheet" href="/css/main.css" type="text/css" />
{% endif %}
{% if user.username %}
<script type="text/javascript" src="/javascript/jquery.js"></script>
<script type="text/javascript" src="/javascript/md5.js"></script>
<script type="text/javascript" src="/javascript/flickrapp-api.js"></script>
<meta http-equiv="refresh" content="1800" />
{% endif %}
{% if browser.iphone %}
- <meta name="viewport" content="width=340" />
+ <meta name="viewport" content="width=device-width" />
{% endif %}
</head>
<body>
<div id="whoami">
{% if browser.mobile %}
<strong>ffbp</strong>{% if user.username %} / {{ user.username|escape }} / <a href="/signout">signout</a>{% endif %}
{% else %}
{% if user.username %}
You are logged in with your <a href="http://www.flickr.com/photos/{% ifequal user.path_alias "" %}{{ user.nsid|escape }}{% else %}{{ user.path_alias|escape }}{% endifequal %}/" class="fl_username">{{ user.username|escape }}</a> Flickr account
<form method="POST" action="/signout" id="signout">
<input type="hidden" name="crumb" value="{{ logout_crumb|escape }}" />
<input type="submit" value="sign out" />
</form>
{% else %}
<form method="GET" action="/signin" id="signin">
<input type="hidden" name="crumb" value="{{ login_crumb|escape }}" />
<input type="submit" value="sign in" />
</form>
{% endif %}
{% endif %}
</div>
<br clear="all" />
<div id="main">
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index 662d00e..afab726 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,100 +1,94 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
{% ifequal slice.duration '8h' %}8 hours{% endifequal %}
{% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
<div class="slice_contacts">
<a name="slice_{{ slice.duration|escape }}"></a>
<div id="slice_{{ slice.duration|escape }}">
{% ifequal slice.count 0 %}
<div class="slice_noone" id="slice_noone_{{ slice.duration|escape }}">
{% ifequal slice.defer 1 %}
Fetching contacts...
<script type="text/javascript">
- var offset = 200 * parseInt('{{ slice.duration|escape }}'.substring(0,1));
- var delay = Math.floor(Math.random() * offset);
-
- setTimeout(function(){
+ $(document).ready(function(){
window.ffbp.fetch_contacts('{{ slice.duration|escape }}');
- }, delay);
+ });
</script>
{% else %}
{% if slice.error %}
<span style="font-size:small;">{{ slice.error|escape }} — trying again...</span>
<script type="text/javascript">
- var offset = 200 * parseInt('{{ slice.duration|escape }}'.substring(0,1));
- var delay = Math.floor(Math.random() * offset);
-
- setTimeout(function(){
+ $(document).ready(function(){
window.ffbp.fetch_contacts('{{ slice.duration|escape }}');
- }, delay);
+ });
</script>
{% else %}
Nothing new...
-
{% endif %}
+
{% endifequal %}
</div>
{% else %}
{% for c in slice.contacts %}
<div id="photos_{{ c.nsid_hex|escape }}" class="photos_hex">
<a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;" title="yay! new photos from {{ c.username|escape }}">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" alt="{{ c.username|escape }}" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<script type="text/javascript">
{% for c in slice.contacts %}
window.ffbp.contacts_seen[ '{{ c.nsid|escape }}' ] = '{{ slice.duration|escape }}';
{% endfor %}
</script>
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index a6e31ea..8932757 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,26 +1,28 @@
{% include "inc_head.html" %}
<script type="text/javascript" src="/javascript/ffbp.js"></script>
<script type="text/javascript">
var ffbp_args = {
'host' : '{{ host_url }}',
'search_crumb' : '{{ search_crumb }}',
'contacts_crumb' : '{{ contacts_crumb }}',
};
window.ffbp = new info.aaronland.ffbp.Photos(ffbp_args);
</script>
{% ifequal browser.mobile 0 %}
<br /><br />
{% endifequal %}
{% for slice in slices %}
{% include "inc_main_contacts.html" %}
{% endfor %}
+<p style="font-size:small;">Note: (December 29-ish 2010) It appears as though the <a href="http://www.flickr.com/services/api/flickr.contacts.getListRecentlyUploaded.htm">flickr.contacts.getListRecentlyUploaded</a> API method is having some trouble lately. If you're not seeing any recent uploads from <em>any</em> of your contacts, that's probably why. Sad face...</p>
+
{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
d1a7f7a38b4b2fd62dda5da7042b57c7a30e6933
|
if mobile and embiggen, show 240 versions not 500; fussing with whitespace
|
diff --git a/app.yaml b/app.yaml
index 61d82df..3ba5f2f 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,35 +1,35 @@
application: flickrforbusypeople
-version: 8
+version: 10
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
- url: /admin/.*
script: admin.py
login: admin
- url: .*
script: main.py
diff --git a/ffbp/API/__init__.py b/ffbp/API/__init__.py
index 491e4d6..b85ab29 100644
--- a/ffbp/API/__init__.py
+++ b/ffbp/API/__init__.py
@@ -1,160 +1,166 @@
from APIApp import APIApp
import ffbp
import time
import md5
import logging
class Dispatch (ffbp.Request, APIApp) :
-
+
def __init__ (self):
ffbp.Request.__init__(self)
APIApp.__init__(self)
def post (self) :
-
+
if not self.check_logged_in(self.min_perms) :
self.api_error(403)
return
method = self.request.get('method')
format = self.request.get('format')
if format and not format in self.valid_formats :
self.api_error(999, 'Not a valid format')
return
if format :
self.format = format
-
+
if method == 'search' :
return self.__search()
elif method == 'contacts' :
return self.__contacts()
else :
self.api_error(404, 'Invalid method')
return
-
+
def ensure_crumb (self, path) :
if not self.validate_crumb(self.user, path, self.request.get('crumb')) :
self.api_error(400, 'Invalid crumb')
return False
return True
def __search (self) :
required = ('crumb', 'user_id', 'min_upload_date')
-
+
if not self.ensure_args(required) :
- return
+ return
if not self.ensure_crumb('method=search') :
return
method = 'flickr.photos.search'
-
+
args = {
'auth_token' : self.user.token,
'user_id' : self.request.get('user_id'),
'min_upload_date' : self.request.get('min_upload_date'),
'extras' : 'owner_name',
}
rsp = self.api_call(method, args)
if not rsp :
return self.api_error()
if rsp['stat'] != 'ok' :
return self.api_error()
- embiggen = 0;
+ self.check_useragent()
+
+ embiggen = 0
+ mobile = 0
+
+ if self.user.settings.embiggen_photos:
+ embiggen = 1
+
+ if self.browser['mobile']:
+ mobile = 1
- if self.user.settings.embiggen_photos :
- embiggen = 1;
-
- return self.api_ok({'photos' : rsp['photos'], 'embiggen' : embiggen})
+ return self.api_ok({'photos' : rsp['photos'], 'embiggen' : embiggen, 'mobile': mobile})
def __contacts (self) :
required = ('crumb', 'offset')
-
+
if not self.ensure_args(required) :
- return
+ return
if not self.ensure_crumb('method=contacts') :
return
duration = self.request.get('offset')
-
+
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
elif duration == '8h' :
- hours = 8
+ hours = 8
else :
duration = 1
hours = 1
-
+
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
-
+
contacts = []
foo = None
-
+
if not rsp or rsp['stat'] != 'ok' :
code = 999
error = 'Hrm. Something went wrong calling the Flickr API...'
if rsp :
code = rsp['code']
error = rsp['message']
self.api_error(code, error)
return
-
+
elif rsp['contacts']['total'] == 0 :
foo = {'contacts' : contacts, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
else :
for c in rsp['contacts']['contact'] :
icon = self.flickr_get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
-
+
contacts.append(user)
-
+
foo = {'contacts' : contacts, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
return self.api_ok(foo)
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index 6a1bc60..1487d0e 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,360 +1,367 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
this.contacts_seen = {};
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
if ((id == uid) && (el.css('display') == 'none')){
el.show();
continue;
}
el.hide();
}
}
if ($("#" + uid).length){
- return;
+ return;
}
-
+
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['search_crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
var embiggen = rsp.embiggen;
-
+ var mobile = rsp.mobile;
+
var count = parseInt(rsp.photos.total);
if ((embiggen) && (count > 20)){
embiggen = 0;
html += '<div class="donot_embiggen">Embiggen-ing has been disabled for ' + rsp['photos']['photo'][0]['ownername'] + '\' photos, this time, because there are way too many of them to display at once. It\'s probably a job best handled by <a href="http://www.flickr.com/photos/' + rsp['photos']['photo'][0]['owner'] + '" target="_flickr">their Flickr photostream</a>.</div>';
}
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
if (embiggen){
- src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '.jpg';
+ src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret']
+
+ if (mobile){
+ src += '_m';
+ }
+
+ src += '.jpg';
}
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
if (embiggen){
img += '<img src="' + src + '" style="border:4px solid #' + short_hex + ';" />';
}
else {
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
}
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
if (embiggen){
html += '<div class="slice_thumb_title">' + ph['title'] + '...</div>';
}
else {
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
}
html += '</div>';
}
html += '</div>';
$(container).append(html);
if (duration == '8h'){
window.location.href = "#slice_" + duration;
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
-
+
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
info.aaronland.ffbp.Photos.prototype.fetch_contacts = function(offset){
var _self = this;
var doThisOnSuccess = function(rsp){
var count = parseInt(rsp['count']);
if (! count){
$("#slice_noone_" + offset).html("Nothing new...");
return;
}
var html = '';
var contacts = 0;
for (var i=0; i < count; i++){
var contact = rsp['contacts'][i];
var nsid = contact['nsid'];
if (_self['contacts_seen'][nsid]){
continue;
}
_self['contacts_seen'][nsid] = rsp['offset'];
contacts += 1;
html += '<div id="photos_' + contact['nsid_hex'] + '" class="photos_hex">';
html += '<a href="#" onclick="window.ffbp.show_photos(\'' + contact['nsid'] + '\', \'' + rsp['offset'] + '\', \'' + rsp['duration'] + '\');return false;" title="yay! new photos from ' + contact['username'] + '">';
html += '<img id="buddy_' + contact['nsid_hex'] + '" src="' + contact['buddyicon'] + '" height="48" width="48" class="buddy_hex" style="border:3px solid #' + contact['nsid_short_hex'] + ';" alt="' + contact['username'] + '" />';
html += '</a>';
html += '<div id="count_thumbs_' + contact['nsid_hex'] + '" class="count_thumbs_hex">';
html += '<a href="http://www.flickr.com/photos/' + contact['nsid'] + '" target="' + contact['nsid_hex'] + '">';
if (parseInt(contact['count']) == 1){
html += '<strong>1</strong> photo';
}
else {
html += '<strong>' + contact['count'] + '</strong> photos';
}
html += '</a>';
html += '</div>';
html += '</div>';
}
if (! contacts){
$("#slice_noone_" + offset).html("Nothing new...");
return;
}
html += '<br clear="all" />';
html += '<div class="status" id="status_' + rsp['duration'] + '"></div>';
html += '<div class="slice_thumbs" id="slice_thumbs_' + rsp['duration'] + '"></div>';
html += '<br clear="all" />';
$("#slice_" + offset).html(html);
};
var doThisIfNot = function(rsp){
var html = '';
- html += '<span style="font-size:.75em;">';
+ html += '<span style="font-size:small;">';
html += 'I give up! The magic future-world we keep dreaming of says: <em>' + rsp['error']['message'] + '</em>';
html += '</span>';
$("#slice_noone_" + offset).html(html);
return;
};
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'offset' : offset,
'format' : 'json',
'crumb' : this.args['contacts_crumb'],
};
// Note: We are calling the ffbp API rather than the Flickr API
// directly. This may need to be revisited in light of token/sig
// stuff. I suppose on possibility would be to have an endpoint
// that simply generated a sig when passed a bunch of API args
// and a (very) time-sensitive crumb. That might work for queries
// that are implicity scoped by time but I haven't thought it all
// through yet... (20091107/asc)
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('contacts', search_args, doThisOnSuccess, doThisIfNot);
};
info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "photos_" + hex;
var status_id = "status_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=slice_thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var ctx = $("#photos_" + hex);
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 48;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
ctx.after(html);
ctx = $("#thumb_" + ph['id']);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
\ No newline at end of file
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index 4b4d3c3..662d00e 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,100 +1,100 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
{% ifequal slice.duration '8h' %}8 hours{% endifequal %}
{% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
<div class="slice_contacts">
<a name="slice_{{ slice.duration|escape }}"></a>
<div id="slice_{{ slice.duration|escape }}">
{% ifequal slice.count 0 %}
<div class="slice_noone" id="slice_noone_{{ slice.duration|escape }}">
{% ifequal slice.defer 1 %}
Fetching contacts...
<script type="text/javascript">
var offset = 200 * parseInt('{{ slice.duration|escape }}'.substring(0,1));
var delay = Math.floor(Math.random() * offset);
-
+
setTimeout(function(){
window.ffbp.fetch_contacts('{{ slice.duration|escape }}');
}, delay);
</script>
{% else %}
{% if slice.error %}
- <span style="font-size:.75em;">{{ slice.error|escape }} — trying again...</span>
+ <span style="font-size:small;">{{ slice.error|escape }} — trying again...</span>
<script type="text/javascript">
var offset = 200 * parseInt('{{ slice.duration|escape }}'.substring(0,1));
var delay = Math.floor(Math.random() * offset);
setTimeout(function(){
window.ffbp.fetch_contacts('{{ slice.duration|escape }}');
}, delay);
</script>
{% else %}
Nothing new...
{% endif %}
{% endifequal %}
</div>
{% else %}
{% for c in slice.contacts %}
-
+
<div id="photos_{{ c.nsid_hex|escape }}" class="photos_hex">
<a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;" title="yay! new photos from {{ c.username|escape }}">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" alt="{{ c.username|escape }}" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<script type="text/javascript">
{% for c in slice.contacts %}
window.ffbp.contacts_seen[ '{{ c.nsid|escape }}' ] = '{{ slice.duration|escape }}';
{% endfor %}
</script>
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index b8e6af7..a6e31ea 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,26 +1,26 @@
{% include "inc_head.html" %}
<script type="text/javascript" src="/javascript/ffbp.js"></script>
<script type="text/javascript">
var ffbp_args = {
'host' : '{{ host_url }}',
'search_crumb' : '{{ search_crumb }}',
'contacts_crumb' : '{{ contacts_crumb }}',
};
window.ffbp = new info.aaronland.ffbp.Photos(ffbp_args);
-
+
</script>
{% ifequal browser.mobile 0 %}
<br /><br />
{% endifequal %}
{% for slice in slices %}
{% include "inc_main_contacts.html" %}
{% endfor %}
{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
69a0da11b69af3c8186f665300c6083b4b89eed3
|
support for deffered contacts loading
|
diff --git a/app.yaml b/app.yaml
index a048ac1..61d82df 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,31 +1,35 @@
application: flickrforbusypeople
-version: 6
+version: 8
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
+- url: /admin/.*
+ script: admin.py
+ login: admin
+
- url: .*
script: main.py
diff --git a/ffbp/API/__init__.py b/ffbp/API/__init__.py
index 9a1be93..491e4d6 100644
--- a/ffbp/API/__init__.py
+++ b/ffbp/API/__init__.py
@@ -1,69 +1,160 @@
from APIApp import APIApp
import ffbp
+import time
+import md5
+
+import logging
+
class Dispatch (ffbp.Request, APIApp) :
def __init__ (self):
ffbp.Request.__init__(self)
APIApp.__init__(self)
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.api_error(403)
return
method = self.request.get('method')
format = self.request.get('format')
if format and not format in self.valid_formats :
self.api_error(999, 'Not a valid format')
return
if format :
self.format = format
if method == 'search' :
return self.__search()
+ elif method == 'contacts' :
+ return self.__contacts()
+
+ else :
+ self.api_error(404, 'Invalid method')
+ return
+
def ensure_crumb (self, path) :
if not self.validate_crumb(self.user, path, self.request.get('crumb')) :
self.api_error(400, 'Invalid crumb')
return False
return True
def __search (self) :
required = ('crumb', 'user_id', 'min_upload_date')
if not self.ensure_args(required) :
return
if not self.ensure_crumb('method=search') :
return
method = 'flickr.photos.search'
args = {
'auth_token' : self.user.token,
'user_id' : self.request.get('user_id'),
'min_upload_date' : self.request.get('min_upload_date'),
'extras' : 'owner_name',
}
rsp = self.api_call(method, args)
if not rsp :
return self.api_error()
if rsp['stat'] != 'ok' :
return self.api_error()
embiggen = 0;
if self.user.settings.embiggen_photos :
embiggen = 1;
return self.api_ok({'photos' : rsp['photos'], 'embiggen' : embiggen})
+
+ def __contacts (self) :
+
+ required = ('crumb', 'offset')
+
+ if not self.ensure_args(required) :
+ return
+
+ if not self.ensure_crumb('method=contacts') :
+ return
+
+ duration = self.request.get('offset')
+
+ if duration == '30m' :
+ hours = .5
+ elif duration == '2h' :
+ hours = 2
+ elif duration == '4h' :
+ hours = 4
+ elif duration == '8h' :
+ hours = 8
+ else :
+ duration = 1
+ hours = 1
+
+ offset = 60 * 60 * hours
+ dt = int(time.time() - offset)
+
+ contacts_filter = self.user.settings.search_in_contacts_filter
+
+ args = {
+ 'auth_token' : self.user.token,
+ 'date_lastupload' : dt,
+ 'filter' : contacts_filter,
+ }
+
+ rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
+
+ contacts = []
+
+ foo = None
+
+ if not rsp or rsp['stat'] != 'ok' :
+
+ code = 999
+ error = 'Hrm. Something went wrong calling the Flickr API...'
+
+ if rsp :
+ code = rsp['code']
+ error = rsp['message']
+
+ self.api_error(code, error)
+ return
+
+ elif rsp['contacts']['total'] == 0 :
+ foo = {'contacts' : contacts, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
+
+ else :
+ for c in rsp['contacts']['contact'] :
+
+ icon = self.flickr_get_buddyicon(c['nsid'])
+
+ hex = md5.new(c['nsid']).hexdigest()
+ short_hex = hex[0:6]
+
+ user = {
+ 'username' : c['username'],
+ 'nsid' : c['nsid'],
+ 'nsid_hex' : hex,
+ 'nsid_short_hex' : short_hex,
+ 'count' : c['photos_uploaded'],
+ 'buddyicon' : icon,
+ }
+
+ contacts.append(user)
+
+ foo = {'contacts' : contacts, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
+
+ return self.api_ok(foo)
diff --git a/ffbp/API/__init__.pyc b/ffbp/API/__init__.pyc
index 4ec4faa..2e652fc 100644
Binary files a/ffbp/API/__init__.pyc and b/ffbp/API/__init__.pyc differ
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index 9327247..545172c 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,153 +1,160 @@
+from config import config
+
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
+ contacts_crumb = self.generate_crumb(self.user, 'method=contacts')
self.assign('search_crumb', search_crumb)
+ self.assign('contacts_crumb', contacts_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
contacts_8h = self.get_contacts('8h', contacts_4h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
slices.append(contacts_8h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
elif duration == '8h' :
hours = 8
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
+ if config['ffbp_deferred_loading'] :
+ return {'contacts' : [], 'filter' : filter, 'error' : None, 'defer' : 1, 'offset' : dt, 'duration' : duration, 'count' : 0 }
+
contacts_filter = self.user.settings.search_in_contacts_filter
# TO DO: Backet times, so 30 minutes becomes 0-30 minutes
# and 2hr becomes 30-120 minutes and so on. This requires
# changes in the Flickr API itself.
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
-
+
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
- error = 'INVISIBLE ERRORCAT HISSES AT YOU'
+ error = 'Hrm. Something went wrong calling the Flickr API...'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
icon = self.flickr_get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
#
filter = self.request.get('filter')
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
#
embiggen = self.request.get('embiggen')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
if embiggen == 'yes' :
ffbp.Settings.embiggen_photos(self.user.nsid, True)
else :
ffbp.Settings.embiggen_photos(self.user.nsid, False)
#
self.redirect('/')
diff --git a/ffbp/App/__init__.pyc b/ffbp/App/__init__.pyc
index 3fdadf8..dc6cd16 100644
Binary files a/ffbp/App/__init__.pyc and b/ffbp/App/__init__.pyc differ
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index c188f25..6a1bc60 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,260 +1,360 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
+
+ this.contacts_seen = {};
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
if ((id == uid) && (el.css('display') == 'none')){
el.show();
continue;
}
el.hide();
}
}
if ($("#" + uid).length){
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
- 'crumb' : this.args['crumb'],
+ 'crumb' : this.args['search_crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
var embiggen = rsp.embiggen;
var count = parseInt(rsp.photos.total);
if ((embiggen) && (count > 20)){
embiggen = 0;
html += '<div class="donot_embiggen">Embiggen-ing has been disabled for ' + rsp['photos']['photo'][0]['ownername'] + '\' photos, this time, because there are way too many of them to display at once. It\'s probably a job best handled by <a href="http://www.flickr.com/photos/' + rsp['photos']['photo'][0]['owner'] + '" target="_flickr">their Flickr photostream</a>.</div>';
}
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
if (embiggen){
src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '.jpg';
}
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
if (embiggen){
img += '<img src="' + src + '" style="border:4px solid #' + short_hex + ';" />';
}
else {
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
}
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
if (embiggen){
html += '<div class="slice_thumb_title">' + ph['title'] + '...</div>';
}
else {
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
}
html += '</div>';
}
html += '</div>';
$(container).append(html);
if (duration == '8h'){
window.location.href = "#slice_" + duration;
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
+info.aaronland.ffbp.Photos.prototype.fetch_contacts = function(offset){
+
+ var _self = this;
+
+ var doThisOnSuccess = function(rsp){
+
+ var count = parseInt(rsp['count']);
+
+ if (! count){
+ $("#slice_noone_" + offset).html("Nothing new...");
+ return;
+ }
+
+ var html = '';
+ var contacts = 0;
+
+ for (var i=0; i < count; i++){
+
+ var contact = rsp['contacts'][i];
+ var nsid = contact['nsid'];
+
+ if (_self['contacts_seen'][nsid]){
+ continue;
+ }
+
+ _self['contacts_seen'][nsid] = rsp['offset'];
+ contacts += 1;
+
+ html += '<div id="photos_' + contact['nsid_hex'] + '" class="photos_hex">';
+
+ html += '<a href="#" onclick="window.ffbp.show_photos(\'' + contact['nsid'] + '\', \'' + rsp['offset'] + '\', \'' + rsp['duration'] + '\');return false;" title="yay! new photos from ' + contact['username'] + '">';
+ html += '<img id="buddy_' + contact['nsid_hex'] + '" src="' + contact['buddyicon'] + '" height="48" width="48" class="buddy_hex" style="border:3px solid #' + contact['nsid_short_hex'] + ';" alt="' + contact['username'] + '" />';
+ html += '</a>';
+
+ html += '<div id="count_thumbs_' + contact['nsid_hex'] + '" class="count_thumbs_hex">';
+ html += '<a href="http://www.flickr.com/photos/' + contact['nsid'] + '" target="' + contact['nsid_hex'] + '">';
+
+ if (parseInt(contact['count']) == 1){
+ html += '<strong>1</strong> photo';
+ }
+
+ else {
+ html += '<strong>' + contact['count'] + '</strong> photos';
+ }
+
+ html += '</a>';
+ html += '</div>';
+ html += '</div>';
+ }
+
+ if (! contacts){
+ $("#slice_noone_" + offset).html("Nothing new...");
+ return;
+ }
+
+ html += '<br clear="all" />';
+
+ html += '<div class="status" id="status_' + rsp['duration'] + '"></div>';
+ html += '<div class="slice_thumbs" id="slice_thumbs_' + rsp['duration'] + '"></div>';
+ html += '<br clear="all" />';
+
+ $("#slice_" + offset).html(html);
+ };
+
+ var doThisIfNot = function(rsp){
+
+ var html = '';
+
+ html += '<span style="font-size:.75em;">';
+ html += 'I give up! The magic future-world we keep dreaming of says: <em>' + rsp['error']['message'] + '</em>';
+ html += '</span>';
+
+ $("#slice_noone_" + offset).html(html);
+ return;
+ };
+
+ var api_args = {
+ 'host' : this.args['host'],
+ };
+
+ var search_args = {
+ 'offset' : offset,
+ 'format' : 'json',
+ 'crumb' : this.args['contacts_crumb'],
+ };
+
+ // Note: We are calling the ffbp API rather than the Flickr API
+ // directly. This may need to be revisited in light of token/sig
+ // stuff. I suppose on possibility would be to have an endpoint
+ // that simply generated a sig when passed a bunch of API args
+ // and a (very) time-sensitive crumb. That might work for queries
+ // that are implicity scoped by time but I haven't thought it all
+ // through yet... (20091107/asc)
+
+ var api = new info.aaronland.flickrapp.API(api_args)
+ api.api_call('contacts', search_args, doThisOnSuccess, doThisIfNot);
+};
+
info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "photos_" + hex;
var status_id = "status_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=slice_thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var ctx = $("#photos_" + hex);
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 48;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
ctx.after(html);
ctx = $("#thumb_" + ph['id']);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
-};
+};
\ No newline at end of file
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index bbca40b..4b4d3c3 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,51 +1,100 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
{% ifequal slice.duration '8h' %}8 hours{% endifequal %}
{% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
<div class="slice_contacts">
+ <a name="slice_{{ slice.duration|escape }}"></a>
+
+ <div id="slice_{{ slice.duration|escape }}">
+
{% ifequal slice.count 0 %}
- <div class="slice_noone">Nothing new...</div>
+ <div class="slice_noone" id="slice_noone_{{ slice.duration|escape }}">
+
+ {% ifequal slice.defer 1 %}
+
+ Fetching contacts...
+
+ <script type="text/javascript">
+
+ var offset = 200 * parseInt('{{ slice.duration|escape }}'.substring(0,1));
+ var delay = Math.floor(Math.random() * offset);
+
+ setTimeout(function(){
+ window.ffbp.fetch_contacts('{{ slice.duration|escape }}');
+ }, delay);
+
+ </script>
+
+ {% else %}
+
+ {% if slice.error %}
+
+ <span style="font-size:.75em;">{{ slice.error|escape }} — trying again...</span>
+
+ <script type="text/javascript">
+
+ var offset = 200 * parseInt('{{ slice.duration|escape }}'.substring(0,1));
+ var delay = Math.floor(Math.random() * offset);
+
+ setTimeout(function(){
+ window.ffbp.fetch_contacts('{{ slice.duration|escape }}');
+ }, delay);
+
+ </script>
+
+ {% else %}
+ Nothing new...
+
+ {% endif %}
+ {% endifequal %}
+
+ </div>
{% else %}
{% for c in slice.contacts %}
-
- <a name="slice_{{ slice.duration|escape }}"></a>
<div id="photos_{{ c.nsid_hex|escape }}" class="photos_hex">
<a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;" title="yay! new photos from {{ c.username|escape }}">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" alt="{{ c.username|escape }}" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
+<script type="text/javascript">
+{% for c in slice.contacts %}
+ window.ffbp.contacts_seen[ '{{ c.nsid|escape }}' ] = '{{ slice.duration|escape }}';
+{% endfor %}
+</script>
+
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
+</div>
</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index 907d299..b8e6af7 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,25 +1,26 @@
{% include "inc_head.html" %}
<script type="text/javascript" src="/javascript/ffbp.js"></script>
<script type="text/javascript">
var ffbp_args = {
'host' : '{{ host_url }}',
- 'crumb' : '{{ search_crumb }}',
+ 'search_crumb' : '{{ search_crumb }}',
+ 'contacts_crumb' : '{{ contacts_crumb }}',
};
window.ffbp = new info.aaronland.ffbp.Photos(ffbp_args);
</script>
{% ifequal browser.mobile 0 %}
<br /><br />
{% endifequal %}
{% for slice in slices %}
{% include "inc_main_contacts.html" %}
{% endfor %}
{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
ba4cae978241a8970109a7b51778be35a703c91f
|
do not embiggen if photo count for a user is > 20
|
diff --git a/css/main.css b/css/main.css
index 6e0cee8..b5285ef 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,264 +1,270 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#ff0084;
font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:25px;
}
#example_image img {
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:2em;
margin-bottom:50px;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:15%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
text-align:right;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#settings {
margin-left:12%;
}
#settings p {
font-size:small;
}
#settings_dothis {
text-align:right;
margin-right:5%;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
+.donot_embiggen {
+ font-size: 11px;
+ font-style: italic;
+ margin-bottom: 20px;
+}
+
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin15 {
margin-left:15%;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
//text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
.submit {
border:3px solid #ccc;
background-color:#336699;
padding:10px;
font-size:large;
font-weight:700;
color:white;
margin-top:20px;
}
\ No newline at end of file
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index 6909d19..c188f25 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,252 +1,260 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
if ((id == uid) && (el.css('display') == 'none')){
el.show();
continue;
}
el.hide();
}
}
if ($("#" + uid).length){
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
var embiggen = rsp.embiggen;
+
+ var count = parseInt(rsp.photos.total);
+
+ if ((embiggen) && (count > 20)){
+ embiggen = 0;
+
+ html += '<div class="donot_embiggen">Embiggen-ing has been disabled for ' + rsp['photos']['photo'][0]['ownername'] + '\' photos, this time, because there are way too many of them to display at once. It\'s probably a job best handled by <a href="http://www.flickr.com/photos/' + rsp['photos']['photo'][0]['owner'] + '" target="_flickr">their Flickr photostream</a>.</div>';
+ }
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
if (embiggen){
src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '.jpg';
}
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
if (embiggen){
img += '<img src="' + src + '" style="border:4px solid #' + short_hex + ';" />';
}
else {
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
}
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
if (embiggen){
html += '<div class="slice_thumb_title">' + ph['title'] + '...</div>';
}
else {
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
}
html += '</div>';
}
html += '</div>';
$(container).append(html);
if (duration == '8h'){
window.location.href = "#slice_" + duration;
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "photos_" + hex;
var status_id = "status_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=slice_thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var ctx = $("#photos_" + hex);
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 48;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
ctx.after(html);
ctx = $("#thumb_" + ph['id']);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index 25f233f..bbca40b 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,51 +1,51 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
{% ifequal slice.duration '8h' %}8 hours{% endifequal %}
{% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
<div class="slice_contacts">
{% ifequal slice.count 0 %}
<div class="slice_noone">Nothing new...</div>
{% else %}
{% for c in slice.contacts %}
<a name="slice_{{ slice.duration|escape }}"></a>
<div id="photos_{{ c.nsid_hex|escape }}" class="photos_hex">
- <a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
- <img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" />
+ <a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;" title="yay! new photos from {{ c.username|escape }}">
+ <img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" alt="{{ c.username|escape }}" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
diff --git a/templates/settings.html b/templates/settings.html
index 3f167af..3441659 100644
--- a/templates/settings.html
+++ b/templates/settings.html
@@ -1,66 +1,66 @@
{% include "inc_head.html" %}
<div id="settings">
{% if error %}
<p class="error">
{% ifequal error 'invalid_crumb' %}
Hrm. Your session seems to have expired.
{% endifequal %}
{% ifequal error 'invalid_filter' %}
Hey! That's an invalid filter.
{% endifequal %}
</p>
<p><a href="/settings">Would you like to try again?</a></p>
{% else %}
<form method="POST" action="/settings" id="update_settings">
<input type="hidden" name="crumb" value="{{ settings_crumb|escape }}" />
<h2>Whose photos do you want to see?</h2>
<p>This setting will filter which contacts with recent uploads you will see updates for.</p>
<br />
<input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked{% endifequal %} /> <label for="all">All of my contacts</label>
<br style="margin-bottom:15px;" />
<input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked{% endifequal %} /> <label for="ff">Only friends and family</label>
<h2>How big should those photos be?</h2>
- <p>This setting will determine whether to display small thumbnails or large (500 pixels) versions of your contacts photos.</p>
+ <p>This setting will determine whether to display small thumbnails or large — 500 pixels — versions of your contacts photos.</p>
- <p style="font-style:italic;">You may regret embiggen-izing photos when one of your contacts posts 300 wedding pictures in one go but that's your business.</p>
+ <p style="font-style:italic;font-size:11px;">(If a contact has uploaded more than 20 photos in a given time slice (30 minutes, 4 hours, etc.) then embiggen-ing will be automatically disabled until there aren't quite so many photos to show at once.)</p>
<br />
{% comment %} I HATE DJANGO TEMPLATES, YES I DO {% endcomment %}
{% ifequal user.settings.embiggen_photos None %}
<input type="radio" name="embiggen" id="small" value="no" checked /> <label for="small">Small is beautiful</label>
<br style="margin-bottom:15px;" />
<input type="radio" name="embiggen" id="big" value="yes" /> <label for="big">Embiggen the photos, please</label>
{% else %}
<input type="radio" name="embiggen" id="small" value="no" {% if user.settings.embiggen_photos %}{% else %}checked{% endif %} /> <label for="small">Small is beautiful</label>
<br style="margin-bottom:15px;" />
<input type="radio" name="embiggen" id="big" value="yes" {% if user.settings.embiggen_photos %}checked{% endif %} /> <label for="big">Embiggen-ized photos</label>
{% endifequal %}
<br /><br />
<div id="settings_dothis">
<input type="submit" value="UPDATE" class="submit" />
<br /><br />
<p>Or just go back to your <a href="/">recent uploads page.</p>
</div>
</form>
{% endif %}
</div>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
2bfe4b7b0d89ad4c8a6562b7637a944efeadf47d
|
embiggenizing - make it so
|
diff --git a/app.yaml b/app.yaml
index 0879066..a048ac1 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,31 +1,31 @@
application: flickrforbusypeople
-version: 5
+version: 6
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
- url: .*
script: main.py
diff --git a/css/main.css b/css/main.css
index d923a6d..6e0cee8 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,246 +1,264 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#ff0084;
font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:25px;
}
#example_image img {
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:2em;
margin-bottom:50px;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:15%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
+ text-align:right;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
+#settings {
+ margin-left:12%;
+}
+
+#settings p {
+ font-size:small;
+}
+
+#settings_dothis {
+ text-align:right;
+ margin-right:5%;
+}
+
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
+.margin15 {
+ margin-left:15%;
+}
+
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
//text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
.submit {
border:3px solid #ccc;
background-color:#336699;
padding:10px;
font-size:large;
font-weight:700;
color:white;
margin-top:20px;
}
\ No newline at end of file
diff --git a/ffbp/API/__init__.py b/ffbp/API/__init__.py
index 4be93ae..9a1be93 100644
--- a/ffbp/API/__init__.py
+++ b/ffbp/API/__init__.py
@@ -1,70 +1,69 @@
from APIApp import APIApp
import ffbp
class Dispatch (ffbp.Request, APIApp) :
def __init__ (self):
ffbp.Request.__init__(self)
APIApp.__init__(self)
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.api_error(403)
return
method = self.request.get('method')
format = self.request.get('format')
if format and not format in self.valid_formats :
self.api_error(999, 'Not a valid format')
return
if format :
self.format = format
if method == 'search' :
return self.__search()
def ensure_crumb (self, path) :
if not self.validate_crumb(self.user, path, self.request.get('crumb')) :
self.api_error(400, 'Invalid crumb')
return False
return True
def __search (self) :
required = ('crumb', 'user_id', 'min_upload_date')
if not self.ensure_args(required) :
return
if not self.ensure_crumb('method=search') :
return
method = 'flickr.photos.search'
args = {
'auth_token' : self.user.token,
'user_id' : self.request.get('user_id'),
'min_upload_date' : self.request.get('min_upload_date'),
'extras' : 'owner_name',
}
rsp = self.api_call(method, args)
if not rsp :
return self.api_error()
if rsp['stat'] != 'ok' :
return self.api_error()
embiggen = 0;
if self.user.settings.embiggen_photos :
embiggen = 1;
return self.api_ok({'photos' : rsp['photos'], 'embiggen' : embiggen})
-
diff --git a/ffbp/API/__init__.pyc b/ffbp/API/__init__.pyc
index b4ccee0..4ec4faa 100644
Binary files a/ffbp/API/__init__.pyc and b/ffbp/API/__init__.pyc differ
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index 044d79f..9327247 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,150 +1,153 @@
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
self.assign('search_crumb', search_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
contacts_8h = self.get_contacts('8h', contacts_4h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
slices.append(contacts_8h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
elif duration == '8h' :
hours = 8
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
# TO DO: Backet times, so 30 minutes becomes 0-30 minutes
# and 2hr becomes 30-120 minutes and so on. This requires
# changes in the Flickr API itself.
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
error = 'INVISIBLE ERRORCAT HISSES AT YOU'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
- icon = self.get_buddyicon(c['nsid'])
+ icon = self.flickr_get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
- # deprecated
-
- def get_user (self, nsid) :
- return self.flickr_get_user_info(nsid)
-
- # deprecated
-
- def get_buddyicon (self, nsid) :
- return self.flickr_get_buddyicon(nsid)
-
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
+ #
+
filter = self.request.get('filter')
+ ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
+
+ #
+
+ embiggen = self.request.get('embiggen')
+
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
-
- ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
+
+ if embiggen == 'yes' :
+ ffbp.Settings.embiggen_photos(self.user.nsid, True)
+ else :
+ ffbp.Settings.embiggen_photos(self.user.nsid, False)
+
+ #
self.redirect('/')
diff --git a/ffbp/App/__init__.pyc b/ffbp/App/__init__.pyc
index 4b20718..3fdadf8 100644
Binary files a/ffbp/App/__init__.pyc and b/ffbp/App/__init__.pyc differ
diff --git a/ffbp/Settings/__init__.py b/ffbp/Settings/__init__.py
index 7e8e138..568571a 100644
--- a/ffbp/Settings/__init__.py
+++ b/ffbp/Settings/__init__.py
@@ -1,33 +1,38 @@
from google.appengine.ext import db
from ffbp.Tables import dbSettings
def get_settings_for_user (nsid, auto_create=True) :
gql = "SELECT * FROM dbSettings WHERE nsid = :1"
res = db.GqlQuery(gql, nsid)
settings = res.get()
if settings :
return settings
if auto_create :
return create_settings_for_user(nsid)
return False
def create_settings_for_user (nsid) :
settings = dbSettings()
settings.nsid = nsid
settings.search_in_contacts_filter = 'all'
settings.put()
return settings
def search_in_contacts_filter (nsid, context='all') :
settings = get_settings_for_user(nsid)
settings.search_in_contacts_filter = context
settings.put()
-
+
+def embiggen_photos (nsid, bool=True) :
+
+ settings = get_settings_for_user(nsid)
+ settings.embiggen_photos = bool
+ settings.put()
diff --git a/ffbp/Settings/__init__.pyc b/ffbp/Settings/__init__.pyc
index 2380094..68342c9 100644
Binary files a/ffbp/Settings/__init__.pyc and b/ffbp/Settings/__init__.pyc differ
diff --git a/ffbp/__init__.pyc b/ffbp/__init__.pyc
index e3639e7..f225f27 100644
Binary files a/ffbp/__init__.pyc and b/ffbp/__init__.pyc differ
diff --git a/templates/inc_main_settings.html b/templates/inc_main_settings.html
index 049457a..c9dfe61 100644
--- a/templates/inc_main_settings.html
+++ b/templates/inc_main_settings.html
@@ -1,20 +1,15 @@
<div id="main_settings">
-{% ifequal user.settings.search_in_contacts_filter 'all' %}
-
{% if browser.mobile %}
-<a href="/settings">Switch to friends and family only</a>.
-
+<a href="/settings">settings</a>.
{% else %}
-You are currently checking for recent uploads from all your contacts. <a href="/settings">Switch to friends and family only</a>.
-{% endif %}
+{% ifequal user.settings.search_in_contacts_filter 'all' %}
+You are currently checking for recent uploads ({% if user.settings.embiggen_photos %}embiggen-ed{% else %}in thumbnail format{% endif %}) from all your contacts
{% else %}
+You are currently checking for recent uploads ({% if user.settings.embiggen_photos %}embiggen-ed{% else %}in thumbnail format{% endif %}) from friends and family only
+{% endifequal %}
-{% if browser.mobile %}
-<a href="/settings">Switch to all your contacts</a>.
-{% else %}
-You are currently checking for recent uploads from friends and family only. <a href="/settings">Switch to all your contacts</a>.
-{% endif %}
+[ <a href="/settings">update your settings</a> ]
-{% endifequal %}
+{% endif %}
</div>
diff --git a/templates/settings.html b/templates/settings.html
index 2a22fc5..3f167af 100644
--- a/templates/settings.html
+++ b/templates/settings.html
@@ -1,38 +1,66 @@
{% include "inc_head.html" %}
-<div class="margin20">
+<div id="settings">
{% if error %}
<p class="error">
{% ifequal error 'invalid_crumb' %}
Hrm. Your session seems to have expired.
{% endifequal %}
{% ifequal error 'invalid_filter' %}
Hey! That's an invalid filter.
{% endifequal %}
</p>
<p><a href="/settings">Would you like to try again?</a></p>
{% else %}
-<h2>Update which contacts you want to get busy for</h2>
-
-<p>This setting will filter which contacts with recent uploads you will see updates for.</p>
-
<form method="POST" action="/settings" id="update_settings">
<input type="hidden" name="crumb" value="{{ settings_crumb|escape }}" />
- <input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked"{% endifequal %} /> <label for="all">All your contacts</label>
+
+ <h2>Whose photos do you want to see?</h2>
+
+ <p>This setting will filter which contacts with recent uploads you will see updates for.</p>
+
+ <br />
+
+ <input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked{% endifequal %} /> <label for="all">All of my contacts</label>
<br style="margin-bottom:15px;" />
- <input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked"{% endifequal %} /> <label for="ff">Only friends and family</label>
- <br /><br />
- <input type="submit" value="UPDATE" class="submit" />
+ <input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked{% endifequal %} /> <label for="ff">Only friends and family</label>
+
+ <h2>How big should those photos be?</h2>
+
+ <p>This setting will determine whether to display small thumbnails or large (500 pixels) versions of your contacts photos.</p>
+
+ <p style="font-style:italic;">You may regret embiggen-izing photos when one of your contacts posts 300 wedding pictures in one go but that's your business.</p>
+
+ <br />
+
+ {% comment %} I HATE DJANGO TEMPLATES, YES I DO {% endcomment %}
+
+ {% ifequal user.settings.embiggen_photos None %}
+ <input type="radio" name="embiggen" id="small" value="no" checked /> <label for="small">Small is beautiful</label>
+ <br style="margin-bottom:15px;" />
+ <input type="radio" name="embiggen" id="big" value="yes" /> <label for="big">Embiggen the photos, please</label>
+ {% else %}
+ <input type="radio" name="embiggen" id="small" value="no" {% if user.settings.embiggen_photos %}{% else %}checked{% endif %} /> <label for="small">Small is beautiful</label>
+ <br style="margin-bottom:15px;" />
+ <input type="radio" name="embiggen" id="big" value="yes" {% if user.settings.embiggen_photos %}checked{% endif %} /> <label for="big">Embiggen-ized photos</label>
+ {% endifequal %}
+
<br /><br />
+
+ <div id="settings_dothis">
+ <input type="submit" value="UPDATE" class="submit" />
+ <br /><br />
+
+ <p>Or just go back to your <a href="/">recent uploads page.</p>
+ </div>
</form>
{% endif %}
-<p>Or just go back to your <a href="/">recent uploads page.</p>
</div>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
9976ae248f79f1728a795a2336427c5112516a45
|
start on work to allow embiggening of photos
|
diff --git a/app.yaml b/app.yaml
index 7fd1d04..0879066 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,31 +1,31 @@
application: flickrforbusypeople
-version: 3
+version: 5
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
- url: .*
script: main.py
diff --git a/css/main.css b/css/main.css
index 3c1b200..d923a6d 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,246 +1,246 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#ff0084;
font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:25px;
}
#example_image img {
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:2em;
margin-bottom:50px;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:15%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
- text-align:center;
+ //text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
.submit {
border:3px solid #ccc;
background-color:#336699;
padding:10px;
font-size:large;
font-weight:700;
color:white;
margin-top:20px;
}
\ No newline at end of file
diff --git a/ffbp/API/__init__.py b/ffbp/API/__init__.py
index 3d98032..4be93ae 100644
--- a/ffbp/API/__init__.py
+++ b/ffbp/API/__init__.py
@@ -1,65 +1,70 @@
from APIApp import APIApp
import ffbp
class Dispatch (ffbp.Request, APIApp) :
def __init__ (self):
ffbp.Request.__init__(self)
APIApp.__init__(self)
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.api_error(403)
return
method = self.request.get('method')
format = self.request.get('format')
if format and not format in self.valid_formats :
self.api_error(999, 'Not a valid format')
return
if format :
self.format = format
if method == 'search' :
return self.__search()
def ensure_crumb (self, path) :
if not self.validate_crumb(self.user, path, self.request.get('crumb')) :
self.api_error(400, 'Invalid crumb')
return False
return True
def __search (self) :
required = ('crumb', 'user_id', 'min_upload_date')
if not self.ensure_args(required) :
return
if not self.ensure_crumb('method=search') :
return
method = 'flickr.photos.search'
args = {
'auth_token' : self.user.token,
'user_id' : self.request.get('user_id'),
'min_upload_date' : self.request.get('min_upload_date'),
'extras' : 'owner_name',
}
rsp = self.api_call(method, args)
if not rsp :
return self.api_error()
if rsp['stat'] != 'ok' :
return self.api_error()
-
- return self.api_ok({'photos' : rsp['photos']})
+
+ embiggen = 0;
+
+ if self.user.settings.embiggen_photos :
+ embiggen = 1;
+
+ return self.api_ok({'photos' : rsp['photos'], 'embiggen' : embiggen})
diff --git a/ffbp/API/__init__.pyc b/ffbp/API/__init__.pyc
index d0e9529..b4ccee0 100644
Binary files a/ffbp/API/__init__.pyc and b/ffbp/API/__init__.pyc differ
diff --git a/ffbp/Tables/__init__.py b/ffbp/Tables/__init__.py
index 16540b1..380ebe5 100644
--- a/ffbp/Tables/__init__.py
+++ b/ffbp/Tables/__init__.py
@@ -1,6 +1,7 @@
from google.appengine.ext import db
class dbSettings (db.Model) :
nsid = db.StringProperty()
search_in_contacts_filter = db.StringProperty()
+ embiggen_photos = db.BooleanProperty()
diff --git a/ffbp/Tables/__init__.pyc b/ffbp/Tables/__init__.pyc
index c1911b1..72b9c6f 100644
Binary files a/ffbp/Tables/__init__.pyc and b/ffbp/Tables/__init__.pyc differ
diff --git a/ffbp/__init__.py b/ffbp/__init__.py
index a8d34b7..93a41d7 100644
--- a/ffbp/__init__.py
+++ b/ffbp/__init__.py
@@ -1,19 +1,19 @@
from FlickrApp.Handlers import FlickrAppRequest
from config import config
import ffbp.Settings
class Request (FlickrAppRequest) :
def __init__ (self) :
FlickrAppRequest.__init__(self, config)
def check_logged_in (self, min_perms) :
if not FlickrAppRequest.check_logged_in(self, min_perms) :
return False
settings = ffbp.Settings.get_settings_for_user(self.user.nsid)
self.user.settings = settings
-
+
return True
diff --git a/ffbp/__init__.pyc b/ffbp/__init__.pyc
index 36cfc7b..e3639e7 100644
Binary files a/ffbp/__init__.pyc and b/ffbp/__init__.pyc differ
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index 3a0ee26..6909d19 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,230 +1,252 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
if ((id == uid) && (el.css('display') == 'none')){
el.show();
continue;
}
el.hide();
}
}
if ($("#" + uid).length){
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
-
+
+ var embiggen = rsp.embiggen;
+
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
+ if (embiggen){
+ src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '.jpg';
+ }
+
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
- img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
+
+ if (embiggen){
+ img += '<img src="' + src + '" style="border:4px solid #' + short_hex + ';" />';
+ }
+
+ else {
+ img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
+ }
+
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
- html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
+
+ if (embiggen){
+ html += '<div class="slice_thumb_title">' + ph['title'] + '...</div>';
+ }
+
+ else {
+ html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
+ }
+
html += '</div>';
}
html += '</div>';
$(container).append(html);
if (duration == '8h'){
window.location.href = "#slice_" + duration;
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "photos_" + hex;
var status_id = "status_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=slice_thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var ctx = $("#photos_" + hex);
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 48;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
ctx.after(html);
ctx = $("#thumb_" + ph['id']);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
|
straup/gae-flickrforbusypeople
|
b73dec38f5b8e986258a056830927ac3c8e68ef4
|
sigh...app engine doesn't support minor numbers for versioning (which is weird)
|
diff --git a/app.yaml b/app.yaml
index 3e1d1c7..7fd1d04 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,31 +1,31 @@
application: flickrforbusypeople
-version: 2.1
+version: 3
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
- url: .*
script: main.py
|
straup/gae-flickrforbusypeople
|
efe393302e221ef47f826d8e07f1d4c9ba01e088
|
more fussing
|
diff --git a/README b/README
index e1f5467..cfe0731 100644
--- a/README
+++ b/README
@@ -1,54 +1,55 @@
This is the source code that runs Flickr For Busy People (ffbp). ffbp is a
Flickr API (web) application that runs on the Google AppEngine (GAE) platform:
http://flickrforbusypeople.appspot.com
It asks the Flickr API for the list of your contacts who have uploaded photos in
the last 30 minutes, two hours, four hours and eight hours. If you click on the
buddyicon for a contact, the application will display thumbnails of those
photos.
You can choose to display all your contacts or only friends and family but
otherwise that's all it does.
I find it tremendously useful for keeping up with my contacts uploads,
especially those people who are awake and using Flickr while I am asleep. Your
mileage may vary.
A NOTE ABOUT MOBILE DEVICES
-ffbp works in, and can be formatted for, the iPhone and iPod Touch browser. It
-could probably be could probably be made to play nicely with the S60 browser but
-since debugging JavaScript for those devices is such a nuisance I more or less
-gave up trying. I would welcome suggestions or patches.
+ffbp was designed for desktop browser but does works in, and is formatted for,
+the iPhone and iPod Touch browser. It could probably be could probably be made
+to play nicely with the S60 browser but since debugging JavaScript for those
+devices is such a nuisance I more or less gave up trying. I would welcome
+suggestions or patches.
REQUIREMENTS
If you want to run your own version of ffbp, you will need:
* A valid Flickr API key, registered as a web application. Both the API key and
the application secret will need to be added to the config.py file.
If you don't have a Flickr API key, you can get one here:
http://www.flickr.com/services/api/keys/apply/
* A valid GAE account and a recent version of the GAE SDK for Python (>= 1.2.3)
which can be downloaded from:
http://code.google.com/appengine/downloads.html
There are two other dependencies which are actually included as part of the
package itself. They are:
* gae-FlickrApp
http://github.com/straup/gae-flickrapp/tree/master
* gae-APIApp
http://github.com/straup/gae-APIApp/tree/master
I mention that just because they are not specific to ffbp but are bundled with
the code for sake of keeping things simple. You should be aware that the
versions here may not be the same as those under active development.
|
straup/gae-flickrforbusypeople
|
9ae69d4bcbf800d261bb44bf62935658e997ca73
|
fussing
|
diff --git a/README b/README
index 45f1e02..e1f5467 100644
--- a/README
+++ b/README
@@ -1,54 +1,54 @@
This is the source code that runs Flickr For Busy People (ffbp). ffbp is a
Flickr API (web) application that runs on the Google AppEngine (GAE) platform:
http://flickrforbusypeople.appspot.com
It asks the Flickr API for the list of your contacts who have uploaded photos in
the last 30 minutes, two hours, four hours and eight hours. If you click on the
buddyicon for a contact, the application will display thumbnails of those
photos.
You can choose to display all your contacts or only friends and family but
otherwise that's all it does.
I find it tremendously useful for keeping up with my contacts uploads,
especially those people who are awake and using Flickr while I am asleep. Your
mileage may vary.
A NOTE ABOUT MOBILE DEVICES
-ffbp works in, and is formatted for, the iPhone and iPod Touch browser. It could
-probably be could probably be made to play nicely with the S60 browser but since
-debugging JavaScript for those devices is such a nuisance I more or less gave up
-trying. I would welcome suggestions or patches.
+ffbp works in, and can be formatted for, the iPhone and iPod Touch browser. It
+could probably be could probably be made to play nicely with the S60 browser but
+since debugging JavaScript for those devices is such a nuisance I more or less
+gave up trying. I would welcome suggestions or patches.
REQUIREMENTS
If you want to run your own version of ffbp, you will need:
* A valid Flickr API key, registered as a web application. Both the API key and
the application secret will need to be added to the config.py file.
If you don't have a Flickr API key, you can get one here:
http://www.flickr.com/services/api/keys/apply/
* A valid GAE account and a recent version of the GAE SDK for Python (>= 1.2.3)
which can be downloaded from:
http://code.google.com/appengine/downloads.html
There are two other dependencies which are actually included as part of the
package itself. They are:
* gae-FlickrApp
http://github.com/straup/gae-flickrapp/tree/master
* gae-APIApp
http://github.com/straup/gae-APIApp/tree/master
I mention that just because they are not specific to ffbp but are bundled with
the code for sake of keeping things simple. You should be aware that the
versions here may not be the same as those under active development.
|
straup/gae-flickrforbusypeople
|
8de3eee55a04439506dcb9b9ea70fce5c51172f2
|
notes about mobile
|
diff --git a/README b/README
index 16c91d1..45f1e02 100644
--- a/README
+++ b/README
@@ -1,47 +1,54 @@
This is the source code that runs Flickr For Busy People (ffbp). ffbp is a
Flickr API (web) application that runs on the Google AppEngine (GAE) platform:
http://flickrforbusypeople.appspot.com
It asks the Flickr API for the list of your contacts who have uploaded photos in
the last 30 minutes, two hours, four hours and eight hours. If you click on the
buddyicon for a contact, the application will display thumbnails of those
photos.
You can choose to display all your contacts or only friends and family but
otherwise that's all it does.
I find it tremendously useful for keeping up with my contacts uploads,
especially those people who are awake and using Flickr while I am asleep. Your
mileage may vary.
+A NOTE ABOUT MOBILE DEVICES
+
+ffbp works in, and is formatted for, the iPhone and iPod Touch browser. It could
+probably be could probably be made to play nicely with the S60 browser but since
+debugging JavaScript for those devices is such a nuisance I more or less gave up
+trying. I would welcome suggestions or patches.
+
REQUIREMENTS
If you want to run your own version of ffbp, you will need:
* A valid Flickr API key, registered as a web application. Both the API key and
the application secret will need to be added to the config.py file.
If you don't have a Flickr API key, you can get one here:
http://www.flickr.com/services/api/keys/apply/
* A valid GAE account and a recent version of the GAE SDK for Python (>= 1.2.3)
which can be downloaded from:
http://code.google.com/appengine/downloads.html
There are two other dependencies which are actually included as part of the
package itself. They are:
* gae-FlickrApp
http://github.com/straup/gae-flickrapp/tree/master
* gae-APIApp
http://github.com/straup/gae-APIApp/tree/master
I mention that just because they are not specific to ffbp but are bundled with
the code for sake of keeping things simple. You should be aware that the
versions here may not be the same as those under active development.
|
straup/gae-flickrforbusypeople
|
3fa6d8174665996e4160717d1a27c02913c4fc0d
|
remove some unused code; comments
|
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index d34acc5..79ac0d2 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,175 +1,170 @@
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
self.assign('search_crumb', search_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
contacts_8h = self.get_contacts('8h', contacts_4h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
slices.append(contacts_8h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
elif duration == '8h' :
hours = 8
- elif duration == '12h' :
- hours = 12
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
+
+ # TO DO: Backet times, so 30 minutes becomes 0-30 minutes
+ # and 2hr becomes 30-120 minutes and so on. This requires
+ # changes in the Flickr API itself.
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
error = 'INVISIBLE ERRORCAT HISSES AT YOU'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
- args = {
- 'user_id' : c['nsid'],
- 'method' : 'flickr.photos.search',
- 'auth_token' : self.user.token,
- 'min_upload_date' : dt,
- }
-
icon = self.get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
def get_user (self, nsid) :
memkey = "people_getinfo_%s" % nsid
cache = memcache.get(memkey)
if cache :
return cache
rsp = self.api_call('flickr.people.getInfo', {'user_id' : nsid, 'auth_token' : self.user.token})
if not rsp or rsp['stat'] != 'ok' :
return
ttl = 60 * 60 * 24 * 14
memcache.add(memkey, rsp['person'], ttl)
return rsp['person']
def get_buddyicon (self, nsid) :
user = self.get_user(nsid)
if not user :
return 'http://www.flickr.com/images/buddyicon.jpg'
if int(user['iconserver']) == 0 :
return 'http://www.flickr.com/images/buddyicon.jpg'
return "http://farm%s.static.flickr.com/%s/buddyicons/%s.jpg" % (user['iconfarm'], user['iconserver'], nsid)
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
filter = self.request.get('filter')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
self.redirect('/')
|
straup/gae-flickrforbusypeople
|
f8ff7f212cd8a548c99256d5056f0a3efb0f0544
|
stuff about bundled gae-*App packages
|
diff --git a/README b/README
index 5cbc719..16c91d1 100644
--- a/README
+++ b/README
@@ -1,42 +1,47 @@
-This is the source code that runs Flickr For Busy Peopl (ffbp):
-
-ffbp is a Flickr API (web) application that runs on the Google AppEngine (GAE)
-platform:
+This is the source code that runs Flickr For Busy People (ffbp). ffbp is a
+Flickr API (web) application that runs on the Google AppEngine (GAE) platform:
http://flickrforbusypeople.appspot.com
It asks the Flickr API for the list of your contacts who have uploaded photos in
the last 30 minutes, two hours, four hours and eight hours. If you click on the
buddyicon for a contact, the application will display thumbnails of those
photos.
You can choose to display all your contacts or only friends and family but
otherwise that's all it does.
I find it tremendously useful for keeping up with my contacts uploads,
especially those people who are awake and using Flickr while I am asleep. Your
mileage may vary.
REQUIREMENTS
If you want to run your own version of ffbp, you will need:
* A valid Flickr API key, registered as a web application. Both the API key and
the application secret will need to be added to the config.py file.
If you don't have a Flickr API key, you can get one here:
http://www.flickr.com/services/api/keys/apply/
* A valid GAE account and a recent version of the GAE SDK for Python (>= 1.2.3)
which can be downloaded from:
http://code.google.com/appengine/downloads.html
-* The gae-FlickrApp package
+There are two other dependencies which are actually included as part of the
+package itself. They are:
+
+* gae-FlickrApp
http://github.com/straup/gae-flickrapp/tree/master
-* The gae-APIApp package
+* gae-APIApp
http://github.com/straup/gae-APIApp/tree/master
+
+I mention that just because they are not specific to ffbp but are bundled with
+the code for sake of keeping things simple. You should be aware that the
+versions here may not be the same as those under active development.
|
straup/gae-flickrforbusypeople
|
e61f9d7c10aaba18b41dd6489b4e0f9f3bd5df01
|
fussing
|
diff --git a/README b/README
index 51e49e1..5cbc719 100644
--- a/README
+++ b/README
@@ -1,30 +1,42 @@
-This is the source code that runs flickrforbusypeople.appspot.com (ffbp).
+This is the source code that runs Flickr For Busy Peopl (ffbp):
-ffbp is a Flickr API application that runs on the Google AppEngine (GAE)
-platform. It asks the Flickr API for the list of your contacts who have
-uploaded photos in the last 30 minutes, two hours, four hours and eight
-hours. If you click on the buddyicon for a contact, the application will display
-thumbnails of those photos.
+ffbp is a Flickr API (web) application that runs on the Google AppEngine (GAE)
+platform:
+
+http://flickrforbusypeople.appspot.com
+
+It asks the Flickr API for the list of your contacts who have uploaded photos in
+the last 30 minutes, two hours, four hours and eight hours. If you click on the
+buddyicon for a contact, the application will display thumbnails of those
+photos.
You can choose to display all your contacts or only friends and family but
otherwise that's all it does.
I find it tremendously useful for keeping up with my contacts uploads,
especially those people who are awake and using Flickr while I am asleep. Your
mileage may vary.
REQUIREMENTS
If you want to run your own version of ffbp, you will need:
+* A valid Flickr API key, registered as a web application. Both the API key and
+ the application secret will need to be added to the config.py file.
+
+ If you don't have a Flickr API key, you can get one here:
+
+ http://www.flickr.com/services/api/keys/apply/
+
* A valid GAE account and a recent version of the GAE SDK for Python (>= 1.2.3)
which can be downloaded from:
http://code.google.com/appengine/downloads.html
-* A valid Flickr API key, registered as a web application. Both the API key and
- the application secret will need to be added to the config.py file.
+* The gae-FlickrApp package
- If you don't have a Flickr API key, you can get one here:
+ http://github.com/straup/gae-flickrapp/tree/master
- http://www.flickr.com/services/api/keys/apply/
+* The gae-APIApp package
+
+ http://github.com/straup/gae-APIApp/tree/master
|
straup/gae-flickrforbusypeople
|
e1216bbe31abd98ad7421ca4a6bd89a10cc72975
|
margin-left: 15%
|
diff --git a/css/main.css b/css/main.css
index 411b15e..3c1b200 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,246 +1,246 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#ff0084;
font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:25px;
}
#example_image img {
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:2em;
margin-bottom:50px;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
- padding-left:10%;
+ padding-left:15%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
.submit {
border:3px solid #ccc;
background-color:#336699;
padding:10px;
font-size:large;
font-weight:700;
color:white;
margin-top:20px;
}
\ No newline at end of file
|
straup/gae-flickrforbusypeople
|
42f6766761bffb867d3b5fbf3f09bef306974ba2
|
DTRT if user is null when fetching buddyicons
|
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index db2156c..d34acc5 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,172 +1,175 @@
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
self.assign('search_crumb', search_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
contacts_8h = self.get_contacts('8h', contacts_4h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
slices.append(contacts_8h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
elif duration == '8h' :
hours = 8
elif duration == '12h' :
hours = 12
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
error = 'INVISIBLE ERRORCAT HISSES AT YOU'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
args = {
'user_id' : c['nsid'],
'method' : 'flickr.photos.search',
'auth_token' : self.user.token,
'min_upload_date' : dt,
}
icon = self.get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
def get_user (self, nsid) :
memkey = "people_getinfo_%s" % nsid
cache = memcache.get(memkey)
if cache :
return cache
rsp = self.api_call('flickr.people.getInfo', {'user_id' : nsid, 'auth_token' : self.user.token})
if not rsp or rsp['stat'] != 'ok' :
return
ttl = 60 * 60 * 24 * 14
memcache.add(memkey, rsp['person'], ttl)
return rsp['person']
def get_buddyicon (self, nsid) :
user = self.get_user(nsid)
+
+ if not user :
+ return 'http://www.flickr.com/images/buddyicon.jpg'
if int(user['iconserver']) == 0 :
return 'http://www.flickr.com/images/buddyicon.jpg'
return "http://farm%s.static.flickr.com/%s/buddyicons/%s.jpg" % (user['iconfarm'], user['iconserver'], nsid)
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
filter = self.request.get('filter')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
self.redirect('/')
|
straup/gae-flickrforbusypeople
|
301bc43fdac420856f5b9aa2d70f5d153f3b86f2
|
css for submit buttons
|
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index 5166440..3a0ee26 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,228 +1,230 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
if ((id == uid) && (el.css('display') == 'none')){
el.show();
continue;
}
el.hide();
}
}
if ($("#" + uid).length){
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
}
html += '</div>';
$(container).append(html);
- window.location.href = "#slice_" + duration;
+ if (duration == '8h'){
+ window.location.href = "#slice_" + duration;
+ }
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "photos_" + hex;
var status_id = "status_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=slice_thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var ctx = $("#photos_" + hex);
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 48;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
ctx.after(html);
ctx = $("#thumb_" + ph['id']);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
diff --git a/templates/settings.html b/templates/settings.html
index 9f5f681..2a22fc5 100644
--- a/templates/settings.html
+++ b/templates/settings.html
@@ -1,37 +1,38 @@
{% include "inc_head.html" %}
<div class="margin20">
{% if error %}
<p class="error">
{% ifequal error 'invalid_crumb' %}
Hrm. Your session seems to have expired.
{% endifequal %}
{% ifequal error 'invalid_filter' %}
Hey! That's an invalid filter.
{% endifequal %}
</p>
<p><a href="/settings">Would you like to try again?</a></p>
{% else %}
<h2>Update which contacts you want to get busy for</h2>
<p>This setting will filter which contacts with recent uploads you will see updates for.</p>
<form method="POST" action="/settings" id="update_settings">
<input type="hidden" name="crumb" value="{{ settings_crumb|escape }}" />
<input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked"{% endifequal %} /> <label for="all">All your contacts</label>
<br style="margin-bottom:15px;" />
<input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked"{% endifequal %} /> <label for="ff">Only friends and family</label>
<br /><br />
- <input type="submit" value="UPDATE" />
+ <input type="submit" value="UPDATE" class="submit" />
+ <br /><br />
</form>
{% endif %}
<p>Or just go back to your <a href="/">recent uploads page.</p>
</div>
{% include "inc_foot.html" %}
diff --git a/templates/signout.html b/templates/signout.html
index 4ffa802..8a797a8 100644
--- a/templates/signout.html
+++ b/templates/signout.html
@@ -1,14 +1,14 @@
{% include "inc_head.html" %}
<h2>Are you sure you want to sign out of flickr for busy people?</h2>
-<div style="text-align:center;margin-bottom:40px;margin-top:30px;">
+<div id="signout">
<form method="POST" action="/signout" style="display:inline;">
<input type="hidden" name="crumb" value="{{ logout_crumb|escape }}" />
- <input type="submit" value="yes please, sign me out" style="border:1px dotted white;background-color:darkslategray;color:white;font-size:14px;padding:5px;" />
+ <input type="submit" value="yes please, sign me out" class="submit" />
</form>
</div>
<p>Or just go back to your <a href="/">recent uploads page.</p>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
30dd11bdcbf4f6cb0068f66492c59171b700584d
|
css for submit buttons
|
diff --git a/css/main.css b/css/main.css
index 08a3a0d..411b15e 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,236 +1,246 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#ff0084;
font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:25px;
}
#example_image img {
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:2em;
margin-bottom:50px;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:10%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
+}
+
+.submit {
+ border:3px solid #ccc;
+ background-color:#336699;
+ padding:10px;
+ font-size:large;
+ font-weight:700;
+ color:white;
+ margin-top:20px;
}
\ No newline at end of file
diff --git a/css/mobile.css b/css/mobile.css
index fbb75e8..4e3c3f4 100644
--- a/css/mobile.css
+++ b/css/mobile.css
@@ -1,222 +1,232 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
//max-width:340px;
}
h2 {
color:#ff0084;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:6%;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:1.5em;
text-align:center;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
}
#main_settings {
font-size:11px;
margin-top:30px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin_input {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
- display:inline;
- margin-left:15px;
+ margin-bottom:40px;
+ margin-top:30px;
}
#login_input {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:small;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
margin-right:50px;
}
.slice_inthelast {
display:none;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
display:block;
margin-bottom:10px;
}
.slice_contacts { }
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
+}
+
+.submit {
+ border:3px solid #ccc;
+ background-color:#336699;
+ padding:10px;
+ font-size:large;
+ font-weight:700;
+ color:white;
+ margin-top:20px;
}
\ No newline at end of file
|
straup/gae-flickrforbusypeople
|
a38b49b1ac0901399e6e57b588f171933909d031
|
big sloppy commit before I go to work
|
diff --git a/css/main.css b/css/main.css
index 3dca955..08a3a0d 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,243 +1,236 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#ff0084;
font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
- margin-left:75px;
- margin-bottom:30px;
+ margin-left:25px;
}
#example_image img {
- //border:1px dotted #ccc;
- padding:5px;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
-#foot_help {
- margin-top:4px;
- display:none;
-}
-
#getstarted {
font-size:2em;
+ margin-bottom:50px;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:10%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
\ No newline at end of file
diff --git a/css/mobile.css b/css/mobile.css
index 6bcccdf..fbb75e8 100644
--- a/css/mobile.css
+++ b/css/mobile.css
@@ -1,222 +1,222 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
- max-width:400px;
+ //max-width:340px;
}
h2 {
color:#ff0084;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:6%;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:1.5em;
text-align:center;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
}
#main_settings {
font-size:11px;
margin-top:30px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin_input {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#login_input {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
- font-size:11px;
+ font-size:small;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
margin-right:50px;
}
.slice_inthelast {
display:none;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
display:block;
margin-bottom:10px;
}
.slice_contacts { }
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
\ No newline at end of file
diff --git a/images/example2.jpg b/images/example2.jpg
index 7c65b2c..9f94e5a 100644
Binary files a/images/example2.jpg and b/images/example2.jpg differ
diff --git a/templates/inc_foot.html b/templates/inc_foot.html
index 478be53..18b4e3a 100644
--- a/templates/inc_foot.html
+++ b/templates/inc_foot.html
@@ -1,15 +1,12 @@
</div>
<div id="foot">
{% if browser.mobile %}
<strong>ffbp</strong> is a thing made by <a href="http://www.aaronland.info/">aaron</a>.
{% else %}
<q><a href="/" style="font-weight:700;color:darkslategray;text-decoration:none">flickr for busy people</a></q>
is a thing made by <a href="http://www.aaronland.info/">aaron straup cope</a>
- <div id="foot_help">
- / with help from <a href="http://www.flickr.com/photos/george/3762439133/">george</a> and <a href="http://www.flickr.com/photos/george/3762439133/#comment72157621929889120">cal</a>
- </div>
{% endif %}
</div>
</body>
</html>
diff --git a/templates/inc_head.html b/templates/inc_head.html
index 5ac9036..4fe896c 100644
--- a/templates/inc_head.html
+++ b/templates/inc_head.html
@@ -1,47 +1,47 @@
<html>
<head>
<title>flickr for busy people{% if page_title %}{{ page_title|escape }}{% endif %}</title>
{% if browser.mobile %}
<link rel="stylesheet" href="/css/mobile.css" type="text/css" />
{% else %}
<link rel="stylesheet" href="/css/main.css" type="text/css" />
{% endif %}
{% if user.username %}
<script type="text/javascript" src="/javascript/jquery.js"></script>
<script type="text/javascript" src="/javascript/md5.js"></script>
<script type="text/javascript" src="/javascript/flickrapp-api.js"></script>
<meta http-equiv="refresh" content="1800" />
{% endif %}
{% if browser.iphone %}
- <meta name="viewport" content="width=400" />
+ <meta name="viewport" content="width=340" />
{% endif %}
</head>
<body>
<div id="whoami">
{% if browser.mobile %}
<strong>ffbp</strong>{% if user.username %} / {{ user.username|escape }} / <a href="/signout">signout</a>{% endif %}
{% else %}
{% if user.username %}
You are logged in with your <a href="http://www.flickr.com/photos/{% ifequal user.path_alias "" %}{{ user.nsid|escape }}{% else %}{{ user.path_alias|escape }}{% endifequal %}/" class="fl_username">{{ user.username|escape }}</a> Flickr account
<form method="POST" action="/signout" id="signout">
<input type="hidden" name="crumb" value="{{ logout_crumb|escape }}" />
<input type="submit" value="sign out" />
</form>
{% else %}
<form method="GET" action="/signin" id="signin">
<input type="hidden" name="crumb" value="{{ login_crumb|escape }}" />
<input type="submit" value="sign in" />
</form>
{% endif %}
{% endif %}
</div>
<br clear="all" />
<div id="main">
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index 8e1a4d6..25f233f 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,51 +1,51 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
{% ifequal slice.duration '8h' %}8 hours{% endifequal %}
{% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
<div class="slice_contacts">
{% ifequal slice.count 0 %}
<div class="slice_noone">Nothing new...</div>
{% else %}
{% for c in slice.contacts %}
<a name="slice_{{ slice.duration|escape }}"></a>
<div id="photos_{{ c.nsid_hex|escape }}" class="photos_hex">
- <a href="#" onclick="window.ffbp.show_photos{% if browser.mobile %}_inline{% endif %}('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
+ <a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index 6e553ab..907d299 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,23 +1,25 @@
{% include "inc_head.html" %}
<script type="text/javascript" src="/javascript/ffbp.js"></script>
<script type="text/javascript">
var ffbp_args = {
'host' : '{{ host_url }}',
'crumb' : '{{ search_crumb }}',
};
window.ffbp = new info.aaronland.ffbp.Photos(ffbp_args);
</script>
+{% ifequal browser.mobile 0 %}
<br /><br />
+{% endifequal %}
{% for slice in slices %}
{% include "inc_main_contacts.html" %}
{% endfor %}
{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
diff --git a/templates/main_logged_out.html b/templates/main_logged_out.html
index 88bd7c6..17cfa5b 100644
--- a/templates/main_logged_out.html
+++ b/templates/main_logged_out.html
@@ -1,33 +1,33 @@
{% include "inc_head.html" %}
<div class="margin20">
<h2>This is Flickr for busy people.</h2>
{% if browser.mobile %}
<p>It is a simple web application that displays a list of your Flickr contacts who have
- uploaded photos in the last 30 minutes, two hours and four hours.</p>
+ uploaded photos in the last 30 minutes, two hours, four hours and eight hours.</p>
{% else %}
<p>It is a simple web application that displays a list of your Flickr contacts who have
- uploaded photos in the last 30 minutes, two hours and four hours.</p>
+ uploaded photos in the last 30 minutes, two hours, four hours and eight hours</p>
-<p>If you click on a buddyicon, the application will display thumbnails of the
+If you click on a buddyicon, the application will display thumbnails of the
photos that user has uploaded recently. Like this:</p>
<div id="example_image">
-<img src="images/example2.jpg" width="582" height="205" alt="flickr for busy people screenshot"/>
+<img src="images/example2.jpg" width="675" height="254" alt="flickr for busy people screenshot"/>
</div>
{% endif %}
<p id="getstarted"><strong><a href="/signin">→ Sign in to get started</a></strong></p>
<p style="font-size:small;font-style:italic;">The first time you sign in you
will be taken to Flickr and asked authorize the <q>flickrforbusypeople</q>
application. You should only need to do this once.</p>
</div>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
cc5b2c3ef175bbe357fedc0a47759f8f0dd7591d
|
throw it to the wind, see what happens
|
diff --git a/css/main.css b/css/main.css
index 1ba0f5d..3dca955 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,237 +1,243 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
- color:#cc0099;
-
+ color:#ff0084;
+ font-size:2em;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
- margin-left:6%;
+ margin-left:75px;
+ margin-bottom:30px;
+}
+
+#example_image img {
+ //border:1px dotted #ccc;
+ padding:5px;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#foot_help {
margin-top:4px;
display:none;
}
#getstarted {
font-size:2em;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:10%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
.slice_contacts {
margin-left:275px;
}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
\ No newline at end of file
diff --git a/css/mobile.css b/css/mobile.css
index ccb4ae9..6bcccdf 100644
--- a/css/mobile.css
+++ b/css/mobile.css
@@ -1,222 +1,222 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
max-width:400px;
}
h2 {
- color:#cc0099;
-
+ color:#ff0084;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:6%;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
- font-size:2em;
+ font-size:1.5em;
+ text-align:center;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
}
#main_settings {
font-size:11px;
margin-top:30px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin_input {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#login_input {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
- margin-left:20%;
+
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
margin-right:50px;
}
.slice_inthelast {
display:none;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
display:block;
margin-bottom:10px;
}
.slice_contacts { }
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
\ No newline at end of file
diff --git a/ffbp/App/__init__.pyc b/ffbp/App/__init__.pyc
index f57f1d6..97c8f8b 100644
Binary files a/ffbp/App/__init__.pyc and b/ffbp/App/__init__.pyc differ
diff --git a/images/example2.jpg b/images/example2.jpg
index 513f56a..7c65b2c 100644
Binary files a/images/example2.jpg and b/images/example2.jpg differ
diff --git a/templates/main_logged_out.html b/templates/main_logged_out.html
index 6c7759b..88bd7c6 100644
--- a/templates/main_logged_out.html
+++ b/templates/main_logged_out.html
@@ -1,33 +1,33 @@
{% include "inc_head.html" %}
<div class="margin20">
<h2>This is Flickr for busy people.</h2>
{% if browser.mobile %}
<p>It is a simple web application that displays a list of your Flickr contacts who have
uploaded photos in the last 30 minutes, two hours and four hours.</p>
{% else %}
<p>It is a simple web application that displays a list of your Flickr contacts who have
uploaded photos in the last 30 minutes, two hours and four hours.</p>
<p>If you click on a buddyicon, the application will display thumbnails of the
photos that user has uploaded recently. Like this:</p>
<div id="example_image">
-<img src="images/example2.jpg" alt="flickr for busy people screenshot"/>
+<img src="images/example2.jpg" width="582" height="205" alt="flickr for busy people screenshot"/>
</div>
{% endif %}
-<p id="getstarted"><strong><a href="/signin">→ Sign in to get started.</a></strong></p>
+<p id="getstarted"><strong><a href="/signin">→ Sign in to get started</a></strong></p>
<p style="font-size:small;font-style:italic;">The first time you sign in you
will be taken to Flickr and asked authorize the <q>flickrforbusypeople</q>
application. You should only need to do this once.</p>
</div>
{% include "inc_foot.html" %}
diff --git a/templates/signout.html b/templates/signout.html
index 8271604..4ffa802 100644
--- a/templates/signout.html
+++ b/templates/signout.html
@@ -1,12 +1,14 @@
{% include "inc_head.html" %}
<h2>Are you sure you want to sign out of flickr for busy people?</h2>
- <form method="POST" action="/signout" style="display:inline;margin-left:15px;">
+<div style="text-align:center;margin-bottom:40px;margin-top:30px;">
+ <form method="POST" action="/signout" style="display:inline;">
<input type="hidden" name="crumb" value="{{ logout_crumb|escape }}" />
<input type="submit" value="yes please, sign me out" style="border:1px dotted white;background-color:darkslategray;color:white;font-size:14px;padding:5px;" />
</form>
+</div>
<p>Or just go back to your <a href="/">recent uploads page.</p>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
2e7368ce025b5485c9cbfb2acb66482828b671ed
|
inline stuff works, basically
|
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
index 1cb32b6..5166440 100644
--- a/javascript/ffbp.js
+++ b/javascript/ffbp.js
@@ -1,133 +1,228 @@
if (! info){
var info = {};
}
if (! info.aaronland){
info.aaronland = {};
}
if (! info.aaronland.ffbp){
info.aaronland.ffbp = {};
}
info.aaronland.ffbp.Photos = function(args){
this.args = args;
};
// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var short_hex = hex.substring(0, 6);
var uid = "thumbs_" + hex;
var status_id = "status_" + duration;
var container = "#slice_thumbs_" + duration;
var buddy_icon = $("#buddy_" + hex)[0].src;
var kids = $(container).children();
if (kids.length){
for (var i=0; i < kids.length; i++){
var child = kids[i];
var id = child.getAttribute("id");
var el = $("#" + id);
- // console.log('uid: ' + uid + ' id: ' + id + ' css: ' + el.css('display'));
-
if ((id == uid) && (el.css('display') == 'none')){
el.show();
-
- // $("#count_" + uid).css('border', '2px solid #' + short_hex);
continue;
}
- // $("#count_" + id).css('border', 'none');
el.hide();
}
}
if ($("#" + uid).length){
return;
}
var api_args = {
'host' : this.args['host'],
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : this.args['crumb'],
};
// see above inre: inheritance...
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#" + status_id).html();
$("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
$("#" + status_id).show();
return;
}
var html = '<div id="' + uid + '">';
html += '<div class="slice_thumbs_from">';
html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
html += '</div>';
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
html += img;
html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
}
html += '</div>';
$(container).append(html);
- // $("#count_" + uid).css('border', '2px solid #' + short_hex);
+ window.location.href = "#slice_" + duration;
+
+ }, function (rsp) {
+
+ $("#buddy_" + hex)[0].src = buddy_icon;
+
+ $("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
+ $("#" + status_id).show();
+ return;
+ });
+
+ $("#buddy_" + hex)[0].src = "/images/loading.gif";
+
+ $("#" + status_id).html("Retrieving photos...");
+ $("#" + status_id).show();
+};
+
+info.aaronland.ffbp.Photos.prototype.show_photos_inline = function(nsid, offset, duration){
+
+ $("#ffbp_status").html();
+ $("#ffbp_status").hide();
+
+ var hex = hex_md5(nsid);
+ var short_hex = hex.substring(0, 6);
+
+ var uid = "photos_" + hex;
+
+ var status_id = "status_" + duration;
+ var buddy_icon = $("#buddy_" + hex)[0].src;
+
+ thumbs = $('[class=slice_thumb_' + hex + ']');
+
+ if (thumbs.length){
+
+ for (i=0; i < thumbs.length; i++){
+ var id = thumbs[i].getAttribute('id');
+ var el = $("#" + id);
+
+ if (el.css('display') == 'block'){
+ el.hide();
+ continue;
+ }
+
+ el.show();
+ }
+
+ return;
+ }
+
+ var api_args = {
+ 'host' : this.args['host'],
+ };
+
+ var search_args = {
+ 'user_id' : nsid,
+ 'min_upload_date' : offset,
+ 'format' : 'json',
+ 'crumb' : this.args['crumb'],
+ };
+
+ // see above inre: inheritance...
+
+ var api = new info.aaronland.flickrapp.API(api_args)
+
+ api.api_call('search', search_args, function (rsp){
+
+ $("#" + status_id).html();
+ $("#" + status_id).hide();
+
+ $("#buddy_" + hex)[0].src = buddy_icon;
+
+ var short_hex = hex.substring(0, 6);
+
+ if (rsp.photos.photo.length == 0){
+ $("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
+ $("#" + status_id).show();
+ return;
+ }
+
+ var ctx = $("#photos_" + hex);
+
+ for (i in rsp.photos.photo){
+ var ph = rsp.photos.photo[i];
+ var sz = 48;
+
+ var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
+ var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
+
+ var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
+ img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
+ img += '</a>';
+
+ var html = '<div id="thumb_' + ph['id'] + '" class="slice_thumb_' + hex + '" style="float:left;margin-right:10px;margin-bottom:10px;">';
+ html += img;
+ html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
+ html += '</div>';
+
+ ctx.after(html);
+ ctx = $("#thumb_" + ph['id']);
+ }
+
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#" + status_id).html("Retrieving photos...");
$("#" + status_id).show();
};
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index c0080f0..a0235a1 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,48 +1,50 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
{% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
<div class="slice_contacts">
{% ifequal slice.count 0 %}
<div class="slice_noone">Nothing new...</div>
{% else %}
{% for c in slice.contacts %}
+
+ <a name="slice_{{ slice.duration|escape }}"></a>
- <div id="photos_{{ c.nsid_hex }}" class="photos_hex">
+ <div id="photos_{{ c.nsid_hex|escape }}" class="photos_hex">
- <a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
+ <a href="#" onclick="window.ffbp.show_photos{% if browser.mobile %}_inline{% endif %}('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
|
straup/gae-flickrforbusypeople
|
7886414184e92ed7b29eea18d7a4c5006c0e353e
|
start work on mobile
|
diff --git a/app.yaml b/app.yaml
index c3453a8..3dddbeb 100644
--- a/app.yaml
+++ b/app.yaml
@@ -1,31 +1,31 @@
application: flickrforbusypeople
-version: 1
+version: 2
runtime: python
api_version: 1
skip_files:
- ^(.*/)?app\.yaml
- ^(.*/)?app\.yml
- ^(.*/)?index\.yaml
- ^(.*/)?index\.yml
- ^(.*/)?#.*#
- ^(.*/)?.*~
- ^(.*/)?.*\.py[co]
- ^(.*/)?.*/RCS/.*
- ^(.*/)?.*/CVS/.*
- ^(.*/)?\..*
- ^(.*/)?.*\.bak$
handlers:
- url: /css
static_dir: css
- url: /javascript
static_dir: javascript
- url: /images
static_dir: images
- url: .*
script: main.py
diff --git a/css/main.css b/css/main.css
index 6cf0126..2f1790d 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,237 +1,238 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#cc0099;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:6%;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#foot_help {
margin-top:4px;
display:none;
}
#getstarted {
font-size:2em;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:10%;
padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin input[type='submit'] {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#signout input[type='submit'] {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
float:left;
width:225px;
text-align:right;
margin-right:50px;
}
.slice_inthelast {
color:#ccc;
font-weight:700;
margin-bottom:5px;
display:block;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
font-size:2em;
display:block;
}
-.slice_contacts { }
+.slice_contacts {
+ margin-left:275px;
+}
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
- margin-left:275px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
margin-left: 275px;
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
\ No newline at end of file
diff --git a/css/mobile.css b/css/mobile.css
index 5e8fa2a..ccb4ae9 100644
--- a/css/mobile.css
+++ b/css/mobile.css
@@ -1,232 +1,222 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
+ max-width:400px;
}
h2 {
color:#cc0099;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
/* ids */
#example_image {
margin-left:6%;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#getstarted {
font-size:2em;
}
#getstarted a {
color:#0099cc;
text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
- padding-left:10%;
- padding-right:10%;
}
#main_settings {
font-size:11px;
margin-top:30px;
- margin-left:275px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#problem {
color:red;
font-weight:700;
}
#problem a {
text-decoration:none;
}
#signin {
display:inline;
}
#signin_input {
border:1px dotted yellow;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#signout {
display:inline;
margin-left:15px;
}
#login_input {
border:1px dotted white;
background-color:#ff0066;
color:white;
font-size:14px;
padding:5px;
}
#update_settings {
font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.count_thumbs_hex {
text-align:center;
font-size:10px;
color:auto;
}
.count_thumbs_hex a {
text-decoration:none;
color:#666;
}
.count_thumbs_hex a strong{
font-size: 12px;
}
.error {
font-weight:700;
color:red;
}
.buddy_hex {
margin-bottom:5px;
}
.margin20 {
margin-left:20%;
}
.photos_hex {
float:left;
margin-right:10px;
margin-bottom:10px;
}
.slice {
min-height:90px;
}
.slice_label {
- float:left;
- width:225px;
- text-align:right;
margin-right:50px;
}
.slice_inthelast {
- color:#ccc;
- font-weight:700;
- margin-bottom:5px;
- display:block;
+ display:none;
}
.slice_amountoftime {
color:#ff0084;
font-weight:700;
- font-size:2em;
display:block;
+ margin-bottom:10px;
}
.slice_contacts { }
.slice_noone {
font-weight:500;
font-size:1.5em;
color:#d0d0d0;
padding-top:10px;
}
.slice_thumbs {
margin-top:10px;
- margin-left:275px;
}
.slice_thumbs_from {
margin-bottom:6px;
font-size:12px;
}
.slice_thumb_hex {
margin-right:10px;
margin-bottom:20px;
float:left;
}
.slice_thumb_title {
margin-top:12px;
font-style:italic;
text-align:center;
font-size:10px;
color:auto;
}
.status {
- margin-left: 275px;
margin-top: 10px;
font-style:italic;
color:#ff0084;
}
\ No newline at end of file
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index f5b8b85..b8dd44e 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,166 +1,170 @@
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
self.assign('search_crumb', search_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
+ contacts_12h = self.get_contacts('12h', contacts_4h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
+ slices.append(contacts_12h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
+ elif duration == '12h' :
+ hours = 12
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
error = 'INVISIBLE ERRORCAT HISSES AT YOU'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
args = {
'user_id' : c['nsid'],
'method' : 'flickr.photos.search',
'auth_token' : self.user.token,
'min_upload_date' : dt,
}
icon = self.get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
def get_user (self, nsid) :
memkey = "people_getinfo_%s" % nsid
cache = memcache.get(memkey)
if cache :
return cache
rsp = self.api_call('flickr.people.getInfo', {'user_id' : nsid, 'auth_token' : self.user.token})
if not rsp or rsp['stat'] != 'ok' :
return
ttl = 60 * 60 * 24 * 14
memcache.add(memkey, rsp['person'], ttl)
return rsp['person']
def get_buddyicon (self, nsid) :
user = self.get_user(nsid)
if int(user['iconserver']) == 0 :
return 'http://www.flickr.com/images/buddyicon.jpg'
return "http://farm%s.static.flickr.com/%s/buddyicons/%s.jpg" % (user['iconfarm'], user['iconserver'], nsid)
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
filter = self.request.get('filter')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
self.redirect('/')
diff --git a/ffbp/App/__init__.pyc b/ffbp/App/__init__.pyc
index f91cae8..f57f1d6 100644
Binary files a/ffbp/App/__init__.pyc and b/ffbp/App/__init__.pyc differ
diff --git a/templates/inc_head.html b/templates/inc_head.html
index 952d244..5ac9036 100644
--- a/templates/inc_head.html
+++ b/templates/inc_head.html
@@ -1,47 +1,47 @@
<html>
<head>
<title>flickr for busy people{% if page_title %}{{ page_title|escape }}{% endif %}</title>
{% if browser.mobile %}
-
+ <link rel="stylesheet" href="/css/mobile.css" type="text/css" />
{% else %}
<link rel="stylesheet" href="/css/main.css" type="text/css" />
{% endif %}
{% if user.username %}
<script type="text/javascript" src="/javascript/jquery.js"></script>
<script type="text/javascript" src="/javascript/md5.js"></script>
<script type="text/javascript" src="/javascript/flickrapp-api.js"></script>
<meta http-equiv="refresh" content="1800" />
{% endif %}
{% if browser.iphone %}
<meta name="viewport" content="width=400" />
{% endif %}
</head>
<body>
<div id="whoami">
{% if browser.mobile %}
<strong>ffbp</strong>{% if user.username %} / {{ user.username|escape }} / <a href="/signout">signout</a>{% endif %}
{% else %}
{% if user.username %}
You are logged in with your <a href="http://www.flickr.com/photos/{% ifequal user.path_alias "" %}{{ user.nsid|escape }}{% else %}{{ user.path_alias|escape }}{% endifequal %}/" class="fl_username">{{ user.username|escape }}</a> Flickr account
<form method="POST" action="/signout" id="signout">
<input type="hidden" name="crumb" value="{{ logout_crumb|escape }}" />
<input type="submit" value="sign out" />
</form>
{% else %}
<form method="GET" action="/signin" id="signin">
<input type="hidden" name="crumb" value="{{ login_crumb|escape }}" />
<input type="submit" value="sign in" />
</form>
{% endif %}
{% endif %}
</div>
<br clear="all" />
<div id="main">
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index f569c3b..c0080f0 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,48 +1,48 @@
<div class="slice">
<div class="slice_label">
<span class="slice_inthelast">in the last</span>
<span class="slice_amountoftime">
-  
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
+ {% ifequal slice.duration '12h' %}12 hours{% endifequal %}
</span>
</div>
-<div id="slice_contacts">
+<div class="slice_contacts">
{% ifequal slice.count 0 %}
<div class="slice_noone">Nothing new...</div>
{% else %}
{% for c in slice.contacts %}
<div id="photos_{{ c.nsid_hex }}" class="photos_hex">
<a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" />
</a>
<div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
<strong>1</strong> photo
{% else %}
<strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<br clear="all" />
<div class="status" id="status_{{ slice.duration|escape }}"></div>
<div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
|
straup/gae-flickrforbusypeople
|
9669e4f5b00c51d7cef9b95833fd2db4612e4253
|
cast iconserver
|
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index 093f543..f5b8b85 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,166 +1,166 @@
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
self.assign('search_crumb', search_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
error = 'INVISIBLE ERRORCAT HISSES AT YOU'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
args = {
'user_id' : c['nsid'],
'method' : 'flickr.photos.search',
'auth_token' : self.user.token,
'min_upload_date' : dt,
}
icon = self.get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
def get_user (self, nsid) :
memkey = "people_getinfo_%s" % nsid
cache = memcache.get(memkey)
if cache :
return cache
rsp = self.api_call('flickr.people.getInfo', {'user_id' : nsid, 'auth_token' : self.user.token})
if not rsp or rsp['stat'] != 'ok' :
return
ttl = 60 * 60 * 24 * 14
memcache.add(memkey, rsp['person'], ttl)
return rsp['person']
def get_buddyicon (self, nsid) :
user = self.get_user(nsid)
- if user['iconserver'] == 0 :
+ if int(user['iconserver']) == 0 :
return 'http://www.flickr.com/images/buddyicon.jpg'
return "http://farm%s.static.flickr.com/%s/buddyicons/%s.jpg" % (user['iconfarm'], user['iconserver'], nsid)
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
filter = self.request.get('filter')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
self.redirect('/')
|
straup/gae-flickrforbusypeople
|
a9fa5e534ea1d861c9ff7c0872d5772f3311efda
|
tweaks and bug fixes
|
diff --git a/css/main.css b/css/main.css
index 70582c2..6cf0126 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,556 +1,237 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#cc0099;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
-.nophotos {
- font-size:.8em;
- color:pink;
- margin-bottom:5px;
-}
-
/* ids */
-#chooser {
- margin-bottom:15px;
- position:relative;
-}
-
-#ffbp_status {
- display:none;
- color:darkslategray;
- /*border:1px dotted #6699ff;
- background-image: url('/images/dot.gif');
- padding:7px;
- padding-left:none;
- */
- font-weight:700;
- width:100%;
- text-align:right;
- float:right;
+#example_image {
+ margin-left:6%;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
-#greeting {
-
-}
-
-#iamheremap_review {
- filter:alpha(opacity=100);
- moz-opacity: 1.0 !important;
- opacity: 1.0;
- z-index:30;
- position:absolute;
- top:200px;
- text-align:center;
-}
-
-#iamheremap_reviewcontainer {
- height:350px;
- width:600px;
- z-index:20;
- position:absolute;
- top:150px;
- text-align:center;
+#foot_help {
+ margin-top:4px;
+ display:none;
}
-#iamheremap_reviewbackground{
- background-color:#000;
- filter:alpha(opacity=60);
- moz-opacity: 0.6;
- opacity: 0.6;
- text-align:center;
- height:100%;
+#getstarted {
+ font-size:2em;
}
-#iamheremap_reviewclose{
- text-align:right;
- background-color:white;
- font-weight:700;
- padding:10px;
-}
-
-#loading {
- font-size:24pt;
- font-weight:700;
+#getstarted a {
+ color:#0099cc;
+ text-decoration:none;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
padding-left:10%;
padding-right:10%;
}
-#map {
- margin-bottom:10px;
+#main_settings {
+ font-size:11px;
+ margin-top:30px;
+ margin-left:275px;
}
#navi {
- list-style-type: none;
- margin: 0px;
- padding:0px;
+ list-style-type: none;
+ margin: 0px;
+ padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
- font-size:11px;
+ font-size:11px;
font-weight:700;
max-width:585px;
-}
+ }
#navi li a {
text-decoration:none;
color:darkslategray;
}
-#options {
- max-width:585px;
- width:585px;
-}
-
-#options_random {
- font-size:14pt;
- font-weight:700;
-}
-
-#options_random li {
- float:right;
- margin:20px;
- list-style-type: none;
- border:2px solid #666;
- padding:10px;
- background-color:orange;
-}
-
-#options_random li a {
- text-decoration:none;
- color:white;
-}
-
-#options_random_skip {
- background-color:#ff9966 !important;
-}
-
-#options_random_noidea {
- background-color:#cc3366 !important;
-}
-
#problem {
color:red;
font-weight:700;
- font-size:14pt;
- max-width:585px;
}
#problem a {
text-decoration:none;
}
-#status {
- font-size:small;
- color:#cc0066;
- margin-bottom:10px;
-}
-
-#token_change_main {
- font-size:12pt;
- margin-top:15px;
- margin-bottom:20px;
-}
-
-#token_change_review_link {
- font-size:12pt;
- margin-top:15px;
- margin-bottom:20px;
-}
-
-#token_skip {
- font-size:14pt;
- font-weight:700;
+#signin {
+ display:inline;
}
-#whatisthis {
- width:580px;
- max-width:580px;
- display:none;
- text-align:right;
- font-size:11px;
+#signin input[type='submit'] {
+ border:1px dotted yellow;
+ background-color:#ff0066;
+ color:white;
+ font-size:14px;
+ padding:5px;
}
-#whatisthis_icon {
- float:right;
+#signout {
+ display:inline;
+ margin-left:15px;
}
-#whatisthis_icon img {
- border: 1px dotted #ccc;
- margin-left:15px;
+#signout input[type='submit'] {
+ border:1px dotted white;
+ background-color:#ff0066;
+ color:white;
+ font-size:14px;
+ padding:5px;
}
-#whatisthis_text {
- margin-bottom:5px;
- font-weight:600;
+#update_settings {
+ font-size:large;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
-#whoami_mobile {
- float:right;
- margin-left:20px;
- margin-bottom:25px;
- margin-right:20px;
-}
-
/* classes */
-.about li {
- line-height:25px;
-}
-
-.alt_options {
- list-style-type:none;
-}
-
-.alt_options li {
- margin-bottom:10px;
-}
-
-.alt_options li a {
- text-decoration:none;
- color:#333366;
- font-weight:600;
-}
-
-.approve_ok {
- font-size:18pt;
- font-weight:700;
- color:green;
-}
-
-.approve_fail {
- font-size:18pt;
- font-weight:700;
- color:#cc0066;
-}
-
-.block_ok {
- font-size:18pt;
- font-weight:700;
- color:green;
-}
-
-.block_fail {
- font-size:18pt;
- font-weight:700;
- color:#cc0066;
-}
-
-.block_user {
- float:left;
- margin:10px;
- padding:10px;
- font-weight:700;
- background-color:#333;
- border:2px solid #ccc;
- text-decoration:none;
- color:white;
-}
-
-.block_user a {
- text-decoration:none;
- color:white;
-}
-
-.blocked_users {
- list-style-type: none;
-}
-
-.choose_user {
- border:3px solid #ccc;
- color:#336699;
- padding:10px;
- font-size:large;
- font-weight:700;
- margin-top:20px;
-}
-
-.confirm_token_change {
- float:left;
- margin:10px;
- padding:10px;
- font-weight:700;
- background-color:green;
- border:2px solid #ccc;
- text-decoration:none;
- color:white;
-}
-
-.confirm_geocode {
- float:left;
- margin:10px;
- padding:10px;
- font-weight:700;
- background-color:green;
- border:2px solid #ccc;
- text-decoration:none;
- color:white;
-}
-
-.current_photo{
- border:solid thin #ccc;
- padding:5px;
-}
-
-.do_suggest_link{
- font-weight:700;
- color:#ff0066;
- display:block;
- font-size:18pt;
- text-align:center;
- border:1px solid pink;
- margin-top:20px;
- padding:10px;
- text-decoration:none;
-}
-
-.fl_username {
- font-weight:700;
- color:#cc0066;
-}
-
-.new_email_ok {
- font-size:18pt;
- font-weight:700;
- color:green;
-}
-
-.new_email_fail {
- font-size:18pt;
- font-weight:700;
- color:#cc0066;
-}
-
-.options {
- font-size:30pt;
- font-weight:700;
- margin:0px;
- margin-top:30pt;
- margin-bottom:30pt;
- list-style-type:none;
+.count_thumbs_hex {
+ text-align:center;
+ font-size:10px;
+ color:auto;
}
-.options li {
- margin-bottom:30px;
- margin-left:25px;
+.count_thumbs_hex a {
+ text-decoration:none;
+ color:#666;
}
-.options li a {
- text-decoration:none;
- color:#ff0066;
+.count_thumbs_hex a strong{
+ font-size: 12px;
}
-.pending {
- margin-top:40px;
+.error {
+ font-weight:700;
+ color:red;
}
-.pending_buddyicon {
- float:left;
+.buddy_hex {
+ margin-bottom:5px;
}
-.pending_buddyicon img {
- border: 1px dotted #ccc;
- margin-right:10px;
+.margin20 {
+ margin-left:20%;
}
-.pending_photo {
- float:left;
- margin-right:60px;
- margin-bottom:50px;
+.photos_hex {
+ float:left;
+ margin-right:10px;
+ margin-bottom:10px;
}
-.reject_ok {
- font-size:18pt;
- font-weight:700;
- color:green;
+.slice {
+ min-height:90px;
}
-.reject_fail {
- font-size:18pt;
- font-weight:700;
- color:#cc0066;
+.slice_label {
+ float:left;
+ width:225px;
+ text-align:right;
+ margin-right:50px;
}
-.reject_token_change {
- float:left;
- margin:10px;
- padding:10px;
+.slice_inthelast {
+ color:#ccc;
font-weight:700;
- background-color:#cc0066;
- border:2px solid #ccc;
- text-decoration:none;
- color:white;
-}
-
-.submit {
- border:3px solid #ccc;
- background-color:#336699;
- padding:10px;
- font-size:large;
- font-weight:700;
- color:white;
- margin-top:20px;
-}
-
-.suggest_fail {
- font-size:18pt;
- font-weight:700;
- color:#cc0066;
-}
-
-.suggest_ok {
- font-size:18pt;
- font-weight:700;
- color:green;
+ margin-bottom:5px;
+ display:block;
}
-.suggestion_approve {
- float:left;
- margin:10px;
- margin-left:0;
- padding:10px;
- font-weight:700;
- background-color:green;
- border:2px solid #ccc;
+.slice_amountoftime {
+ color:#ff0084;
+ font-weight:700;
+ font-size:2em;
+ display:block;
}
-.suggestion_approve a {
- text-decoration:none;
- color:white;
-}
+.slice_contacts { }
-.suggestion_edit {
- font-size:small;
+.slice_noone {
+ font-weight:500;
+ font-size:1.5em;
+ color:#d0d0d0;
+ padding-top:10px;
}
-.suggestion_permalink {
- text-align:right;
- padding-top:20px;
- color:#666;
+.slice_thumbs {
+ margin-top:10px;
+ margin-left:275px;
}
-.suggestion_permalink a {
- color:#666;
+.slice_thumbs_from {
+ margin-bottom:6px;
+ font-size:12px;
}
-.suggestion_reject {
+.slice_thumb_hex {
+ margin-right:10px;
+ margin-bottom:20px;
float:left;
- margin:10px;
- padding:10px;
- font-weight:700;
- background-color:#cc0066;
- border:2px solid #ccc;
- text-decoration:none;
- color:white;
-}
-
-.suggestion_reject a {
- text-decoration:none;
- color:white;
-}
-
-.suggestion_showme {
- font-size:11px;
- margin-bottom:20px;
- margin-top:20px;
-}
-
-.suggestion_showme a {
- color:#ff0066;
- text-decoration:none;
-}
-
-.suggestion_tweak {
- font-size:11px;
- border:1px dotted #ccc;
- background-color:#b0e0e6;
- margin-left:300px;
- margin-bottom:20px;
- padding:10px;
- padding-left:50px;
- display:none;
- background-image: url('/images/dot.gif');
-}
-
-.suggestion_tweak .suggestion_perms {
-}
-
-.tag_placemaker_link {
- color:#333366;
- text-decoration:none;
- font-weight:700;
-}
-
-.unblock_user {
- margin-left:20px;
- padding:10px;
- font-weight:700;
- color:green;
- text-decoration: none;
-}
-
-.unblock_user_fail {
- font-weight:700;
- color:green;
}
-.unblock_user_ok {
- font-weight:700;
- color:green;
+.slice_thumb_title {
+ margin-top:12px;
+ font-style:italic;
+ text-align:center;
+ font-size:10px;
+ color:auto;
}
-.whirclick {
- color:#666;
- font-size:14pt;
- font-weight:600;
- font-style:italic;
+.status {
+ margin-left: 275px;
+ margin-top: 10px;
+ font-style:italic;
+ color:#ff0084;
}
\ No newline at end of file
diff --git a/ffbp/App/__init__.py b/ffbp/App/__init__.py
index 95f281e..093f543 100644
--- a/ffbp/App/__init__.py
+++ b/ffbp/App/__init__.py
@@ -1,162 +1,166 @@
import ffbp
import ffbp.Settings
from google.appengine.api import memcache
import time
import logging
import md5
class Main (ffbp.Request) :
def get (self):
if not self.check_logged_in(self.min_perms) :
self.display("main_logged_out.html")
return
search_crumb = self.generate_crumb(self.user, 'method=search')
self.assign('search_crumb', search_crumb)
contacts_30m = self.get_contacts('30m', [])
contacts_2h = self.get_contacts('2h', contacts_30m['filter'])
contacts_4h = self.get_contacts('4h', contacts_2h['filter'])
slices = []
slices.append(contacts_30m)
slices.append(contacts_2h)
slices.append(contacts_4h)
self.assign('slices', slices)
self.display("main_logged_in.html")
return
def get_contacts (self, duration=1, filter=[]) :
if duration == '30m' :
hours = .5
elif duration == '2h' :
hours = 2
elif duration == '4h' :
hours = 4
else :
duration = 1
hours = 1
offset = 60 * 60 * hours
dt = int(time.time() - offset)
contacts_filter = self.user.settings.search_in_contacts_filter
args = {
'auth_token' : self.user.token,
'date_lastupload' : dt,
'filter' : contacts_filter,
}
rsp = self.api_call('flickr.contacts.getListRecentlyUploaded', args)
contacts = []
new_filter = filter
if not rsp or rsp['stat'] != 'ok' :
error = 'INVISIBLE ERRORCAT HISSES AT YOU'
if rsp :
error = rsp['message']
return {'contacts' : contacts, 'filter' : new_filter, 'error' : error, 'offset' : dt, 'duration' : duration, 'count' : 0 }
if rsp['contacts']['total'] == 0 :
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : 0 }
for c in rsp['contacts']['contact'] :
if c['nsid'] in filter :
continue
args = {
'user_id' : c['nsid'],
'method' : 'flickr.photos.search',
'auth_token' : self.user.token,
'min_upload_date' : dt,
}
icon = self.get_buddyicon(c['nsid'])
hex = md5.new(c['nsid']).hexdigest()
short_hex = hex[0:6]
user = {
'username' : c['username'],
'nsid' : c['nsid'],
'nsid_hex' : hex,
'nsid_short_hex' : short_hex,
'count' : c['photos_uploaded'],
'buddyicon' : icon,
}
contacts.append(user)
new_filter.append(c['nsid'])
return {'contacts' : contacts, 'filter' : new_filter, 'error' : None, 'offset' : dt, 'duration' : duration, 'count' : len(contacts) }
def get_user (self, nsid) :
memkey = "people_getinfo_%s" % nsid
cache = memcache.get(memkey)
if cache :
return cache
rsp = self.api_call('flickr.people.getInfo', {'user_id' : nsid, 'auth_token' : self.user.token})
if not rsp or rsp['stat'] != 'ok' :
return
ttl = 60 * 60 * 24 * 14
memcache.add(memkey, rsp['person'], ttl)
return rsp['person']
def get_buddyicon (self, nsid) :
user = self.get_user(nsid)
+
+ if user['iconserver'] == 0 :
+ return 'http://www.flickr.com/images/buddyicon.jpg'
+
return "http://farm%s.static.flickr.com/%s/buddyicons/%s.jpg" % (user['iconfarm'], user['iconserver'], nsid)
class Settings (ffbp.Request) :
def get (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
settings_crumb = self.generate_crumb(self.user, 'method=settings')
self.assign('settings_crumb', settings_crumb)
self.display('settings.html')
return
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.do_flickr_auth(self.min_perms)
return
if not self.validate_crumb(self.user, 'method=settings', self.request.get('crumb')) :
self.assign('error', 'invalid_crumb')
self.display('settings.html')
return
filter = self.request.get('filter')
if not filter in ('all', 'ff') :
self.assign('error', 'invalid_filter')
self.display('settings.html')
return
ffbp.Settings.search_in_contacts_filter(self.user.nsid, filter)
self.redirect('/')
diff --git a/templates/inc_foot.html b/templates/inc_foot.html
index c76ce9b..478be53 100644
--- a/templates/inc_foot.html
+++ b/templates/inc_foot.html
@@ -1,11 +1,15 @@
</div>
<div id="foot">
{% if browser.mobile %}
<strong>ffbp</strong> is a thing made by <a href="http://www.aaronland.info/">aaron</a>.
{% else %}
- <q><a href="/" style="font-weight:700;color:darkslategray;text-decoration:none">flickr for busy people</a></q> is a thing made by <a href="http://www.aaronland.info/">aaron straup cope</a>.
+ <q><a href="/" style="font-weight:700;color:darkslategray;text-decoration:none">flickr for busy people</a></q>
+ is a thing made by <a href="http://www.aaronland.info/">aaron straup cope</a>
+ <div id="foot_help">
+ / with help from <a href="http://www.flickr.com/photos/george/3762439133/">george</a> and <a href="http://www.flickr.com/photos/george/3762439133/#comment72157621929889120">cal</a>
+ </div>
{% endif %}
</div>
</body>
</html>
diff --git a/templates/inc_head.html b/templates/inc_head.html
index 7c147d8..952d244 100644
--- a/templates/inc_head.html
+++ b/templates/inc_head.html
@@ -1,41 +1,47 @@
<html>
<head>
<title>flickr for busy people{% if page_title %}{{ page_title|escape }}{% endif %}</title>
+
+{% if browser.mobile %}
+
+{% else %}
<link rel="stylesheet" href="/css/main.css" type="text/css" />
+{% endif %}
{% if user.username %}
<script type="text/javascript" src="/javascript/jquery.js"></script>
<script type="text/javascript" src="/javascript/md5.js"></script>
<script type="text/javascript" src="/javascript/flickrapp-api.js"></script>
<meta http-equiv="refresh" content="1800" />
{% endif %}
{% if browser.iphone %}
<meta name="viewport" content="width=400" />
{% endif %}
+
</head>
<body>
- <div id="whoami{% if browser.mobile %}_mobile{% endif %}">
+ <div id="whoami">
{% if browser.mobile %}
-<strong>ffbp</strong>{% if user.username %} / {{ user.username|escape }} / <a href="/signout">signout</a>{% endif %}
+ <strong>ffbp</strong>{% if user.username %} / {{ user.username|escape }} / <a href="/signout">signout</a>{% endif %}
{% else %}
{% if user.username %}
You are logged in with your <a href="http://www.flickr.com/photos/{% ifequal user.path_alias "" %}{{ user.nsid|escape }}{% else %}{{ user.path_alias|escape }}{% endifequal %}/" class="fl_username">{{ user.username|escape }}</a> Flickr account
- <form method="POST" action="/signout" style="display:inline;margin-left:15px;">
+ <form method="POST" action="/signout" id="signout">
<input type="hidden" name="crumb" value="{{ logout_crumb|escape }}" />
- <input type="submit" value="sign out" style="border:1px dotted white;background-color:#ff0066;color:white;font-size:14px;padding:5px;" />
+ <input type="submit" value="sign out" />
</form>
{% else %}
- <form method="GET" action="/signin" style="display:inline;">
+ <form method="GET" action="/signin" id="signin">
<input type="hidden" name="crumb" value="{{ login_crumb|escape }}" />
- <input type="submit" value="sign in" style="border:1px dotted yellow;background-color:#ff0066;color:white;font-size:14px;padding:5px;" />
+ <input type="submit" value="sign in" />
</form>
{% endif %}
{% endif %}
</div>
<br clear="all" />
<div id="main">
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index 3a4bc2f..f569c3b 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,49 +1,48 @@
-<div class="slice" style="min-height:90px;">
- <div class="slice_label" style="float:left;width:225px;text-align:right;margin-right:50px;">
- <div class="slice_inthelast" style="color:#ccc;font-weight:700;margin-bottom:5px;">in the last</div>
- <div class="slice_amountoftime" style="color:#ff0084;font-weight:700;font-size:2em;">
+<div class="slice">
+ <div class="slice_label">
+ <span class="slice_inthelast">in the last</span>
+ <span class="slice_amountoftime">
+  
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
- </div>
+ </span>
</div>
-<div id="slice_contacts" style="">
+<div id="slice_contacts">
{% ifequal slice.count 0 %}
- <div class="slice_noone" style="font-weight:500;font-size:1.5em;color:#d0d0d0;padding-top:10px;">Nothing new...</div>
+ <div class="slice_noone">Nothing new...</div>
{% else %}
{% for c in slice.contacts %}
- <div id="photos_{{ c.nsid_hex }}" style="float:left; margin-right:10px;margin-bottom:10px;">
+ <div id="photos_{{ c.nsid_hex }}" class="photos_hex">
- <a href="#" onclick="show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
- <img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" style="border:3px solid #{{ c.nsid_short_hex}};margin-bottom:5px;" />
+ <a href="#" onclick="window.ffbp.show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
+ <img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" class="buddy_hex" style="border:3px solid #{{ c.nsid_short_hex}};" />
</a>
- <div id="count_thumbs_{{ c.nsid_hex }}" style="text-align:center;font-size:10px;color:auto;">
- <a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}" style="text-decoration:none;color:#666;">
+ <div id="count_thumbs_{{ c.nsid_hex }}" class="count_thumbs_hex">
+ <a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}">
{% ifequal c.count '1' %}
- <strong style="font-size:12px;">1</strong> photo
+ <strong>1</strong> photo
{% else %}
- <strong style="font-size:12px;">{{ c.count|escape }}</strong> photos
+ <strong>{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
</div>
{% endfor %}
<br clear="all" />
- <div style="margin-left:275px;">
- <div id="status_{{ slice.duration|escape }}" style="margin-top:5px;font-style:italic;"></div>
- <div id="slice_thumbs_{{ slice.duration|escape }}" style="margin-top:10px;"></div>
- </div>
+ <div class="status" id="status_{{ slice.duration|escape }}"></div>
+ <div class="slice_thumbs" id="slice_thumbs_{{ slice.duration|escape }}"></div>
<br clear="all" />
{% endifequal %}
</div>
</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index 8266428..6e553ab 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,136 +1,23 @@
{% include "inc_head.html" %}
-<script>
+<script type="text/javascript" src="/javascript/ffbp.js"></script>
- function show_photos(nsid, offset, duration){
-
- $("#ffbp_status").html();
- $("#ffbp_status").hide();
-
- var hex = hex_md5(nsid);
- var short_hex = hex.substring(0, 6);
-
- var uid = "thumbs_" + hex;
-
- var status_id = "status_" + duration;
-
- var container = "#slice_thumbs_" + duration;
-
- var buddy_icon = $("#buddy_" + hex)[0].src;
-
- var kids = $(container).children();
-
- if (kids.length){
- for (var i=0; i < kids.length; i++){
- var child = kids[i];
- var id = child.getAttribute("id");
- var el = $("#" + id);
-
- // console.log('uid: ' + uid + ' id: ' + id + ' css: ' + el.css('display'));
-
- if ((id == uid) && (el.css('display') == 'none')){
- el.show();
-
- $("#count_" + uid).css('border', '2px solid #' + short_hex);
- continue;
- }
-
- $("#count_" + id).css('border', 'none');
- el.hide();
- }
- }
-
- if ($("#" + uid).length){
- return;
- }
-
- var api_args = {
- 'host' : '{{ host_url }}',
- };
-
- var search_args = {
- 'user_id' : nsid,
- 'min_upload_date' : offset,
- 'format' : 'json',
- 'crumb' : '{{ search_crumb|escape }}',
- };
-
- var api = new info.aaronland.flickrapp.API(api_args)
-
- api.api_call('search', search_args, function (rsp){
-
- $("#" + status_id).html();
- $("#" + status_id).hide();
-
- $("#buddy_" + hex)[0].src = buddy_icon;
-
- var short_hex = hex.substring(0, 6);
- var html = '';
-
- if (rsp.photos.photo.length == 0){
- $("#ffbp_status").html("Foiled again! The Flickr API returns no photos for that user.");
- $("#ffbp_status").show();
- return;
- }
-
- var html = '<div id="' + uid + '">';
- //html += '<div style="margin-bottom:4px;font-size:small;">';
- //html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
- //html += '</div>';
-
- for (i in rsp.photos.photo){
- var ph = rsp.photos.photo[i];
-
- var sz = 75;
-
- var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
- var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
-
- var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
- img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
- img += '</a>';
-
- html += '<div id="thumb_' + ph['id'] + '" class="thumb_' + hex + '" style="margin-right:10px;margin-bottom:20px; float:left">';
- html += img;
- html += '<div style="margin-top:12px;font-style:italic;text-align:center;font-size:10px;color:auto;">' + ph['title'].substring(0, 6) + '...</div>';
- html += '</div>';
- }
-
- html += '</div>';
- $(container).append(html);
-
- $("#count_" + uid).css('border', '2px solid #' + short_hex);
-
- }, function (rsp) {
-
- $("#buddy_" + hex)[0].src = buddy_icon;
-
- $("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
- $("#" + status_id).show();
- return;
- });
-
- $("#buddy_" + hex)[0].src = "/images/loading.gif";
-
- $("#" + status_id).html("Retrieving photos...");
- $("#" + status_id).show();
- }
+<script type="text/javascript">
+ var ffbp_args = {
+ 'host' : '{{ host_url }}',
+ 'crumb' : '{{ search_crumb }}',
+ };
+ window.ffbp = new info.aaronland.ffbp.Photos(ffbp_args);
+
</script>
-{% if browser.mobile %}
-{% else %}
-<div style="height:20px;border:none;">
-<div id="ffbp_status" style="z-index:100;"></div>
-</div>
-{% endif %}
-
-<br />
+<br /><br />
{% for slice in slices %}
{% include "inc_main_contacts.html" %}
{% endfor %}
{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
diff --git a/templates/main_logged_out.html b/templates/main_logged_out.html
index 576a7c7..6c7759b 100644
--- a/templates/main_logged_out.html
+++ b/templates/main_logged_out.html
@@ -1,27 +1,33 @@
{% include "inc_head.html" %}
+<div class="margin20">
+
<h2>This is Flickr for busy people.</h2>
{% if browser.mobile %}
<p>It is a simple web application that displays a list of your Flickr contacts who have
uploaded photos in the last 30 minutes, two hours and four hours.</p>
{% else %}
<p>It is a simple web application that displays a list of your Flickr contacts who have
uploaded photos in the last 30 minutes, two hours and four hours.</p>
<p>If you click on a buddyicon, the application will display thumbnails of the
photos that user has uploaded recently. Like this:</p>
-<div style="text-align:center;"><img src="images/example.jpg" height="272" width="611" alt="flickr for busy people screenshot" /></div>
+<div id="example_image">
+<img src="images/example2.jpg" alt="flickr for busy people screenshot"/>
+</div>
{% endif %}
-<p style=""><strong><a href="/signin">Sign in to get started.</a></strong></p>
+<p id="getstarted"><strong><a href="/signin">→ Sign in to get started.</a></strong></p>
<p style="font-size:small;font-style:italic;">The first time you sign in you
will be taken to Flickr and asked authorize the <q>flickrforbusypeople</q>
application. You should only need to do this once.</p>
+</div>
+
{% include "inc_foot.html" %}
diff --git a/templates/settings.html b/templates/settings.html
index b187fc7..9f5f681 100644
--- a/templates/settings.html
+++ b/templates/settings.html
@@ -1,37 +1,37 @@
{% include "inc_head.html" %}
-<div style="margin-left:20%">
+<div class="margin20">
{% if error %}
-<p>
+<p class="error">
{% ifequal error 'invalid_crumb' %}
Hrm. Your session seems to have expired.
{% endifequal %}
{% ifequal error 'invalid_filter' %}
Hey! That's an invalid filter.
{% endifequal %}
</p>
<p><a href="/settings">Would you like to try again?</a></p>
{% else %}
<h2>Update which contacts you want to get busy for</h2>
<p>This setting will filter which contacts with recent uploads you will see updates for.</p>
-<form method="POST" action="/settings" style="font-size:large;">
+<form method="POST" action="/settings" id="update_settings">
<input type="hidden" name="crumb" value="{{ settings_crumb|escape }}" />
<input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked"{% endifequal %} /> <label for="all">All your contacts</label>
<br style="margin-bottom:15px;" />
<input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked"{% endifequal %} /> <label for="ff">Only friends and family</label>
<br /><br />
<input type="submit" value="UPDATE" />
</form>
{% endif %}
<p>Or just go back to your <a href="/">recent uploads page.</p>
</div>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
5dc4377feca8d5d1217f94b70c475bd351314be5
|
chunked out all the css; moved js around
|
diff --git a/css/mobile.css b/css/mobile.css
new file mode 100644
index 0000000..5e8fa2a
--- /dev/null
+++ b/css/mobile.css
@@ -0,0 +1,232 @@
+/* elements */
+
+body {
+ font-family:sans-serif;
+ font-size:11pt;
+ color:darkslategrey;
+ margin-top:25px;
+ margin-left:0;
+ margin-right:0;
+ margin-bottom:60px;
+ background-image: url('/images/bluetile.gif');
+}
+
+h2 {
+ color:#cc0099;
+
+}
+
+h3 {
+ color:#cc0099;
+ margin-bottom:15px;
+}
+
+/* ids */
+
+#example_image {
+ margin-left:6%;
+}
+
+#foot {
+ text-align:right;
+ font-size:11px;
+ margin-top:10px;
+ margin-right:20px;
+}
+
+#getstarted {
+ font-size:2em;
+}
+
+#getstarted a {
+ color:#0099cc;
+ text-decoration:none;
+}
+
+#main {
+ margin-top:0;
+ background-color:white;
+ border:1px dotted #ccc;
+ padding:30px;
+ padding-bottom:60px;
+ padding-left:10%;
+ padding-right:10%;
+}
+
+#main_settings {
+ font-size:11px;
+ margin-top:30px;
+ margin-left:275px;
+}
+
+#navi {
+ list-style-type: none;
+ margin: 0px;
+ padding:0px;
+ text-align:right;
+ margin-right:20px;
+}
+
+#navi li {
+ display: inline;
+ border:1px dotted #ccc;
+ border-bottom:none;
+ background-color:white;
+ padding:8px;
+ margin: 0px;
+ font-size:11px;
+ font-weight:700;
+ max-width:585px;
+ }
+
+#navi li a {
+ text-decoration:none;
+ color:darkslategray;
+}
+
+#problem {
+ color:red;
+ font-weight:700;
+}
+
+#problem a {
+ text-decoration:none;
+}
+
+#signin {
+ display:inline;
+}
+
+#signin_input {
+ border:1px dotted yellow;
+ background-color:#ff0066;
+ color:white;
+ font-size:14px;
+ padding:5px;
+}
+
+#signout {
+ display:inline;
+ margin-left:15px;
+}
+
+#login_input {
+ border:1px dotted white;
+ background-color:#ff0066;
+ color:white;
+ font-size:14px;
+ padding:5px;
+}
+
+#update_settings {
+ font-size:large;
+}
+
+#whoami {
+ float:right;
+ font-size:11px;
+ margin-left:20px;
+ margin-bottom:25px;
+ margin-right:20px;
+}
+
+/* classes */
+
+.count_thumbs_hex {
+ text-align:center;
+ font-size:10px;
+ color:auto;
+}
+
+.count_thumbs_hex a {
+ text-decoration:none;
+ color:#666;
+}
+
+.count_thumbs_hex a strong{
+ font-size: 12px;
+}
+
+.error {
+ font-weight:700;
+ color:red;
+}
+
+.buddy_hex {
+ margin-bottom:5px;
+}
+
+.margin20 {
+ margin-left:20%;
+}
+
+.photos_hex {
+ float:left;
+ margin-right:10px;
+ margin-bottom:10px;
+}
+
+.slice {
+ min-height:90px;
+}
+
+.slice_label {
+ float:left;
+ width:225px;
+ text-align:right;
+ margin-right:50px;
+}
+
+.slice_inthelast {
+ color:#ccc;
+ font-weight:700;
+ margin-bottom:5px;
+ display:block;
+}
+
+.slice_amountoftime {
+ color:#ff0084;
+ font-weight:700;
+ font-size:2em;
+ display:block;
+}
+
+.slice_contacts { }
+
+.slice_noone {
+ font-weight:500;
+ font-size:1.5em;
+ color:#d0d0d0;
+ padding-top:10px;
+}
+
+.slice_thumbs {
+ margin-top:10px;
+ margin-left:275px;
+}
+
+.slice_thumbs_from {
+ margin-bottom:6px;
+ font-size:12px;
+}
+
+.slice_thumb_hex {
+ margin-right:10px;
+ margin-bottom:20px;
+ float:left;
+}
+
+.slice_thumb_title {
+ margin-top:12px;
+ font-style:italic;
+ text-align:center;
+ font-size:10px;
+ color:auto;
+}
+
+.status {
+ margin-left: 275px;
+ margin-top: 10px;
+ font-style:italic;
+ color:#ff0084;
+}
\ No newline at end of file
diff --git a/images/example2.jpg b/images/example2.jpg
new file mode 100644
index 0000000..513f56a
Binary files /dev/null and b/images/example2.jpg differ
diff --git a/javascript/ffbp.js b/javascript/ffbp.js
new file mode 100644
index 0000000..1cb32b6
--- /dev/null
+++ b/javascript/ffbp.js
@@ -0,0 +1,133 @@
+if (! info){
+ var info = {};
+}
+
+if (! info.aaronland){
+ info.aaronland = {};
+}
+
+if (! info.aaronland.ffbp){
+ info.aaronland.ffbp = {};
+}
+
+info.aaronland.ffbp.Photos = function(args){
+ this.args = args;
+};
+
+// do the thing to inherit from info.aaronland.flickrapp.API here (see below) ...
+
+info.aaronland.ffbp.Photos.prototype.show_photos = function(nsid, offset, duration){
+
+ $("#ffbp_status").html();
+ $("#ffbp_status").hide();
+
+ var hex = hex_md5(nsid);
+ var short_hex = hex.substring(0, 6);
+
+ var uid = "thumbs_" + hex;
+
+ var status_id = "status_" + duration;
+
+ var container = "#slice_thumbs_" + duration;
+
+ var buddy_icon = $("#buddy_" + hex)[0].src;
+
+ var kids = $(container).children();
+
+ if (kids.length){
+ for (var i=0; i < kids.length; i++){
+ var child = kids[i];
+ var id = child.getAttribute("id");
+ var el = $("#" + id);
+
+ // console.log('uid: ' + uid + ' id: ' + id + ' css: ' + el.css('display'));
+
+ if ((id == uid) && (el.css('display') == 'none')){
+ el.show();
+
+ // $("#count_" + uid).css('border', '2px solid #' + short_hex);
+ continue;
+ }
+
+ // $("#count_" + id).css('border', 'none');
+ el.hide();
+ }
+ }
+
+ if ($("#" + uid).length){
+ return;
+ }
+
+ var api_args = {
+ 'host' : this.args['host'],
+ };
+
+ var search_args = {
+ 'user_id' : nsid,
+ 'min_upload_date' : offset,
+ 'format' : 'json',
+ 'crumb' : this.args['crumb'],
+ };
+
+ // see above inre: inheritance...
+
+ var api = new info.aaronland.flickrapp.API(api_args)
+
+ api.api_call('search', search_args, function (rsp){
+
+ $("#" + status_id).html();
+ $("#" + status_id).hide();
+
+ $("#buddy_" + hex)[0].src = buddy_icon;
+
+ var short_hex = hex.substring(0, 6);
+ var html = '';
+
+ if (rsp.photos.photo.length == 0){
+ $("#" + status_id).html("Foiled again! The Flickr API returns no photos for that user.");
+ $("#" + status_id).show();
+ return;
+ }
+
+ var html = '<div id="' + uid + '">';
+ html += '<div class="slice_thumbs_from">';
+ html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
+ html += '</div>';
+
+ for (i in rsp.photos.photo){
+ var ph = rsp.photos.photo[i];
+
+ var sz = 75;
+
+ var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
+ var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
+
+ var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
+ img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
+ img += '</a>';
+
+ html += '<div id="thumb_' + ph['id'] + '" class="slice_thumb_hex">';
+ html += img;
+ html += '<div class="slice_thumb_title">' + ph['title'].substring(0, 6) + '...</div>';
+ html += '</div>';
+ }
+
+ html += '</div>';
+ $(container).append(html);
+
+ // $("#count_" + uid).css('border', '2px solid #' + short_hex);
+
+ }, function (rsp) {
+
+ $("#buddy_" + hex)[0].src = buddy_icon;
+
+ $("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
+ $("#" + status_id).show();
+ return;
+ });
+
+ $("#buddy_" + hex)[0].src = "/images/loading.gif";
+
+ $("#" + status_id).html("Retrieving photos...");
+ $("#" + status_id).show();
+};
diff --git a/templates/inc_main_settings.html b/templates/inc_main_settings.html
index 0fdddf2..049457a 100644
--- a/templates/inc_main_settings.html
+++ b/templates/inc_main_settings.html
@@ -1,20 +1,20 @@
-<div style="font-size:small;margin-top:30px;font-style:italic;margin-left:275px;">
+<div id="main_settings">
{% ifequal user.settings.search_in_contacts_filter 'all' %}
{% if browser.mobile %}
<a href="/settings">Switch to friends and family only</a>.
{% else %}
You are currently checking for recent uploads from all your contacts. <a href="/settings">Switch to friends and family only</a>.
{% endif %}
{% else %}
{% if browser.mobile %}
<a href="/settings">Switch to all your contacts</a>.
{% else %}
You are currently checking for recent uploads from friends and family only. <a href="/settings">Switch to all your contacts</a>.
{% endif %}
{% endifequal %}
</div>
|
straup/gae-flickrforbusypeople
|
6598919a44d7bf6ce8892a34283041f9b1be0d57
|
big sloppy commit; implemented most of george's design
|
diff --git a/css/main.css b/css/main.css
index e4fbaab..70582c2 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,555 +1,556 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#cc0099;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
.nophotos {
font-size:.8em;
color:pink;
margin-bottom:5px;
}
/* ids */
#chooser {
margin-bottom:15px;
position:relative;
}
#ffbp_status {
display:none;
color:darkslategray;
- border:1px dotted #6699ff;
+ /*border:1px dotted #6699ff;
background-image: url('/images/dot.gif');
padding:7px;
padding-left:none;
+ */
font-weight:700;
width:100%;
text-align:right;
float:right;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#greeting {
}
#iamheremap_review {
filter:alpha(opacity=100);
moz-opacity: 1.0 !important;
opacity: 1.0;
z-index:30;
position:absolute;
top:200px;
text-align:center;
}
#iamheremap_reviewcontainer {
height:350px;
width:600px;
z-index:20;
position:absolute;
top:150px;
text-align:center;
}
#iamheremap_reviewbackground{
background-color:#000;
filter:alpha(opacity=60);
moz-opacity: 0.6;
opacity: 0.6;
text-align:center;
height:100%;
}
#iamheremap_reviewclose{
text-align:right;
background-color:white;
font-weight:700;
padding:10px;
}
#loading {
font-size:24pt;
font-weight:700;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
- padding-left:5%;
+ padding-left:10%;
padding-right:10%;
}
#map {
margin-bottom:10px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#options {
max-width:585px;
width:585px;
}
#options_random {
font-size:14pt;
font-weight:700;
}
#options_random li {
float:right;
margin:20px;
list-style-type: none;
border:2px solid #666;
padding:10px;
background-color:orange;
}
#options_random li a {
text-decoration:none;
color:white;
}
#options_random_skip {
background-color:#ff9966 !important;
}
#options_random_noidea {
background-color:#cc3366 !important;
}
#problem {
color:red;
font-weight:700;
font-size:14pt;
max-width:585px;
}
#problem a {
text-decoration:none;
}
#status {
font-size:small;
color:#cc0066;
margin-bottom:10px;
}
#token_change_main {
font-size:12pt;
margin-top:15px;
margin-bottom:20px;
}
#token_change_review_link {
font-size:12pt;
margin-top:15px;
margin-bottom:20px;
}
#token_skip {
font-size:14pt;
font-weight:700;
}
#whatisthis {
width:580px;
max-width:580px;
display:none;
text-align:right;
font-size:11px;
}
#whatisthis_icon {
float:right;
}
#whatisthis_icon img {
border: 1px dotted #ccc;
margin-left:15px;
}
#whatisthis_text {
margin-bottom:5px;
font-weight:600;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
#whoami_mobile {
float:right;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.about li {
line-height:25px;
}
.alt_options {
list-style-type:none;
}
.alt_options li {
margin-bottom:10px;
}
.alt_options li a {
text-decoration:none;
color:#333366;
font-weight:600;
}
.approve_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.approve_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.block_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.block_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.block_user {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:#333;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.block_user a {
text-decoration:none;
color:white;
}
.blocked_users {
list-style-type: none;
}
.choose_user {
border:3px solid #ccc;
color:#336699;
padding:10px;
font-size:large;
font-weight:700;
margin-top:20px;
}
.confirm_token_change {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:green;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.confirm_geocode {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:green;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.current_photo{
border:solid thin #ccc;
padding:5px;
}
.do_suggest_link{
font-weight:700;
color:#ff0066;
display:block;
font-size:18pt;
text-align:center;
border:1px solid pink;
margin-top:20px;
padding:10px;
text-decoration:none;
}
.fl_username {
font-weight:700;
color:#cc0066;
}
.new_email_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.new_email_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.options {
font-size:30pt;
font-weight:700;
margin:0px;
margin-top:30pt;
margin-bottom:30pt;
list-style-type:none;
}
.options li {
margin-bottom:30px;
margin-left:25px;
}
.options li a {
text-decoration:none;
color:#ff0066;
}
.pending {
margin-top:40px;
}
.pending_buddyicon {
float:left;
}
.pending_buddyicon img {
border: 1px dotted #ccc;
margin-right:10px;
}
.pending_photo {
float:left;
margin-right:60px;
margin-bottom:50px;
}
.reject_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.reject_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.reject_token_change {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:#cc0066;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.submit {
border:3px solid #ccc;
background-color:#336699;
padding:10px;
font-size:large;
font-weight:700;
color:white;
margin-top:20px;
}
.suggest_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.suggest_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.suggestion_approve {
float:left;
margin:10px;
margin-left:0;
padding:10px;
font-weight:700;
background-color:green;
border:2px solid #ccc;
}
.suggestion_approve a {
text-decoration:none;
color:white;
}
.suggestion_edit {
font-size:small;
}
.suggestion_permalink {
text-align:right;
padding-top:20px;
color:#666;
}
.suggestion_permalink a {
color:#666;
}
.suggestion_reject {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:#cc0066;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.suggestion_reject a {
text-decoration:none;
color:white;
}
.suggestion_showme {
font-size:11px;
margin-bottom:20px;
margin-top:20px;
}
.suggestion_showme a {
color:#ff0066;
text-decoration:none;
}
.suggestion_tweak {
font-size:11px;
border:1px dotted #ccc;
background-color:#b0e0e6;
margin-left:300px;
margin-bottom:20px;
padding:10px;
padding-left:50px;
display:none;
background-image: url('/images/dot.gif');
}
.suggestion_tweak .suggestion_perms {
}
.tag_placemaker_link {
color:#333366;
text-decoration:none;
font-weight:700;
}
.unblock_user {
margin-left:20px;
padding:10px;
font-weight:700;
color:green;
text-decoration: none;
}
.unblock_user_fail {
font-weight:700;
color:green;
}
.unblock_user_ok {
font-weight:700;
color:green;
}
.whirclick {
color:#666;
font-size:14pt;
font-weight:600;
font-style:italic;
}
\ No newline at end of file
diff --git a/ffbp/API/__init__.py b/ffbp/API/__init__.py
index 89ebb0c..3d98032 100644
--- a/ffbp/API/__init__.py
+++ b/ffbp/API/__init__.py
@@ -1,64 +1,65 @@
from APIApp import APIApp
import ffbp
class Dispatch (ffbp.Request, APIApp) :
def __init__ (self):
ffbp.Request.__init__(self)
APIApp.__init__(self)
def post (self) :
if not self.check_logged_in(self.min_perms) :
self.api_error(403)
return
method = self.request.get('method')
format = self.request.get('format')
if format and not format in self.valid_formats :
self.api_error(999, 'Not a valid format')
return
if format :
self.format = format
if method == 'search' :
return self.__search()
def ensure_crumb (self, path) :
if not self.validate_crumb(self.user, path, self.request.get('crumb')) :
self.api_error(400, 'Invalid crumb')
return False
return True
def __search (self) :
required = ('crumb', 'user_id', 'min_upload_date')
if not self.ensure_args(required) :
return
if not self.ensure_crumb('method=search') :
return
method = 'flickr.photos.search'
args = {
'auth_token' : self.user.token,
'user_id' : self.request.get('user_id'),
'min_upload_date' : self.request.get('min_upload_date'),
+ 'extras' : 'owner_name',
}
rsp = self.api_call(method, args)
if not rsp :
return self.api_error()
if rsp['stat'] != 'ok' :
return self.api_error()
return self.api_ok({'photos' : rsp['photos']})
diff --git a/ffbp/API/__init__.pyc b/ffbp/API/__init__.pyc
index 150d813..d0e9529 100644
Binary files a/ffbp/API/__init__.pyc and b/ffbp/API/__init__.pyc differ
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
index 9dc8ab6..3a4bc2f 100644
--- a/templates/inc_main_contacts.html
+++ b/templates/inc_main_contacts.html
@@ -1,40 +1,49 @@
-<div class="slice" style="margin-bottom:40px;">
- <div class="slice_label" style="float:left;width:225px;text-align:right;margin-right:25px;">
+<div class="slice" style="min-height:90px;">
+ <div class="slice_label" style="float:left;width:225px;text-align:right;margin-right:50px;">
<div class="slice_inthelast" style="color:#ccc;font-weight:700;margin-bottom:5px;">in the last</div>
<div class="slice_amountoftime" style="color:#ff0084;font-weight:700;font-size:2em;">
{% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
{% ifequal slice.duration '2h' %}2 hours{% endifequal %}
{% ifequal slice.duration '4h' %}4 hours{% endifequal %}
</div>
</div>
<div id="slice_contacts" style="">
{% ifequal slice.count 0 %}
- <div class="slice_noone" style="font-weight:500;font-size:1.5em;color:#0099cc;padding-top:15px;">Nothing new...</div>
+ <div class="slice_noone" style="font-weight:500;font-size:1.5em;color:#d0d0d0;padding-top:10px;">Nothing new...</div>
{% else %}
{% for c in slice.contacts %}
<div id="photos_{{ c.nsid_hex }}" style="float:left; margin-right:10px;margin-bottom:10px;">
-
- <a href="#" onclick="show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}');return false;">
+
+ <a href="#" onclick="show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}', '{{ slice.duration|escape }}');return false;">
<img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" style="border:3px solid #{{ c.nsid_short_hex}};margin-bottom:5px;" />
</a>
- <div style="text-align:center;font-size:10px;color:auto;">
+ <div id="count_thumbs_{{ c.nsid_hex }}" style="text-align:center;font-size:10px;color:auto;">
<a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}" style="text-decoration:none;color:#666;">
{% ifequal c.count '1' %}
<strong style="font-size:12px;">1</strong> photo
{% else %}
<strong style="font-size:12px;">{{ c.count|escape }}</strong> photos
{% endifequal %}
</a>
</div>
+
</div>
{% endfor %}
+
+ <br clear="all" />
+
+ <div style="margin-left:275px;">
+ <div id="status_{{ slice.duration|escape }}" style="margin-top:5px;font-style:italic;"></div>
+ <div id="slice_thumbs_{{ slice.duration|escape }}" style="margin-top:10px;"></div>
+ </div>
+
+ <br clear="all" />
{% endifequal %}
</div>
- <br clear="all" />
</div>
diff --git a/templates/inc_main_settings.html b/templates/inc_main_settings.html
index c597215..0fdddf2 100644
--- a/templates/inc_main_settings.html
+++ b/templates/inc_main_settings.html
@@ -1,20 +1,20 @@
-<div style="font-size:small;margin-top:30px;font-style:italic;">
+<div style="font-size:small;margin-top:30px;font-style:italic;margin-left:275px;">
{% ifequal user.settings.search_in_contacts_filter 'all' %}
{% if browser.mobile %}
<a href="/settings">Switch to friends and family only</a>.
{% else %}
You are currently checking for recent uploads from all your contacts. <a href="/settings">Switch to friends and family only</a>.
{% endif %}
{% else %}
{% if browser.mobile %}
<a href="/settings">Switch to all your contacts</a>.
{% else %}
You are currently checking for recent uploads from friends and family only. <a href="/settings">Switch to all your contacts</a>.
{% endif %}
{% endifequal %}
</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index f5c450e..8266428 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,113 +1,136 @@
{% include "inc_head.html" %}
<script>
- function show_photos(nsid, offset){
+ function show_photos(nsid, offset, duration){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
- var uid = "#photos_" + hex;
+ var short_hex = hex.substring(0, 6);
- var buddy_icon = $("#buddy_" + hex)[0].src;
+ var uid = "thumbs_" + hex;
+
+ var status_id = "status_" + duration;
- thumbs = $('[class=thumb_' + hex + ']');
+ var container = "#slice_thumbs_" + duration;
- if (thumbs.length){
+ var buddy_icon = $("#buddy_" + hex)[0].src;
- for (i=0; i < thumbs.length; i++){
- var id = thumbs[i].getAttribute('id');
- var el = $("#" + id);
+ var kids = $(container).children();
- if (el.css('display') == 'block'){
- el.hide();
- continue;
- }
+ if (kids.length){
+ for (var i=0; i < kids.length; i++){
+ var child = kids[i];
+ var id = child.getAttribute("id");
+ var el = $("#" + id);
- el.show();
- }
+ // console.log('uid: ' + uid + ' id: ' + id + ' css: ' + el.css('display'));
+
+ if ((id == uid) && (el.css('display') == 'none')){
+ el.show();
- return;
- }
+ $("#count_" + uid).css('border', '2px solid #' + short_hex);
+ continue;
+ }
+ $("#count_" + id).css('border', 'none');
+ el.hide();
+ }
+ }
+
+ if ($("#" + uid).length){
+ return;
+ }
+
var api_args = {
'host' : '{{ host_url }}',
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : '{{ search_crumb|escape }}',
};
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
- $("#ffbp_status").html();
- $("#ffbp_status").hide();
+ $("#" + status_id).html();
+ $("#" + status_id).hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#ffbp_status").html("Foiled again! The Flickr API returns no photos for that user.");
$("#ffbp_status").show();
return;
}
-
+ var html = '<div id="' + uid + '">';
+ //html += '<div style="margin-bottom:4px;font-size:small;">';
+ //html += 'From <a href="http://www.flickr.com/photos/' + nsid + '" target="_flickr">' + rsp['photos']['photo'][0]['ownername'] + '</a>:';
+ //html += '</div>';
+
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
- var html = '<div id="thumb_' + ph['id'] + '" class="thumb_' + hex + '" style="margin-right:10px;margin-bottom:20px; float:left">';
+ html += '<div id="thumb_' + ph['id'] + '" class="thumb_' + hex + '" style="margin-right:10px;margin-bottom:20px; float:left">';
html += img;
html += '<div style="margin-top:12px;font-style:italic;text-align:center;font-size:10px;color:auto;">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
-
- $(uid).append(html);
}
+ html += '</div>';
+ $(container).append(html);
+
+ $("#count_" + uid).css('border', '2px solid #' + short_hex);
+
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
- $("#ffbp_status").html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
- $("#ffbp_status").show();
+ $("#" + status_id).html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
+ $("#" + status_id).show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
- $("#ffbp_status").html("Retrieving photos...");
- $("#ffbp_status").show();
+ $("#" + status_id).html("Retrieving photos...");
+ $("#" + status_id).show();
}
+
</script>
{% if browser.mobile %}
{% else %}
<div style="height:20px;border:none;">
<div id="ffbp_status" style="z-index:100;"></div>
</div>
{% endif %}
+<br />
+
{% for slice in slices %}
{% include "inc_main_contacts.html" %}
{% endfor %}
{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
diff --git a/templates/settings.html b/templates/settings.html
index 9ff2d2d..b187fc7 100644
--- a/templates/settings.html
+++ b/templates/settings.html
@@ -1,35 +1,37 @@
{% include "inc_head.html" %}
+<div style="margin-left:20%">
{% if error %}
<p>
{% ifequal error 'invalid_crumb' %}
Hrm. Your session seems to have expired.
{% endifequal %}
{% ifequal error 'invalid_filter' %}
Hey! That's an invalid filter.
{% endifequal %}
</p>
<p><a href="/settings">Would you like to try again?</a></p>
{% else %}
<h2>Update which contacts you want to get busy for</h2>
<p>This setting will filter which contacts with recent uploads you will see updates for.</p>
<form method="POST" action="/settings" style="font-size:large;">
<input type="hidden" name="crumb" value="{{ settings_crumb|escape }}" />
<input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked"{% endifequal %} /> <label for="all">All your contacts</label>
<br style="margin-bottom:15px;" />
<input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked"{% endifequal %} /> <label for="ff">Only friends and family</label>
<br /><br />
<input type="submit" value="UPDATE" />
</form>
{% endif %}
<p>Or just go back to your <a href="/">recent uploads page.</p>
+</div>
{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
9ad5e5825ba40910c0e8844b7b1b1b66ff539746
|
start on side design
|
diff --git a/css/main.css b/css/main.css
index 4940038..e4fbaab 100644
--- a/css/main.css
+++ b/css/main.css
@@ -1,555 +1,555 @@
/* elements */
body {
font-family:sans-serif;
font-size:11pt;
color:darkslategrey;
margin-top:25px;
margin-left:0;
margin-right:0;
margin-bottom:60px;
background-image: url('/images/bluetile.gif');
}
h2 {
color:#cc0099;
}
h3 {
color:#cc0099;
margin-bottom:15px;
}
.nophotos {
font-size:.8em;
color:pink;
margin-bottom:5px;
}
/* ids */
#chooser {
margin-bottom:15px;
position:relative;
}
#ffbp_status {
display:none;
color:darkslategray;
border:1px dotted #6699ff;
background-image: url('/images/dot.gif');
padding:7px;
padding-left:none;
font-weight:700;
width:100%;
text-align:right;
float:right;
}
#foot {
text-align:right;
font-size:11px;
margin-top:10px;
margin-right:20px;
}
#greeting {
}
#iamheremap_review {
filter:alpha(opacity=100);
moz-opacity: 1.0 !important;
opacity: 1.0;
z-index:30;
position:absolute;
top:200px;
text-align:center;
}
#iamheremap_reviewcontainer {
height:350px;
width:600px;
z-index:20;
position:absolute;
top:150px;
text-align:center;
}
#iamheremap_reviewbackground{
background-color:#000;
filter:alpha(opacity=60);
moz-opacity: 0.6;
opacity: 0.6;
text-align:center;
height:100%;
}
#iamheremap_reviewclose{
text-align:right;
background-color:white;
font-weight:700;
padding:10px;
}
#loading {
font-size:24pt;
font-weight:700;
}
#main {
margin-top:0;
background-color:white;
border:1px dotted #ccc;
padding:30px;
padding-bottom:60px;
- padding-left:20%;
+ padding-left:5%;
padding-right:10%;
}
#map {
margin-bottom:10px;
}
#navi {
list-style-type: none;
margin: 0px;
padding:0px;
text-align:right;
margin-right:20px;
}
#navi li {
display: inline;
border:1px dotted #ccc;
border-bottom:none;
background-color:white;
padding:8px;
margin: 0px;
font-size:11px;
font-weight:700;
max-width:585px;
}
#navi li a {
text-decoration:none;
color:darkslategray;
}
#options {
max-width:585px;
width:585px;
}
#options_random {
font-size:14pt;
font-weight:700;
}
#options_random li {
float:right;
margin:20px;
list-style-type: none;
border:2px solid #666;
padding:10px;
background-color:orange;
}
#options_random li a {
text-decoration:none;
color:white;
}
#options_random_skip {
background-color:#ff9966 !important;
}
#options_random_noidea {
background-color:#cc3366 !important;
}
#problem {
color:red;
font-weight:700;
font-size:14pt;
max-width:585px;
}
#problem a {
text-decoration:none;
}
#status {
font-size:small;
color:#cc0066;
margin-bottom:10px;
}
#token_change_main {
font-size:12pt;
margin-top:15px;
margin-bottom:20px;
}
#token_change_review_link {
font-size:12pt;
margin-top:15px;
margin-bottom:20px;
}
#token_skip {
font-size:14pt;
font-weight:700;
}
#whatisthis {
width:580px;
max-width:580px;
display:none;
text-align:right;
font-size:11px;
}
#whatisthis_icon {
float:right;
}
#whatisthis_icon img {
border: 1px dotted #ccc;
margin-left:15px;
}
#whatisthis_text {
margin-bottom:5px;
font-weight:600;
}
#whoami {
float:right;
font-size:11px;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
#whoami_mobile {
float:right;
margin-left:20px;
margin-bottom:25px;
margin-right:20px;
}
/* classes */
.about li {
line-height:25px;
}
.alt_options {
list-style-type:none;
}
.alt_options li {
margin-bottom:10px;
}
.alt_options li a {
text-decoration:none;
color:#333366;
font-weight:600;
}
.approve_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.approve_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.block_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.block_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.block_user {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:#333;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.block_user a {
text-decoration:none;
color:white;
}
.blocked_users {
list-style-type: none;
}
.choose_user {
border:3px solid #ccc;
color:#336699;
padding:10px;
font-size:large;
font-weight:700;
margin-top:20px;
}
.confirm_token_change {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:green;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.confirm_geocode {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:green;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.current_photo{
border:solid thin #ccc;
padding:5px;
}
.do_suggest_link{
font-weight:700;
color:#ff0066;
display:block;
font-size:18pt;
text-align:center;
border:1px solid pink;
margin-top:20px;
padding:10px;
text-decoration:none;
}
.fl_username {
font-weight:700;
color:#cc0066;
}
.new_email_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.new_email_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.options {
font-size:30pt;
font-weight:700;
margin:0px;
margin-top:30pt;
margin-bottom:30pt;
list-style-type:none;
}
.options li {
margin-bottom:30px;
margin-left:25px;
}
.options li a {
text-decoration:none;
color:#ff0066;
}
.pending {
margin-top:40px;
}
.pending_buddyicon {
float:left;
}
.pending_buddyicon img {
border: 1px dotted #ccc;
margin-right:10px;
}
.pending_photo {
float:left;
margin-right:60px;
margin-bottom:50px;
}
.reject_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.reject_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.reject_token_change {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:#cc0066;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.submit {
border:3px solid #ccc;
background-color:#336699;
padding:10px;
font-size:large;
font-weight:700;
color:white;
margin-top:20px;
}
.suggest_fail {
font-size:18pt;
font-weight:700;
color:#cc0066;
}
.suggest_ok {
font-size:18pt;
font-weight:700;
color:green;
}
.suggestion_approve {
float:left;
margin:10px;
margin-left:0;
padding:10px;
font-weight:700;
background-color:green;
border:2px solid #ccc;
}
.suggestion_approve a {
text-decoration:none;
color:white;
}
.suggestion_edit {
font-size:small;
}
.suggestion_permalink {
text-align:right;
padding-top:20px;
color:#666;
}
.suggestion_permalink a {
color:#666;
}
.suggestion_reject {
float:left;
margin:10px;
padding:10px;
font-weight:700;
background-color:#cc0066;
border:2px solid #ccc;
text-decoration:none;
color:white;
}
.suggestion_reject a {
text-decoration:none;
color:white;
}
.suggestion_showme {
font-size:11px;
margin-bottom:20px;
margin-top:20px;
}
.suggestion_showme a {
color:#ff0066;
text-decoration:none;
}
.suggestion_tweak {
font-size:11px;
border:1px dotted #ccc;
background-color:#b0e0e6;
margin-left:300px;
margin-bottom:20px;
padding:10px;
padding-left:50px;
display:none;
background-image: url('/images/dot.gif');
}
.suggestion_tweak .suggestion_perms {
}
.tag_placemaker_link {
color:#333366;
text-decoration:none;
font-weight:700;
}
.unblock_user {
margin-left:20px;
padding:10px;
font-weight:700;
color:green;
text-decoration: none;
}
.unblock_user_fail {
font-weight:700;
color:green;
}
.unblock_user_ok {
font-weight:700;
color:green;
}
.whirclick {
color:#666;
font-size:14pt;
font-weight:600;
font-style:italic;
}
\ No newline at end of file
diff --git a/ffbp/App/__init__.pyc b/ffbp/App/__init__.pyc
index 930f9e3..f91cae8 100644
Binary files a/ffbp/App/__init__.pyc and b/ffbp/App/__init__.pyc differ
diff --git a/ffbp/Settings/__init__.py~ b/ffbp/Settings/__init__.py~
deleted file mode 100644
index 6fd5a69..0000000
--- a/ffbp/Settings/__init__.py~
+++ /dev/null
@@ -1 +0,0 @@
-import ffbp.Tables.dbSettings
diff --git a/templates/inc_main_contacts.html b/templates/inc_main_contacts.html
new file mode 100644
index 0000000..9dc8ab6
--- /dev/null
+++ b/templates/inc_main_contacts.html
@@ -0,0 +1,40 @@
+<div class="slice" style="margin-bottom:40px;">
+ <div class="slice_label" style="float:left;width:225px;text-align:right;margin-right:25px;">
+ <div class="slice_inthelast" style="color:#ccc;font-weight:700;margin-bottom:5px;">in the last</div>
+ <div class="slice_amountoftime" style="color:#ff0084;font-weight:700;font-size:2em;">
+ {% ifequal slice.duration '30m' %}30 minutes{% endifequal %}
+ {% ifequal slice.duration '2h' %}2 hours{% endifequal %}
+ {% ifequal slice.duration '4h' %}4 hours{% endifequal %}
+ </div>
+ </div>
+
+<div id="slice_contacts" style="">
+
+{% ifequal slice.count 0 %}
+ <div class="slice_noone" style="font-weight:500;font-size:1.5em;color:#0099cc;padding-top:15px;">Nothing new...</div>
+{% else %}
+{% for c in slice.contacts %}
+
+ <div id="photos_{{ c.nsid_hex }}" style="float:left; margin-right:10px;margin-bottom:10px;">
+
+ <a href="#" onclick="show_photos('{{ c.nsid|escape }}', '{{ slice.offset}}');return false;">
+ <img id="buddy_{{ c.nsid_hex}}" src="{{ c.buddyicon }}" height="48" width="48" style="border:3px solid #{{ c.nsid_short_hex}};margin-bottom:5px;" />
+ </a>
+
+ <div style="text-align:center;font-size:10px;color:auto;">
+ <a href="http://www.flickr.com/photos/{{ c.nsid|escape }}" target="fl_{{ c.nsid_hex|escape }}" style="text-decoration:none;color:#666;">
+ {% ifequal c.count '1' %}
+ <strong style="font-size:12px;">1</strong> photo
+ {% else %}
+ <strong style="font-size:12px;">{{ c.count|escape }}</strong> photos
+ {% endifequal %}
+ </a>
+ </div>
+ </div>
+
+{% endfor %}
+{% endifequal %}
+</div>
+
+ <br clear="all" />
+</div>
diff --git a/templates/inc_main_settings.html b/templates/inc_main_settings.html
new file mode 100644
index 0000000..c597215
--- /dev/null
+++ b/templates/inc_main_settings.html
@@ -0,0 +1,20 @@
+<div style="font-size:small;margin-top:30px;font-style:italic;">
+{% ifequal user.settings.search_in_contacts_filter 'all' %}
+
+{% if browser.mobile %}
+<a href="/settings">Switch to friends and family only</a>.
+
+{% else %}
+You are currently checking for recent uploads from all your contacts. <a href="/settings">Switch to friends and family only</a>.
+{% endif %}
+
+{% else %}
+
+{% if browser.mobile %}
+<a href="/settings">Switch to all your contacts</a>.
+{% else %}
+You are currently checking for recent uploads from friends and family only. <a href="/settings">Switch to all your contacts</a>.
+{% endif %}
+
+{% endifequal %}
+</div>
diff --git a/templates/main_logged_in.html b/templates/main_logged_in.html
index c026f86..f5c450e 100644
--- a/templates/main_logged_in.html
+++ b/templates/main_logged_in.html
@@ -1,136 +1,113 @@
{% include "inc_head.html" %}
<script>
function show_photos(nsid, offset){
$("#ffbp_status").html();
$("#ffbp_status").hide();
var hex = hex_md5(nsid);
var uid = "#photos_" + hex;
var buddy_icon = $("#buddy_" + hex)[0].src;
thumbs = $('[class=thumb_' + hex + ']');
if (thumbs.length){
for (i=0; i < thumbs.length; i++){
var id = thumbs[i].getAttribute('id');
var el = $("#" + id);
if (el.css('display') == 'block'){
el.hide();
continue;
}
el.show();
}
return;
}
var api_args = {
'host' : '{{ host_url }}',
};
var search_args = {
'user_id' : nsid,
'min_upload_date' : offset,
'format' : 'json',
'crumb' : '{{ search_crumb|escape }}',
};
var api = new info.aaronland.flickrapp.API(api_args)
api.api_call('search', search_args, function (rsp){
$("#ffbp_status").html();
$("#ffbp_status").hide();
$("#buddy_" + hex)[0].src = buddy_icon;
var short_hex = hex.substring(0, 6);
var html = '';
if (rsp.photos.photo.length == 0){
$("#ffbp_status").html("Foiled again! The Flickr API returns no photos for that user.");
$("#ffbp_status").show();
return;
}
for (i in rsp.photos.photo){
var ph = rsp.photos.photo[i];
var sz = 75;
var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
img += '</a>';
var html = '<div id="thumb_' + ph['id'] + '" class="thumb_' + hex + '" style="margin-right:10px;margin-bottom:20px; float:left">';
html += img;
html += '<div style="margin-top:12px;font-style:italic;text-align:center;font-size:10px;color:auto;">' + ph['title'].substring(0, 6) + '...</div>';
html += '</div>';
$(uid).append(html);
}
}, function (rsp) {
$("#buddy_" + hex)[0].src = buddy_icon;
$("#ffbp_status").html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
$("#ffbp_status").show();
return;
});
$("#buddy_" + hex)[0].src = "/images/loading.gif";
$("#ffbp_status").html("Retrieving photos...");
$("#ffbp_status").show();
}
</script>
{% if browser.mobile %}
{% else %}
<div style="height:20px;border:none;">
<div id="ffbp_status" style="z-index:100;"></div>
</div>
{% endif %}
{% for slice in slices %}
-{% ifnotequal slice.count 0 %}
-{% include "inc_show_contacts.html"%}
-{% else %}
-{% include "inc_no_contacts.html"%}
-{% endifnotequal %}
+{% include "inc_main_contacts.html" %}
{% endfor %}
-<div style="font-size:small;margin-top:30px;font-style:italic;">
-{% ifequal user.settings.search_in_contacts_filter 'all' %}
-
-{% if browser.mobile %}
-<a href="/settings">Switch to friends and family only</a>.
-
-{% else %}
-You are currently checking for recent uploads from all your contacts. <a href="/settings">Switch to friends and family only</a>.
-{% endif %}
-
-{% else %}
-
-{% if browser.mobile %}
-<a href="/settings">Switch to all your contacts</a>.
-{% else %}
-You are currently checking for recent uploads from friends and family only. <a href="/settings">Switch to all your contacts</a>.
-{% endif %}
-
-{% endifequal %}
-</div>
+{% include "inc_main_settings.html" %}
{% include "inc_foot.html" %}
diff --git a/templates/main_logged_in.html~ b/templates/main_logged_in.html~
deleted file mode 100644
index 0c57779..0000000
--- a/templates/main_logged_in.html~
+++ /dev/null
@@ -1,133 +0,0 @@
-{% include "inc_head.html" %}
-
-<script>
-
- function show_photos(nsid, offset){
-
- $("#ffbp_status").html();
- $("#ffbp_status").hide();
-
- var hex = hex_md5(nsid);
- var uid = "#photos_" + hex;
-
- var buddy_icon = $("#buddy_" + hex)[0].src;
-
- thumbs = $('[class=thumb_' + hex + ']');
-
- if (thumbs.length){
-
- for (i=0; i < thumbs.length; i++){
- var id = thumbs[i].getAttribute('id');
- var el = $("#" + id);
-
- if (el.css('display') == 'block'){
- el.hide();
- continue;
- }
-
- el.show();
- }
-
- return;
- }
-
- var api_args = {
- 'host' : '{{ host_url }}',
- };
-
- var search_args = {
- 'user_id' : nsid,
- 'min_upload_date' : offset,
- 'format' : 'json',
- 'crumb' : '{{ search_crumb|escape }}',
- };
-
- var api = new info.aaronland.flickrapp.API(api_args)
-
- api.api_call('search', search_args, function (rsp){
-
- $("#ffbp_status").html();
- $("#ffbp_status").hide();
-
- var short_hex = hex.substring(0, 6);
-
- var html = '';
-
- if (rsp.photos.photo.length == 0){
- $("#ffbp_status").html("Foiled again! The Flickr API returns no photos for that user.");
- $("#ffbp_status").show();
- return;
- }
-
- $("#buddy_" + hex)[0].src = buddy_icon;
-
- for (i in rsp.photos.photo){
- var ph = rsp.photos.photo[i];
-
- var sz = 75;
-
- var link = 'http://www.flickr.com/photos/' + ph['owner'] + '/' + ph['id'];
- var src = 'http://farm' + ph['farm'] + '.static.flickr.com/' + ph['server'] + '/' + ph['id'] + '_' + ph['secret'] + '_s.jpg';
-
- var img = '<a href="' + link + '" target="_fl' + ph['id'] + '">';
- img += '<img src="' + src + '" height="' + sz + '" width="' + sz + '" style="border:3px solid #' + short_hex + ';" />';
- img += '</a>';
-
- var html = '<div id="thumb_' + ph['id'] + '" class="thumb_' + hex + '" style="margin-right:10px;margin-bottom:20px; float:left">';
- html += img;
- html += '<div style="margin-top:12px;font-style:italic;text-align:center;font-size:10px;color:auto;">' + ph['title'].substring(0, 6) + '...</div>';
- html += '</div>';
-
- $(uid).append(html);
- }
-
- }, function (rsp) {
- $("#ffbp_status").html('Unable to retrieve any photos for that user. The attempt failed with the following message:<br /><br />' + rsp.error.message);
- $("#ffbp_status").show();
- return;
- });
-
- $("#buddy_" + hex)[0].src = "/images/loading.gif";
-
- $("#ffbp_status").html("Retrieving photos...");
- $("#ffbp_status").show();
- }
-
-</script>
-
-{% if browser.mobile %}
-{% else %}
-<div style="height:20px;border:none;">
-<div id="ffbp_status" style="z-index:100;"></div>
-</div>
-{% endif %}
-
-{% for slice in slices %}
-{% ifnotequal slice.count 0 %}
-{% include "inc_show_contacts.html"%}
-{% else %}
-{% include "inc_no_contacts.html"%}
-{% endifnotequal %}
-{% endfor %}
-
-<div style="font-size:small;margin-top:30px;font-style:italic;">
-{% ifequal user.settings.search_in_contacts_filter 'all' %}
-
-{% if browser.mobile %}
-<a href="/settings">Switch to friends and family only</a>.
-
-{% else %}
-You are currently checking for recent uploads from all your contacts. <a href="/settings">Switch to friends and family only</a>.
-{% endif %}
-
-{% else %}
-
-{% if browser.mobile %}
-<a href="/settings">Switch to all your contacts</a>.
-{% else %}
-You are currently checking for recent uploads from friends and family only. <a href="/settings">Switch to all your contacts</a>.
-{% endif %}
-
-{% endifequal %}
-</div>
-{% include "inc_foot.html" %}
|
straup/gae-flickrforbusypeople
|
70ee8f83be10a99a4d277f65f1b325f668c93d15
|
label elements
|
diff --git a/templates/settings.html b/templates/settings.html
index 7bbeda2..9ff2d2d 100644
--- a/templates/settings.html
+++ b/templates/settings.html
@@ -1,35 +1,35 @@
{% include "inc_head.html" %}
{% if error %}
<p>
{% ifequal error 'invalid_crumb' %}
Hrm. Your session seems to have expired.
{% endifequal %}
{% ifequal error 'invalid_filter' %}
Hey! That's an invalid filter.
{% endifequal %}
</p>
<p><a href="/settings">Would you like to try again?</a></p>
{% else %}
<h2>Update which contacts you want to get busy for</h2>
<p>This setting will filter which contacts with recent uploads you will see updates for.</p>
<form method="POST" action="/settings" style="font-size:large;">
<input type="hidden" name="crumb" value="{{ settings_crumb|escape }}" />
- <input type="radio" name="filter" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked"{% endifequal %} /> All your contacts
- <br />
- <input type="radio" name="filter" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked"{% endifequal %} /> Only friends and family
+ <input type="radio" name="filter" id="all" value="all" {% ifequal user.settings.search_in_contacts_filter 'all' %}checked"{% endifequal %} /> <label for="all">All your contacts</label>
+ <br style="margin-bottom:15px;" />
+ <input type="radio" name="filter" id="ff" value="ff" {% ifequal user.settings.search_in_contacts_filter 'ff' %}checked"{% endifequal %} /> <label for="ff">Only friends and family</label>
<br /><br />
<input type="submit" value="UPDATE" />
</form>
{% endif %}
<p>Or just go back to your <a href="/">recent uploads page.</p>
{% include "inc_foot.html" %}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.