repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
emersonsoftware/ansiblefork | refs/heads/devel | lib/ansible/modules/system/cron.py | 25 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2012, Dane Summers <[email protected]>
# (c) 2013, Mike Grozak <[email protected]>
# (c) 2013, Patrick Callahan <[email protected]>
# (c) 2015, Evan Kaufman <[email protected]>
# (c) 2015, Luca Berruti <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Cron Plugin: The goal of this plugin is to provide an idempotent method for
# setting up cron jobs on a host. The script will play well with other manually
# entered crons. Each cron job entered will be preceded with a comment
# describing the job so that it can be found later, which is required to be
# present in order for this plugin to find/modify the job.
#
# This module is based on python-crontab by Martin Owens.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = """
---
module: cron
short_description: Manage cron.d and crontab entries.
description:
- Use this module to manage crontab and environment variables entries. This module allows
you to create environment variables and named crontab entries, update, or delete them.
- 'When crontab jobs are managed: the module includes one line with the description of the
crontab entry C("#Ansible: <name>") corresponding to the "name" passed to the module,
which is used by future ansible/module calls to find/check the state. The "name"
parameter should be unique, and changing the "name" value will result in a new cron
task being created (or a different one being removed).'
- 'When environment variables are managed: no comment line is added, but, when the module
needs to find/check the state, it uses the "name" parameter to find the environment
variable definition line.'
- 'When using symbols such as %, they must be properly escaped.'
version_added: "0.9"
options:
name:
description:
- Description of a crontab entry or, if env is set, the name of environment variable.
Required if state=absent. Note that if name is not set and state=present, then a
new crontab entry will always be created, regardless of existing ones.
default: null
required: false
user:
description:
- The specific user whose crontab should be modified.
required: false
default: root
job:
description:
- The command to execute or, if env is set, the value of environment variable.
Required if state=present.
required: false
aliases: ['value']
default: null
state:
description:
- Whether to ensure the job or environment variable is present or absent.
required: false
default: present
choices: [ "present", "absent" ]
cron_file:
description:
- If specified, uses this file instead of an individual user's crontab.
If this is a relative path, it is interpreted with respect to
/etc/cron.d. (If it is absolute, it will typically be /etc/crontab).
To use the C(cron_file) parameter you must specify the C(user) as well.
required: false
default: null
backup:
description:
- If set, create a backup of the crontab before it is modified.
The location of the backup is returned in the C(backup_file) variable by this module.
required: false
choices: [ "yes", "no" ]
default: no
minute:
description:
- Minute when the job should run ( 0-59, *, */2, etc )
required: false
default: "*"
hour:
description:
- Hour when the job should run ( 0-23, *, */2, etc )
required: false
default: "*"
day:
description:
- Day of the month the job should run ( 1-31, *, */2, etc )
required: false
default: "*"
aliases: [ "dom" ]
month:
description:
- Month of the year the job should run ( 1-12, *, */2, etc )
required: false
default: "*"
weekday:
description:
- Day of the week that the job should run ( 0-6 for Sunday-Saturday, *, etc )
required: false
default: "*"
aliases: [ "dow" ]
reboot:
description:
- If the job should be run at reboot. This option is deprecated. Users should use special_time.
version_added: "1.0"
required: false
default: "no"
choices: [ "yes", "no" ]
special_time:
description:
- Special time specification nickname.
version_added: "1.3"
required: false
default: null
choices: [ "reboot", "yearly", "annually", "monthly", "weekly", "daily", "hourly" ]
disabled:
description:
- If the job should be disabled (commented out) in the crontab. Only has effect if state=present
version_added: "2.0"
required: false
default: false
env:
description:
- If set, manages a crontab's environment variable. New variables are added on top of crontab.
"name" and "value" parameters are the name and the value of environment variable.
version_added: "2.1"
required: false
default: "no"
choices: [ "yes", "no" ]
insertafter:
description:
- Used with C(state=present) and C(env). If specified, the environment variable will be
inserted after the declaration of specified environment variable.
version_added: "2.1"
required: false
default: null
insertbefore:
description:
- Used with C(state=present) and C(env). If specified, the environment variable will be
inserted before the declaration of specified environment variable.
version_added: "2.1"
required: false
default: null
requirements:
- cron
author:
- "Dane Summers (@dsummersl)"
- 'Mike Grozak'
- 'Patrick Callahan'
- 'Evan Kaufman (@EvanK)'
- 'Luca Berruti (@lberruti)'
"""
EXAMPLES = '''
# Ensure a job that runs at 2 and 5 exists.
# Creates an entry like "0 5,2 * * ls -alh > /dev/null"
- cron:
name: "check dirs"
minute: "0"
hour: "5,2"
job: "ls -alh > /dev/null"
# Ensure an old job is no longer present. Removes any job that is prefixed
# by "#Ansible: an old job" from the crontab
- cron:
name: "an old job"
state: absent
# Creates an entry like "@reboot /some/job.sh"
- cron:
name: "a job for reboot"
special_time: reboot
job: "/some/job.sh"
# Creates an entry like "PATH=/opt/bin" on top of crontab
- cron:
name: PATH
env: yes
value: /opt/bin
# Creates an entry like "APP_HOME=/srv/app" and insert it after PATH
# declaration
- cron:
name: APP_HOME
env: yes
value: /srv/app
insertafter: PATH
# Creates a cron file under /etc/cron.d
- cron:
name: yum autoupdate
weekday: 2
minute: 0
hour: 12
user: root
job: "YUMINTERACTIVE: 0 /usr/sbin/yum-autoupdate"
cron_file: ansible_yum-autoupdate
# Removes a cron file from under /etc/cron.d
- cron:
name: "yum autoupdate"
cron_file: ansible_yum-autoupdate
state: absent
# Removes "APP_HOME" environment variable from crontab
- cron:
name: APP_HOME
env: yes
state: absent
'''
import os
import pwd
import re
import tempfile
import platform
import pipes
CRONCMD = "/usr/bin/crontab"
class CronTabError(Exception):
pass
class CronTab(object):
"""
CronTab object to write time based crontab file
user - the user of the crontab (defaults to root)
cron_file - a cron file under /etc/cron.d, or an absolute path
"""
def __init__(self, module, user=None, cron_file=None):
self.module = module
self.user = user
self.root = (os.getuid() == 0)
self.lines = None
self.ansible = "#Ansible: "
self.existing = ''
if cron_file:
if os.path.isabs(cron_file):
self.cron_file = cron_file
else:
self.cron_file = os.path.join('/etc/cron.d', cron_file)
else:
self.cron_file = None
self.read()
def read(self):
# Read in the crontab from the system
self.lines = []
if self.cron_file:
# read the cronfile
try:
f = open(self.cron_file, 'r')
self.existing = f.read()
self.lines = self.existing.splitlines()
f.close()
except IOError:
# cron file does not exist
return
except:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
else:
# using safely quoted shell for now, but this really should be two non-shell calls instead. FIXME
(rc, out, err) = self.module.run_command(self._read_user_execute(), use_unsafe_shell=True)
if rc != 0 and rc != 1: # 1 can mean that there are no jobs.
raise CronTabError("Unable to read crontab")
self.existing = out
lines = out.splitlines()
count = 0
for l in lines:
if count > 2 or (not re.match( r'# DO NOT EDIT THIS FILE - edit the master and reinstall.', l) and
not re.match( r'# \(/tmp/.*installed on.*\)', l) and
not re.match( r'# \(.*version.*\)', l)):
self.lines.append(l)
else:
pattern = re.escape(l) + '[\r\n]?'
self.existing = re.sub(pattern, '', self.existing, 1)
count += 1
def is_empty(self):
if len(self.lines) == 0:
return True
else:
return False
def write(self, backup_file=None):
"""
Write the crontab to the system. Saves all information.
"""
if backup_file:
fileh = open(backup_file, 'w')
elif self.cron_file:
fileh = open(self.cron_file, 'w')
else:
filed, path = tempfile.mkstemp(prefix='crontab')
os.chmod(path, int('0644', 8))
fileh = os.fdopen(filed, 'w')
fileh.write(self.render())
fileh.close()
# return if making a backup
if backup_file:
return
# Add the entire crontab back to the user crontab
if not self.cron_file:
# quoting shell args for now but really this should be two non-shell calls. FIXME
(rc, out, err) = self.module.run_command(self._write_execute(path), use_unsafe_shell=True)
os.unlink(path)
if rc != 0:
self.module.fail_json(msg=err)
# set SELinux permissions
if self.module.selinux_enabled() and self.cron_file:
self.module.set_default_selinux_context(self.cron_file, False)
def do_comment(self, name):
return "%s%s" % (self.ansible, name)
def add_job(self, name, job):
# Add the comment
self.lines.append(self.do_comment(name))
# Add the job
self.lines.append("%s" % (job))
def update_job(self, name, job):
return self._update_job(name, job, self.do_add_job)
def do_add_job(self, lines, comment, job):
lines.append(comment)
lines.append("%s" % (job))
def remove_job(self, name):
return self._update_job(name, "", self.do_remove_job)
def do_remove_job(self, lines, comment, job):
return None
def add_env(self, decl, insertafter=None, insertbefore=None):
if not (insertafter or insertbefore):
self.lines.insert(0, decl)
return
if insertafter:
other_name = insertafter
elif insertbefore:
other_name = insertbefore
other_decl = self.find_env(other_name)
if len(other_decl) > 0:
if insertafter:
index = other_decl[0]+1
elif insertbefore:
index = other_decl[0]
self.lines.insert(index, decl)
return
self.module.fail_json(msg="Variable named '%s' not found." % other_name)
def update_env(self, name, decl):
return self._update_env(name, decl, self.do_add_env)
def do_add_env(self, lines, decl):
lines.append(decl)
def remove_env(self, name):
return self._update_env(name, '', self.do_remove_env)
def do_remove_env(self, lines, decl):
return None
def remove_job_file(self):
try:
os.unlink(self.cron_file)
return True
except OSError:
# cron file does not exist
return False
except:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
def find_job(self, name, job=None):
# attempt to find job by 'Ansible:' header comment
comment = None
for l in self.lines:
if comment is not None:
if comment == name:
return [comment, l]
else:
comment = None
elif re.match( r'%s' % self.ansible, l):
comment = re.sub( r'%s' % self.ansible, '', l)
# failing that, attempt to find job by exact match
if job:
for i, l in enumerate(self.lines):
if l == job:
# if no leading ansible header, insert one
if not re.match( r'%s' % self.ansible, self.lines[i-1]):
self.lines.insert(i, self.do_comment(name))
return [self.lines[i], l, True]
# if a leading blank ansible header AND job has a name, update header
elif name and self.lines[i-1] == self.do_comment(None):
self.lines[i-1] = self.do_comment(name)
return [self.lines[i-1], l, True]
return []
def find_env(self, name):
for index, l in enumerate(self.lines):
if re.match( r'^%s=' % name, l):
return [index, l]
return []
def get_cron_job(self,minute,hour,day,month,weekday,job,special,disabled):
# normalize any leading/trailing newlines (ansible/ansible-modules-core#3791)
job = job.strip('\r\n')
if disabled:
disable_prefix = '#'
else:
disable_prefix = ''
if special:
if self.cron_file:
return "%s@%s %s %s" % (disable_prefix, special, self.user, job)
else:
return "%s@%s %s" % (disable_prefix, special, job)
else:
if self.cron_file:
return "%s%s %s %s %s %s %s %s" % (disable_prefix,minute,hour,day,month,weekday,self.user,job)
else:
return "%s%s %s %s %s %s %s" % (disable_prefix,minute,hour,day,month,weekday,job)
return None
def get_jobnames(self):
jobnames = []
for l in self.lines:
if re.match( r'%s' % self.ansible, l):
jobnames.append(re.sub( r'%s' % self.ansible, '', l))
return jobnames
def get_envnames(self):
envnames = []
for l in self.lines:
if re.match( r'^\S+=' , l):
envnames.append(l.split('=')[0])
return envnames
def _update_job(self, name, job, addlinesfunction):
ansiblename = self.do_comment(name)
newlines = []
comment = None
for l in self.lines:
if comment is not None:
addlinesfunction(newlines, comment, job)
comment = None
elif l == ansiblename:
comment = l
else:
newlines.append(l)
self.lines = newlines
if len(newlines) == 0:
return True
else:
return False # TODO add some more error testing
def _update_env(self, name, decl, addenvfunction):
newlines = []
for l in self.lines:
if re.match( r'^%s=' % name, l):
addenvfunction(newlines, decl)
else:
newlines.append(l)
self.lines = newlines
def render(self):
"""
Render this crontab as it would be in the crontab.
"""
crons = []
for cron in self.lines:
crons.append(cron)
result = '\n'.join(crons)
if result:
result = result.rstrip('\r\n') + '\n'
return result
def _read_user_execute(self):
"""
Returns the command line for reading a crontab
"""
user = ''
if self.user:
if platform.system() == 'SunOS':
return "su %s -c '%s -l'" % (pipes.quote(self.user), pipes.quote(CRONCMD))
elif platform.system() == 'AIX':
return "%s -l %s" % (pipes.quote(CRONCMD), pipes.quote(self.user))
elif platform.system() == 'HP-UX':
return "%s %s %s" % (CRONCMD , '-l', pipes.quote(self.user))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % pipes.quote(self.user)
return "%s %s %s" % (CRONCMD , user, '-l')
def _write_execute(self, path):
"""
Return the command line for writing a crontab
"""
user = ''
if self.user:
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
return "chown %s %s ; su '%s' -c '%s %s'" % (pipes.quote(self.user), pipes.quote(path), pipes.quote(self.user), CRONCMD, pipes.quote(path))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % pipes.quote(self.user)
return "%s %s %s" % (CRONCMD , user, pipes.quote(path))
#==================================================
def main():
# The following example playbooks:
#
# - cron: name="check dirs" hour="5,2" job="ls -alh > /dev/null"
#
# - name: do the job
# cron: name="do the job" hour="5,2" job="/some/dir/job.sh"
#
# - name: no job
# cron: name="an old job" state=absent
#
# - name: sets env
# cron: name="PATH" env=yes value="/bin:/usr/bin"
#
# Would produce:
# PATH=/bin:/usr/bin
# # Ansible: check dirs
# * * 5,2 * * ls -alh > /dev/null
# # Ansible: do the job
# * * 5,2 * * /some/dir/job.sh
module = AnsibleModule(
argument_spec = dict(
name=dict(required=False),
user=dict(required=False),
job=dict(required=False, aliases=['value']),
cron_file=dict(required=False),
state=dict(default='present', choices=['present', 'absent']),
backup=dict(default=False, type='bool'),
minute=dict(default='*'),
hour=dict(default='*'),
day=dict(aliases=['dom'], default='*'),
month=dict(default='*'),
weekday=dict(aliases=['dow'], default='*'),
reboot=dict(required=False, default=False, type='bool'),
special_time=dict(required=False,
default=None,
choices=["reboot", "yearly", "annually", "monthly", "weekly", "daily", "hourly"],
type='str'),
disabled=dict(default=False, type='bool'),
env=dict(required=False, type='bool'),
insertafter=dict(required=False),
insertbefore=dict(required=False),
),
supports_check_mode = True,
mutually_exclusive=[
['reboot', 'special_time'],
['insertafter', 'insertbefore'],
]
)
name = module.params['name']
user = module.params['user']
job = module.params['job']
cron_file = module.params['cron_file']
state = module.params['state']
backup = module.params['backup']
minute = module.params['minute']
hour = module.params['hour']
day = module.params['day']
month = module.params['month']
weekday = module.params['weekday']
reboot = module.params['reboot']
special_time = module.params['special_time']
disabled = module.params['disabled']
env = module.params['env']
insertafter = module.params['insertafter']
insertbefore = module.params['insertbefore']
do_install = state == 'present'
changed = False
res_args = dict()
# Ensure all files generated are only writable by the owning user. Primarily relevant for the cron_file option.
os.umask(int('022', 8))
crontab = CronTab(module, user, cron_file)
module.debug('cron instantiated - name: "%s"' % name)
if module._diff:
diff = dict()
diff['before'] = crontab.existing
if crontab.cron_file:
diff['before_header'] = crontab.cron_file
else:
if crontab.user:
diff['before_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['before_header'] = 'crontab'
# --- user input validation ---
if (special_time or reboot) and \
(True in [(x != '*') for x in [minute, hour, day, month, weekday]]):
module.fail_json(msg="You must specify time and date fields or special time.")
if cron_file and do_install:
if not user:
module.fail_json(msg="To use cron_file=... parameter you must specify user=... as well")
if job is None and do_install:
module.fail_json(msg="You must specify 'job' to install a new cron job or variable")
if (insertafter or insertbefore) and not env and do_install:
module.fail_json(msg="Insertafter and insertbefore parameters are valid only with env=yes")
if reboot:
special_time = "reboot"
# if requested make a backup before making a change
if backup and not module.check_mode:
(backuph, backup_file) = tempfile.mkstemp(prefix='crontab')
crontab.write(backup_file)
if crontab.cron_file and not name and not do_install:
if module._diff:
diff['after'] = ''
diff['after_header'] = '/dev/null'
else:
diff = dict()
if module.check_mode:
changed = os.path.isfile(crontab.cron_file)
else:
changed = crontab.remove_job_file()
module.exit_json(changed=changed,cron_file=cron_file,state=state,diff=diff)
if env:
if ' ' in name:
module.fail_json(msg="Invalid name for environment variable")
decl = '%s="%s"' % (name, job)
old_decl = crontab.find_env(name)
if do_install:
if len(old_decl) == 0:
crontab.add_env(decl, insertafter, insertbefore)
changed = True
if len(old_decl) > 0 and old_decl[1] != decl:
crontab.update_env(name, decl)
changed = True
else:
if len(old_decl) > 0:
crontab.remove_env(name)
changed = True
else:
if do_install:
job = crontab.get_cron_job(minute, hour, day, month, weekday, job, special_time, disabled)
old_job = crontab.find_job(name, job)
if len(old_job) == 0:
crontab.add_job(name, job)
changed = True
if len(old_job) > 0 and old_job[1] != job:
crontab.update_job(name, job)
changed = True
if len(old_job) > 2:
crontab.update_job(name, job)
changed = True
else:
old_job = crontab.find_job(name)
if len(old_job) > 0:
crontab.remove_job(name)
changed = True
# no changes to env/job, but existing crontab needs a terminating newline
if not changed:
if not (crontab.existing.endswith('\r') or crontab.existing.endswith('\n')):
changed = True
res_args = dict(
jobs = crontab.get_jobnames(),
envs = crontab.get_envnames(),
changed = changed
)
if changed:
if not module.check_mode:
crontab.write()
if module._diff:
diff['after'] = crontab.render()
if crontab.cron_file:
diff['after_header'] = crontab.cron_file
else:
if crontab.user:
diff['after_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['after_header'] = 'crontab'
res_args['diff'] = diff
# retain the backup only if crontab or cron file have changed
if backup:
if changed:
res_args['backup_file'] = backup_file
else:
if not module.check_mode:
os.unlink(backup_file)
if cron_file:
res_args['cron_file'] = cron_file
module.exit_json(**res_args)
# --- should never get here
module.exit_json(msg="Unable to execute cron task.")
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
elimence/edx-platform | refs/heads/master | lms/djangoapps/courseware/views.py | 1 | import logging
import urllib
from functools import partial
from django.conf import settings
from django.core.context_processors import csrf
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect
from mitxmako.shortcuts import render_to_response, render_to_string
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.cache import cache_control
from courseware import grades
from courseware.access import has_access
from courseware.courses import (get_courses, get_course_with_access,
get_courses_by_university, sort_by_announcement)
import courseware.tabs as tabs
from courseware.masquerade import setup_masquerade
from courseware.model_data import ModelDataCache
from .module_render import toc_for_course, get_module_for_descriptor, get_module
from courseware.models import StudentModule, StudentModuleHistory
from django_comment_client.utils import get_discussion_title
from student.models import UserTestGroup, CourseEnrollment
from util.cache import cache, cache_if_anonymous
from xmodule.modulestore import Location
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, NoPathToItem
from xmodule.modulestore.search import path_to_location
import comment_client
log = logging.getLogger("mitx.courseware")
template_imports = {'urllib': urllib}
def user_groups(user):
"""
TODO (vshnayder): This is not used. When we have a new plan for groups, adjust appropriately.
"""
if not user.is_authenticated():
return []
# TODO: Rewrite in Django
key = 'user_group_names_{user.id}'.format(user=user)
cache_expiration = 60 * 60 # one hour
# Kill caching on dev machines -- we switch groups a lot
group_names = cache.get(key)
if settings.DEBUG:
group_names = None
if group_names is None:
group_names = [u.name for u in UserTestGroup.objects.filter(users=user)]
cache.set(key, group_names, cache_expiration)
return group_names
@ensure_csrf_cookie
@cache_if_anonymous
def courses(request):
"""
Render "find courses" page. The course selection work is done in courseware.courses.
"""
courses = get_courses(request.user, request.META.get('HTTP_HOST'))
courses = sort_by_announcement(courses)
return render_to_response("courseware/courses.html", {'courses': courses})
def render_accordion(request, course, chapter, section, model_data_cache):
"""
Draws navigation bar. Takes current position in accordion as
parameter.
If chapter and section are '' or None, renders a default accordion.
course, chapter, and section are the url_names.
Returns the html string
"""
# grab the table of contents
user = User.objects.prefetch_related("groups").get(id=request.user.id)
request.user = user # keep just one instance of User
toc = toc_for_course(user, request, course, chapter, section, model_data_cache)
context = dict([('toc', toc),
('course_id', course.id),
('csrf', csrf(request)['csrf_token']),
('show_timezone', course.show_timezone)] + template_imports.items())
return render_to_string('courseware/accordion.html', context)
def get_current_child(xmodule):
"""
Get the xmodule.position's display item of an xmodule that has a position and
children. If xmodule has no position or is out of bounds, return the first child.
Returns None only if there are no children at all.
"""
if not hasattr(xmodule, 'position'):
return None
if xmodule.position is None:
pos = 0
else:
# position is 1-indexed.
pos = xmodule.position - 1
children = xmodule.get_display_items()
if 0 <= pos < len(children):
child = children[pos]
elif len(children) > 0:
# Something is wrong. Default to first child
child = children[0]
else:
child = None
return child
def redirect_to_course_position(course_module):
"""
Return a redirect to the user's current place in the course.
If this is the user's first time, redirects to COURSE/CHAPTER/SECTION.
If this isn't the users's first time, redirects to COURSE/CHAPTER,
and the view will find the current section and display a message
about reusing the stored position.
If there is no current position in the course or chapter, then selects
the first child.
"""
urlargs = {'course_id': course_module.descriptor.id}
chapter = get_current_child(course_module)
if chapter is None:
# oops. Something bad has happened.
raise Http404("No chapter found when loading current position in course")
urlargs['chapter'] = chapter.url_name
if course_module.position is not None:
return redirect(reverse('courseware_chapter', kwargs=urlargs))
# Relying on default of returning first child
section = get_current_child(chapter)
if section is None:
raise Http404("No section found when loading current position in course")
urlargs['section'] = section.url_name
return redirect(reverse('courseware_section', kwargs=urlargs))
def save_child_position(seq_module, child_name):
"""
child_name: url_name of the child
"""
for position, c in enumerate(seq_module.get_display_items(), start=1):
if c.url_name == child_name:
# Only save if position changed
if position != seq_module.position:
seq_module.position = position
def check_for_active_timelimit_module(request, course_id, course):
"""
Looks for a timing module for the given user and course that is currently active.
If found, returns a context dict with timer-related values to enable display of time remaining.
"""
context = {}
# TODO (cpennington): Once we can query the course structure, replace this with such a query
timelimit_student_modules = StudentModule.objects.filter(student=request.user, course_id=course_id, module_type='timelimit')
if timelimit_student_modules:
for timelimit_student_module in timelimit_student_modules:
# get the corresponding section_descriptor for the given StudentModel entry:
module_state_key = timelimit_student_module.module_state_key
timelimit_descriptor = modulestore().get_instance(course_id, Location(module_state_key))
timelimit_module_cache = ModelDataCache.cache_for_descriptor_descendents(course.id, request.user,
timelimit_descriptor, depth=None)
timelimit_module = get_module_for_descriptor(request.user, request, timelimit_descriptor,
timelimit_module_cache, course.id, position=None)
if timelimit_module is not None and timelimit_module.category == 'timelimit' and \
timelimit_module.has_begun and not timelimit_module.has_ended:
location = timelimit_module.location
# determine where to go when the timer expires:
if timelimit_descriptor.time_expired_redirect_url is None:
raise Http404("no time_expired_redirect_url specified at this location: {} ".format(timelimit_module.location))
context['time_expired_redirect_url'] = timelimit_descriptor.time_expired_redirect_url
# Fetch the remaining time relative to the end time as stored in the module when it was started.
# This value should be in milliseconds.
remaining_time = timelimit_module.get_remaining_time_in_ms()
context['timer_expiration_duration'] = remaining_time
context['suppress_toplevel_navigation'] = timelimit_descriptor.suppress_toplevel_navigation
return_url = reverse('jump_to', kwargs={'course_id': course_id, 'location': location})
context['timer_navigation_return_url'] = return_url
return context
def update_timelimit_module(user, course_id, model_data_cache, timelimit_descriptor, timelimit_module):
"""
Updates the state of the provided timing module, starting it if it hasn't begun.
Returns dict with timer-related values to enable display of time remaining.
Returns 'timer_expiration_duration' in dict if timer is still active, and not if timer has expired.
"""
context = {}
# determine where to go when the exam ends:
if timelimit_descriptor.time_expired_redirect_url is None:
raise Http404("No time_expired_redirect_url specified at this location: {} ".format(timelimit_module.location))
context['time_expired_redirect_url'] = timelimit_descriptor.time_expired_redirect_url
if not timelimit_module.has_ended:
if not timelimit_module.has_begun:
# user has not started the exam, so start it now.
if timelimit_descriptor.duration is None:
raise Http404("No duration specified at this location: {} ".format(timelimit_module.location))
# The user may have an accommodation that has been granted to them.
# This accommodation information should already be stored in the module's state.
timelimit_module.begin(timelimit_descriptor.duration)
# the exam has been started, either because the student is returning to the
# exam page, or because they have just visited it. Fetch the remaining time relative to the
# end time as stored in the module when it was started.
context['timer_expiration_duration'] = timelimit_module.get_remaining_time_in_ms()
# also use the timed module to determine whether top-level navigation is visible:
context['suppress_toplevel_navigation'] = timelimit_descriptor.suppress_toplevel_navigation
return context
@login_required
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
def index(request, course_id, chapter=None, section=None,
position=None):
"""
Displays courseware accordion and associated content. If course, chapter,
and section are all specified, renders the page, or returns an error if they
are invalid.
If section is not specified, displays the accordion opened to the right chapter.
If neither chapter or section are specified, redirects to user's most recent
chapter, or the first chapter if this is the user's first visit.
Arguments:
- request : HTTP request
- course_id : course id (str: ORG/course/URL_NAME)
- chapter : chapter url_name (str)
- section : section url_name (str)
- position : position in module, eg of <sequential> module (str)
Returns:
- HTTPresponse
"""
user = User.objects.prefetch_related("groups").get(id=request.user.id)
request.user = user # keep just one instance of User
course = get_course_with_access(user, course_id, 'load', depth=2)
staff_access = has_access(user, course, 'staff')
registered = registered_for_course(course, user)
if not registered:
# TODO (vshnayder): do course instructors need to be registered to see course?
log.debug('User %s tried to view course %s but is not enrolled' % (user, course.location.url()))
return redirect(reverse('about_course', args=[course.id]))
masq = setup_masquerade(request, staff_access)
try:
model_data_cache = ModelDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
course_module = get_module_for_descriptor(user, request, course, model_data_cache, course.id)
if course_module is None:
log.warning('If you see this, something went wrong: if we got this'
' far, should have gotten a course module for this user')
return redirect(reverse('about_course', args=[course.id]))
if chapter is None:
return redirect_to_course_position(course_module)
context = {
'csrf': csrf(request)['csrf_token'],
'accordion': render_accordion(request, course, chapter, section, model_data_cache),
'COURSE_TITLE': course.display_name_with_default,
'course': course,
'init': '',
'content': '',
'staff_access': staff_access,
'masquerade': masq,
'xqa_server': settings.MITX_FEATURES.get('USE_XQA_SERVER', 'http://xqa:[email protected]/xqa')
}
chapter_descriptor = course.get_child_by(lambda m: m.url_name == chapter)
if chapter_descriptor is not None:
save_child_position(course_module, chapter)
else:
raise Http404('No chapter descriptor found with name {}'.format(chapter))
chapter_module = course_module.get_child_by(lambda m: m.url_name == chapter)
if chapter_module is None:
# User may be trying to access a chapter that isn't live yet
if masq=='student': # if staff is masquerading as student be kinder, don't 404
log.debug('staff masq as student: no chapter %s' % chapter)
return redirect(reverse('courseware', args=[course.id]))
raise Http404
if section is not None:
section_descriptor = chapter_descriptor.get_child_by(lambda m: m.url_name == section)
if section_descriptor is None:
# Specifically asked-for section doesn't exist
if masq=='student': # if staff is masquerading as student be kinder, don't 404
log.debug('staff masq as student: no section %s' % section)
return redirect(reverse('courseware', args=[course.id]))
raise Http404
# cdodge: this looks silly, but let's refetch the section_descriptor with depth=None
# which will prefetch the children more efficiently than doing a recursive load
section_descriptor = modulestore().get_instance(course.id, section_descriptor.location, depth=None)
# Load all descendants of the section, because we're going to display its
# html, which in general will need all of its children
section_model_data_cache = ModelDataCache.cache_for_descriptor_descendents(
course_id, user, section_descriptor, depth=None)
section_module = get_module(request.user, request,
section_descriptor.location,
section_model_data_cache, course_id, position, depth=None)
if section_module is None:
# User may be trying to be clever and access something
# they don't have access to.
raise Http404
# Save where we are in the chapter
save_child_position(chapter_module, section)
# check here if this section *is* a timed module.
if section_module.category == 'timelimit':
timer_context = update_timelimit_module(user, course_id, student_module_cache,
section_descriptor, section_module)
if 'timer_expiration_duration' in timer_context:
context.update(timer_context)
else:
# if there is no expiration defined, then we know the timer has expired:
return HttpResponseRedirect(timer_context['time_expired_redirect_url'])
else:
# check here if this page is within a course that has an active timed module running. If so, then
# add in the appropriate timer information to the rendering context:
context.update(check_for_active_timelimit_module(request, course_id, course))
context['content'] = section_module.get_html()
else:
# section is none, so display a message
prev_section = get_current_child(chapter_module)
if prev_section is None:
# Something went wrong -- perhaps this chapter has no sections visible to the user
raise Http404
prev_section_url = reverse('courseware_section', kwargs={'course_id': course_id,
'chapter': chapter_descriptor.url_name,
'section': prev_section.url_name})
context['content'] = render_to_string('courseware/welcome-back.html',
{'course': course,
'chapter_module': chapter_module,
'prev_section': prev_section,
'prev_section_url': prev_section_url})
result = render_to_response('courseware/courseware.html', context)
except Exception as e:
if isinstance(e, Http404):
# let it propagate
raise
# In production, don't want to let a 500 out for any reason
if settings.DEBUG:
raise
else:
log.exception("Error in index view: user={user}, course={course},"
" chapter={chapter} section={section}"
"position={position}".format(
user=user,
course=course,
chapter=chapter,
section=section,
position=position
))
try:
result = render_to_response('courseware/courseware-error.html',
{'staff_access': staff_access,
'course': course})
except:
# Let the exception propagate, relying on global config to at
# at least return a nice error message
log.exception("Error while rendering courseware-error page")
raise
return result
@ensure_csrf_cookie
def jump_to(request, course_id, location):
"""
Show the page that contains a specific location.
If the location is invalid or not in any class, return a 404.
Otherwise, delegates to the index view to figure out whether this user
has access, and what they should see.
"""
# Complain if the location isn't valid
try:
location = Location(location)
except InvalidLocationError:
raise Http404("Invalid location")
# Complain if there's not data for this location
try:
(course_id, chapter, section, position) = path_to_location(modulestore(), course_id, location)
except ItemNotFoundError:
raise Http404("No data at this location: {0}".format(location))
except NoPathToItem:
raise Http404("This location is not in any class: {0}".format(location))
# choose the appropriate view (and provide the necessary args) based on the
# args provided by the redirect.
# Rely on index to do all error handling and access control.
if chapter is None:
return redirect('courseware', course_id=course_id)
elif section is None:
return redirect('courseware_chapter', course_id=course_id, chapter=chapter)
elif position is None:
return redirect('courseware_section', course_id=course_id, chapter=chapter, section=section)
else:
return redirect('courseware_position', course_id=course_id, chapter=chapter, section=section, position=position)
@ensure_csrf_cookie
def course_info(request, course_id):
"""
Display the course's info.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course = get_course_with_access(request.user, course_id, 'load')
staff_access = has_access(request.user, course, 'staff')
masq = setup_masquerade(request, staff_access) # allow staff to toggle masquerade on info page
return render_to_response('courseware/info.html', {'request': request, 'course_id': course_id, 'cache': None,
'course': course, 'staff_access': staff_access, 'masquerade': masq})
@ensure_csrf_cookie
def static_tab(request, course_id, tab_slug):
"""
Display the courses tab with the given name.
Assumes the course_id is in a valid format.
"""
course = get_course_with_access(request.user, course_id, 'load')
tab = tabs.get_static_tab_by_slug(course, tab_slug)
if tab is None:
raise Http404
contents = tabs.get_static_tab_contents(
request,
course,
tab
)
if contents is None:
raise Http404
staff_access = has_access(request.user, course, 'staff')
return render_to_response('courseware/static_tab.html',
{'course': course,
'tab': tab,
'tab_contents': contents,
'staff_access': staff_access, })
# TODO arjun: remove when custom tabs in place, see courseware/syllabus.py
@ensure_csrf_cookie
def syllabus(request, course_id):
"""
Display the course's syllabus.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course = get_course_with_access(request.user, course_id, 'load')
staff_access = has_access(request.user, course, 'staff')
return render_to_response('courseware/syllabus.html', {'course': course,
'staff_access': staff_access, })
def registered_for_course(course, user):
"""
Return CourseEnrollment if user is registered for course, else False
"""
if user is None:
return False
if user.is_authenticated():
return CourseEnrollment.objects.filter(user=user, course_id=course.id).exists()
else:
return False
@ensure_csrf_cookie
@cache_if_anonymous
def course_about(request, course_id):
if settings.MITX_FEATURES.get('ENABLE_MKTG_SITE', False):
raise Http404
course = get_course_with_access(request.user, course_id, 'see_exists')
registered = registered_for_course(course, request.user)
if has_access(request.user, course, 'load'):
course_target = reverse('info', args=[course.id])
else:
course_target = reverse('about_course', args=[course.id])
show_courseware_link = (has_access(request.user, course, 'load') or
settings.MITX_FEATURES.get('ENABLE_LMS_MIGRATION'))
return render_to_response('courseware/course_about.html',
{'course': course,
'registered': registered,
'course_target': course_target,
'show_courseware_link': show_courseware_link})
@ensure_csrf_cookie
@cache_if_anonymous
def mktg_course_about(request, course_id):
try:
course = get_course_with_access(request.user, course_id, 'see_exists')
except (ValueError, Http404) as e:
# if a course does not exist yet, display a coming
# soon button
return render_to_response('courseware/mktg_coming_soon.html',
{'course_id': course_id})
registered = registered_for_course(course, request.user)
if has_access(request.user, course, 'load'):
course_target = reverse('info', args=[course.id])
else:
course_target = reverse('about_course', args=[course.id])
allow_registration = has_access(request.user, course, 'enroll')
show_courseware_link = (has_access(request.user, course, 'load') or
settings.MITX_FEATURES.get('ENABLE_LMS_MIGRATION'))
return render_to_response('courseware/mktg_course_about.html',
{'course': course,
'registered': registered,
'allow_registration': allow_registration,
'course_target': course_target,
'show_courseware_link': show_courseware_link})
@ensure_csrf_cookie
@cache_if_anonymous
def static_university_profile(request, org_id):
"""
Return the profile for the particular org_id that does not have any courses.
"""
# Redirect to the properly capitalized org_id
last_path = request.path.split('/')[-1]
if last_path != org_id:
return redirect('static_university_profile', org_id=org_id)
# Render template
template_file = "university_profile/{0}.html".format(org_id).lower()
context = dict(courses=[], org_id=org_id)
return render_to_response(template_file, context)
@ensure_csrf_cookie
@cache_if_anonymous
def university_profile(request, org_id):
"""
Return the profile for the particular org_id. 404 if it's not valid.
"""
virtual_orgs_ids = settings.VIRTUAL_UNIVERSITIES
meta_orgs = getattr(settings, 'META_UNIVERSITIES', {})
# Get all the ids associated with this organization
all_courses = modulestore().get_courses()
valid_orgs_ids = set(c.org for c in all_courses)
valid_orgs_ids.update(virtual_orgs_ids + meta_orgs.keys())
if org_id not in valid_orgs_ids:
raise Http404("University Profile not found for {0}".format(org_id))
# Grab all courses for this organization(s)
org_ids = set([org_id] + meta_orgs.get(org_id, []))
org_courses = []
domain = request.META.get('HTTP_HOST')
for key in org_ids:
cs = get_courses_by_university(request.user, domain=domain)[key]
org_courses.extend(cs)
org_courses = sort_by_announcement(org_courses)
context = dict(courses=org_courses, org_id=org_id)
template_file = "university_profile/{0}.html".format(org_id).lower()
return render_to_response(template_file, context)
def render_notifications(request, course, notifications):
context = {
'notifications': notifications,
'get_discussion_title': partial(get_discussion_title, request=request, course=course),
'course': course,
}
return render_to_string('courseware/notifications.html', context)
@login_required
def news(request, course_id):
course = get_course_with_access(request.user, course_id, 'load')
notifications = comment_client.get_notifications(request.user.id)
context = {
'course': course,
'content': render_notifications(request, course, notifications),
}
return render_to_response('courseware/news.html', context)
@login_required
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
def progress(request, course_id, student_id=None):
""" User progress. We show the grade bar and every problem score.
Course staff are allowed to see the progress of students in their class.
"""
course = get_course_with_access(request.user, course_id, 'load', depth=None)
staff_access = has_access(request.user, course, 'staff')
if student_id is None or student_id == request.user.id:
# always allowed to see your own profile
student = request.user
else:
# Requesting access to a different student's profile
if not staff_access:
raise Http404
student = User.objects.get(id=int(student_id))
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
# The pre-fetching of groups is done to make auth checks not require an
# additional DB lookup (this kills the Progress page in particular).
student = User.objects.prefetch_related("groups").get(id=student.id)
model_data_cache = ModelDataCache.cache_for_descriptor_descendents(
course_id, student, course, depth=None)
courseware_summary = grades.progress_summary(student, request, course,
model_data_cache)
grade_summary = grades.grade(student, request, course, model_data_cache)
if courseware_summary is None:
#This means the student didn't have access to the course (which the instructor requested)
raise Http404
context = {'course': course,
'courseware_summary': courseware_summary,
'grade_summary': grade_summary,
'staff_access': staff_access,
'student': student,
}
context.update()
return render_to_response('courseware/progress.html', context)
@login_required
def submission_history(request, course_id, student_username, location):
"""Render an HTML fragment (meant for inclusion elsewhere) that renders a
history of all state changes made by this user for this problem location.
Right now this only works for problems because that's all
StudentModuleHistory records.
"""
course = get_course_with_access(request.user, course_id, 'load')
staff_access = has_access(request.user, course, 'staff')
# Permission Denied if they don't have staff access and are trying to see
# somebody else's submission history.
if (student_username != request.user.username) and (not staff_access):
raise PermissionDenied
try:
student = User.objects.get(username=student_username)
student_module = StudentModule.objects.get(course_id=course_id,
module_state_key=location,
student_id=student.id)
except User.DoesNotExist:
return HttpResponse("User {0} does not exist.".format(student_username))
except StudentModule.DoesNotExist:
return HttpResponse("{0} has never accessed problem {1}"
.format(student_username, location))
history_entries = StudentModuleHistory.objects \
.filter(student_module=student_module).order_by('-id')
# If no history records exist, let's force a save to get history started.
if not history_entries:
student_module.save()
history_entries = StudentModuleHistory.objects \
.filter(student_module=student_module).order_by('-id')
context = {
'history_entries': history_entries,
'username': student.username,
'location': location,
'course_id': course_id
}
return render_to_response('courseware/submission_history.html', context)
|
iuliat/nova | refs/heads/master | nova/console/api.py | 66 | # Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles ConsoleProxy API requests."""
from oslo_config import cfg
from nova.compute import rpcapi as compute_rpcapi
from nova.console import rpcapi as console_rpcapi
from nova.db import base
from nova import objects
CONF = cfg.CONF
CONF.import_opt('console_topic', 'nova.console.rpcapi')
class API(base.Base):
"""API for spinning up or down console proxy connections."""
def __init__(self, **kwargs):
super(API, self).__init__(**kwargs)
def get_consoles(self, context, instance_uuid):
return self.db.console_get_all_by_instance(context, instance_uuid,
columns_to_join=['pool'])
def get_console(self, context, instance_uuid, console_uuid):
return self.db.console_get(context, console_uuid, instance_uuid)
def delete_console(self, context, instance_uuid, console_uuid):
console = self.db.console_get(context, console_uuid, instance_uuid)
rpcapi = console_rpcapi.ConsoleAPI(topic=CONF.console_topic,
server=console['pool']['host'])
rpcapi.remove_console(context, console['id'])
def create_console(self, context, instance_uuid):
# NOTE(mdragon): If we wanted to return this the console info
# here, as we would need to do a call.
# They can just do an index later to fetch
# console info. I am not sure which is better
# here.
instance = objects.Instance.get_by_uuid(context, instance_uuid)
topic = self._get_console_topic(context, instance.host)
server = None
if '.' in topic:
topic, server = topic.split('.', 1)
rpcapi = console_rpcapi.ConsoleAPI(topic=topic, server=server)
rpcapi.add_console(context, instance.id)
def _get_console_topic(self, context, instance_host):
rpcapi = compute_rpcapi.ComputeAPI()
return rpcapi.get_console_topic(context, instance_host)
|
visualputty/Landing-Lights | refs/heads/master | djangoflash/tests/__init__.py | 9 | # -*- coding: utf-8 -*-
"""Dummy file to make this directory a package.
"""
|
luser/socorro | refs/heads/master | socorro/cron/jobs/fetch_adi_from_hive.py | 2 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import codecs
import datetime
import urllib2
import os
import tempfile
import unicodedata
import pyhs2
from configman import Namespace, class_converter
from crontabber.base import BaseCronApp
from crontabber.mixins import as_backfill_cron_app
from socorro.external.postgresql.connection_context import ConnectionContext
from socorro.external.postgresql.dbapi2_util import execute_no_results
class NoRowsWritten(Exception):
pass
"""
Detailed documentation on columns avaiable from our Hive system at:
https://intranet.mozilla.org/Metrics/Blocklist
Columns being queried are:
report_date
product_name
product_os_platform
product_os_version
product_version
build
build_channel
product_guid
count
"""
_QUERY = """
select
ds,
split(request_url,'/')[5],
split(split(request_url,'/')[10], '%%20')[0],
split(split(request_url,'/')[10], '%%20')[1],
split(request_url,'/')[4],
split(request_url,'/')[6],
split(request_url,'/')[9],
split(request_url,'/')[3],
count(*)
FROM v2_raw_logs
WHERE
(domain='addons.mozilla.org' OR domain='blocklist.addons.mozilla.org')
and http_status_code = '200'
and request_url like '/blocklist/3/%%'
and ds='%s'
GROUP BY
ds,
split(request_url,'/')[5],
split(split(request_url,'/')[10], '%%20')[0],
split(split(request_url,'/')[10], '%%20')[1],
split(request_url,'/')[4],
split(request_url,'/')[6],
split(request_url,'/')[9],
split(request_url,'/')[3]
"""
_RAW_ADI_QUERY = """
INSERT INTO raw_adi (
adi_count,
date,
product_name,
product_os_platform,
product_os_version,
product_version,
build,
product_guid,
update_channel
)
SELECT
sum(count),
report_date,
CASE WHEN (raw_adi_logs.product_name = 'Fennec'
AND product_guid = '{aa3c5121-dab2-40e2-81ca-7ea25febc110}')
THEN 'FennecAndroid'
WHEN (raw_adi_logs.product_name = 'Webapp Runtime')
THEN 'WebappRuntime'
ELSE raw_adi_logs.product_name
END,
product_os_platform,
product_os_version,
product_version,
build,
CASE WHEN (product_guid = '[email protected]')
THEN '{[email protected]}'
ELSE product_guid
END,
CASE WHEN (build_channel ILIKE 'release%%')
THEN 'release'
ELSE build_channel
END
FROM raw_adi_logs
-- FILTER with product_productid_map
JOIN product_productid_map ON productid =
CASE WHEN (product_guid = '[email protected]')
THEN '{[email protected]}'
ELSE product_guid
END
WHERE
report_date=%s
GROUP BY
report_date,
raw_adi_logs.product_name,
product_os_platform,
product_os_version,
product_version,
build,
product_guid,
build_channel
"""
_FENNEC38_ADI_CHANNEL_CORRECTION_SQL = """
update raw_adi
set update_channel = 'beta'
where product_name = 'FennecAndroid'
and product_version = '38.0'
and build = '20150427090529'
and date > '2015-04-27';"""
@as_backfill_cron_app
class FetchADIFromHiveCronApp(BaseCronApp):
""" This cron is our daily blocklist ping web logs query
that rolls up all the browser checkins and let's us know
how many browsers we think were active on the internet
for a particular day """
app_name = 'fetch-adi-from-hive'
app_description = 'Fetch ADI From Hive App'
app_version = '0.1'
required_config = Namespace()
required_config.add_option(
'query',
default=_QUERY,
doc='Hive query for fetching ADI data')
required_config.add_option(
'hive_host',
default='localhost',
doc='Hostname to run Hive query on')
required_config.add_option(
'hive_port',
default=10000,
doc='Port to run Hive query on')
required_config.add_option(
'hive_user',
default='socorro',
doc='User to connect to Hive with')
required_config.add_option(
'hive_password',
default='ignored',
doc='Password to connect to Hive with',
secret=True)
required_config.add_option(
'hive_database',
default='default',
doc='Database name to connect to Hive with')
required_config.add_option(
'hive_auth_mechanism',
default='PLAIN',
doc='Auth mechanism for Hive')
required_config.add_option(
'timeout',
default=30 * 60, # 30 minutes
doc='number of seconds to wait before timing out')
required_config.namespace('primary_destination')
required_config.primary_destination.add_option(
'transaction_executor_class',
default="socorro.database.transaction_executor."
"TransactionExecutorWithInfiniteBackoff",
doc='a class that will manage transactions',
from_string_converter=class_converter,
reference_value_from='resource.postgresql',
)
required_config.primary_destination.add_option(
'database_class',
default=ConnectionContext,
doc='The class responsible for connecting to Postgres',
reference_value_from='resource.postgresql',
)
required_config.namespace('secondary_destination')
required_config.secondary_destination.add_option(
'transaction_executor_class',
default="socorro.database.transaction_executor."
"TransactionExecutorWithInfiniteBackoff",
doc='a class that will manage transactions',
from_string_converter=class_converter,
reference_value_from='resource.postgresql',
)
required_config.secondary_destination.add_option(
'database_class',
default=ConnectionContext,
doc=(
'The class responsible for connecting to Postgres. '
'Optionally set this to an empty string to entirely '
'disable the secondary destination.'
),
reference_value_from='resource.postgresql',
)
@staticmethod
def remove_control_characters(s):
if isinstance(s, str):
s = unicode(s, 'utf-8', errors='replace')
return ''.join(c for c in s if unicodedata.category(c)[0] != "C")
def _database_transaction(
self,
connection,
raw_adi_logs_pathname,
target_date
):
with codecs.open(raw_adi_logs_pathname, 'r', 'utf-8') as f:
pgcursor = connection.cursor()
pgcursor.copy_from(
f,
'raw_adi_logs',
null='None',
columns=[
'report_date',
'product_name',
'product_os_platform',
'product_os_version',
'product_version',
'build',
'build_channel',
'product_guid',
'count'
]
)
pgcursor.execute(_RAW_ADI_QUERY, (target_date,))
# for Bug 1159993
execute_no_results(connection, _FENNEC38_ADI_CHANNEL_CORRECTION_SQL)
def run(self, date):
db_class = self.config.primary_destination.database_class
primary_database = db_class(self.config.primary_destination)
tx_class = self.config.primary_destination.transaction_executor_class
primary_transaction = tx_class(
self.config,
primary_database,
)
transactions = [primary_transaction]
db_class = self.config.secondary_destination.database_class
# The reason for checking if this is anything at all is
# because one way of disabling the secondary destination
# is to set the database_class to an empty string.
if db_class:
secondary_database = db_class(self.config.secondary_destination)
if secondary_database.config != primary_database.config:
# The secondary really is different from the first one.
# By default, if not explicitly set, it'll pick up the same
# resource values as the first one.
tx_class = (
self.config.secondary_destination
.transaction_executor_class
)
secondary_transaction = tx_class(
self.config,
secondary_database,
)
transactions.append(secondary_transaction)
target_date = (date - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
raw_adi_logs_pathname = os.path.join(
tempfile.gettempdir(),
"%s.raw_adi_logs.TEMPORARY%s" % (
target_date,
'.txt'
)
)
try:
with codecs.open(raw_adi_logs_pathname, 'w', 'utf-8') as f:
hive = pyhs2.connect(
host=self.config.hive_host,
port=self.config.hive_port,
authMechanism=self.config.hive_auth_mechanism,
user=self.config.hive_user,
password=self.config.hive_password,
database=self.config.hive_database,
# the underlying TSocket setTimeout() wants milliseconds
timeout=self.config.timeout * 1000
)
cur = hive.cursor()
query = self.config.query % target_date
cur.execute(query)
rows_written = 0
for row in cur:
if None in row:
continue
f.write(
"\t"
.join(
self.remove_control_characters(
urllib2.unquote(v)
).replace('\\', '\\\\')
if isinstance(v, basestring) else str(v)
for v in row
)
)
f.write("\n")
rows_written += 1
if not rows_written:
raise NoRowsWritten('hive yielded no rows to write')
self.config.logger.info(
'Wrote %d rows from doing hive query' % rows_written
)
for transaction in transactions:
transaction(
self._database_transaction,
raw_adi_logs_pathname,
target_date
)
finally:
if os.path.isfile(raw_adi_logs_pathname):
os.remove(raw_adi_logs_pathname)
@as_backfill_cron_app
class FAKEFetchADIFromHiveCronApp(BaseCronApp):
"""Because of firewalls, we can't generally run the real
'fetch-adi-from-hive' in a staging environment. That means that
various other crontabber apps that depend on this refuses to
run.
By introducing a fake version - one that does nothing - we circumvent
that problem as we're able to keep the same name as the real class.
NB. The reason for prefixing this class with the word FAKE in
all upper case is to make it extra noticable so that you never
enable this class in a crontabber environment on production.
For more information, see:
https://bugzilla.mozilla.org/show_bug.cgi?id=1246673
"""
app_name = 'fetch-adi-from-hive'
app_description = 'FAKE Fetch ADI From Hive App that does nothing'
app_version = '0.1'
def run(self, date):
self.config.logger.info(
'Faking the fetching of ADI from Hive :)'
)
|
dilworm/pytest | refs/heads/master | redisinfo/redisinfocollector.py | 1 | # -*- coding=utf8 -*-
import Queue,threading,asyncore,time,socket
import redisclient as rc
import configparser as cf
from redisreplycallback import RedisReplyCallback
import redisreplyhandler as rrh
exec_interval = 1 # send command "info" to redis
def timer_check_connection(collector):
for p in collector.peers:
if not (p.is_connected() or p.is_connecting()):
#print "{0}: connecting {1}:{2}".format(time.time(), p.host, p.port)
p.connect()
threading.Timer(conn_timeout, timer_check_connection, args=[collector]).start()
def timer_add_cmd(collector):
for p in collector.peers:
if p.is_connected():
print "{0}: add 'info' cmd to queue {1}:{2}".format(time.time(), p.host, p.port)
p.add_cmd()
threading.Timer(exec_interval, timer_add_cmd, args=[collector]).start()
#
class ReidsInfoCollector(RedisReplyCallback):
svraddrlist = []
peers = []
queue = Queue.Queue()
is_start = False
def __init__(self):
self.worker = threading.Thread(target=self._run, name = "collector service")
def set_server_list(self, sl):
self.svraddrlist = sl
def set_reply_service(self, rs):
self.reply_service = rs
def _run(self):
print "ReidsInfoCollector::_run"
#threading.Timer(conn_timeout, timer_check_connection, args = [self]).start()
threading.Timer(exec_interval, timer_add_cmd, args = [self]).start()
# If you want to handle "global" poll timeout event of asyncore.loop, create a empty
# asyncore.dispatcher object with readable() return False ,then call
# create_socket, which will add the object to poll list, causing unproper disconnect event, now we can
# handle the "global" poll timeout event inside the object's writeable() function.
for item in self.svraddrlist:
c = rc.RedisClient(item[0], item[1], item[2], item[3])
c.set_callback(self)
c.asyn_info()
c.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.peers.append(c)
asyncore.loop(0.1)
def start(self):
if self.is_start:
return True
self.is_start = True
self.worker.start()
# RedisReplyCallback implement
def on_info(self, redisid, redisname, data):
#print "{0}: on_info".format(time.time())
item = rrh.ReplyItem(redisid, redisname, "info", data, time.time())
self.reply_service.add_reply(item)
|
ManageIQ/integration_tests | refs/heads/master | requirements/__init__.py | 2 | import os
import subprocess
from importlib_metadata import metadata
from importlib_metadata import PackageNotFoundError
from pip_module_scanner.scanner import Scanner
# Override Scanner to skip dotfiles
class RepoImportScanner(Scanner):
"""Overwrite Scanner to use git tracked files instead of os.walk
Also override init to create installed_packages since we just want to write package names
"""
def __init__(self, *args, **kwargs):
# workaround for https://gitlab.com/python-devs/importlib_metadata/issues/81
self.tricky_package_map = kwargs.pop("package_map", None) or {}
# overwrite libraries_installed keyed on package names
super().__init__(*args, **kwargs)
self.installed_packages = [
lib.key
for lib in self.libraries_installed
if lib.key not in "manageiq-integration-tests"
] # ignore local
def search_script_directory(self, path):
"""
Recursively loop through a directory to find all python
script files. When one is found, it is analyzed for import statements
Only scans files tracked by git
:param path: string
:return: generator
"""
proc = subprocess.Popen(["git", "ls-files", "--full-name"], stdout=subprocess.PIPE)
proc.wait()
# decode the file names because subprocess PIPE has them as bytes
for file_name in [f.decode() for f in proc.stdout.read().splitlines()]:
if (not file_name.endswith(".py")) or "sprout/" in file_name:
continue # skip sprout files and non-python files
self.search_script_file(os.path.dirname(file_name), os.path.basename(file_name))
def search_script(self, script):
"""
Search a script's contents for import statements and check
if they're currently prevent in the list of all installed
pip modules.
:param script: string
:return: void
"""
if self.import_statement.search(script):
unique_found = []
for f_import in set(self.import_statement.findall(script)):
# Try the package metadata lookup, if its not found its just local or builtin
try:
import_metadata = metadata(f_import)
except PackageNotFoundError:
try:
import_metadata = metadata(self.tricky_package_map[f_import])
except KeyError:
# if f_import is not in our tricky_package_map, it must be a local package,
# so skip it
continue
# Check that the package isn't already accounted for
name = import_metadata["Name"]
# Shriver - previously this was checking installed packages
# Thinking this prevents freeze_all from working correctly on a clean venv
# Want it to be able to go from clean venv + import scan to a frozen req file
# freeze.py uses any existing frozen file as constraints
if name not in self.libraries_found: # and name in self.installed_packages:
unique_found.append(name)
for package_name in unique_found:
# Shriver - see above
# self.installed_packages.remove(package_name)
self.libraries_found.append(package_name)
def output_to_fd(self, fd):
"""
Outputs the results of the scanner to a file descriptor (stdout counts :)
:param fd: file
:return: void
"""
for library in self.libraries_found:
fd.write(f"{library}\n")
|
PmagPy/PmagPy | refs/heads/master | programs/revtest_mm1990.py | 2 | #!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from builtins import input
from builtins import range
from past.utils import old_div
import sys
import numpy
import pmagpy.pmagplotlib as pmagplotlib
import pmagpy.pmag as pmag
#
#contributed by N. Swanson-Hysell on 5/3/2013 relying heavily on the implementation of the Watson V test developed by L. Tauxe in watsonsV.py
#
def main():
"""
NAME
revtest_MM1990.py
DESCRIPTION
calculates Watson's V statistic from input files through Monte Carlo simulation in order to test whether normal and reversed populations could have been drawn from a common mean (equivalent to watsonV.py). Also provides the critical angle between the two sample mean directions and the corresponding McFadden and McElhinny (1990) classification.
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files (one file for normal directions, one file for reversed directions).
SYNTAX
revtest_MM1990.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE
-f2 FILE
-P (don't plot the Watson V cdf)
OUTPUT
Watson's V between the two populations and the Monte Carlo Critical Value Vc.
M&M1990 angle, critical angle and classification
Plot of Watson's V CDF from Monte Carlo simulation (red line), V is solid and Vc is dashed.
"""
D1,D2=[],[]
plot=1
Flip=1
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-P' in sys.argv: plot=0
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file1=sys.argv[ind+1]
f1=open(file1,'r')
for line in f1.readlines():
rec=line.split()
Dec,Inc=float(rec[0]),float(rec[1])
D1.append([Dec,Inc,1.])
f1.close()
if '-f2' in sys.argv:
ind=sys.argv.index('-f2')
file2=sys.argv[ind+1]
f2=open(file2,'r')
print("be patient, your computer is doing 5000 simulations...")
for line in f2.readlines():
rec=line.split()
Dec,Inc=float(rec[0]),float(rec[1])
D2.append([Dec,Inc,1.])
f2.close()
#take the antipode for the directions in file 2
D2_flip=[]
for rec in D2:
d,i=(rec[0]-180.)%360.,-rec[1]
D2_flip.append([d,i,1.])
pars_1=pmag.fisher_mean(D1)
pars_2=pmag.fisher_mean(D2_flip)
cart_1=pmag.dir2cart([pars_1["dec"],pars_1["inc"],pars_1["r"]])
cart_2=pmag.dir2cart([pars_2['dec'],pars_2['inc'],pars_2["r"]])
Sw=pars_1['k']*pars_1['r']+pars_2['k']*pars_2['r'] # k1*r1+k2*r2
xhat_1=pars_1['k']*cart_1[0]+pars_2['k']*cart_2[0] # k1*x1+k2*x2
xhat_2=pars_1['k']*cart_1[1]+pars_2['k']*cart_2[1] # k1*y1+k2*y2
xhat_3=pars_1['k']*cart_1[2]+pars_2['k']*cart_2[2] # k1*z1+k2*z2
Rw=numpy.sqrt(xhat_1**2+xhat_2**2+xhat_3**2)
V=2*(Sw-Rw)
#
#keep weighted sum for later when determining the "critical angle" let's save it as Sr (notation of McFadden and McElhinny, 1990)
#
Sr=Sw
#
# do monte carlo simulation of datasets with same kappas, but common mean
#
counter,NumSims=0,5000
Vp=[] # set of Vs from simulations
for k in range(NumSims):
#
# get a set of N1 fisher distributed vectors with k1, calculate fisher stats
#
Dirp=[]
for i in range(pars_1["n"]):
Dirp.append(pmag.fshdev(pars_1["k"]))
pars_p1=pmag.fisher_mean(Dirp)
#
# get a set of N2 fisher distributed vectors with k2, calculate fisher stats
#
Dirp=[]
for i in range(pars_2["n"]):
Dirp.append(pmag.fshdev(pars_2["k"]))
pars_p2=pmag.fisher_mean(Dirp)
#
# get the V for these
#
Vk=pmag.vfunc(pars_p1,pars_p2)
Vp.append(Vk)
#
# sort the Vs, get Vcrit (95th percentile one)
#
Vp.sort()
k=int(.95*NumSims)
Vcrit=Vp[k]
#
# equation 18 of McFadden and McElhinny, 1990 calculates the critical value of R (Rwc)
#
Rwc=Sr-(old_div(Vcrit,2))
#
#following equation 19 of McFadden and McElhinny (1990) the critical angle is calculated.
#
k1=pars_1['k']
k2=pars_2['k']
R1=pars_1['r']
R2=pars_2['r']
critical_angle=numpy.degrees(numpy.arccos(old_div(((Rwc**2)-((k1*R1)**2)-((k2*R2)**2)),(2*k1*R1*k2*R2))))
D1_mean=(pars_1['dec'],pars_1['inc'])
D2_mean=(pars_2['dec'],pars_2['inc'])
angle=pmag.angle(D1_mean,D2_mean)
#
# print the results of the test
#
print("")
print("Results of Watson V test: ")
print("")
print("Watson's V: " '%.1f' %(V))
print("Critical value of V: " '%.1f' %(Vcrit))
if V<Vcrit:
print('"Pass": Since V is less than Vcrit, the null hypothesis that the two populations are drawn from distributions that share a common mean direction (antipodal to one another) cannot be rejected.')
elif V>Vcrit:
print('"Fail": Since V is greater than Vcrit, the two means can be distinguished at the 95% confidence level.')
print("")
print("M&M1990 classification:")
print("")
print("Angle between data set means: " '%.1f'%(angle))
print("Critical angle of M&M1990: " '%.1f'%(critical_angle))
if V>Vcrit:
print("")
elif V<Vcrit:
if critical_angle<5:
print("The McFadden and McElhinny (1990) classification for this test is: 'A'")
elif critical_angle<10:
print("The McFadden and McElhinny (1990) classification for this test is: 'B'")
elif critical_angle<20:
print("The McFadden and McElhinny (1990) classification for this test is: 'C'")
else:
print("The McFadden and McElhinny (1990) classification for this test is: 'INDETERMINATE;")
if plot==1:
CDF={'cdf':1}
pmagplotlib.plot_init(CDF['cdf'],5,5)
p1 = pmagplotlib.plot_cdf(CDF['cdf'],Vp,"Watson's V",'r',"")
p2 = pmagplotlib.plot_vs(CDF['cdf'],[V],'g','-')
p3 = pmagplotlib.plot_vs(CDF['cdf'],[Vp[k]],'b','--')
pmagplotlib.draw_figs(CDF)
files,fmt={},'svg'
if file2!="":
files['cdf']='WatsonsV_'+file1+'_'+file2+'.'+fmt
else:
files['cdf']='WatsonsV_'+file1+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['cdf']='Cumulative Distribution'
CDF = pmagplotlib.add_borders(CDF,titles,black,purple)
pmagplotlib.save_plots(CDF,files)
else:
ans=input(" S[a]ve to save plot, [q]uit without saving: ")
if ans=="a": pmagplotlib.save_plots(CDF,files)
if __name__ == "__main__":
main()
|
idovear/odoo | refs/heads/master | addons/project/wizard/__init__.py | 381 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project_task_delegate
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
thaim/ansible | refs/heads/fix-broken-link | lib/ansible/modules/cloud/cloudstack/cs_region.py | 25 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_region
short_description: Manages regions on Apache CloudStack based clouds.
description:
- Add, update and remove regions.
version_added: '2.3'
author: René Moser (@resmo)
options:
id:
description:
- ID of the region.
- Must be an number (int).
type: int
required: true
name:
description:
- Name of the region.
- Required if I(state=present)
type: str
endpoint:
description:
- Endpoint URL of the region.
- Required if I(state=present)
type: str
state:
description:
- State of the region.
type: str
default: present
choices: [ present, absent ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: create a region
cs_region:
id: 2
name: geneva
endpoint: https://cloud.gva.example.com
delegate_to: localhost
- name: remove a region with ID 2
cs_region:
id: 2
state: absent
delegate_to: localhost
'''
RETURN = '''
---
id:
description: ID of the region.
returned: success
type: int
sample: 1
name:
description: Name of the region.
returned: success
type: str
sample: local
endpoint:
description: Endpoint of the region.
returned: success
type: str
sample: http://cloud.example.com
gslb_service_enabled:
description: Whether the GSLB service is enabled or not.
returned: success
type: bool
sample: true
portable_ip_service_enabled:
description: Whether the portable IP service is enabled or not.
returned: success
type: bool
sample: true
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackRegion(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackRegion, self).__init__(module)
self.returns = {
'endpoint': 'endpoint',
'gslbserviceenabled': 'gslb_service_enabled',
'portableipserviceenabled': 'portable_ip_service_enabled',
}
def get_region(self):
id = self.module.params.get('id')
regions = self.query_api('listRegions', id=id)
if regions:
return regions['region'][0]
return None
def present_region(self):
region = self.get_region()
if not region:
region = self._create_region(region=region)
else:
region = self._update_region(region=region)
return region
def _create_region(self, region):
self.result['changed'] = True
args = {
'id': self.module.params.get('id'),
'name': self.module.params.get('name'),
'endpoint': self.module.params.get('endpoint')
}
if not self.module.check_mode:
res = self.query_api('addRegion', **args)
region = res['region']
return region
def _update_region(self, region):
args = {
'id': self.module.params.get('id'),
'name': self.module.params.get('name'),
'endpoint': self.module.params.get('endpoint')
}
if self.has_changed(args, region):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('updateRegion', **args)
region = res['region']
return region
def absent_region(self):
region = self.get_region()
if region:
self.result['changed'] = True
if not self.module.check_mode:
self.query_api('removeRegion', id=region['id'])
return region
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
id=dict(required=True, type='int'),
name=dict(),
endpoint=dict(),
state=dict(choices=['present', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
required_if=[
('state', 'present', ['name', 'endpoint']),
],
supports_check_mode=True
)
acs_region = AnsibleCloudStackRegion(module)
state = module.params.get('state')
if state == 'absent':
region = acs_region.absent_region()
else:
region = acs_region.present_region()
result = acs_region.get_result(region)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
israeleriston/scientific-week | refs/heads/master | backend/venv/lib/python3.5/site-packages/sqlalchemy/sql/selectable.py | 20 | # sql/selectable.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""The :class:`.FromClause` class of SQL expression elements, representing
SQL tables and derived rowsets.
"""
from .elements import ClauseElement, TextClause, ClauseList, \
and_, Grouping, UnaryExpression, literal_column, BindParameter
from .elements import _clone, \
_literal_as_text, _interpret_as_column_or_from, _expand_cloned,\
_select_iterables, _anonymous_label, _clause_element_as_expr,\
_cloned_intersection, _cloned_difference, True_, \
_literal_as_label_reference, _literal_and_labels_as_label_reference
from .base import Immutable, Executable, _generative, \
ColumnCollection, ColumnSet, _from_objects, Generative
from . import type_api
from .. import inspection
from .. import util
from .. import exc
from operator import attrgetter
from . import operators
import operator
import collections
from .annotation import Annotated
import itertools
from sqlalchemy.sql.visitors import Visitable
def _interpret_as_from(element):
insp = inspection.inspect(element, raiseerr=False)
if insp is None:
if isinstance(element, util.string_types):
util.warn_limited(
"Textual SQL FROM expression %(expr)r should be "
"explicitly declared as text(%(expr)r), "
"or use table(%(expr)r) for more specificity",
{"expr": util.ellipses_string(element)})
return TextClause(util.text_type(element))
try:
return insp.selectable
except AttributeError:
raise exc.ArgumentError("FROM expression expected")
def _interpret_as_select(element):
element = _interpret_as_from(element)
if isinstance(element, Alias):
element = element.original
if not isinstance(element, SelectBase):
element = element.select()
return element
class _OffsetLimitParam(BindParameter):
@property
def _limit_offset_value(self):
return self.effective_value
def _offset_or_limit_clause(element, name=None, type_=None):
"""Convert the given value to an "offset or limit" clause.
This handles incoming integers and converts to an expression; if
an expression is already given, it is passed through.
"""
if element is None:
return None
elif hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif isinstance(element, Visitable):
return element
else:
value = util.asint(element)
return _OffsetLimitParam(name, value, type_=type_, unique=True)
def _offset_or_limit_clause_asint(clause, attrname):
"""Convert the "offset or limit" clause of a select construct to an
integer.
This is only possible if the value is stored as a simple bound parameter.
Otherwise, a compilation error is raised.
"""
if clause is None:
return None
try:
value = clause._limit_offset_value
except AttributeError:
raise exc.CompileError(
"This SELECT structure does not use a simple "
"integer value for %s" % attrname)
else:
return util.asint(value)
def subquery(alias, *args, **kwargs):
r"""Return an :class:`.Alias` object derived
from a :class:`.Select`.
name
alias name
\*args, \**kwargs
all other arguments are delivered to the
:func:`select` function.
"""
return Select(*args, **kwargs).alias(alias)
def alias(selectable, name=None, flat=False):
"""Return an :class:`.Alias` object.
An :class:`.Alias` represents any :class:`.FromClause`
with an alternate name assigned within SQL, typically using the ``AS``
clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
Similar functionality is available via the
:meth:`~.FromClause.alias` method
available on all :class:`.FromClause` subclasses.
When an :class:`.Alias` is created from a :class:`.Table` object,
this has the effect of the table being rendered
as ``tablename AS aliasname`` in a SELECT statement.
For :func:`.select` objects, the effect is that of creating a named
subquery, i.e. ``(select ...) AS aliasname``.
The ``name`` parameter is optional, and provides the name
to use in the rendered SQL. If blank, an "anonymous" name
will be deterministically generated at compile time.
Deterministic means the name is guaranteed to be unique against
other constructs used in the same statement, and will also be the
same name for each successive compilation of the same statement
object.
:param selectable: any :class:`.FromClause` subclass,
such as a table, select statement, etc.
:param name: string name to be assigned as the alias.
If ``None``, a name will be deterministically generated
at compile time.
:param flat: Will be passed through to if the given selectable
is an instance of :class:`.Join` - see :meth:`.Join.alias`
for details.
.. versionadded:: 0.9.0
"""
return _interpret_as_from(selectable).alias(name=name, flat=flat)
def lateral(selectable, name=None):
"""Return a :class:`.Lateral` object.
:class:`.Lateral` is an :class:`.Alias` subclass that represents
a subquery with the LATERAL keyword applied to it.
The special behavior of a LATERAL subquery is that it appears in the
FROM clause of an enclosing SELECT, but may correlate to other
FROM clauses of that SELECT. It is a special case of subquery
only supported by a small number of backends, currently more recent
PostgreSQL versions.
.. versionadded:: 1.1
.. seealso::
:ref:`lateral_selects` - overview of usage.
"""
return _interpret_as_from(selectable).lateral(name=name)
def tablesample(selectable, sampling, name=None, seed=None):
"""Return a :class:`.TableSample` object.
:class:`.TableSample` is an :class:`.Alias` subclass that represents
a table with the TABLESAMPLE clause applied to it.
:func:`~.expression.tablesample`
is also available from the :class:`.FromClause` class via the
:meth:`.FromClause.tablesample` method.
The TABLESAMPLE clause allows selecting a randomly selected approximate
percentage of rows from a table. It supports multiple sampling methods,
most commonly BERNOULLI and SYSTEM.
e.g.::
from sqlalchemy import func
selectable = people.tablesample(
func.bernoulli(1),
name='alias',
seed=func.random())
stmt = select([selectable.c.people_id])
Assuming ``people`` with a column ``people_id``, the above
statement would render as::
SELECT alias.people_id FROM
people AS alias TABLESAMPLE bernoulli(:bernoulli_1)
REPEATABLE (random())
.. versionadded:: 1.1
:param sampling: a ``float`` percentage between 0 and 100 or
:class:`.functions.Function`.
:param name: optional alias name
:param seed: any real-valued SQL expression. When specified, the
REPEATABLE sub-clause is also rendered.
"""
return _interpret_as_from(selectable).tablesample(
sampling, name=name, seed=seed)
class Selectable(ClauseElement):
"""mark a class as being selectable"""
__visit_name__ = 'selectable'
is_selectable = True
@property
def selectable(self):
return self
class HasPrefixes(object):
_prefixes = ()
@_generative
def prefix_with(self, *expr, **kw):
r"""Add one or more expressions following the statement keyword, i.e.
SELECT, INSERT, UPDATE, or DELETE. Generative.
This is used to support backend-specific prefix keywords such as those
provided by MySQL.
E.g.::
stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
Multiple prefixes can be specified by multiple calls
to :meth:`.prefix_with`.
:param \*expr: textual or :class:`.ClauseElement` construct which
will be rendered following the INSERT, UPDATE, or DELETE
keyword.
:param \**kw: A single keyword 'dialect' is accepted. This is an
optional string dialect name which will
limit rendering of this prefix to only that dialect.
"""
dialect = kw.pop('dialect', None)
if kw:
raise exc.ArgumentError("Unsupported argument(s): %s" %
",".join(kw))
self._setup_prefixes(expr, dialect)
def _setup_prefixes(self, prefixes, dialect=None):
self._prefixes = self._prefixes + tuple(
[(_literal_as_text(p, warn=False), dialect) for p in prefixes])
class HasSuffixes(object):
_suffixes = ()
@_generative
def suffix_with(self, *expr, **kw):
r"""Add one or more expressions following the statement as a whole.
This is used to support backend-specific suffix keywords on
certain constructs.
E.g.::
stmt = select([col1, col2]).cte().suffix_with(
"cycle empno set y_cycle to 1 default 0", dialect="oracle")
Multiple suffixes can be specified by multiple calls
to :meth:`.suffix_with`.
:param \*expr: textual or :class:`.ClauseElement` construct which
will be rendered following the target clause.
:param \**kw: A single keyword 'dialect' is accepted. This is an
optional string dialect name which will
limit rendering of this suffix to only that dialect.
"""
dialect = kw.pop('dialect', None)
if kw:
raise exc.ArgumentError("Unsupported argument(s): %s" %
",".join(kw))
self._setup_suffixes(expr, dialect)
def _setup_suffixes(self, suffixes, dialect=None):
self._suffixes = self._suffixes + tuple(
[(_literal_as_text(p, warn=False), dialect) for p in suffixes])
class FromClause(Selectable):
"""Represent an element that can be used within the ``FROM``
clause of a ``SELECT`` statement.
The most common forms of :class:`.FromClause` are the
:class:`.Table` and the :func:`.select` constructs. Key
features common to all :class:`.FromClause` objects include:
* a :attr:`.c` collection, which provides per-name access to a collection
of :class:`.ColumnElement` objects.
* a :attr:`.primary_key` attribute, which is a collection of all those
:class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
* Methods to generate various derivations of a "from" clause, including
:meth:`.FromClause.alias`, :meth:`.FromClause.join`,
:meth:`.FromClause.select`.
"""
__visit_name__ = 'fromclause'
named_with_column = False
_hide_froms = []
_is_join = False
_is_select = False
_is_from_container = False
_textual = False
"""a marker that allows us to easily distinguish a :class:`.TextAsFrom`
or similar object from other kinds of :class:`.FromClause` objects."""
schema = None
"""Define the 'schema' attribute for this :class:`.FromClause`.
This is typically ``None`` for most objects except that of
:class:`.Table`, where it is taken as the value of the
:paramref:`.Table.schema` argument.
"""
def _translate_schema(self, effective_schema, map_):
return effective_schema
_memoized_property = util.group_expirable_memoized_property(["_columns"])
@util.deprecated(
'1.1',
message="``FromClause.count()`` is deprecated. Counting "
"rows requires that the correct column expression and "
"accommodations for joins, DISTINCT, etc. must be made, "
"otherwise results may not be what's expected. "
"Please use an appropriate ``func.count()`` expression "
"directly.")
@util.dependencies("sqlalchemy.sql.functions")
def count(self, functions, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`.FromClause`.
The function generates COUNT against the
first column in the primary key of the table, or against
the first column in the table overall. Explicit use of
``func.count()`` should be preferred::
row_count = conn.scalar(
select([func.count('*')]).select_from(table)
)
.. seealso::
:data:`.func`
"""
if self.primary_key:
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
return Select(
[functions.func.count(col).label('tbl_row_count')],
whereclause,
from_obj=[self],
**params)
def select(self, whereclause=None, **params):
"""return a SELECT of this :class:`.FromClause`.
.. seealso::
:func:`~.sql.expression.select` - general purpose
method which allows for arbitrary column lists.
"""
return Select([self], whereclause, **params)
def join(self, right, onclause=None, isouter=False, full=False):
"""Return a :class:`.Join` from this :class:`.FromClause`
to another :class:`FromClause`.
E.g.::
from sqlalchemy import join
j = user_table.join(address_table,
user_table.c.id == address_table.c.user_id)
stmt = select([user_table]).select_from(j)
would emit SQL along the lines of::
SELECT user.id, user.name FROM user
JOIN address ON user.id = address.user_id
:param right: the right side of the join; this is any
:class:`.FromClause` object such as a :class:`.Table` object, and
may also be a selectable-compatible object such as an ORM-mapped
class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
:param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
:param full: if True, render a FULL OUTER JOIN, instead of LEFT OUTER
JOIN. Implies :paramref:`.FromClause.join.isouter`.
.. versionadded:: 1.1
.. seealso::
:func:`.join` - standalone function
:class:`.Join` - the type of object produced
"""
return Join(self, right, onclause, isouter, full)
def outerjoin(self, right, onclause=None, full=False):
"""Return a :class:`.Join` from this :class:`.FromClause`
to another :class:`FromClause`, with the "isouter" flag set to
True.
E.g.::
from sqlalchemy import outerjoin
j = user_table.outerjoin(address_table,
user_table.c.id == address_table.c.user_id)
The above is equivalent to::
j = user_table.join(
address_table,
user_table.c.id == address_table.c.user_id,
isouter=True)
:param right: the right side of the join; this is any
:class:`.FromClause` object such as a :class:`.Table` object, and
may also be a selectable-compatible object such as an ORM-mapped
class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
:param full: if True, render a FULL OUTER JOIN, instead of
LEFT OUTER JOIN.
.. versionadded:: 1.1
.. seealso::
:meth:`.FromClause.join`
:class:`.Join`
"""
return Join(self, right, onclause, True, full)
def alias(self, name=None, flat=False):
"""return an alias of this :class:`.FromClause`.
This is shorthand for calling::
from sqlalchemy import alias
a = alias(self, name=name)
See :func:`~.expression.alias` for details.
"""
return Alias(self, name)
def lateral(self, name=None):
"""Return a LATERAL alias of this :class:`.FromClause`.
The return value is the :class:`.Lateral` construct also
provided by the top-level :func:`~.expression.lateral` function.
.. versionadded:: 1.1
.. seealso::
:ref:`lateral_selects` - overview of usage.
"""
return Lateral(self, name)
def tablesample(self, sampling, name=None, seed=None):
"""Return a TABLESAMPLE alias of this :class:`.FromClause`.
The return value is the :class:`.TableSample` construct also
provided by the top-level :func:`~.expression.tablesample` function.
.. versionadded:: 1.1
.. seealso::
:func:`~.expression.tablesample` - usage guidelines and parameters
"""
return TableSample(self, sampling, name, seed)
def is_derived_from(self, fromclause):
"""Return True if this FromClause is 'derived' from the given
FromClause.
An example would be an Alias of a Table is derived from that Table.
"""
# this is essentially an "identity" check in the base class.
# Other constructs override this to traverse through
# contained elements.
return fromclause in self._cloned_set
def _is_lexical_equivalent(self, other):
"""Return True if this FromClause and the other represent
the same lexical identity.
This tests if either one is a copy of the other, or
if they are the same via annotation identity.
"""
return self._cloned_set.intersection(other._cloned_set)
@util.dependencies("sqlalchemy.sql.util")
def replace_selectable(self, sqlutil, old, alias):
"""replace all occurrences of FromClause 'old' with the given Alias
object, returning a copy of this :class:`.FromClause`.
"""
return sqlutil.ClauseAdapter(alias).traverse(self)
def correspond_on_equivalents(self, column, equivalents):
"""Return corresponding_column for the given column, or if None
search for a match in the given dictionary.
"""
col = self.corresponding_column(column, require_embedded=True)
if col is None and col in equivalents:
for equiv in equivalents[col]:
nc = self.corresponding_column(equiv, require_embedded=True)
if nc:
return nc
return col
def corresponding_column(self, column, require_embedded=False):
"""Given a :class:`.ColumnElement`, return the exported
:class:`.ColumnElement` object from this :class:`.Selectable`
which corresponds to that original
:class:`~sqlalchemy.schema.Column` via a common ancestor
column.
:param column: the target :class:`.ColumnElement` to be matched
:param require_embedded: only return corresponding columns for
the given :class:`.ColumnElement`, if the given
:class:`.ColumnElement` is actually present within a sub-element
of this :class:`.FromClause`. Normally the column will match if
it merely shares a common ancestor with one of the exported
columns of this :class:`.FromClause`.
"""
def embedded(expanded_proxy_set, target_set):
for t in target_set.difference(expanded_proxy_set):
if not set(_expand_cloned([t])
).intersection(expanded_proxy_set):
return False
return True
# don't dig around if the column is locally present
if self.c.contains_column(column):
return column
col, intersect = None, None
target_set = column.proxy_set
cols = self.c._all_columns
for c in cols:
expanded_proxy_set = set(_expand_cloned(c.proxy_set))
i = target_set.intersection(expanded_proxy_set)
if i and (not require_embedded
or embedded(expanded_proxy_set, target_set)):
if col is None:
# no corresponding column yet, pick this one.
col, intersect = c, i
elif len(i) > len(intersect):
# 'c' has a larger field of correspondence than
# 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
# matches a1.c.x->table.c.x better than
# selectable.c.x->table.c.x does.
col, intersect = c, i
elif i == intersect:
# they have the same field of correspondence. see
# which proxy_set has fewer columns in it, which
# indicates a closer relationship with the root
# column. Also take into account the "weight"
# attribute which CompoundSelect() uses to give
# higher precedence to columns based on vertical
# position in the compound statement, and discard
# columns that have no reference to the target
# column (also occurs with CompoundSelect)
col_distance = util.reduce(
operator.add,
[sc._annotations.get('weight', 1) for sc in
col.proxy_set if sc.shares_lineage(column)])
c_distance = util.reduce(
operator.add,
[sc._annotations.get('weight', 1) for sc in
c.proxy_set if sc.shares_lineage(column)])
if c_distance < col_distance:
col, intersect = c, i
return col
@property
def description(self):
"""a brief description of this FromClause.
Used primarily for error message formatting.
"""
return getattr(self, 'name', self.__class__.__name__ + " object")
def _reset_exported(self):
"""delete memoized collections when a FromClause is cloned."""
self._memoized_property.expire_instance(self)
@_memoized_property
def columns(self):
"""A named-based collection of :class:`.ColumnElement` objects
maintained by this :class:`.FromClause`.
The :attr:`.columns`, or :attr:`.c` collection, is the gateway
to the construction of SQL expressions using table-bound or
other selectable-bound columns::
select([mytable]).where(mytable.c.somecolumn == 5)
"""
if '_columns' not in self.__dict__:
self._init_collections()
self._populate_column_collection()
return self._columns.as_immutable()
@_memoized_property
def primary_key(self):
"""Return the collection of Column objects which comprise the
primary key of this FromClause."""
self._init_collections()
self._populate_column_collection()
return self.primary_key
@_memoized_property
def foreign_keys(self):
"""Return the collection of ForeignKey objects which this
FromClause references."""
self._init_collections()
self._populate_column_collection()
return self.foreign_keys
c = property(attrgetter('columns'),
doc="An alias for the :attr:`.columns` attribute.")
_select_iterable = property(attrgetter('columns'))
def _init_collections(self):
assert '_columns' not in self.__dict__
assert 'primary_key' not in self.__dict__
assert 'foreign_keys' not in self.__dict__
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
@property
def _cols_populated(self):
return '_columns' in self.__dict__
def _populate_column_collection(self):
"""Called on subclasses to establish the .c collection.
Each implementation has a different way of establishing
this collection.
"""
def _refresh_for_new_column(self, column):
"""Given a column added to the .c collection of an underlying
selectable, produce the local version of that column, assuming this
selectable ultimately should proxy this column.
this is used to "ping" a derived selectable to add a new column
to its .c. collection when a Column has been added to one of the
Table objects it ultimtely derives from.
If the given selectable hasn't populated its .c. collection yet,
it should at least pass on the message to the contained selectables,
but it will return None.
This method is currently used by Declarative to allow Table
columns to be added to a partially constructed inheritance
mapping that may have already produced joins. The method
isn't public right now, as the full span of implications
and/or caveats aren't yet clear.
It's also possible that this functionality could be invoked by
default via an event, which would require that
selectables maintain a weak referencing collection of all
derivations.
"""
if not self._cols_populated:
return None
elif (column.key in self.columns and
self.columns[column.key] is column):
return column
else:
return None
class Join(FromClause):
"""represent a ``JOIN`` construct between two :class:`.FromClause`
elements.
The public constructor function for :class:`.Join` is the module-level
:func:`.join()` function, as well as the :meth:`.FromClause.join` method
of any :class:`.FromClause` (e.g. such as :class:`.Table`).
.. seealso::
:func:`.join`
:meth:`.FromClause.join`
"""
__visit_name__ = 'join'
_is_join = True
def __init__(self, left, right, onclause=None, isouter=False, full=False):
"""Construct a new :class:`.Join`.
The usual entrypoint here is the :func:`~.expression.join`
function or the :meth:`.FromClause.join` method of any
:class:`.FromClause` object.
"""
self.left = _interpret_as_from(left)
self.right = _interpret_as_from(right).self_group()
if onclause is None:
self.onclause = self._match_primaries(self.left, self.right)
else:
self.onclause = onclause
self.isouter = isouter
self.full = full
@classmethod
def _create_outerjoin(cls, left, right, onclause=None, full=False):
"""Return an ``OUTER JOIN`` clause element.
The returned object is an instance of :class:`.Join`.
Similar functionality is also available via the
:meth:`~.FromClause.outerjoin()` method on any
:class:`.FromClause`.
:param left: The left side of the join.
:param right: The right side of the join.
:param onclause: Optional criterion for the ``ON`` clause, is
derived from foreign key relationships established between
left and right otherwise.
To chain joins together, use the :meth:`.FromClause.join` or
:meth:`.FromClause.outerjoin` methods on the resulting
:class:`.Join` object.
"""
return cls(left, right, onclause, isouter=True, full=full)
@classmethod
def _create_join(cls, left, right, onclause=None, isouter=False,
full=False):
"""Produce a :class:`.Join` object, given two :class:`.FromClause`
expressions.
E.g.::
j = join(user_table, address_table,
user_table.c.id == address_table.c.user_id)
stmt = select([user_table]).select_from(j)
would emit SQL along the lines of::
SELECT user.id, user.name FROM user
JOIN address ON user.id = address.user_id
Similar functionality is available given any
:class:`.FromClause` object (e.g. such as a :class:`.Table`) using
the :meth:`.FromClause.join` method.
:param left: The left side of the join.
:param right: the right side of the join; this is any
:class:`.FromClause` object such as a :class:`.Table` object, and
may also be a selectable-compatible object such as an ORM-mapped
class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
:param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
:param full: if True, render a FULL OUTER JOIN, instead of JOIN.
.. versionadded:: 1.1
.. seealso::
:meth:`.FromClause.join` - method form, based on a given left side
:class:`.Join` - the type of object produced
"""
return cls(left, right, onclause, isouter, full)
@property
def description(self):
return "Join object on %s(%d) and %s(%d)" % (
self.left.description,
id(self.left),
self.right.description,
id(self.right))
def is_derived_from(self, fromclause):
return fromclause is self or \
self.left.is_derived_from(fromclause) or \
self.right.is_derived_from(fromclause)
def self_group(self, against=None):
return FromGrouping(self)
@util.dependencies("sqlalchemy.sql.util")
def _populate_column_collection(self, sqlutil):
columns = [c for c in self.left.columns] + \
[c for c in self.right.columns]
self.primary_key.extend(sqlutil.reduce_columns(
(c for c in columns if c.primary_key), self.onclause))
self._columns.update((col._label, col) for col in columns)
self.foreign_keys.update(itertools.chain(
*[col.foreign_keys for col in columns]))
def _refresh_for_new_column(self, column):
col = self.left._refresh_for_new_column(column)
if col is None:
col = self.right._refresh_for_new_column(column)
if col is not None:
if self._cols_populated:
self._columns[col._label] = col
self.foreign_keys.update(col.foreign_keys)
if col.primary_key:
self.primary_key.add(col)
return col
return None
def _copy_internals(self, clone=_clone, **kw):
self._reset_exported()
self.left = clone(self.left, **kw)
self.right = clone(self.right, **kw)
self.onclause = clone(self.onclause, **kw)
def get_children(self, **kwargs):
return self.left, self.right, self.onclause
def _match_primaries(self, left, right):
if isinstance(left, Join):
left_right = left.right
else:
left_right = None
return self._join_condition(left, right, a_subset=left_right)
@classmethod
def _join_condition(cls, a, b, ignore_nonexistent_tables=False,
a_subset=None,
consider_as_foreign_keys=None):
"""create a join condition between two tables or selectables.
e.g.::
join_condition(tablea, tableb)
would produce an expression along the lines of::
tablea.c.id==tableb.c.tablea_id
The join is determined based on the foreign key relationships
between the two selectables. If there are multiple ways
to join, or no way to join, an error is raised.
:param ignore_nonexistent_tables: Deprecated - this
flag is no longer used. Only resolution errors regarding
the two given tables are propagated.
:param a_subset: An optional expression that is a sub-component
of ``a``. An attempt will be made to join to just this sub-component
first before looking at the full ``a`` construct, and if found
will be successful even if there are other ways to join to ``a``.
This allows the "right side" of a join to be passed thereby
providing a "natural join".
"""
constraints = cls._joincond_scan_left_right(
a, a_subset, b, consider_as_foreign_keys)
if len(constraints) > 1:
cls._joincond_trim_constraints(
a, b, constraints, consider_as_foreign_keys)
if len(constraints) == 0:
if isinstance(b, FromGrouping):
hint = " Perhaps you meant to convert the right side to a "\
"subquery using alias()?"
else:
hint = ""
raise exc.NoForeignKeysError(
"Can't find any foreign key relationships "
"between '%s' and '%s'.%s" %
(a.description, b.description, hint))
crit = [(x == y) for x, y in list(constraints.values())[0]]
if len(crit) == 1:
return (crit[0])
else:
return and_(*crit)
@classmethod
def _joincond_scan_left_right(
cls, a, a_subset, b, consider_as_foreign_keys):
constraints = collections.defaultdict(list)
for left in (a_subset, a):
if left is None:
continue
for fk in sorted(
b.foreign_keys,
key=lambda fk: fk.parent._creation_order):
if consider_as_foreign_keys is not None and \
fk.parent not in consider_as_foreign_keys:
continue
try:
col = fk.get_referent(left)
except exc.NoReferenceError as nrte:
if nrte.table_name == left.name:
raise
else:
continue
if col is not None:
constraints[fk.constraint].append((col, fk.parent))
if left is not b:
for fk in sorted(
left.foreign_keys,
key=lambda fk: fk.parent._creation_order):
if consider_as_foreign_keys is not None and \
fk.parent not in consider_as_foreign_keys:
continue
try:
col = fk.get_referent(b)
except exc.NoReferenceError as nrte:
if nrte.table_name == b.name:
raise
else:
continue
if col is not None:
constraints[fk.constraint].append((col, fk.parent))
if constraints:
break
return constraints
@classmethod
def _joincond_trim_constraints(
cls, a, b, constraints, consider_as_foreign_keys):
# more than one constraint matched. narrow down the list
# to include just those FKCs that match exactly to
# "consider_as_foreign_keys".
if consider_as_foreign_keys:
for const in list(constraints):
if set(f.parent for f in const.elements) != set(
consider_as_foreign_keys):
del constraints[const]
# if still multiple constraints, but
# they all refer to the exact same end result, use it.
if len(constraints) > 1:
dedupe = set(tuple(crit) for crit in constraints.values())
if len(dedupe) == 1:
key = list(constraints)[0]
constraints = {key: constraints[key]}
if len(constraints) != 1:
raise exc.AmbiguousForeignKeysError(
"Can't determine join between '%s' and '%s'; "
"tables have more than one foreign key "
"constraint relationship between them. "
"Please specify the 'onclause' of this "
"join explicitly." % (a.description, b.description))
def select(self, whereclause=None, **kwargs):
r"""Create a :class:`.Select` from this :class:`.Join`.
The equivalent long-hand form, given a :class:`.Join` object
``j``, is::
from sqlalchemy import select
j = select([j.left, j.right], **kw).\
where(whereclause).\
select_from(j)
:param whereclause: the WHERE criterion that will be sent to
the :func:`select()` function
:param \**kwargs: all other kwargs are sent to the
underlying :func:`select()` function.
"""
collist = [self.left, self.right]
return Select(collist, whereclause, from_obj=[self], **kwargs)
@property
def bind(self):
return self.left.bind or self.right.bind
@util.dependencies("sqlalchemy.sql.util")
def alias(self, sqlutil, name=None, flat=False):
r"""return an alias of this :class:`.Join`.
The default behavior here is to first produce a SELECT
construct from this :class:`.Join`, then to produce an
:class:`.Alias` from that. So given a join of the form::
j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
The JOIN by itself would look like::
table_a JOIN table_b ON table_a.id = table_b.a_id
Whereas the alias of the above, ``j.alias()``, would in a
SELECT context look like::
(SELECT table_a.id AS table_a_id, table_b.id AS table_b_id,
table_b.a_id AS table_b_a_id
FROM table_a
JOIN table_b ON table_a.id = table_b.a_id) AS anon_1
The equivalent long-hand form, given a :class:`.Join` object
``j``, is::
from sqlalchemy import select, alias
j = alias(
select([j.left, j.right]).\
select_from(j).\
with_labels(True).\
correlate(False),
name=name
)
The selectable produced by :meth:`.Join.alias` features the same
columns as that of the two individual selectables presented under
a single name - the individual columns are "auto-labeled", meaning
the ``.c.`` collection of the resulting :class:`.Alias` represents
the names of the individual columns using a
``<tablename>_<columname>`` scheme::
j.c.table_a_id
j.c.table_b_a_id
:meth:`.Join.alias` also features an alternate
option for aliasing joins which produces no enclosing SELECT and
does not normally apply labels to the column names. The
``flat=True`` option will call :meth:`.FromClause.alias`
against the left and right sides individually.
Using this option, no new ``SELECT`` is produced;
we instead, from a construct as below::
j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
j = j.alias(flat=True)
we get a result like this::
table_a AS table_a_1 JOIN table_b AS table_b_1 ON
table_a_1.id = table_b_1.a_id
The ``flat=True`` argument is also propagated to the contained
selectables, so that a composite join such as::
j = table_a.join(
table_b.join(table_c,
table_b.c.id == table_c.c.b_id),
table_b.c.a_id == table_a.c.id
).alias(flat=True)
Will produce an expression like::
table_a AS table_a_1 JOIN (
table_b AS table_b_1 JOIN table_c AS table_c_1
ON table_b_1.id = table_c_1.b_id
) ON table_a_1.id = table_b_1.a_id
The standalone :func:`~.expression.alias` function as well as the
base :meth:`.FromClause.alias` method also support the ``flat=True``
argument as a no-op, so that the argument can be passed to the
``alias()`` method of any selectable.
.. versionadded:: 0.9.0 Added the ``flat=True`` option to create
"aliases" of joins without enclosing inside of a SELECT
subquery.
:param name: name given to the alias.
:param flat: if True, produce an alias of the left and right
sides of this :class:`.Join` and return the join of those
two selectables. This produces join expression that does not
include an enclosing SELECT.
.. versionadded:: 0.9.0
.. seealso::
:func:`~.expression.alias`
"""
if flat:
assert name is None, "Can't send name argument with flat"
left_a, right_a = self.left.alias(flat=True), \
self.right.alias(flat=True)
adapter = sqlutil.ClauseAdapter(left_a).\
chain(sqlutil.ClauseAdapter(right_a))
return left_a.join(right_a, adapter.traverse(self.onclause),
isouter=self.isouter, full=self.full)
else:
return self.select(use_labels=True, correlate=False).alias(name)
@property
def _hide_froms(self):
return itertools.chain(*[_from_objects(x.left, x.right)
for x in self._cloned_set])
@property
def _from_objects(self):
return [self] + \
self.onclause._from_objects + \
self.left._from_objects + \
self.right._from_objects
class Alias(FromClause):
"""Represents an table or selectable alias (AS).
Represents an alias, as typically applied to any table or
sub-select within a SQL statement using the ``AS`` keyword (or
without the keyword on certain databases such as Oracle).
This object is constructed from the :func:`~.expression.alias` module
level function as well as the :meth:`.FromClause.alias` method available
on all :class:`.FromClause` subclasses.
"""
__visit_name__ = 'alias'
named_with_column = True
_is_from_container = True
def __init__(self, selectable, name=None):
baseselectable = selectable
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
self.supports_execution = baseselectable.supports_execution
if self.supports_execution:
self._execution_options = baseselectable._execution_options
self.element = selectable
if name is None:
if self.original.named_with_column:
name = getattr(self.original, 'name', None)
name = _anonymous_label('%%(%d %s)s' % (id(self), name
or 'anon'))
self.name = name
def self_group(self, target=None):
if isinstance(target, CompoundSelect) and \
isinstance(self.original, Select) and \
self.original._needs_parens_for_grouping():
return FromGrouping(self)
return super(Alias, self).self_group(target)
@property
def description(self):
if util.py3k:
return self.name
else:
return self.name.encode('ascii', 'backslashreplace')
def as_scalar(self):
try:
return self.element.as_scalar()
except AttributeError:
raise AttributeError("Element %s does not support "
"'as_scalar()'" % self.element)
def is_derived_from(self, fromclause):
if fromclause in self._cloned_set:
return True
return self.element.is_derived_from(fromclause)
def _populate_column_collection(self):
for col in self.element.columns._all_columns:
col._make_proxy(self)
def _refresh_for_new_column(self, column):
col = self.element._refresh_for_new_column(column)
if col is not None:
if not self._cols_populated:
return None
else:
return col._make_proxy(self)
else:
return None
def _copy_internals(self, clone=_clone, **kw):
# don't apply anything to an aliased Table
# for now. May want to drive this from
# the given **kw.
if isinstance(self.element, TableClause):
return
self._reset_exported()
self.element = clone(self.element, **kw)
baseselectable = self.element
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
def get_children(self, column_collections=True, **kw):
if column_collections:
for c in self.c:
yield c
yield self.element
@property
def _from_objects(self):
return [self]
@property
def bind(self):
return self.element.bind
class Lateral(Alias):
"""Represent a LATERAL subquery.
This object is constructed from the :func:`~.expression.lateral` module
level function as well as the :meth:`.FromClause.lateral` method available
on all :class:`.FromClause` subclasses.
While LATERAL is part of the SQL standard, curently only more recent
PostgreSQL versions provide support for this keyword.
.. versionadded:: 1.1
.. seealso::
:ref:`lateral_selects` - overview of usage.
"""
__visit_name__ = 'lateral'
class TableSample(Alias):
"""Represent a TABLESAMPLE clause.
This object is constructed from the :func:`~.expression.tablesample` module
level function as well as the :meth:`.FromClause.tablesample` method available
on all :class:`.FromClause` subclasses.
.. versionadded:: 1.1
.. seealso::
:func:`~.expression.tablesample`
"""
__visit_name__ = 'tablesample'
def __init__(self, selectable, sampling,
name=None,
seed=None):
self.sampling = sampling
self.seed = seed
super(TableSample, self).__init__(selectable, name=name)
@util.dependencies("sqlalchemy.sql.functions")
def _get_method(self, functions):
if isinstance(self.sampling, functions.Function):
return self.sampling
else:
return functions.func.system(self.sampling)
class CTE(Generative, HasSuffixes, Alias):
"""Represent a Common Table Expression.
The :class:`.CTE` object is obtained using the
:meth:`.SelectBase.cte` method from any selectable.
See that method for complete examples.
.. versionadded:: 0.7.6
"""
__visit_name__ = 'cte'
def __init__(self, selectable,
name=None,
recursive=False,
_cte_alias=None,
_restates=frozenset(),
_suffixes=None):
self.recursive = recursive
self._cte_alias = _cte_alias
self._restates = _restates
if _suffixes:
self._suffixes = _suffixes
super(CTE, self).__init__(selectable, name=name)
def _copy_internals(self, clone=_clone, **kw):
super(CTE, self)._copy_internals(clone, **kw)
if self._cte_alias is not None:
self._cte_alias = self
self._restates = frozenset([
clone(elem, **kw) for elem in self._restates
])
@util.dependencies("sqlalchemy.sql.dml")
def _populate_column_collection(self, dml):
if isinstance(self.element, dml.UpdateBase):
for col in self.element._returning:
col._make_proxy(self)
else:
for col in self.element.columns._all_columns:
col._make_proxy(self)
def alias(self, name=None, flat=False):
return CTE(
self.original,
name=name,
recursive=self.recursive,
_cte_alias=self,
_suffixes=self._suffixes
)
def union(self, other):
return CTE(
self.original.union(other),
name=self.name,
recursive=self.recursive,
_restates=self._restates.union([self]),
_suffixes=self._suffixes
)
def union_all(self, other):
return CTE(
self.original.union_all(other),
name=self.name,
recursive=self.recursive,
_restates=self._restates.union([self]),
_suffixes=self._suffixes
)
class HasCTE(object):
"""Mixin that declares a class to include CTE support.
.. versionadded:: 1.1
"""
def cte(self, name=None, recursive=False):
r"""Return a new :class:`.CTE`, or Common Table Expression instance.
Common table expressions are a SQL standard whereby SELECT
statements can draw upon secondary statements specified along
with the primary statement, using a clause called "WITH".
Special semantics regarding UNION can also be employed to
allow "recursive" queries, where a SELECT statement can draw
upon the set of rows that have previously been selected.
CTEs can also be applied to DML constructs UPDATE, INSERT
and DELETE on some databases, both as a source of CTE rows
when combined with RETURNING, as well as a consumer of
CTE rows.
SQLAlchemy detects :class:`.CTE` objects, which are treated
similarly to :class:`.Alias` objects, as special elements
to be delivered to the FROM clause of the statement as well
as to a WITH clause at the top of the statement.
.. versionchanged:: 1.1 Added support for UPDATE/INSERT/DELETE as
CTE, CTEs added to UPDATE/INSERT/DELETE.
:param name: name given to the common table expression. Like
:meth:`._FromClause.alias`, the name can be left as ``None``
in which case an anonymous symbol will be used at query
compile time.
:param recursive: if ``True``, will render ``WITH RECURSIVE``.
A recursive common table expression is intended to be used in
conjunction with UNION ALL in order to derive rows
from those already selected.
The following examples include two from PostgreSQL's documentation at
http://www.postgresql.org/docs/current/static/queries-with.html,
as well as additional examples.
Example 1, non recursive::
from sqlalchemy import (Table, Column, String, Integer,
MetaData, select, func)
metadata = MetaData()
orders = Table('orders', metadata,
Column('region', String),
Column('amount', Integer),
Column('product', String),
Column('quantity', Integer)
)
regional_sales = select([
orders.c.region,
func.sum(orders.c.amount).label('total_sales')
]).group_by(orders.c.region).cte("regional_sales")
top_regions = select([regional_sales.c.region]).\
where(
regional_sales.c.total_sales >
select([
func.sum(regional_sales.c.total_sales)/10
])
).cte("top_regions")
statement = select([
orders.c.region,
orders.c.product,
func.sum(orders.c.quantity).label("product_units"),
func.sum(orders.c.amount).label("product_sales")
]).where(orders.c.region.in_(
select([top_regions.c.region])
)).group_by(orders.c.region, orders.c.product)
result = conn.execute(statement).fetchall()
Example 2, WITH RECURSIVE::
from sqlalchemy import (Table, Column, String, Integer,
MetaData, select, func)
metadata = MetaData()
parts = Table('parts', metadata,
Column('part', String),
Column('sub_part', String),
Column('quantity', Integer),
)
included_parts = select([
parts.c.sub_part,
parts.c.part,
parts.c.quantity]).\
where(parts.c.part=='our part').\
cte(recursive=True)
incl_alias = included_parts.alias()
parts_alias = parts.alias()
included_parts = included_parts.union_all(
select([
parts_alias.c.sub_part,
parts_alias.c.part,
parts_alias.c.quantity
]).
where(parts_alias.c.part==incl_alias.c.sub_part)
)
statement = select([
included_parts.c.sub_part,
func.sum(included_parts.c.quantity).
label('total_quantity')
]).\
group_by(included_parts.c.sub_part)
result = conn.execute(statement).fetchall()
Example 3, an upsert using UPDATE and INSERT with CTEs::
from datetime import date
from sqlalchemy import (MetaData, Table, Column, Integer,
Date, select, literal, and_, exists)
metadata = MetaData()
visitors = Table('visitors', metadata,
Column('product_id', Integer, primary_key=True),
Column('date', Date, primary_key=True),
Column('count', Integer),
)
# add 5 visitors for the product_id == 1
product_id = 1
day = date.today()
count = 5
update_cte = (
visitors.update()
.where(and_(visitors.c.product_id == product_id,
visitors.c.date == day))
.values(count=visitors.c.count + count)
.returning(literal(1))
.cte('update_cte')
)
upsert = visitors.insert().from_select(
[visitors.c.product_id, visitors.c.date, visitors.c.count],
select([literal(product_id), literal(day), literal(count)])
.where(~exists(update_cte.select()))
)
connection.execute(upsert)
.. seealso::
:meth:`.orm.query.Query.cte` - ORM version of
:meth:`.HasCTE.cte`.
"""
return CTE(self, name=name, recursive=recursive)
class FromGrouping(FromClause):
"""Represent a grouping of a FROM clause"""
__visit_name__ = 'grouping'
def __init__(self, element):
self.element = element
def _init_collections(self):
pass
@property
def columns(self):
return self.element.columns
@property
def primary_key(self):
return self.element.primary_key
@property
def foreign_keys(self):
return self.element.foreign_keys
def is_derived_from(self, element):
return self.element.is_derived_from(element)
def alias(self, **kw):
return FromGrouping(self.element.alias(**kw))
@property
def _hide_froms(self):
return self.element._hide_froms
def get_children(self, **kwargs):
return self.element,
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {'element': self.element}
def __setstate__(self, state):
self.element = state['element']
class TableClause(Immutable, FromClause):
"""Represents a minimal "table" construct.
This is a lightweight table object that has only a name and a
collection of columns, which are typically produced
by the :func:`.expression.column` function::
from sqlalchemy import table, column
user = table("user",
column("id"),
column("name"),
column("description"),
)
The :class:`.TableClause` construct serves as the base for
the more commonly used :class:`~.schema.Table` object, providing
the usual set of :class:`~.expression.FromClause` services including
the ``.c.`` collection and statement generation methods.
It does **not** provide all the additional schema-level services
of :class:`~.schema.Table`, including constraints, references to other
tables, or support for :class:`.MetaData`-level services. It's useful
on its own as an ad-hoc construct used to generate quick SQL
statements when a more fully fledged :class:`~.schema.Table`
is not on hand.
"""
__visit_name__ = 'table'
named_with_column = True
implicit_returning = False
""":class:`.TableClause` doesn't support having a primary key or column
-level defaults, so implicit returning doesn't apply."""
_autoincrement_column = None
"""No PK or default support so no autoincrement column."""
def __init__(self, name, *columns):
"""Produce a new :class:`.TableClause`.
The object returned is an instance of :class:`.TableClause`, which
represents the "syntactical" portion of the schema-level
:class:`~.schema.Table` object.
It may be used to construct lightweight table constructs.
.. versionchanged:: 1.0.0 :func:`.expression.table` can now
be imported from the plain ``sqlalchemy`` namespace like any
other SQL element.
:param name: Name of the table.
:param columns: A collection of :func:`.expression.column` constructs.
"""
super(TableClause, self).__init__()
self.name = self.fullname = name
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
for c in columns:
self.append_column(c)
def _init_collections(self):
pass
@util.memoized_property
def description(self):
if util.py3k:
return self.name
else:
return self.name.encode('ascii', 'backslashreplace')
def append_column(self, c):
self._columns[c.key] = c
c.table = self
def get_children(self, column_collections=True, **kwargs):
if column_collections:
return [c for c in self.c]
else:
return []
@util.dependencies("sqlalchemy.sql.dml")
def insert(self, dml, values=None, inline=False, **kwargs):
"""Generate an :func:`.insert` construct against this
:class:`.TableClause`.
E.g.::
table.insert().values(name='foo')
See :func:`.insert` for argument and usage information.
"""
return dml.Insert(self, values=values, inline=inline, **kwargs)
@util.dependencies("sqlalchemy.sql.dml")
def update(
self, dml, whereclause=None, values=None, inline=False, **kwargs):
"""Generate an :func:`.update` construct against this
:class:`.TableClause`.
E.g.::
table.update().where(table.c.id==7).values(name='foo')
See :func:`.update` for argument and usage information.
"""
return dml.Update(self, whereclause=whereclause,
values=values, inline=inline, **kwargs)
@util.dependencies("sqlalchemy.sql.dml")
def delete(self, dml, whereclause=None, **kwargs):
"""Generate a :func:`.delete` construct against this
:class:`.TableClause`.
E.g.::
table.delete().where(table.c.id==7)
See :func:`.delete` for argument and usage information.
"""
return dml.Delete(self, whereclause, **kwargs)
@property
def _from_objects(self):
return [self]
class ForUpdateArg(ClauseElement):
@classmethod
def parse_legacy_select(self, arg):
"""Parse the for_update argument of :func:`.select`.
:param mode: Defines the lockmode to use.
``None`` - translates to no lockmode
``'update'`` - translates to ``FOR UPDATE``
(standard SQL, supported by most dialects)
``'nowait'`` - translates to ``FOR UPDATE NOWAIT``
(supported by Oracle, PostgreSQL 8.1 upwards)
``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
and ``FOR SHARE`` (for PostgreSQL)
``'read_nowait'`` - translates to ``FOR SHARE NOWAIT``
(supported by PostgreSQL). ``FOR SHARE`` and
``FOR SHARE NOWAIT`` (PostgreSQL).
"""
if arg in (None, False):
return None
nowait = read = False
if arg == 'nowait':
nowait = True
elif arg == 'read':
read = True
elif arg == 'read_nowait':
read = nowait = True
elif arg is not True:
raise exc.ArgumentError("Unknown for_update argument: %r" % arg)
return ForUpdateArg(read=read, nowait=nowait)
@property
def legacy_for_update_value(self):
if self.read and not self.nowait:
return "read"
elif self.read and self.nowait:
return "read_nowait"
elif self.nowait:
return "nowait"
else:
return True
def _copy_internals(self, clone=_clone, **kw):
if self.of is not None:
self.of = [clone(col, **kw) for col in self.of]
def __init__(
self, nowait=False, read=False, of=None,
skip_locked=False, key_share=False):
"""Represents arguments specified to :meth:`.Select.for_update`.
.. versionadded:: 0.9.0
"""
self.nowait = nowait
self.read = read
self.skip_locked = skip_locked
self.key_share = key_share
if of is not None:
self.of = [_interpret_as_column_or_from(elem)
for elem in util.to_list(of)]
else:
self.of = None
class SelectBase(HasCTE, Executable, FromClause):
"""Base class for SELECT statements.
This includes :class:`.Select`, :class:`.CompoundSelect` and
:class:`.TextAsFrom`.
"""
def as_scalar(self):
"""return a 'scalar' representation of this selectable, which can be
used as a column expression.
Typically, a select statement which has only one column in its columns
clause is eligible to be used as a scalar expression.
The returned object is an instance of
:class:`ScalarSelect`.
"""
return ScalarSelect(self)
def label(self, name):
"""return a 'scalar' representation of this selectable, embedded as a
subquery with a label.
.. seealso::
:meth:`~.SelectBase.as_scalar`.
"""
return self.as_scalar().label(name)
@_generative
@util.deprecated('0.6',
message="``autocommit()`` is deprecated. Use "
":meth:`.Executable.execution_options` with the "
"'autocommit' flag.")
def autocommit(self):
"""return a new selectable with the 'autocommit' flag set to
True.
"""
self._execution_options = \
self._execution_options.union({'autocommit': True})
def _generate(self):
"""Override the default _generate() method to also clear out
exported collections."""
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
s._reset_exported()
return s
@property
def _from_objects(self):
return [self]
class GenerativeSelect(SelectBase):
"""Base class for SELECT statements where additional elements can be
added.
This serves as the base for :class:`.Select` and :class:`.CompoundSelect`
where elements such as ORDER BY, GROUP BY can be added and column
rendering can be controlled. Compare to :class:`.TextAsFrom`, which,
while it subclasses :class:`.SelectBase` and is also a SELECT construct,
represents a fixed textual string which cannot be altered at this level,
only wrapped as a subquery.
.. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to
provide functionality specific to :class:`.Select` and
:class:`.CompoundSelect` while allowing :class:`.SelectBase` to be
used for other SELECT-like objects, e.g. :class:`.TextAsFrom`.
"""
_order_by_clause = ClauseList()
_group_by_clause = ClauseList()
_limit_clause = None
_offset_clause = None
_for_update_arg = None
def __init__(self,
use_labels=False,
for_update=False,
limit=None,
offset=None,
order_by=None,
group_by=None,
bind=None,
autocommit=None):
self.use_labels = use_labels
if for_update is not False:
self._for_update_arg = (ForUpdateArg.
parse_legacy_select(for_update))
if autocommit is not None:
util.warn_deprecated('autocommit on select() is '
'deprecated. Use .execution_options(a'
'utocommit=True)')
self._execution_options = \
self._execution_options.union(
{'autocommit': autocommit})
if limit is not None:
self._limit_clause = _offset_or_limit_clause(limit)
if offset is not None:
self._offset_clause = _offset_or_limit_clause(offset)
self._bind = bind
if order_by is not None:
self._order_by_clause = ClauseList(
*util.to_list(order_by),
_literal_as_text=_literal_and_labels_as_label_reference)
if group_by is not None:
self._group_by_clause = ClauseList(
*util.to_list(group_by),
_literal_as_text=_literal_as_label_reference)
@property
def for_update(self):
"""Provide legacy dialect support for the ``for_update`` attribute.
"""
if self._for_update_arg is not None:
return self._for_update_arg.legacy_for_update_value
else:
return None
@for_update.setter
def for_update(self, value):
self._for_update_arg = ForUpdateArg.parse_legacy_select(value)
@_generative
def with_for_update(self, nowait=False, read=False, of=None,
skip_locked=False, key_share=False):
"""Specify a ``FOR UPDATE`` clause for this :class:`.GenerativeSelect`.
E.g.::
stmt = select([table]).with_for_update(nowait=True)
On a database like PostgreSQL or Oracle, the above would render a
statement like::
SELECT table.a, table.b FROM table FOR UPDATE NOWAIT
on other backends, the ``nowait`` option is ignored and instead
would produce::
SELECT table.a, table.b FROM table FOR UPDATE
When called with no arguments, the statement will render with
the suffix ``FOR UPDATE``. Additional arguments can then be
provided which allow for common database-specific
variants.
:param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle
and PostgreSQL dialects.
:param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL,
``FOR SHARE`` on PostgreSQL. On PostgreSQL, when combined with
``nowait``, will render ``FOR SHARE NOWAIT``.
:param of: SQL expression or list of SQL expression elements
(typically :class:`.Column` objects or a compatible expression) which
will render into a ``FOR UPDATE OF`` clause; supported by PostgreSQL
and Oracle. May render as a table or as a column depending on
backend.
:param skip_locked: boolean, will render ``FOR UPDATE SKIP LOCKED``
on Oracle and PostgreSQL dialects or ``FOR SHARE SKIP LOCKED`` if
``read=True`` is also specified.
.. versionadded:: 1.1.0
:param key_share: boolean, will render ``FOR NO KEY UPDATE``,
or if combined with ``read=True`` will render ``FOR KEY SHARE``,
on the PostgreSQL dialect.
.. versionadded:: 1.1.0
"""
self._for_update_arg = ForUpdateArg(nowait=nowait, read=read, of=of,
skip_locked=skip_locked,
key_share=key_share)
@_generative
def apply_labels(self):
"""return a new selectable with the 'use_labels' flag set to True.
This will result in column expressions being generated using labels
against their table name, such as "SELECT somecolumn AS
tablename_somecolumn". This allows selectables which contain multiple
FROM clauses to produce a unique set of column names regardless of
name conflicts among the individual FROM clauses.
"""
self.use_labels = True
@property
def _limit(self):
"""Get an integer value for the limit. This should only be used
by code that cannot support a limit as a BindParameter or
other custom clause as it will throw an exception if the limit
isn't currently set to an integer.
"""
return _offset_or_limit_clause_asint(self._limit_clause, "limit")
@property
def _simple_int_limit(self):
"""True if the LIMIT clause is a simple integer, False
if it is not present or is a SQL expression.
"""
return isinstance(self._limit_clause, _OffsetLimitParam)
@property
def _simple_int_offset(self):
"""True if the OFFSET clause is a simple integer, False
if it is not present or is a SQL expression.
"""
return isinstance(self._offset_clause, _OffsetLimitParam)
@property
def _offset(self):
"""Get an integer value for the offset. This should only be used
by code that cannot support an offset as a BindParameter or
other custom clause as it will throw an exception if the
offset isn't currently set to an integer.
"""
return _offset_or_limit_clause_asint(self._offset_clause, "offset")
@_generative
def limit(self, limit):
"""return a new selectable with the given LIMIT criterion
applied.
This is a numerical value which usually renders as a ``LIMIT``
expression in the resulting select. Backends that don't
support ``LIMIT`` will attempt to provide similar
functionality.
.. versionchanged:: 1.0.0 - :meth:`.Select.limit` can now
accept arbitrary SQL expressions as well as integer values.
:param limit: an integer LIMIT parameter, or a SQL expression
that provides an integer result.
"""
self._limit_clause = _offset_or_limit_clause(limit)
@_generative
def offset(self, offset):
"""return a new selectable with the given OFFSET criterion
applied.
This is a numeric value which usually renders as an ``OFFSET``
expression in the resulting select. Backends that don't
support ``OFFSET`` will attempt to provide similar
functionality.
.. versionchanged:: 1.0.0 - :meth:`.Select.offset` can now
accept arbitrary SQL expressions as well as integer values.
:param offset: an integer OFFSET parameter, or a SQL expression
that provides an integer result.
"""
self._offset_clause = _offset_or_limit_clause(offset)
@_generative
def order_by(self, *clauses):
"""return a new selectable with the given list of ORDER BY
criterion applied.
The criterion will be appended to any pre-existing ORDER BY
criterion.
"""
self.append_order_by(*clauses)
@_generative
def group_by(self, *clauses):
"""return a new selectable with the given list of GROUP BY
criterion applied.
The criterion will be appended to any pre-existing GROUP BY
criterion.
"""
self.append_group_by(*clauses)
def append_order_by(self, *clauses):
"""Append the given ORDER BY criterion applied to this selectable.
The criterion will be appended to any pre-existing ORDER BY criterion.
This is an **in-place** mutation method; the
:meth:`~.GenerativeSelect.order_by` method is preferred, as it
provides standard :term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
self._order_by_clause = ClauseList()
else:
if getattr(self, '_order_by_clause', None) is not None:
clauses = list(self._order_by_clause) + list(clauses)
self._order_by_clause = ClauseList(
*clauses,
_literal_as_text=_literal_and_labels_as_label_reference)
def append_group_by(self, *clauses):
"""Append the given GROUP BY criterion applied to this selectable.
The criterion will be appended to any pre-existing GROUP BY criterion.
This is an **in-place** mutation method; the
:meth:`~.GenerativeSelect.group_by` method is preferred, as it
provides standard :term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
self._group_by_clause = ClauseList()
else:
if getattr(self, '_group_by_clause', None) is not None:
clauses = list(self._group_by_clause) + list(clauses)
self._group_by_clause = ClauseList(
*clauses, _literal_as_text=_literal_as_label_reference)
@property
def _label_resolve_dict(self):
raise NotImplementedError()
def _copy_internals(self, clone=_clone, **kw):
if self._limit_clause is not None:
self._limit_clause = clone(self._limit_clause, **kw)
if self._offset_clause is not None:
self._offset_clause = clone(self._offset_clause, **kw)
class CompoundSelect(GenerativeSelect):
"""Forms the basis of ``UNION``, ``UNION ALL``, and other
SELECT-based set operations.
.. seealso::
:func:`.union`
:func:`.union_all`
:func:`.intersect`
:func:`.intersect_all`
:func:`.except`
:func:`.except_all`
"""
__visit_name__ = 'compound_select'
UNION = util.symbol('UNION')
UNION_ALL = util.symbol('UNION ALL')
EXCEPT = util.symbol('EXCEPT')
EXCEPT_ALL = util.symbol('EXCEPT ALL')
INTERSECT = util.symbol('INTERSECT')
INTERSECT_ALL = util.symbol('INTERSECT ALL')
_is_from_container = True
def __init__(self, keyword, *selects, **kwargs):
self._auto_correlate = kwargs.pop('correlate', False)
self.keyword = keyword
self.selects = []
numcols = None
# some DBs do not like ORDER BY in the inner queries of a UNION, etc.
for n, s in enumerate(selects):
s = _clause_element_as_expr(s)
if not numcols:
numcols = len(s.c._all_columns)
elif len(s.c._all_columns) != numcols:
raise exc.ArgumentError(
'All selectables passed to '
'CompoundSelect must have identical numbers of '
'columns; select #%d has %d columns, select '
'#%d has %d' %
(1, len(self.selects[0].c._all_columns),
n + 1, len(s.c._all_columns))
)
self.selects.append(s.self_group(self))
GenerativeSelect.__init__(self, **kwargs)
@property
def _label_resolve_dict(self):
d = dict(
(c.key, c) for c in self.c
)
return d, d, d
@classmethod
def _create_union(cls, *selects, **kwargs):
r"""Return a ``UNION`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
A similar :func:`union()` method is available on all
:class:`.FromClause` subclasses.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
@classmethod
def _create_union_all(cls, *selects, **kwargs):
r"""Return a ``UNION ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
A similar :func:`union_all()` method is available on all
:class:`.FromClause` subclasses.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
@classmethod
def _create_except(cls, *selects, **kwargs):
r"""Return an ``EXCEPT`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
@classmethod
def _create_except_all(cls, *selects, **kwargs):
r"""Return an ``EXCEPT ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
@classmethod
def _create_intersect(cls, *selects, **kwargs):
r"""Return an ``INTERSECT`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
@classmethod
def _create_intersect_all(cls, *selects, **kwargs):
r"""Return an ``INTERSECT ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(
CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
def _scalar_type(self):
return self.selects[0]._scalar_type()
def self_group(self, against=None):
return FromGrouping(self)
def is_derived_from(self, fromclause):
for s in self.selects:
if s.is_derived_from(fromclause):
return True
return False
def _populate_column_collection(self):
for cols in zip(*[s.c._all_columns for s in self.selects]):
# this is a slightly hacky thing - the union exports a
# column that resembles just that of the *first* selectable.
# to get at a "composite" column, particularly foreign keys,
# you have to dig through the proxies collection which we
# generate below. We may want to improve upon this, such as
# perhaps _make_proxy can accept a list of other columns
# that are "shared" - schema.column can then copy all the
# ForeignKeys in. this would allow the union() to have all
# those fks too.
proxy = cols[0]._make_proxy(
self, name=cols[0]._label if self.use_labels else None,
key=cols[0]._key_label if self.use_labels else None)
# hand-construct the "_proxies" collection to include all
# derived columns place a 'weight' annotation corresponding
# to how low in the list of select()s the column occurs, so
# that the corresponding_column() operation can resolve
# conflicts
proxy._proxies = [
c._annotate({'weight': i + 1}) for (i, c) in enumerate(cols)]
def _refresh_for_new_column(self, column):
for s in self.selects:
s._refresh_for_new_column(column)
if not self._cols_populated:
return None
raise NotImplementedError("CompoundSelect constructs don't support "
"addition of columns to underlying "
"selectables")
def _copy_internals(self, clone=_clone, **kw):
super(CompoundSelect, self)._copy_internals(clone, **kw)
self._reset_exported()
self.selects = [clone(s, **kw) for s in self.selects]
if hasattr(self, '_col_map'):
del self._col_map
for attr in (
'_order_by_clause', '_group_by_clause', '_for_update_arg'):
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
def get_children(self, column_collections=True, **kwargs):
return (column_collections and list(self.c) or []) \
+ [self._order_by_clause, self._group_by_clause] \
+ list(self.selects)
def bind(self):
if self._bind:
return self._bind
for s in self.selects:
e = s.bind
if e:
return e
else:
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
"""Represents a ``SELECT`` statement.
"""
__visit_name__ = 'select'
_prefixes = ()
_suffixes = ()
_hints = util.immutabledict()
_statement_hints = ()
_distinct = False
_from_cloned = None
_correlate = ()
_correlate_except = None
_memoized_property = SelectBase._memoized_property
_is_select = True
def __init__(self,
columns=None,
whereclause=None,
from_obj=None,
distinct=False,
having=None,
correlate=True,
prefixes=None,
suffixes=None,
**kwargs):
"""Construct a new :class:`.Select`.
Similar functionality is also available via the
:meth:`.FromClause.select` method on any :class:`.FromClause`.
All arguments which accept :class:`.ClauseElement` arguments also
accept string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
.. seealso::
:ref:`coretutorial_selecting` - Core Tutorial description of
:func:`.select`.
:param columns:
A list of :class:`.ColumnElement` or :class:`.FromClause`
objects which will form the columns clause of the resulting
statement. For those objects that are instances of
:class:`.FromClause` (typically :class:`.Table` or :class:`.Alias`
objects), the :attr:`.FromClause.c` collection is extracted
to form a collection of :class:`.ColumnElement` objects.
This parameter will also accept :class:`.Text` constructs as
given, as well as ORM-mapped classes.
.. note::
The :paramref:`.select.columns` parameter is not available
in the method form of :func:`.select`, e.g.
:meth:`.FromClause.select`.
.. seealso::
:meth:`.Select.column`
:meth:`.Select.with_only_columns`
:param whereclause:
A :class:`.ClauseElement` expression which will be used to form the
``WHERE`` clause. It is typically preferable to add WHERE
criterion to an existing :class:`.Select` using method chaining
with :meth:`.Select.where`.
.. seealso::
:meth:`.Select.where`
:param from_obj:
A list of :class:`.ClauseElement` objects which will be added to the
``FROM`` clause of the resulting statement. This is equivalent
to calling :meth:`.Select.select_from` using method chaining on
an existing :class:`.Select` object.
.. seealso::
:meth:`.Select.select_from` - full description of explicit
FROM clause specification.
:param autocommit:
Deprecated. Use ``.execution_options(autocommit=<True|False>)``
to set the autocommit option.
.. seealso::
:meth:`.Executable.execution_options`
:param bind=None:
an :class:`~.Engine` or :class:`~.Connection` instance
to which the
resulting :class:`.Select` object will be bound. The
:class:`.Select` object will otherwise automatically bind to
whatever :class:`~.base.Connectable` instances can be located within
its contained :class:`.ClauseElement` members.
:param correlate=True:
indicates that this :class:`.Select` object should have its
contained :class:`.FromClause` elements "correlated" to an enclosing
:class:`.Select` object. It is typically preferable to specify
correlations on an existing :class:`.Select` construct using
:meth:`.Select.correlate`.
.. seealso::
:meth:`.Select.correlate` - full description of correlation.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
clause of the resulting statement.
The boolean argument may also be a column expression or list
of column expressions - this is a special calling form which
is understood by the PostgreSQL dialect to render the
``DISTINCT ON (<columns>)`` syntax.
``distinct`` is also available on an existing :class:`.Select`
object via the :meth:`~.Select.distinct` method.
.. seealso::
:meth:`.Select.distinct`
:param for_update=False:
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement.
.. deprecated:: 0.9.0 - use
:meth:`.Select.with_for_update` to specify the
structure of the ``FOR UPDATE`` clause.
``for_update`` accepts various string values interpreted by
specific backends, including:
* ``"read"`` - on MySQL, translates to ``LOCK IN SHARE MODE``;
on PostgreSQL, translates to ``FOR SHARE``.
* ``"nowait"`` - on PostgreSQL and Oracle, translates to
``FOR UPDATE NOWAIT``.
* ``"read_nowait"`` - on PostgreSQL, translates to
``FOR SHARE NOWAIT``.
.. seealso::
:meth:`.Select.with_for_update` - improved API for
specifying the ``FOR UPDATE`` clause.
:param group_by:
a list of :class:`.ClauseElement` objects which will comprise the
``GROUP BY`` clause of the resulting select. This parameter
is typically specified more naturally using the
:meth:`.Select.group_by` method on an existing :class:`.Select`.
.. seealso::
:meth:`.Select.group_by`
:param having:
a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
of the resulting select when ``GROUP BY`` is used. This parameter
is typically specified more naturally using the
:meth:`.Select.having` method on an existing :class:`.Select`.
.. seealso::
:meth:`.Select.having`
:param limit=None:
a numerical value which usually renders as a ``LIMIT``
expression in the resulting select. Backends that don't
support ``LIMIT`` will attempt to provide similar
functionality. This parameter is typically specified more naturally
using the :meth:`.Select.limit` method on an existing
:class:`.Select`.
.. seealso::
:meth:`.Select.limit`
:param offset=None:
a numeric value which usually renders as an ``OFFSET``
expression in the resulting select. Backends that don't
support ``OFFSET`` will attempt to provide similar
functionality. This parameter is typically specified more naturally
using the :meth:`.Select.offset` method on an existing
:class:`.Select`.
.. seealso::
:meth:`.Select.offset`
:param order_by:
a scalar or list of :class:`.ClauseElement` objects which will
comprise the ``ORDER BY`` clause of the resulting select.
This parameter is typically specified more naturally using the
:meth:`.Select.order_by` method on an existing :class:`.Select`.
.. seealso::
:meth:`.Select.order_by`
:param use_labels=False:
when ``True``, the statement will be generated using labels
for each column in the columns clause, which qualify each
column with its parent table's (or aliases) name so that name
conflicts between columns in different tables don't occur.
The format of the label is <tablename>_<column>. The "c"
collection of the resulting :class:`.Select` object will use these
names as well for targeting column members.
This parameter can also be specified on an existing
:class:`.Select` object using the :meth:`.Select.apply_labels`
method.
.. seealso::
:meth:`.Select.apply_labels`
"""
self._auto_correlate = correlate
if distinct is not False:
if distinct is True:
self._distinct = True
else:
self._distinct = [
_literal_as_text(e)
for e in util.to_list(distinct)
]
if from_obj is not None:
self._from_obj = util.OrderedSet(
_interpret_as_from(f)
for f in util.to_list(from_obj))
else:
self._from_obj = util.OrderedSet()
try:
cols_present = bool(columns)
except TypeError:
raise exc.ArgumentError("columns argument to select() must "
"be a Python list or other iterable")
if cols_present:
self._raw_columns = []
for c in columns:
c = _interpret_as_column_or_from(c)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
self._raw_columns.append(c)
else:
self._raw_columns = []
if whereclause is not None:
self._whereclause = _literal_as_text(
whereclause).self_group(against=operators._asbool)
else:
self._whereclause = None
if having is not None:
self._having = _literal_as_text(
having).self_group(against=operators._asbool)
else:
self._having = None
if prefixes:
self._setup_prefixes(prefixes)
if suffixes:
self._setup_suffixes(suffixes)
GenerativeSelect.__init__(self, **kwargs)
@property
def _froms(self):
# would love to cache this,
# but there's just enough edge cases, particularly now that
# declarative encourages construction of SQL expressions
# without tables present, to just regen this each time.
froms = []
seen = set()
translate = self._from_cloned
for item in itertools.chain(
_from_objects(*self._raw_columns),
_from_objects(self._whereclause)
if self._whereclause is not None else (),
self._from_obj
):
if item is self:
raise exc.InvalidRequestError(
"select() construct refers to itself as a FROM")
if translate and item in translate:
item = translate[item]
if not seen.intersection(item._cloned_set):
froms.append(item)
seen.update(item._cloned_set)
return froms
def _get_display_froms(self, explicit_correlate_froms=None,
implicit_correlate_froms=None):
"""Return the full list of 'from' clauses to be displayed.
Takes into account a set of existing froms which may be
rendered in the FROM clause of enclosing selects; this Select
may want to leave those absent if it is automatically
correlating.
"""
froms = self._froms
toremove = set(itertools.chain(*[
_expand_cloned(f._hide_froms)
for f in froms]))
if toremove:
# if we're maintaining clones of froms,
# add the copies out to the toremove list. only include
# clones that are lexical equivalents.
if self._from_cloned:
toremove.update(
self._from_cloned[f] for f in
toremove.intersection(self._from_cloned)
if self._from_cloned[f]._is_lexical_equivalent(f)
)
# filter out to FROM clauses not in the list,
# using a list to maintain ordering
froms = [f for f in froms if f not in toremove]
if self._correlate:
to_correlate = self._correlate
if to_correlate:
froms = [
f for f in froms if f not in
_cloned_intersection(
_cloned_intersection(
froms, explicit_correlate_froms or ()),
to_correlate
)
]
if self._correlate_except is not None:
froms = [
f for f in froms if f not in
_cloned_difference(
_cloned_intersection(
froms, explicit_correlate_froms or ()),
self._correlate_except
)
]
if self._auto_correlate and \
implicit_correlate_froms and \
len(froms) > 1:
froms = [
f for f in froms if f not in
_cloned_intersection(froms, implicit_correlate_froms)
]
if not len(froms):
raise exc.InvalidRequestError("Select statement '%s"
"' returned no FROM clauses "
"due to auto-correlation; "
"specify correlate(<tables>) "
"to control correlation "
"manually." % self)
return froms
def _scalar_type(self):
elem = self._raw_columns[0]
cols = list(elem._select_iterable)
return cols[0].type
@property
def froms(self):
"""Return the displayed list of FromClause elements."""
return self._get_display_froms()
def with_statement_hint(self, text, dialect_name='*'):
"""add a statement hint to this :class:`.Select`.
This method is similar to :meth:`.Select.with_hint` except that
it does not require an individual table, and instead applies to the
statement as a whole.
Hints here are specific to the backend database and may include
directives such as isolation levels, file directives, fetch directives,
etc.
.. versionadded:: 1.0.0
.. seealso::
:meth:`.Select.with_hint`
"""
return self.with_hint(None, text, dialect_name)
@_generative
def with_hint(self, selectable, text, dialect_name='*'):
r"""Add an indexing or other executional context hint for the given
selectable to this :class:`.Select`.
The text of the hint is rendered in the appropriate
location for the database backend in use, relative
to the given :class:`.Table` or :class:`.Alias` passed as the
``selectable`` argument. The dialect implementation
typically uses Python string substitution syntax
with the token ``%(name)s`` to render the name of
the table or alias. E.g. when using Oracle, the
following::
select([mytable]).\
with_hint(mytable, "index(%(name)s ix_mytable)")
Would render SQL as::
select /*+ index(mytable ix_mytable) */ ... from mytable
The ``dialect_name`` option will limit the rendering of a particular
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
select([mytable]).\
with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
.. seealso::
:meth:`.Select.with_statement_hint`
"""
if selectable is None:
self._statement_hints += ((dialect_name, text), )
else:
self._hints = self._hints.union(
{(selectable, dialect_name): text})
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
"Call as_scalar() on this Select "
"object to return a 'scalar' version "
"of this Select.")
@_memoized_property.method
def locate_all_froms(self):
"""return a Set of all FromClause elements referenced by this Select.
This set is a superset of that returned by the ``froms`` property,
which is specifically for those FromClause elements that would
actually be rendered.
"""
froms = self._froms
return froms + list(_from_objects(*froms))
@property
def inner_columns(self):
"""an iterator of all ColumnElement expressions which would
be rendered into the columns clause of the resulting SELECT statement.
"""
return _select_iterables(self._raw_columns)
@_memoized_property
def _label_resolve_dict(self):
with_cols = dict(
(c._resolve_label or c._label or c.key, c)
for c in _select_iterables(self._raw_columns)
if c._allow_label_resolve)
only_froms = dict(
(c.key, c) for c in
_select_iterables(self.froms) if c._allow_label_resolve)
only_cols = with_cols.copy()
for key, value in only_froms.items():
with_cols.setdefault(key, value)
return with_cols, only_froms, only_cols
def is_derived_from(self, fromclause):
if self in fromclause._cloned_set:
return True
for f in self.locate_all_froms():
if f.is_derived_from(fromclause):
return True
return False
def _copy_internals(self, clone=_clone, **kw):
super(Select, self)._copy_internals(clone, **kw)
# Select() object has been cloned and probably adapted by the
# given clone function. Apply the cloning function to internal
# objects
# 1. keep a dictionary of the froms we've cloned, and what
# they've become. This is consulted later when we derive
# additional froms from "whereclause" and the columns clause,
# which may still reference the uncloned parent table.
# as of 0.7.4 we also put the current version of _froms, which
# gets cleared on each generation. previously we were "baking"
# _froms into self._from_obj.
self._from_cloned = from_cloned = dict(
(f, clone(f, **kw)) for f in self._from_obj.union(self._froms))
# 3. update persistent _from_obj with the cloned versions.
self._from_obj = util.OrderedSet(from_cloned[f] for f in
self._from_obj)
# the _correlate collection is done separately, what can happen
# here is the same item is _correlate as in _from_obj but the
# _correlate version has an annotation on it - (specifically
# RelationshipProperty.Comparator._criterion_exists() does
# this). Also keep _correlate liberally open with its previous
# contents, as this set is used for matching, not rendering.
self._correlate = set(clone(f) for f in
self._correlate).union(self._correlate)
# 4. clone other things. The difficulty here is that Column
# objects are not actually cloned, and refer to their original
# .table, resulting in the wrong "from" parent after a clone
# operation. Hence _from_cloned and _from_obj supersede what is
# present here.
self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
for attr in '_whereclause', '_having', '_order_by_clause', \
'_group_by_clause', '_for_update_arg':
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
# erase exported column list, _froms collection,
# etc.
self._reset_exported()
def get_children(self, column_collections=True, **kwargs):
"""return child elements as per the ClauseElement specification."""
return (column_collections and list(self.columns) or []) + \
self._raw_columns + list(self._froms) + \
[x for x in
(self._whereclause, self._having,
self._order_by_clause, self._group_by_clause)
if x is not None]
@_generative
def column(self, column):
"""return a new select() construct with the given column expression
added to its columns clause.
E.g.::
my_select = my_select.column(table.c.new_column)
See the documentation for :meth:`.Select.with_only_columns`
for guidelines on adding /replacing the columns of a
:class:`.Select` object.
"""
self.append_column(column)
@util.dependencies("sqlalchemy.sql.util")
def reduce_columns(self, sqlutil, only_synonyms=True):
"""Return a new :func`.select` construct with redundantly
named, equivalently-valued columns removed from the columns clause.
"Redundant" here means two columns where one refers to the
other either based on foreign key, or via a simple equality
comparison in the WHERE clause of the statement. The primary purpose
of this method is to automatically construct a select statement
with all uniquely-named columns, without the need to use
table-qualified labels as :meth:`.apply_labels` does.
When columns are omitted based on foreign key, the referred-to
column is the one that's kept. When columns are omitted based on
WHERE eqivalence, the first column in the columns clause is the
one that's kept.
:param only_synonyms: when True, limit the removal of columns
to those which have the same name as the equivalent. Otherwise,
all columns that are equivalent to another are removed.
.. versionadded:: 0.8
"""
return self.with_only_columns(
sqlutil.reduce_columns(
self.inner_columns,
only_synonyms=only_synonyms,
*(self._whereclause, ) + tuple(self._from_obj)
)
)
@_generative
def with_only_columns(self, columns):
r"""Return a new :func:`.select` construct with its columns
clause replaced with the given columns.
This method is exactly equivalent to as if the original
:func:`.select` had been called with the given columns
clause. I.e. a statement::
s = select([table1.c.a, table1.c.b])
s = s.with_only_columns([table1.c.b])
should be exactly equivalent to::
s = select([table1.c.b])
This means that FROM clauses which are only derived
from the column list will be discarded if the new column
list no longer contains that FROM::
>>> table1 = table('t1', column('a'), column('b'))
>>> table2 = table('t2', column('a'), column('b'))
>>> s1 = select([table1.c.a, table2.c.b])
>>> print s1
SELECT t1.a, t2.b FROM t1, t2
>>> s2 = s1.with_only_columns([table2.c.b])
>>> print s2
SELECT t2.b FROM t1
The preferred way to maintain a specific FROM clause
in the construct, assuming it won't be represented anywhere
else (i.e. not in the WHERE clause, etc.) is to set it using
:meth:`.Select.select_from`::
>>> s1 = select([table1.c.a, table2.c.b]).\
... select_from(table1.join(table2,
... table1.c.a==table2.c.a))
>>> s2 = s1.with_only_columns([table2.c.b])
>>> print s2
SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
Care should also be taken to use the correct
set of column objects passed to :meth:`.Select.with_only_columns`.
Since the method is essentially equivalent to calling the
:func:`.select` construct in the first place with the given
columns, the columns passed to :meth:`.Select.with_only_columns`
should usually be a subset of those which were passed
to the :func:`.select` construct, not those which are available
from the ``.c`` collection of that :func:`.select`. That
is::
s = select([table1.c.a, table1.c.b]).select_from(table1)
s = s.with_only_columns([table1.c.b])
and **not**::
# usually incorrect
s = s.with_only_columns([s.c.b])
The latter would produce the SQL::
SELECT b
FROM (SELECT t1.a AS a, t1.b AS b
FROM t1), t1
Since the :func:`.select` construct is essentially being
asked to select both from ``table1`` as well as itself.
"""
self._reset_exported()
rc = []
for c in columns:
c = _interpret_as_column_or_from(c)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
rc.append(c)
self._raw_columns = rc
@_generative
def where(self, whereclause):
"""return a new select() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
self.append_whereclause(whereclause)
@_generative
def having(self, having):
"""return a new select() construct with the given expression added to
its HAVING clause, joined to the existing clause via AND, if any.
"""
self.append_having(having)
@_generative
def distinct(self, *expr):
r"""Return a new select() construct which will apply DISTINCT to its
columns clause.
:param \*expr: optional column expressions. When present,
the PostgreSQL dialect will render a ``DISTINCT ON (<expressions>>)``
construct.
"""
if expr:
expr = [_literal_as_label_reference(e) for e in expr]
if isinstance(self._distinct, list):
self._distinct = self._distinct + expr
else:
self._distinct = expr
else:
self._distinct = True
@_generative
def select_from(self, fromclause):
r"""return a new :func:`.select` construct with the
given FROM expression
merged into its list of FROM objects.
E.g.::
table1 = table('t1', column('a'))
table2 = table('t2', column('b'))
s = select([table1.c.a]).\
select_from(
table1.join(table2, table1.c.a==table2.c.b)
)
The "from" list is a unique set on the identity of each element,
so adding an already present :class:`.Table` or other selectable
will have no effect. Passing a :class:`.Join` that refers
to an already present :class:`.Table` or other selectable will have
the effect of concealing the presence of that selectable as
an individual element in the rendered FROM list, instead
rendering it into a JOIN clause.
While the typical purpose of :meth:`.Select.select_from` is to
replace the default, derived FROM clause with a join, it can
also be called with individual table elements, multiple times
if desired, in the case that the FROM clause cannot be fully
derived from the columns clause::
select([func.count('*')]).select_from(table1)
"""
self.append_from(fromclause)
@_generative
def correlate(self, *fromclauses):
r"""return a new :class:`.Select` which will correlate the given FROM
clauses to that of an enclosing :class:`.Select`.
Calling this method turns off the :class:`.Select` object's
default behavior of "auto-correlation". Normally, FROM elements
which appear in a :class:`.Select` that encloses this one via
its :term:`WHERE clause`, ORDER BY, HAVING or
:term:`columns clause` will be omitted from this :class:`.Select`
object's :term:`FROM clause`.
Setting an explicit correlation collection using the
:meth:`.Select.correlate` method provides a fixed list of FROM objects
that can potentially take place in this process.
When :meth:`.Select.correlate` is used to apply specific FROM clauses
for correlation, the FROM elements become candidates for
correlation regardless of how deeply nested this :class:`.Select`
object is, relative to an enclosing :class:`.Select` which refers to
the same FROM object. This is in contrast to the behavior of
"auto-correlation" which only correlates to an immediate enclosing
:class:`.Select`. Multi-level correlation ensures that the link
between enclosed and enclosing :class:`.Select` is always via
at least one WHERE/ORDER BY/HAVING/columns clause in order for
correlation to take place.
If ``None`` is passed, the :class:`.Select` object will correlate
none of its FROM entries, and all will render unconditionally
in the local FROM clause.
:param \*fromclauses: a list of one or more :class:`.FromClause`
constructs, or other compatible constructs (i.e. ORM-mapped
classes) to become part of the correlate collection.
.. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
:meth:`.Select.correlate`.
.. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
longer unconditionally removes entries from the FROM clause;
instead, the candidate FROM entries must also be matched by a FROM
entry located in an enclosing :class:`.Select`, which ultimately
encloses this one as present in the WHERE clause, ORDER BY clause,
HAVING clause, or columns clause of an enclosing :meth:`.Select`.
.. versionchanged:: 0.8.2 explicit correlation takes place
via any level of nesting of :class:`.Select` objects; in previous
0.8 versions, correlation would only occur relative to the
immediate enclosing :class:`.Select` construct.
.. seealso::
:meth:`.Select.correlate_except`
:ref:`correlated_subqueries`
"""
self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate = ()
else:
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclauses)
@_generative
def correlate_except(self, *fromclauses):
r"""return a new :class:`.Select` which will omit the given FROM
clauses from the auto-correlation process.
Calling :meth:`.Select.correlate_except` turns off the
:class:`.Select` object's default behavior of
"auto-correlation" for the given FROM elements. An element
specified here will unconditionally appear in the FROM list, while
all other FROM elements remain subject to normal auto-correlation
behaviors.
.. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
method was improved to fully prevent FROM clauses specified here
from being omitted from the immediate FROM clause of this
:class:`.Select`.
If ``None`` is passed, the :class:`.Select` object will correlate
all of its FROM entries.
.. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
correctly auto-correlate all FROM clauses.
:param \*fromclauses: a list of one or more :class:`.FromClause`
constructs, or other compatible constructs (i.e. ORM-mapped
classes) to become part of the correlate-exception collection.
.. seealso::
:meth:`.Select.correlate`
:ref:`correlated_subqueries`
"""
self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate_except = ()
else:
self._correlate_except = set(self._correlate_except or ()).union(
_interpret_as_from(f) for f in fromclauses)
def append_correlation(self, fromclause):
"""append the given correlation expression to this select()
construct.
This is an **in-place** mutation method; the
:meth:`~.Select.correlate` method is preferred, as it provides
standard :term:`method chaining`.
"""
self._auto_correlate = False
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclause)
def append_column(self, column):
"""append the given column expression to the columns clause of this
select() construct.
E.g.::
my_select.append_column(some_table.c.new_column)
This is an **in-place** mutation method; the
:meth:`~.Select.column` method is preferred, as it provides standard
:term:`method chaining`.
See the documentation for :meth:`.Select.with_only_columns`
for guidelines on adding /replacing the columns of a
:class:`.Select` object.
"""
self._reset_exported()
column = _interpret_as_column_or_from(column)
if isinstance(column, ScalarSelect):
column = column.self_group(against=operators.comma_op)
self._raw_columns = self._raw_columns + [column]
def append_prefix(self, clause):
"""append the given columns clause prefix expression to this select()
construct.
This is an **in-place** mutation method; the
:meth:`~.Select.prefix_with` method is preferred, as it provides
standard :term:`method chaining`.
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
def append_whereclause(self, whereclause):
"""append the given expression to this select() construct's WHERE
criterion.
The expression will be joined to existing WHERE criterion via AND.
This is an **in-place** mutation method; the
:meth:`~.Select.where` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
self._whereclause = and_(
True_._ifnone(self._whereclause), whereclause)
def append_having(self, having):
"""append the given expression to this select() construct's HAVING
criterion.
The expression will be joined to existing HAVING criterion via AND.
This is an **in-place** mutation method; the
:meth:`~.Select.having` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
self._having = and_(True_._ifnone(self._having), having)
def append_from(self, fromclause):
"""append the given FromClause expression to this select() construct's
FROM clause.
This is an **in-place** mutation method; the
:meth:`~.Select.select_from` method is preferred, as it provides
standard :term:`method chaining`.
"""
self._reset_exported()
fromclause = _interpret_as_from(fromclause)
self._from_obj = self._from_obj.union([fromclause])
@_memoized_property
def _columns_plus_names(self):
if self.use_labels:
names = set()
def name_for_col(c):
if c._label is None or not c._render_label_in_columns_clause:
return (None, c)
name = c._label
if name in names:
name = c.anon_label
else:
names.add(name)
return name, c
return [
name_for_col(c)
for c in util.unique_list(
_select_iterables(self._raw_columns))
]
else:
return [
(None, c)
for c in util.unique_list(
_select_iterables(self._raw_columns))
]
def _populate_column_collection(self):
for name, c in self._columns_plus_names:
if not hasattr(c, '_make_proxy'):
continue
if name is None:
key = None
elif self.use_labels:
key = c._key_label
if key is not None and key in self.c:
key = c.anon_label
else:
key = None
c._make_proxy(self, key=key,
name=name,
name_is_truncatable=True)
def _refresh_for_new_column(self, column):
for fromclause in self._froms:
col = fromclause._refresh_for_new_column(column)
if col is not None:
if col in self.inner_columns and self._cols_populated:
our_label = col._key_label if self.use_labels else col.key
if our_label not in self.c:
return col._make_proxy(
self,
name=col._label if self.use_labels else None,
key=col._key_label if self.use_labels else None,
name_is_truncatable=True)
return None
return None
def _needs_parens_for_grouping(self):
return (
self._limit_clause is not None or
self._offset_clause is not None or
bool(self._order_by_clause.clauses)
)
def self_group(self, against=None):
"""return a 'grouping' construct as per the ClauseElement
specification.
This produces an element that can be embedded in an expression. Note
that this method is called automatically as needed when constructing
expressions and should not require explicit use.
"""
if isinstance(against, CompoundSelect) and \
not self._needs_parens_for_grouping():
return self
return FromGrouping(self)
def union(self, other, **kwargs):
"""return a SQL UNION of this select() construct against the given
selectable."""
return CompoundSelect._create_union(self, other, **kwargs)
def union_all(self, other, **kwargs):
"""return a SQL UNION ALL of this select() construct against the given
selectable.
"""
return CompoundSelect._create_union_all(self, other, **kwargs)
def except_(self, other, **kwargs):
"""return a SQL EXCEPT of this select() construct against the given
selectable."""
return CompoundSelect._create_except(self, other, **kwargs)
def except_all(self, other, **kwargs):
"""return a SQL EXCEPT ALL of this select() construct against the
given selectable.
"""
return CompoundSelect._create_except_all(self, other, **kwargs)
def intersect(self, other, **kwargs):
"""return a SQL INTERSECT of this select() construct against the given
selectable.
"""
return CompoundSelect._create_intersect(self, other, **kwargs)
def intersect_all(self, other, **kwargs):
"""return a SQL INTERSECT ALL of this select() construct against the
given selectable.
"""
return CompoundSelect._create_intersect_all(self, other, **kwargs)
def bind(self):
if self._bind:
return self._bind
froms = self._froms
if not froms:
for c in self._raw_columns:
e = c.bind
if e:
self._bind = e
return e
else:
e = list(froms)[0].bind
if e:
self._bind = e
return e
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class ScalarSelect(Generative, Grouping):
_from_objects = []
_is_from_container = True
def __init__(self, element):
self.element = element
self.type = element._scalar_type()
@property
def columns(self):
raise exc.InvalidRequestError('Scalar Select expression has no '
'columns; use this object directly '
'within a column-level expression.')
c = columns
@_generative
def where(self, crit):
"""Apply a WHERE clause to the SELECT statement referred to
by this :class:`.ScalarSelect`.
"""
self.element = self.element.where(crit)
def self_group(self, **kwargs):
return self
class Exists(UnaryExpression):
"""Represent an ``EXISTS`` clause.
"""
__visit_name__ = UnaryExpression.__visit_name__
_from_objects = []
def __init__(self, *args, **kwargs):
"""Construct a new :class:`.Exists` against an existing
:class:`.Select` object.
Calling styles are of the following forms::
# use on an existing select()
s = select([table.c.col1]).where(table.c.col2==5)
s = exists(s)
# construct a select() at once
exists(['*'], **select_arguments).where(criterion)
# columns argument is optional, generates "EXISTS (SELECT *)"
# by default.
exists().where(table.c.col2==5)
"""
if args and isinstance(args[0], (SelectBase, ScalarSelect)):
s = args[0]
else:
if not args:
args = ([literal_column('*')],)
s = Select(*args, **kwargs).as_scalar().self_group()
UnaryExpression.__init__(self, s, operator=operators.exists,
type_=type_api.BOOLEANTYPE,
wraps_column_expression=True)
def select(self, whereclause=None, **params):
return Select([self], whereclause, **params)
def correlate(self, *fromclause):
e = self._clone()
e.element = self.element.correlate(*fromclause).self_group()
return e
def correlate_except(self, *fromclause):
e = self._clone()
e.element = self.element.correlate_except(*fromclause).self_group()
return e
def select_from(self, clause):
"""return a new :class:`.Exists` construct, applying the given
expression to the :meth:`.Select.select_from` method of the select
statement contained.
"""
e = self._clone()
e.element = self.element.select_from(clause).self_group()
return e
def where(self, clause):
"""return a new exists() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
e = self._clone()
e.element = self.element.where(clause).self_group()
return e
class TextAsFrom(SelectBase):
"""Wrap a :class:`.TextClause` construct within a :class:`.SelectBase`
interface.
This allows the :class:`.TextClause` object to gain a ``.c`` collection
and other FROM-like capabilities such as :meth:`.FromClause.alias`,
:meth:`.SelectBase.cte`, etc.
The :class:`.TextAsFrom` construct is produced via the
:meth:`.TextClause.columns` method - see that method for details.
.. versionadded:: 0.9.0
.. seealso::
:func:`.text`
:meth:`.TextClause.columns`
"""
__visit_name__ = "text_as_from"
_textual = True
def __init__(self, text, columns, positional=False):
self.element = text
self.column_args = columns
self.positional = positional
@property
def _bind(self):
return self.element._bind
@_generative
def bindparams(self, *binds, **bind_as_values):
self.element = self.element.bindparams(*binds, **bind_as_values)
def _populate_column_collection(self):
for c in self.column_args:
c._make_proxy(self)
def _copy_internals(self, clone=_clone, **kw):
self._reset_exported()
self.element = clone(self.element, **kw)
def _scalar_type(self):
return self.column_args[0].type
class AnnotatedFromClause(Annotated):
def __init__(self, element, values):
# force FromClause to generate their internal
# collections into __dict__
element.c
Annotated.__init__(self, element, values)
|
carolineLe/miasm | refs/heads/master | miasm/ir/__init__.py | 13 | "Intermediate representation methods"
|
proppy/appengine-try-python-flask | refs/heads/master | lib/werkzeug/http.py | 317 | # -*- coding: utf-8 -*-
"""
werkzeug.http
~~~~~~~~~~~~~
Werkzeug comes with a bunch of utilities that help Werkzeug to deal with
HTTP data. Most of the classes and functions provided by this module are
used by the wrappers, but they are useful on their own, too, especially if
the response and request objects are not used.
This covers some of the more HTTP centric features of WSGI, some other
utilities such as cookie handling are documented in the `werkzeug.utils`
module.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
from time import time, gmtime
try:
from email.utils import parsedate_tz
except ImportError: # pragma: no cover
from email.Utils import parsedate_tz
try:
from urllib2 import parse_http_list as _parse_list_header
except ImportError: # pragma: no cover
from urllib.request import parse_http_list as _parse_list_header
from datetime import datetime, timedelta
from hashlib import md5
import base64
from werkzeug._internal import _cookie_quote, _make_cookie_domain, \
_cookie_parse_impl
from werkzeug._compat import to_unicode, iteritems, text_type, \
string_types, try_coerce_native, to_bytes, PY2, \
integer_types
# incorrect
_cookie_charset = 'latin1'
_accept_re = re.compile(r'([^\s;,]+)(?:[^,]*?;\s*q=(\d*(?:\.\d+)?))?')
_token_chars = frozenset("!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
'^_`abcdefghijklmnopqrstuvwxyz|~')
_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)')
_unsafe_header_chars = set('()<>@,;:\"/[]?={} \t')
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(r';\s*(%s|[^\s;=]+)\s*(?:=\s*(%s|[^;]+))?\s*' %
(_quoted_string_re, _quoted_string_re))
_entity_headers = frozenset([
'allow', 'content-encoding', 'content-language', 'content-length',
'content-location', 'content-md5', 'content-range', 'content-type',
'expires', 'last-modified'
])
_hop_by_hop_headers = frozenset([
'connection', 'keep-alive', 'proxy-authenticate',
'proxy-authorization', 'te', 'trailer', 'transfer-encoding',
'upgrade'
])
HTTP_STATUS_CODES = {
100: 'Continue',
101: 'Switching Protocols',
102: 'Processing',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
207: 'Multi Status',
226: 'IM Used', # see RFC 3229
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required', # unused
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
418: 'I\'m a teapot', # see RFC 2324
422: 'Unprocessable Entity',
423: 'Locked',
424: 'Failed Dependency',
426: 'Upgrade Required',
428: 'Precondition Required', # see RFC 6585
429: 'Too Many Requests',
431: 'Request Header Fields Too Large',
449: 'Retry With', # proprietary MS extension
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
507: 'Insufficient Storage',
510: 'Not Extended'
}
def wsgi_to_bytes(data):
"""coerce wsgi unicode represented bytes to real ones
"""
if isinstance(data, bytes):
return data
return data.encode('latin1') #XXX: utf8 fallback?
def bytes_to_wsgi(data):
assert isinstance(data, bytes), 'data must be bytes'
if isinstance(data, str):
return data
else:
return data.decode('latin1')
def quote_header_value(value, extra_chars='', allow_token=True):
"""Quote a header value if necessary.
.. versionadded:: 0.5
:param value: the value to quote.
:param extra_chars: a list of extra characters to skip quoting.
:param allow_token: if this is enabled token values are returned
unchanged.
"""
if isinstance(value, bytes):
value = bytes_to_wsgi(value)
value = str(value)
if allow_token:
token_chars = _token_chars | set(extra_chars)
if set(value).issubset(token_chars):
return value
return '"%s"' % value.replace('\\', '\\\\').replace('"', '\\"')
def unquote_header_value(value, is_filename=False):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
.. versionadded:: 0.5
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
# if this is a filename and the starting characters look like
# a UNC path, then just return the value without quotes. Using the
# replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\':
return value.replace('\\\\', '\\').replace('\\"', '"')
return value
def dump_options_header(header, options):
"""The reverse function to :func:`parse_options_header`.
:param header: the header to dump
:param options: a dict of options to append.
"""
segments = []
if header is not None:
segments.append(header)
for key, value in iteritems(options):
if value is None:
segments.append(key)
else:
segments.append('%s=%s' % (key, quote_header_value(value)))
return '; '.join(segments)
def dump_header(iterable, allow_token=True):
"""Dump an HTTP header again. This is the reversal of
:func:`parse_list_header`, :func:`parse_set_header` and
:func:`parse_dict_header`. This also quotes strings that include an
equals sign unless you pass it as dict of key, value pairs.
>>> dump_header({'foo': 'bar baz'})
'foo="bar baz"'
>>> dump_header(('foo', 'bar baz'))
'foo, "bar baz"'
:param iterable: the iterable or dict of values to quote.
:param allow_token: if set to `False` tokens as values are disallowed.
See :func:`quote_header_value` for more details.
"""
if isinstance(iterable, dict):
items = []
for key, value in iteritems(iterable):
if value is None:
items.append(key)
else:
items.append('%s=%s' % (
key,
quote_header_value(value, allow_token=allow_token)
))
else:
items = [quote_header_value(x, allow_token=allow_token)
for x in iterable]
return ', '.join(items)
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
It basically works like :func:`parse_set_header` just that items
may appear multiple times and case sensitivity is preserved.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
To create a header from the :class:`list` again, use the
:func:`dump_header` function.
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in _parse_list_header(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
def parse_dict_header(value, cls=dict):
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict (or any other mapping object created from
the type with a dict like interface provided by the `cls` arugment):
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
If there is no value for a key it will be `None`:
>>> parse_dict_header('key_without_value')
{'key_without_value': None}
To create a header from the :class:`dict` again, use the
:func:`dump_header` function.
.. versionchanged:: 0.9
Added support for `cls` argument.
:param value: a string with a dict header.
:param cls: callable to use for storage of parsed results.
:return: an instance of `cls`
"""
result = cls()
if not isinstance(value, text_type):
#XXX: validate
value = bytes_to_wsgi(value)
for item in _parse_list_header(value):
if '=' not in item:
result[item] = None
continue
name, value = item.split('=', 1)
if value[:1] == value[-1:] == '"':
value = unquote_header_value(value[1:-1])
result[name] = value
return result
def parse_options_header(value):
"""Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('text/html; charset=utf8')
('text/html', {'charset': 'utf8'})
This should not be used to parse ``Cache-Control`` like headers that use
a slightly different format. For these headers use the
:func:`parse_dict_header` function.
.. versionadded:: 0.5
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value, key == 'filename')
yield key, value
if not value:
return '', {}
parts = _tokenize(';' + value)
name = next(parts)[0]
extra = dict(parts)
return name, extra
def parse_accept_header(value, cls=None):
"""Parses an HTTP Accept-* header. This does not implement a complete
valid algorithm but one that supports at least value and quality
extraction.
Returns a new :class:`Accept` object (basically a list of ``(value, quality)``
tuples sorted by the quality with some additional accessor methods).
The second parameter can be a subclass of :class:`Accept` that is created
with the parsed values and returned.
:param value: the accept header string to be parsed.
:param cls: the wrapper class for the return value (can be
:class:`Accept` or a subclass thereof)
:return: an instance of `cls`.
"""
if cls is None:
cls = Accept
if not value:
return cls(None)
result = []
for match in _accept_re.finditer(value):
quality = match.group(2)
if not quality:
quality = 1
else:
quality = max(min(float(quality), 1), 0)
result.append((match.group(1), quality))
return cls(result)
def parse_cache_control_header(value, on_update=None, cls=None):
"""Parse a cache control header. The RFC differs between response and
request cache control, this method does not. It's your responsibility
to not use the wrong control statements.
.. versionadded:: 0.5
The `cls` was added. If not specified an immutable
:class:`~werkzeug.datastructures.RequestCacheControl` is returned.
:param value: a cache control header to be parsed.
:param on_update: an optional callable that is called every time a value
on the :class:`~werkzeug.datastructures.CacheControl`
object is changed.
:param cls: the class for the returned object. By default
:class:`~werkzeug.datastructures.RequestCacheControl` is used.
:return: a `cls` object.
"""
if cls is None:
cls = RequestCacheControl
if not value:
return cls(None, on_update)
return cls(parse_dict_header(value), on_update)
def parse_set_header(value, on_update=None):
"""Parse a set-like header and return a
:class:`~werkzeug.datastructures.HeaderSet` object:
>>> hs = parse_set_header('token, "quoted value"')
The return value is an object that treats the items case-insensitively
and keeps the order of the items:
>>> 'TOKEN' in hs
True
>>> hs.index('quoted value')
1
>>> hs
HeaderSet(['token', 'quoted value'])
To create a header from the :class:`HeaderSet` again, use the
:func:`dump_header` function.
:param value: a set header to be parsed.
:param on_update: an optional callable that is called every time a
value on the :class:`~werkzeug.datastructures.HeaderSet`
object is changed.
:return: a :class:`~werkzeug.datastructures.HeaderSet`
"""
if not value:
return HeaderSet(None, on_update)
return HeaderSet(parse_list_header(value), on_update)
def parse_authorization_header(value):
"""Parse an HTTP basic/digest authorization header transmitted by the web
browser. The return value is either `None` if the header was invalid or
not given, otherwise an :class:`~werkzeug.datastructures.Authorization`
object.
:param value: the authorization header to parse.
:return: a :class:`~werkzeug.datastructures.Authorization` object or `None`.
"""
if not value:
return
value = wsgi_to_bytes(value)
try:
auth_type, auth_info = value.split(None, 1)
auth_type = auth_type.lower()
except ValueError:
return
if auth_type == b'basic':
try:
username, password = base64.b64decode(auth_info).split(b':', 1)
except Exception as e:
return
return Authorization('basic', {'username': bytes_to_wsgi(username),
'password': bytes_to_wsgi(password)})
elif auth_type == b'digest':
auth_map = parse_dict_header(auth_info)
for key in 'username', 'realm', 'nonce', 'uri', 'response':
if not key in auth_map:
return
if 'qop' in auth_map:
if not auth_map.get('nc') or not auth_map.get('cnonce'):
return
return Authorization('digest', auth_map)
def parse_www_authenticate_header(value, on_update=None):
"""Parse an HTTP WWW-Authenticate header into a
:class:`~werkzeug.datastructures.WWWAuthenticate` object.
:param value: a WWW-Authenticate header to parse.
:param on_update: an optional callable that is called every time a value
on the :class:`~werkzeug.datastructures.WWWAuthenticate`
object is changed.
:return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object.
"""
if not value:
return WWWAuthenticate(on_update=on_update)
try:
auth_type, auth_info = value.split(None, 1)
auth_type = auth_type.lower()
except (ValueError, AttributeError):
return WWWAuthenticate(value.strip().lower(), on_update=on_update)
return WWWAuthenticate(auth_type, parse_dict_header(auth_info),
on_update)
def parse_if_range_header(value):
"""Parses an if-range header which can be an etag or a date. Returns
a :class:`~werkzeug.datastructures.IfRange` object.
.. versionadded:: 0.7
"""
if not value:
return IfRange()
date = parse_date(value)
if date is not None:
return IfRange(date=date)
# drop weakness information
return IfRange(unquote_etag(value)[0])
def parse_range_header(value, make_inclusive=True):
"""Parses a range header into a :class:`~werkzeug.datastructures.Range`
object. If the header is missing or malformed `None` is returned.
`ranges` is a list of ``(start, stop)`` tuples where the ranges are
non-inclusive.
.. versionadded:: 0.7
"""
if not value or '=' not in value:
return None
ranges = []
last_end = 0
units, rng = value.split('=', 1)
units = units.strip().lower()
for item in rng.split(','):
item = item.strip()
if '-' not in item:
return None
if item.startswith('-'):
if last_end < 0:
return None
begin = int(item)
end = None
last_end = -1
elif '-' in item:
begin, end = item.split('-', 1)
begin = int(begin)
if begin < last_end or last_end < 0:
return None
if end:
end = int(end) + 1
if begin >= end:
return None
else:
end = None
last_end = end
ranges.append((begin, end))
return Range(units, ranges)
def parse_content_range_header(value, on_update=None):
"""Parses a range header into a
:class:`~werkzeug.datastructures.ContentRange` object or `None` if
parsing is not possible.
.. versionadded:: 0.7
:param value: a content range header to be parsed.
:param on_update: an optional callable that is called every time a value
on the :class:`~werkzeug.datastructures.ContentRange`
object is changed.
"""
if value is None:
return None
try:
units, rangedef = (value or '').strip().split(None, 1)
except ValueError:
return None
if '/' not in rangedef:
return None
rng, length = rangedef.split('/', 1)
if length == '*':
length = None
elif length.isdigit():
length = int(length)
else:
return None
if rng == '*':
return ContentRange(units, None, None, length, on_update=on_update)
elif '-' not in rng:
return None
start, stop = rng.split('-', 1)
try:
start = int(start)
stop = int(stop) + 1
except ValueError:
return None
if is_byte_range_valid(start, stop, length):
return ContentRange(units, start, stop, length, on_update=on_update)
def quote_etag(etag, weak=False):
"""Quote an etag.
:param etag: the etag to quote.
:param weak: set to `True` to tag it "weak".
"""
if '"' in etag:
raise ValueError('invalid etag')
etag = '"%s"' % etag
if weak:
etag = 'w/' + etag
return etag
def unquote_etag(etag):
"""Unquote a single etag:
>>> unquote_etag('w/"bar"')
('bar', True)
>>> unquote_etag('"bar"')
('bar', False)
:param etag: the etag identifier to unquote.
:return: a ``(etag, weak)`` tuple.
"""
if not etag:
return None, None
etag = etag.strip()
weak = False
if etag[:2] in ('w/', 'W/'):
weak = True
etag = etag[2:]
if etag[:1] == etag[-1:] == '"':
etag = etag[1:-1]
return etag, weak
def parse_etags(value):
"""Parse an etag header.
:param value: the tag header to parse
:return: an :class:`~werkzeug.datastructures.ETags` object.
"""
if not value:
return ETags()
strong = []
weak = []
end = len(value)
pos = 0
while pos < end:
match = _etag_re.match(value, pos)
if match is None:
break
is_weak, quoted, raw = match.groups()
if raw == '*':
return ETags(star_tag=True)
elif quoted:
raw = quoted
if is_weak:
weak.append(raw)
else:
strong.append(raw)
pos = match.end()
return ETags(strong, weak)
def generate_etag(data):
"""Generate an etag for some data."""
return md5(data).hexdigest()
def parse_date(value):
"""Parse one of the following date formats into a datetime object:
.. sourcecode:: text
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036
Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
If parsing fails the return value is `None`.
:param value: a string with a supported date format.
:return: a :class:`datetime.datetime` object.
"""
if value:
t = parsedate_tz(value.strip())
if t is not None:
try:
year = t[0]
# unfortunately that function does not tell us if two digit
# years were part of the string, or if they were prefixed
# with two zeroes. So what we do is to assume that 69-99
# refer to 1900, and everything below to 2000
if year >= 0 and year <= 68:
year += 2000
elif year >= 69 and year <= 99:
year += 1900
return datetime(*((year,) + t[1:7])) - \
timedelta(seconds=t[-1] or 0)
except (ValueError, OverflowError):
return None
def _dump_date(d, delim):
"""Used for `http_date` and `cookie_date`."""
if d is None:
d = gmtime()
elif isinstance(d, datetime):
d = d.utctimetuple()
elif isinstance(d, (integer_types, float)):
d = gmtime(d)
return '%s, %02d%s%s%s%s %02d:%02d:%02d GMT' % (
('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')[d.tm_wday],
d.tm_mday, delim,
('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
'Oct', 'Nov', 'Dec')[d.tm_mon - 1],
delim, str(d.tm_year), d.tm_hour, d.tm_min, d.tm_sec
)
def cookie_date(expires=None):
"""Formats the time to ensure compatibility with Netscape's cookie
standard.
Accepts a floating point number expressed in seconds since the epoch in, a
datetime object or a timetuple. All times in UTC. The :func:`parse_date`
function can be used to parse such a date.
Outputs a string in the format ``Wdy, DD-Mon-YYYY HH:MM:SS GMT``.
:param expires: If provided that date is used, otherwise the current.
"""
return _dump_date(expires, '-')
def http_date(timestamp=None):
"""Formats the time to match the RFC1123 date format.
Accepts a floating point number expressed in seconds since the epoch in, a
datetime object or a timetuple. All times in UTC. The :func:`parse_date`
function can be used to parse such a date.
Outputs a string in the format ``Wdy, DD Mon YYYY HH:MM:SS GMT``.
:param timestamp: If provided that date is used, otherwise the current.
"""
return _dump_date(timestamp, ' ')
def is_resource_modified(environ, etag=None, data=None, last_modified=None):
"""Convenience method for conditional requests.
:param environ: the WSGI environment of the request to be checked.
:param etag: the etag for the response for comparison.
:param data: or alternatively the data of the response to automatically
generate an etag using :func:`generate_etag`.
:param last_modified: an optional date of the last modification.
:return: `True` if the resource was modified, otherwise `False`.
"""
if etag is None and data is not None:
etag = generate_etag(data)
elif data is not None:
raise TypeError('both data and etag given')
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
return False
unmodified = False
if isinstance(last_modified, string_types):
last_modified = parse_date(last_modified)
# ensure that microsecond is zero because the HTTP spec does not transmit
# that either and we might have some false positives. See issue #39
if last_modified is not None:
last_modified = last_modified.replace(microsecond=0)
modified_since = parse_date(environ.get('HTTP_IF_MODIFIED_SINCE'))
if modified_since and last_modified and last_modified <= modified_since:
unmodified = True
if etag:
if_none_match = parse_etags(environ.get('HTTP_IF_NONE_MATCH'))
if if_none_match:
unmodified = if_none_match.contains_raw(etag)
return not unmodified
def remove_entity_headers(headers, allowed=('expires', 'content-location')):
"""Remove all entity headers from a list or :class:`Headers` object. This
operation works in-place. `Expires` and `Content-Location` headers are
by default not removed. The reason for this is :rfc:`2616` section
10.3.5 which specifies some entity headers that should be sent.
.. versionchanged:: 0.5
added `allowed` parameter.
:param headers: a list or :class:`Headers` object.
:param allowed: a list of headers that should still be allowed even though
they are entity headers.
"""
allowed = set(x.lower() for x in allowed)
headers[:] = [(key, value) for key, value in headers if
not is_entity_header(key) or key.lower() in allowed]
def remove_hop_by_hop_headers(headers):
"""Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or
:class:`Headers` object. This operation works in-place.
.. versionadded:: 0.5
:param headers: a list or :class:`Headers` object.
"""
headers[:] = [(key, value) for key, value in headers if
not is_hop_by_hop_header(key)]
def is_entity_header(header):
"""Check if a header is an entity header.
.. versionadded:: 0.5
:param header: the header to test.
:return: `True` if it's an entity header, `False` otherwise.
"""
return header.lower() in _entity_headers
def is_hop_by_hop_header(header):
"""Check if a header is an HTTP/1.1 "Hop-by-Hop" header.
.. versionadded:: 0.5
:param header: the header to test.
:return: `True` if it's an entity header, `False` otherwise.
"""
return header.lower() in _hop_by_hop_headers
def parse_cookie(header, charset='utf-8', errors='replace', cls=None):
"""Parse a cookie. Either from a string or WSGI environ.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
:exc:`HTTPUnicodeError` is raised.
.. versionchanged:: 0.5
This function now returns a :class:`TypeConversionDict` instead of a
regular dict. The `cls` parameter was added.
:param header: the header to be used to parse the cookie. Alternatively
this can be a WSGI environment.
:param charset: the charset for the cookie values.
:param errors: the error behavior for the charset decoding.
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`TypeConversionDict` is
used.
"""
if isinstance(header, dict):
header = header.get('HTTP_COOKIE', '')
elif header is None:
header = ''
# If the value is an unicode string it's mangled through latin1. This
# is done because on PEP 3333 on Python 3 all headers are assumed latin1
# which however is incorrect for cookies, which are sent in page encoding.
# As a result we
if isinstance(header, text_type):
header = header.encode('latin1', 'replace')
if cls is None:
cls = TypeConversionDict
def _parse_pairs():
for key, val in _cookie_parse_impl(header):
key = to_unicode(key, charset, errors, allow_none_charset=True)
val = to_unicode(val, charset, errors, allow_none_charset=True)
yield try_coerce_native(key), val
return cls(_parse_pairs())
def dump_cookie(key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False,
charset='utf-8', sync_expires=True):
"""Creates a new Set-Cookie header without the ``Set-Cookie`` prefix
The parameters are the same as in the cookie Morsel object in the
Python standard library but it accepts unicode data, too.
On Python 3 the return value of this function will be a unicode
string, on Python 2 it will be a native string. In both cases the
return value is usually restricted to ascii as the vast majority of
values are properly escaped, but that is no guarantee. If a unicode
string is returned it's tunneled through latin1 as required by
PEP 3333.
The return value is not ASCII safe if the key contains unicode
characters. This is technically against the specification but
happens in the wild. It's strongly recommended to not use
non-ASCII values for the keys.
:param max_age: should be a number of seconds, or `None` (default) if
the cookie should last only as long as the client's
browser session. Additionally `timedelta` objects
are accepted, too.
:param expires: should be a `datetime` object or unix timestamp.
:param path: limits the cookie to a given path, per default it will
span the whole domain.
:param domain: Use this if you want to set a cross-domain cookie. For
example, ``domain=".example.com"`` will set a cookie
that is readable by the domain ``www.example.com``,
``foo.example.com`` etc. Otherwise, a cookie will only
be readable by the domain that set it.
:param secure: The cookie will only be available via HTTPS
:param httponly: disallow JavaScript to access the cookie. This is an
extension to the cookie standard and probably not
supported by all browsers.
:param charset: the encoding for unicode values.
:param sync_expires: automatically set expires if max_age is defined
but expires not.
"""
key = to_bytes(key, charset)
value = to_bytes(value, charset)
if path is not None:
path = iri_to_uri(path, charset)
domain = _make_cookie_domain(domain)
if isinstance(max_age, timedelta):
max_age = (max_age.days * 60 * 60 * 24) + max_age.seconds
if expires is not None:
if not isinstance(expires, string_types):
expires = cookie_date(expires)
elif max_age is not None and sync_expires:
expires = to_bytes(cookie_date(time() + max_age))
buf = [key + b'=' + _cookie_quote(value)]
# XXX: In theory all of these parameters that are not marked with `None`
# should be quoted. Because stdlib did not quote it before I did not
# want to introduce quoting there now.
for k, v, q in ((b'Domain', domain, True),
(b'Expires', expires, False,),
(b'Max-Age', max_age, False),
(b'Secure', secure, None),
(b'HttpOnly', httponly, None),
(b'Path', path, False)):
if q is None:
if v:
buf.append(k)
continue
if v is None:
continue
tmp = bytearray(k)
if not isinstance(v, (bytes, bytearray)):
v = to_bytes(text_type(v), charset)
if q:
v = _cookie_quote(v)
tmp += b'=' + v
buf.append(bytes(tmp))
# The return value will be an incorrectly encoded latin1 header on
# Python 3 for consistency with the headers object and a bytestring
# on Python 2 because that's how the API makes more sense.
rv = b'; '.join(buf)
if not PY2:
rv = rv.decode('latin1')
return rv
def is_byte_range_valid(start, stop, length):
"""Checks if a given byte content range is valid for the given length.
.. versionadded:: 0.7
"""
if (start is None) != (stop is None):
return False
elif start is None:
return length is None or length >= 0
elif length is None:
return 0 <= start < stop
elif start >= stop:
return False
return 0 <= start < length
# circular dependency fun
from werkzeug.datastructures import Accept, HeaderSet, ETags, Authorization, \
WWWAuthenticate, TypeConversionDict, IfRange, Range, ContentRange, \
RequestCacheControl
# DEPRECATED
# backwards compatible imports
from werkzeug.datastructures import MIMEAccept, CharsetAccept, \
LanguageAccept, Headers
from werkzeug.urls import iri_to_uri
|
SimtterCom/gyp | refs/heads/master | test/link-dependency/gyptest-link-dependency.py | 165 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that a target marked as 'link_dependency==1' isn't being pulled into
the 'none' target's dependency (which would otherwise lead to a dependency
cycle in ninja).
"""
import TestGyp
# See https://codereview.chromium.org/177043010/#msg15 for why this doesn't
# work with cmake.
test = TestGyp.TestGyp(formats=['!cmake'])
test.run_gyp('test.gyp')
test.build('test.gyp', 'main')
# If running gyp worked, all is well.
test.pass_test()
|
SlimRemix/android_external_chromium_org | refs/heads/lp5.1 | third_party/simplejson/__init__.py | 175 | r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.6.2'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict', 'simple_first',
]
__author__ = 'Bob Ippolito <[email protected]>'
from decimal import Decimal
from decoder import JSONDecoder, JSONDecodeError
from encoder import JSONEncoder, JSONEncoderForHTML
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from simplejson._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
bigint_as_string=False,
item_sort_key=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
**kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise. Note that this is still a
lossy operation that will not round-trip correctly and should be used
sparingly.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array
and not bigint_as_string and not item_sort_key and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
**kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *bigint_as_string* is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
import simplejson.decoder as dec
import simplejson.encoder as enc
import simplejson.scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def simple_first(kv):
"""Helper function to pass to item_sort_key to sort simple
elements to the top, then container elements.
"""
return (isinstance(kv[1], (list, dict, tuple)), kv[0])
|
brendanwhitfield/python-OBD | refs/heads/master | obd/asynchronous.py | 1 | # -*- coding: utf-8 -*-
########################################################################
# #
# python-OBD: A python OBD-II serial module derived from pyobd #
# #
# Copyright 2004 Donour Sizemore ([email protected]) #
# Copyright 2009 Secons Ltd. (www.obdtester.com) #
# Copyright 2009 Peter J. Creath #
# Copyright 2016 Brendan Whitfield (brendan-w.com) #
# #
########################################################################
# #
# async.py #
# #
# This file is part of python-OBD (a derivative of pyOBD) #
# #
# python-OBD is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# python-OBD is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with python-OBD. If not, see <http://www.gnu.org/licenses/>. #
# #
########################################################################
import time
import threading
import logging
from .OBDResponse import OBDResponse
from .obd import OBD
logger = logging.getLogger(__name__)
class Async(OBD):
"""
Class representing an OBD-II connection with it's assorted commands/sensors
Specialized for asynchronous value reporting.
"""
def __init__(self, portstr=None, baudrate=None, protocol=None, fast=True,
timeout=0.1, check_voltage=True, delay_cmds=0.25):
super(Async, self).__init__(portstr, baudrate, protocol, fast,
timeout, check_voltage)
self.__commands = {} # key = OBDCommand, value = Response
self.__callbacks = {} # key = OBDCommand, value = list of Functions
self.__thread = None
self.__running = False
self.__was_running = False # used with __enter__() and __exit__()
self.__delay_cmds = delay_cmds
@property
def running(self):
return self.__running
def start(self):
""" Starts the async update loop """
if not self.is_connected():
logger.info("Async thread not started because no connection was made")
return
if len(self.__commands) == 0:
logger.info("Async thread not started because no commands were registered")
return
if self.__thread is None:
logger.info("Starting async thread")
self.__running = True
self.__thread = threading.Thread(target=self.run)
self.__thread.daemon = True
self.__thread.start()
def stop(self):
""" Stops the async update loop """
if self.__thread is not None:
logger.info("Stopping async thread...")
self.__running = False
self.__thread.join()
self.__thread = None
logger.info("Async thread stopped")
def paused(self):
"""
A stub function for semantic purposes only
enables code such as:
with connection.paused() as was_running
...
"""
return self
def __enter__(self):
"""
pauses the async loop,
while recording the old state
"""
self.__was_running = self.__running
self.stop()
return self.__was_running
def __exit__(self, exc_type, exc_value, traceback):
"""
resumes the update loop if it was running
when __enter__ was called
"""
if not self.__running and self.__was_running:
self.start()
return False # don't suppress any exceptions
def close(self):
""" Closes the connection """
self.stop()
super(Async, self).close()
def watch(self, c, callback=None, force=False):
"""
Subscribes the given command for continuous updating. Once subscribed,
query() will return that command's latest value. Optional callbacks can
be given, which will be fired upon every new value.
"""
# the dict shouldn't be changed while the daemon thread is iterating
if self.__running:
logger.warning("Can't watch() while running, please use stop()")
else:
if not force and not self.test_cmd(c):
# self.test_cmd() will print warnings
return
# new command being watched, store the command
if c not in self.__commands:
logger.info("Watching command: %s" % str(c))
self.__commands[c] = OBDResponse() # give it an initial value
self.__callbacks[c] = [] # create an empty list
# if a callback was given, push it
if hasattr(callback, "__call__") and (callback not in self.__callbacks[c]):
logger.info("subscribing callback for command: %s" % str(c))
self.__callbacks[c].append(callback)
def unwatch(self, c, callback=None):
"""
Unsubscribes a specific command (and optionally, a specific callback)
from being updated. If no callback is specified, all callbacks for
that command are dropped.
"""
# the dict shouldn't be changed while the daemon thread is iterating
if self.__running:
logger.warning("Can't unwatch() while running, please use stop()")
else:
logger.info("Unwatching command: %s" % str(c))
if c in self.__commands:
# if a callback was specified, only remove the callback
if hasattr(callback, "__call__") and (callback in self.__callbacks[c]):
self.__callbacks[c].remove(callback)
# if no more callbacks are left, remove the command entirely
if len(self.__callbacks[c]) == 0:
self.__commands.pop(c, None)
else:
# no callback was specified, pop everything
self.__callbacks.pop(c, None)
self.__commands.pop(c, None)
def unwatch_all(self):
""" Unsubscribes all commands and callbacks from being updated """
# the dict shouldn't be changed while the daemon thread is iterating
if self.__running:
logger.warning("Can't unwatch_all() while running, please use stop()")
else:
logger.info("Unwatching all")
self.__commands = {}
self.__callbacks = {}
def query(self, c):
"""
Non-blocking query().
Only commands that have been watch()ed will return valid responses
"""
if c in self.__commands:
return self.__commands[c]
else:
return OBDResponse()
def run(self):
""" Daemon thread """
# loop until the stop signal is recieved
while self.__running:
if len(self.__commands) > 0:
# loop over the requested commands, send, and collect the response
for c in self.__commands:
if not self.is_connected():
logger.info("Async thread terminated because device disconnected")
self.__running = False
self.__thread = None
return
# force, since commands are checked for support in watch()
r = super(Async, self).query(c, force=True)
# store the response
self.__commands[c] = r
# fire the callbacks, if there are any
for callback in self.__callbacks[c]:
callback(r)
time.sleep(self.__delay_cmds)
else:
time.sleep(0.25) # idle
|
ingadhoc/account-financial-tools | refs/heads/13.0 | account_ux/__init__.py | 16 | ##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from . import models
from . import wizards
|
xjzhou/PyMySQL | refs/heads/master | pymysql/tests/test_issues.py | 7 | import datetime
import time
import warnings
import sys
import pymysql
from pymysql.tests import base
import unittest2
try:
import imp
reload = imp.reload
except AttributeError:
pass
__all__ = ["TestOldIssues", "TestNewIssues", "TestGitHubIssues"]
class TestOldIssues(base.PyMySQLTestCase):
def test_issue_3(self):
""" undefined methods datetime_or_None, date_or_None """
conn = self.connections[0]
c = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue3")
c.execute("create table issue3 (d date, t time, dt datetime, ts timestamp)")
try:
c.execute("insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", (None, None, None, None))
c.execute("select d from issue3")
self.assertEqual(None, c.fetchone()[0])
c.execute("select t from issue3")
self.assertEqual(None, c.fetchone()[0])
c.execute("select dt from issue3")
self.assertEqual(None, c.fetchone()[0])
c.execute("select ts from issue3")
self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime))
finally:
c.execute("drop table issue3")
def test_issue_4(self):
""" can't retrieve TIMESTAMP fields """
conn = self.connections[0]
c = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue4")
c.execute("create table issue4 (ts timestamp)")
try:
c.execute("insert into issue4 (ts) values (now())")
c.execute("select ts from issue4")
self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime))
finally:
c.execute("drop table issue4")
def test_issue_5(self):
""" query on information_schema.tables fails """
con = self.connections[0]
cur = con.cursor()
cur.execute("select * from information_schema.tables")
def test_issue_6(self):
""" exception: TypeError: ord() expected a character, but string of length 0 found """
# ToDo: this test requires access to db 'mysql'.
kwargs = self.databases[0].copy()
kwargs['db'] = "mysql"
conn = pymysql.connect(**kwargs)
c = conn.cursor()
c.execute("select * from user")
conn.close()
def test_issue_8(self):
""" Primary Key and Index error when selecting data """
conn = self.connections[0]
c = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists test")
c.execute("""CREATE TABLE `test` (`station` int(10) NOT NULL DEFAULT '0', `dh`
datetime NOT NULL DEFAULT '2015-01-01 00:00:00', `echeance` int(1) NOT NULL
DEFAULT '0', `me` double DEFAULT NULL, `mo` double DEFAULT NULL, PRIMARY
KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT CHARSET=latin1;""")
try:
self.assertEqual(0, c.execute("SELECT * FROM test"))
c.execute("ALTER TABLE `test` ADD INDEX `idx_station` (`station`)")
self.assertEqual(0, c.execute("SELECT * FROM test"))
finally:
c.execute("drop table test")
def test_issue_9(self):
""" sets DeprecationWarning in Python 2.6 """
try:
reload(pymysql)
except DeprecationWarning:
self.fail()
def test_issue_13(self):
""" can't handle large result fields """
conn = self.connections[0]
cur = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
cur.execute("drop table if exists issue13")
try:
cur.execute("create table issue13 (t text)")
# ticket says 18k
size = 18*1024
cur.execute("insert into issue13 (t) values (%s)", ("x" * size,))
cur.execute("select t from issue13")
# use assertTrue so that obscenely huge error messages don't print
r = cur.fetchone()[0]
self.assertTrue("x" * size == r)
finally:
cur.execute("drop table issue13")
def test_issue_15(self):
""" query should be expanded before perform character encoding """
conn = self.connections[0]
c = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue15")
c.execute("create table issue15 (t varchar(32))")
try:
c.execute("insert into issue15 (t) values (%s)", (u'\xe4\xf6\xfc',))
c.execute("select t from issue15")
self.assertEqual(u'\xe4\xf6\xfc', c.fetchone()[0])
finally:
c.execute("drop table issue15")
def test_issue_16(self):
""" Patch for string and tuple escaping """
conn = self.connections[0]
c = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue16")
c.execute("create table issue16 (name varchar(32) primary key, email varchar(32))")
try:
c.execute("insert into issue16 (name, email) values ('pete', 'floydophone')")
c.execute("select email from issue16 where name=%s", ("pete",))
self.assertEqual("floydophone", c.fetchone()[0])
finally:
c.execute("drop table issue16")
@unittest2.skip("test_issue_17() requires a custom, legacy MySQL configuration and will not be run.")
def test_issue_17(self):
""" could not connect mysql use passwod """
conn = self.connections[0]
host = self.databases[0]["host"]
db = self.databases[0]["db"]
c = conn.cursor()
# grant access to a table to a user with a password
try:
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue17")
c.execute("create table issue17 (x varchar(32) primary key)")
c.execute("insert into issue17 (x) values ('hello, world!')")
c.execute("grant all privileges on %s.issue17 to 'issue17user'@'%%' identified by '1234'" % db)
conn.commit()
conn2 = pymysql.connect(host=host, user="issue17user", passwd="1234", db=db)
c2 = conn2.cursor()
c2.execute("select x from issue17")
self.assertEqual("hello, world!", c2.fetchone()[0])
finally:
c.execute("drop table issue17")
class TestNewIssues(base.PyMySQLTestCase):
def test_issue_34(self):
try:
pymysql.connect(host="localhost", port=1237, user="root")
self.fail()
except pymysql.OperationalError as e:
self.assertEqual(2003, e.args[0])
except Exception:
self.fail()
def test_issue_33(self):
conn = pymysql.connect(charset="utf8", **self.databases[0])
self.safe_create_table(conn, u'hei\xdfe',
u'create table hei\xdfe (name varchar(32))')
c = conn.cursor()
c.execute(u"insert into hei\xdfe (name) values ('Pi\xdfata')")
c.execute(u"select name from hei\xdfe")
self.assertEqual(u"Pi\xdfata", c.fetchone()[0])
@unittest2.skip("This test requires manual intervention")
def test_issue_35(self):
conn = self.connections[0]
c = conn.cursor()
print("sudo killall -9 mysqld within the next 10 seconds")
try:
c.execute("select sleep(10)")
self.fail()
except pymysql.OperationalError as e:
self.assertEqual(2013, e.args[0])
def test_issue_36(self):
# connection 0 is super user, connection 1 isn't
conn = self.connections[1]
c = conn.cursor()
c.execute("show processlist")
kill_id = None
for row in c.fetchall():
id = row[0]
info = row[7]
if info == "show processlist":
kill_id = id
break
self.assertEqual(kill_id, conn.thread_id())
# now nuke the connection
self.connections[0].kill(kill_id)
# make sure this connection has broken
try:
c.execute("show tables")
self.fail()
except Exception:
pass
c.close()
conn.close()
# check the process list from the other connection
try:
# Wait since Travis-CI sometimes fail this test.
time.sleep(0.1)
c = self.connections[0].cursor()
c.execute("show processlist")
ids = [row[0] for row in c.fetchall()]
self.assertFalse(kill_id in ids)
finally:
del self.connections[1]
def test_issue_37(self):
conn = self.connections[0]
c = conn.cursor()
self.assertEqual(1, c.execute("SELECT @foo"))
self.assertEqual((None,), c.fetchone())
self.assertEqual(0, c.execute("SET @foo = 'bar'"))
c.execute("set @foo = 'bar'")
def test_issue_38(self):
conn = self.connections[0]
c = conn.cursor()
datum = "a" * 1024 * 1023 # reduced size for most default mysql installs
try:
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue38")
c.execute("create table issue38 (id integer, data mediumblob)")
c.execute("insert into issue38 values (1, %s)", (datum,))
finally:
c.execute("drop table issue38")
def disabled_test_issue_54(self):
conn = self.connections[0]
c = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue54")
big_sql = "select * from issue54 where "
big_sql += " and ".join("%d=%d" % (i,i) for i in range(0, 100000))
try:
c.execute("create table issue54 (id integer primary key)")
c.execute("insert into issue54 (id) values (7)")
c.execute(big_sql)
self.assertEqual(7, c.fetchone()[0])
finally:
c.execute("drop table issue54")
class TestGitHubIssues(base.PyMySQLTestCase):
def test_issue_66(self):
""" 'Connection' object has no attribute 'insert_id' """
conn = self.connections[0]
c = conn.cursor()
self.assertEqual(0, conn.insert_id())
try:
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists issue66")
c.execute("create table issue66 (id integer primary key auto_increment, x integer)")
c.execute("insert into issue66 (x) values (1)")
c.execute("insert into issue66 (x) values (1)")
self.assertEqual(2, conn.insert_id())
finally:
c.execute("drop table issue66")
def test_issue_79(self):
""" Duplicate field overwrites the previous one in the result of DictCursor """
conn = self.connections[0]
c = conn.cursor(pymysql.cursors.DictCursor)
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
c.execute("drop table if exists a")
c.execute("drop table if exists b")
c.execute("""CREATE TABLE a (id int, value int)""")
c.execute("""CREATE TABLE b (id int, value int)""")
a=(1,11)
b=(1,22)
try:
c.execute("insert into a values (%s, %s)", a)
c.execute("insert into b values (%s, %s)", b)
c.execute("SELECT * FROM a inner join b on a.id = b.id")
r = c.fetchall()[0]
self.assertEqual(r['id'], 1)
self.assertEqual(r['value'], 11)
self.assertEqual(r['b.value'], 22)
finally:
c.execute("drop table a")
c.execute("drop table b")
def test_issue_95(self):
""" Leftover trailing OK packet for "CALL my_sp" queries """
conn = self.connections[0]
cur = conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
cur.execute("DROP PROCEDURE IF EXISTS `foo`")
cur.execute("""CREATE PROCEDURE `foo` ()
BEGIN
SELECT 1;
END""")
try:
cur.execute("""CALL foo()""")
cur.execute("""SELECT 1""")
self.assertEqual(cur.fetchone()[0], 1)
finally:
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
cur.execute("DROP PROCEDURE IF EXISTS `foo`")
def test_issue_114(self):
""" autocommit is not set after reconnecting with ping() """
conn = pymysql.connect(charset="utf8", **self.databases[0])
conn.autocommit(False)
c = conn.cursor()
c.execute("""select @@autocommit;""")
self.assertFalse(c.fetchone()[0])
conn.close()
conn.ping()
c.execute("""select @@autocommit;""")
self.assertFalse(c.fetchone()[0])
conn.close()
# Ensure autocommit() is still working
conn = pymysql.connect(charset="utf8", **self.databases[0])
c = conn.cursor()
c.execute("""select @@autocommit;""")
self.assertFalse(c.fetchone()[0])
conn.close()
conn.ping()
conn.autocommit(True)
c.execute("""select @@autocommit;""")
self.assertTrue(c.fetchone()[0])
conn.close()
def test_issue_175(self):
""" The number of fields returned by server is read in wrong way """
conn = self.connections[0]
cur = conn.cursor()
for length in (200, 300):
columns = ', '.join('c{0} integer'.format(i) for i in range(length))
sql = 'create table test_field_count ({0})'.format(columns)
try:
cur.execute(sql)
cur.execute('select * from test_field_count')
assert len(cur.description) == length
finally:
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
cur.execute('drop table if exists test_field_count')
def test_issue_321(self):
""" Test iterable as query argument. """
conn = pymysql.connect(charset="utf8", **self.databases[0])
self.safe_create_table(
conn, "issue321",
"create table issue321 (value_1 varchar(1), value_2 varchar(1))")
sql_insert = "insert into issue321 (value_1, value_2) values (%s, %s)"
sql_dict_insert = ("insert into issue321 (value_1, value_2) "
"values (%(value_1)s, %(value_2)s)")
sql_select = ("select * from issue321 where "
"value_1 in %s and value_2=%s")
data = [
[(u"a", ), u"\u0430"],
[[u"b"], u"\u0430"],
{"value_1": [[u"c"]], "value_2": u"\u0430"}
]
cur = conn.cursor()
self.assertEqual(cur.execute(sql_insert, data[0]), 1)
self.assertEqual(cur.execute(sql_insert, data[1]), 1)
self.assertEqual(cur.execute(sql_dict_insert, data[2]), 1)
self.assertEqual(
cur.execute(sql_select, [(u"a", u"b", u"c"), u"\u0430"]), 3)
self.assertEqual(cur.fetchone(), (u"a", u"\u0430"))
self.assertEqual(cur.fetchone(), (u"b", u"\u0430"))
self.assertEqual(cur.fetchone(), (u"c", u"\u0430"))
def test_issue_364(self):
""" Test mixed unicode/binary arguments in executemany. """
conn = pymysql.connect(charset="utf8", **self.databases[0])
self.safe_create_table(
conn, "issue364",
"create table issue364 (value_1 binary(3), value_2 varchar(3)) "
"engine=InnoDB default charset=utf8")
sql = "insert into issue364 (value_1, value_2) values (_binary%s, _binary%s)"
usql = u"insert into issue364 (value_1, value_2) values (_binary%s, _binary%s)"
values = [b"\x00\xff\x00", u"\xe4\xf6\xfc"]
# test single insert and select
cur = conn.cursor()
cur.execute(sql, args=values)
cur.execute("select * from issue364")
self.assertEqual(cur.fetchone(), tuple(values))
# test single insert unicode query
cur.execute(usql, args=values)
# test multi insert and select
cur.executemany(sql, args=(values, values, values))
cur.execute("select * from issue364")
for row in cur.fetchall():
self.assertEqual(row, tuple(values))
# test multi insert with unicode query
cur.executemany(usql, args=(values, values, values))
def test_issue_363(self):
""" Test binary / geometry types. """
conn = pymysql.connect(charset="utf8", **self.databases[0])
self.safe_create_table(
conn, "issue363",
"CREATE TABLE issue363 ( "
"id INTEGER PRIMARY KEY, geom LINESTRING NOT NULL, "
"SPATIAL KEY geom (geom)) "
"ENGINE=MyISAM default charset=utf8")
cur = conn.cursor()
# FYI - not sure of 5.7.0 version
if sys.version_info[0:2] >= (3,2) and self.mysql_server_is(conn, (5, 7, 0)):
with self.assertWarns(pymysql.err.Warning) as cm:
cur.execute("INSERT INTO issue363 (id, geom) VALUES ("
"1998, GeomFromText('LINESTRING(1.1 1.1,2.2 2.2)'))")
else:
cur.execute("INSERT INTO issue363 (id, geom) VALUES ("
"1998, GeomFromText('LINESTRING(1.1 1.1,2.2 2.2)'))")
# select WKT
if sys.version_info[0:2] >= (3,2) and self.mysql_server_is(conn, (5, 7, 0)):
with self.assertWarns(pymysql.err.Warning) as cm:
cur.execute("SELECT AsText(geom) FROM issue363")
else:
cur.execute("SELECT AsText(geom) FROM issue363")
row = cur.fetchone()
self.assertEqual(row, ("LINESTRING(1.1 1.1,2.2 2.2)", ))
# select WKB
if sys.version_info[0:2] >= (3,2) and self.mysql_server_is(conn, (5, 7, 0)):
with self.assertWarns(pymysql.err.Warning) as cm:
cur.execute("SELECT AsBinary(geom) FROM issue363")
else:
cur.execute("SELECT AsBinary(geom) FROM issue363")
row = cur.fetchone()
self.assertEqual(row,
(b"\x01\x02\x00\x00\x00\x02\x00\x00\x00"
b"\x9a\x99\x99\x99\x99\x99\xf1?"
b"\x9a\x99\x99\x99\x99\x99\xf1?"
b"\x9a\x99\x99\x99\x99\x99\x01@"
b"\x9a\x99\x99\x99\x99\x99\x01@", ))
# select internal binary
cur.execute("SELECT geom FROM issue363")
row = cur.fetchone()
# don't assert the exact internal binary value, as it could
# vary across implementations
self.assertTrue(isinstance(row[0], bytes))
|
lshain-android-source/external-chromium_org | refs/heads/master | build/android/pylib/chrome_test_server_spawner.py | 23 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
It's used to accept requests from the device to spawn and kill instances of the
chrome test server on the host.
"""
import BaseHTTPServer
import json
import logging
import os
import select
import struct
import subprocess
import sys
import threading
import time
import urlparse
import constants
import ports
from pylib.forwarder import Forwarder
# Path that are needed to import necessary modules when launching a testserver.
os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
% (os.path.join(constants.DIR_SOURCE_ROOT, 'third_party'),
os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib',
'src'),
os.path.join(constants.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
os.path.join(constants.DIR_SOURCE_ROOT, 'sync', 'tools', 'testserver')))
SERVER_TYPES = {
'http': '',
'ftp': '-f',
'sync': '', # Sync uses its own script, and doesn't take a server type arg.
'tcpecho': '--tcp-echo',
'udpecho': '--udp-echo',
}
# The timeout (in seconds) of starting up the Python test server.
TEST_SERVER_STARTUP_TIMEOUT = 10
def _WaitUntil(predicate, max_attempts=5):
"""Blocks until the provided predicate (function) is true.
Returns:
Whether the provided predicate was satisfied once (before the timeout).
"""
sleep_time_sec = 0.025
for attempt in xrange(1, max_attempts):
if predicate():
return True
time.sleep(sleep_time_sec)
sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec.
return False
def _CheckPortStatus(port, expected_status):
"""Returns True if port has expected_status.
Args:
port: the port number.
expected_status: boolean of expected status.
Returns:
Returns True if the status is expected. Otherwise returns False.
"""
return _WaitUntil(lambda: ports.IsHostPortUsed(port) == expected_status)
def _CheckDevicePortStatus(adb, port):
"""Returns whether the provided port is used."""
return _WaitUntil(lambda: ports.IsDevicePortUsed(adb, port))
def _GetServerTypeCommandLine(server_type):
"""Returns the command-line by the given server type.
Args:
server_type: the server type to be used (e.g. 'http').
Returns:
A string containing the command-line argument.
"""
if server_type not in SERVER_TYPES:
raise NotImplementedError('Unknown server type: %s' % server_type)
if server_type == 'udpecho':
raise Exception('Please do not run UDP echo tests because we do not have '
'a UDP forwarder tool.')
return SERVER_TYPES[server_type]
class TestServerThread(threading.Thread):
"""A thread to run the test server in a separate process."""
def __init__(self, ready_event, arguments, adb, tool, build_type):
"""Initialize TestServerThread with the following argument.
Args:
ready_event: event which will be set when the test server is ready.
arguments: dictionary of arguments to run the test server.
adb: instance of AndroidCommands.
tool: instance of runtime error detection tool.
build_type: 'Release' or 'Debug'.
"""
threading.Thread.__init__(self)
self.wait_event = threading.Event()
self.stop_flag = False
self.ready_event = ready_event
self.ready_event.clear()
self.arguments = arguments
self.adb = adb
self.tool = tool
self.test_server_process = None
self.is_ready = False
self.host_port = self.arguments['port']
assert isinstance(self.host_port, int)
# The forwarder device port now is dynamically allocated.
self.forwarder_device_port = 0
# Anonymous pipe in order to get port info from test server.
self.pipe_in = None
self.pipe_out = None
self.command_line = []
self.build_type = build_type
def _WaitToStartAndGetPortFromTestServer(self):
"""Waits for the Python test server to start and gets the port it is using.
The port information is passed by the Python test server with a pipe given
by self.pipe_out. It is written as a result to |self.host_port|.
Returns:
Whether the port used by the test server was successfully fetched.
"""
assert self.host_port == 0 and self.pipe_out and self.pipe_in
(in_fds, _, _) = select.select([self.pipe_in, ], [], [],
TEST_SERVER_STARTUP_TIMEOUT)
if len(in_fds) == 0:
logging.error('Failed to wait to the Python test server to be started.')
return False
# First read the data length as an unsigned 4-byte value. This
# is _not_ using network byte ordering since the Python test server packs
# size as native byte order and all Chromium platforms so far are
# configured to use little-endian.
# TODO(jnd): Change the Python test server and local_test_server_*.cc to
# use a unified byte order (either big-endian or little-endian).
data_length = os.read(self.pipe_in, struct.calcsize('=L'))
if data_length:
(data_length,) = struct.unpack('=L', data_length)
assert data_length
if not data_length:
logging.error('Failed to get length of server data.')
return False
port_json = os.read(self.pipe_in, data_length)
if not port_json:
logging.error('Failed to get server data.')
return False
logging.info('Got port json data: %s', port_json)
port_json = json.loads(port_json)
if port_json.has_key('port') and isinstance(port_json['port'], int):
self.host_port = port_json['port']
return _CheckPortStatus(self.host_port, True)
logging.error('Failed to get port information from the server data.')
return False
def _GenerateCommandLineArguments(self):
"""Generates the command line to run the test server.
Note that all options are processed by following the definitions in
testserver.py.
"""
if self.command_line:
return
# The following arguments must exist.
type_cmd = _GetServerTypeCommandLine(self.arguments['server-type'])
if type_cmd:
self.command_line.append(type_cmd)
self.command_line.append('--port=%d' % self.host_port)
# Use a pipe to get the port given by the instance of Python test server
# if the test does not specify the port.
if self.host_port == 0:
(self.pipe_in, self.pipe_out) = os.pipe()
self.command_line.append('--startup-pipe=%d' % self.pipe_out)
self.command_line.append('--host=%s' % self.arguments['host'])
data_dir = self.arguments['data-dir'] or 'chrome/test/data'
if not os.path.isabs(data_dir):
data_dir = os.path.join(constants.DIR_SOURCE_ROOT, data_dir)
self.command_line.append('--data-dir=%s' % data_dir)
# The following arguments are optional depending on the individual test.
if self.arguments.has_key('log-to-console'):
self.command_line.append('--log-to-console')
if self.arguments.has_key('auth-token'):
self.command_line.append('--auth-token=%s' % self.arguments['auth-token'])
if self.arguments.has_key('https'):
self.command_line.append('--https')
if self.arguments.has_key('cert-and-key-file'):
self.command_line.append('--cert-and-key-file=%s' % os.path.join(
constants.DIR_SOURCE_ROOT, self.arguments['cert-and-key-file']))
if self.arguments.has_key('ocsp'):
self.command_line.append('--ocsp=%s' % self.arguments['ocsp'])
if self.arguments.has_key('https-record-resume'):
self.command_line.append('--https-record-resume')
if self.arguments.has_key('ssl-client-auth'):
self.command_line.append('--ssl-client-auth')
if self.arguments.has_key('tls-intolerant'):
self.command_line.append('--tls-intolerant=%s' %
self.arguments['tls-intolerant'])
if self.arguments.has_key('ssl-client-ca'):
for ca in self.arguments['ssl-client-ca']:
self.command_line.append('--ssl-client-ca=%s' %
os.path.join(constants.DIR_SOURCE_ROOT, ca))
if self.arguments.has_key('ssl-bulk-cipher'):
for bulk_cipher in self.arguments['ssl-bulk-cipher']:
self.command_line.append('--ssl-bulk-cipher=%s' % bulk_cipher)
def _CloseUnnecessaryFDsForTestServerProcess(self):
# This is required to avoid subtle deadlocks that could be caused by the
# test server child process inheriting undesirable file descriptors such as
# file lock file descriptors.
for fd in xrange(0, 1024):
if fd != self.pipe_out:
try:
os.close(fd)
except:
pass
def run(self):
logging.info('Start running the thread!')
self.wait_event.clear()
self._GenerateCommandLineArguments()
command = constants.DIR_SOURCE_ROOT
if self.arguments['server-type'] == 'sync':
command = [os.path.join(command, 'sync', 'tools', 'testserver',
'sync_testserver.py')] + self.command_line
else:
command = [os.path.join(command, 'net', 'tools', 'testserver',
'testserver.py')] + self.command_line
logging.info('Running: %s', command)
self.process = subprocess.Popen(
command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess)
if self.process:
if self.pipe_out:
self.is_ready = self._WaitToStartAndGetPortFromTestServer()
else:
self.is_ready = _CheckPortStatus(self.host_port, True)
if self.is_ready:
Forwarder.Map([(0, self.host_port)], self.adb, self.build_type, self.tool)
# Check whether the forwarder is ready on the device.
self.is_ready = False
device_port = Forwarder.DevicePortForHostPort(self.host_port)
if device_port and _CheckDevicePortStatus(self.adb, device_port):
self.is_ready = True
self.forwarder_device_port = device_port
# Wake up the request handler thread.
self.ready_event.set()
# Keep thread running until Stop() gets called.
_WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint)
if self.process.poll() is None:
self.process.kill()
Forwarder.UnmapDevicePort(self.forwarder_device_port, self.adb)
self.process = None
self.is_ready = False
if self.pipe_out:
os.close(self.pipe_in)
os.close(self.pipe_out)
self.pipe_in = None
self.pipe_out = None
logging.info('Test-server has died.')
self.wait_event.set()
def Stop(self):
"""Blocks until the loop has finished.
Note that this must be called in another thread.
"""
if not self.process:
return
self.stop_flag = True
self.wait_event.wait()
class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler used to process http GET/POST request."""
def _SendResponse(self, response_code, response_reason, additional_headers,
contents):
"""Generates a response sent to the client from the provided parameters.
Args:
response_code: number of the response status.
response_reason: string of reason description of the response.
additional_headers: dict of additional headers. Each key is the name of
the header, each value is the content of the header.
contents: string of the contents we want to send to client.
"""
self.send_response(response_code, response_reason)
self.send_header('Content-Type', 'text/html')
# Specify the content-length as without it the http(s) response will not
# be completed properly (and the browser keeps expecting data).
self.send_header('Content-Length', len(contents))
for header_name in additional_headers:
self.send_header(header_name, additional_headers[header_name])
self.end_headers()
self.wfile.write(contents)
self.wfile.flush()
def _StartTestServer(self):
"""Starts the test server thread."""
logging.info('Handling request to spawn a test server.')
content_type = self.headers.getheader('content-type')
if content_type != 'application/json':
raise Exception('Bad content-type for start request.')
content_length = self.headers.getheader('content-length')
if not content_length:
content_length = 0
try:
content_length = int(content_length)
except:
raise Exception('Bad content-length for start request.')
logging.info(content_length)
test_server_argument_json = self.rfile.read(content_length)
logging.info(test_server_argument_json)
assert not self.server.test_server_instance
ready_event = threading.Event()
self.server.test_server_instance = TestServerThread(
ready_event,
json.loads(test_server_argument_json),
self.server.adb,
self.server.tool,
self.server.build_type)
self.server.test_server_instance.setDaemon(True)
self.server.test_server_instance.start()
ready_event.wait()
if self.server.test_server_instance.is_ready:
self._SendResponse(200, 'OK', {}, json.dumps(
{'port': self.server.test_server_instance.forwarder_device_port,
'message': 'started'}))
logging.info('Test server is running on port: %d.',
self.server.test_server_instance.host_port)
else:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during starting a test server.')
def _KillTestServer(self):
"""Stops the test server instance."""
# There should only ever be one test server at a time. This may do the
# wrong thing if we try and start multiple test servers.
if not self.server.test_server_instance:
return
port = self.server.test_server_instance.host_port
logging.info('Handling request to kill a test server on port: %d.', port)
self.server.test_server_instance.Stop()
# Make sure the status of test server is correct before sending response.
if _CheckPortStatus(port, False):
self._SendResponse(200, 'OK', {}, 'killed')
logging.info('Test server on port %d is killed', port)
else:
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during killing a test server.')
self.server.test_server_instance = None
def do_POST(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
logging.info('Action for POST method is: %s.', action)
if action == '/start':
self._StartTestServer()
else:
self._SendResponse(400, 'Unknown request.', {}, '')
logging.info('Encounter unknown request: %s.', action)
def do_GET(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
logging.info('Action for GET method is: %s.', action)
for param in params:
logging.info('%s=%s', param, params[param][0])
if action == '/kill':
self._KillTestServer()
elif action == '/ping':
# The ping handler is used to check whether the spawner server is ready
# to serve the requests. We don't need to test the status of the test
# server when handling ping request.
self._SendResponse(200, 'OK', {}, 'ready')
logging.info('Handled ping request and sent response.')
else:
self._SendResponse(400, 'Unknown request', {}, '')
logging.info('Encounter unknown request: %s.', action)
class SpawningServer(object):
"""The class used to start/stop a http server."""
def __init__(self, test_server_spawner_port, adb, tool, build_type):
logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
SpawningServerRequestHandler)
self.server.adb = adb
self.server.tool = tool
self.server.test_server_instance = None
self.server.build_type = build_type
def _Listen(self):
logging.info('Starting test server spawner')
self.server.serve_forever()
def Start(self):
"""Starts the test server spawner."""
listener_thread = threading.Thread(target=self._Listen)
listener_thread.setDaemon(True)
listener_thread.start()
def Stop(self):
"""Stops the test server spawner.
Also cleans the server state.
"""
self.CleanupState()
self.server.shutdown()
def CleanupState(self):
"""Cleans up the spawning server state.
This should be called if the test server spawner is reused,
to avoid sharing the test server instance.
"""
if self.server.test_server_instance:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
|
renegelinas/mi-instrument | refs/heads/master | mi/dataset/parser/test/test_auv_eng_auv.py | 7 | #!/usr/bin/env python
"""
@package mi.dataset.parser.test
@fid marine-integrations/mi/dataset/parser/test/test_auv_eng_auv.py
@author Jeff Roy
@brief Test code for a auv_eng_auv data parser
NOTE: there have been several other parsers built on auv_common tested already
all negative paths through the code are not again verified here.
Testing is limited to code specific to the derived classes of auv_eng_auv
"""
import os
from nose.plugins.attrib import attr
from mi.core.exceptions import RecoverableSampleException
from mi.core.log import get_logger
from mi.dataset.driver.auv_eng.auv.resource import RESOURCE_PATH
from mi.dataset.parser.auv_eng_auv import AuvEngAuvParser
from mi.dataset.test.test_parser import ParserUnitTestCase
log = get_logger()
@attr('UNIT', group='mi')
class AuvEngAuvTestCase(ParserUnitTestCase):
"""
auv_eng_auv Parser unit test suite
"""
# IMAGENEX 852 TESTS
def test_simple_imagenex(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_imagenex.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'imagenex_telem_20.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_imagenex.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'imagenex_recov_20.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_imagenex(self):
"""
Read test data and pull out data particles.
Assert the expected we get 2 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'imagenex_bad_timestamps.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 7
self.assertEqual(len(particles), 7)
self.assertEqual(len(self.exception_callback_value), 2)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
self.assertIsInstance(self.exception_callback_value[1], RecoverableSampleException)
# DIGITAL USBL TESTS
def test_simple_usbl(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_usbl.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'usbl_telem_22.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_usbl.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'usbl_recov_22.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_usbl(self):
"""
Read test data and pull out data particles.
Assert the expected we get 2 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'usbl_bad_timestamps.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 6
self.assertEqual(len(particles), 6)
self.assertEqual(len(self.exception_callback_value), 2)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
self.assertIsInstance(self.exception_callback_value[1], RecoverableSampleException)
# TRI FIN MOTOR TESTS
def test_simple_motor(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_motor.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(28)
self.assert_particles(particles, 'motor_telem_28.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_motor.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(28)
self.assert_particles(particles, 'motor_recov_28.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_motor(self):
"""
Read test data and pull out data particles.
Assert the expected we get 2 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'motor_bad_timestamp.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 4
self.assertEqual(len(particles), 4)
self.assertEqual(len(self.exception_callback_value), 1)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
# EMERGENCY BOARD TESTS
def test_simple_emergency(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_emergency.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'emergency_telem_7.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_emergency.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'emergency_recov_7.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_emergency(self):
"""
Read test data and pull out data particles.
Assert the expected we get 1 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'emergency_bad_timestamp.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 3
self.assertEqual(len(particles), 3)
self.assertEqual(len(self.exception_callback_value), 1)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
# OIL COMPENSATOR TESTS
def test_simple_oil_comp(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_oil_comp.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'oil_comp_telem_20.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_oil_comp.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'oil_comp_recov_20.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_oil_comp(self):
"""
Read test data and pull out data particles.
Assert the expected we get 1 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'oil_comp_bad_timestamps.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 4
self.assertEqual(len(particles), 4)
self.assertEqual(len(self.exception_callback_value), 2)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
self.assertIsInstance(self.exception_callback_value[1], RecoverableSampleException)
# SMART BATTERY TESTS
def test_simple_battery(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_battery.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'battery_telem_20.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_battery.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'battery_recov_20.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_battery(self):
"""
Read test data and pull out data particles.
Assert the expected we get 1 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'battery_bad_timestamp.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 4
self.assertEqual(len(particles), 4)
self.assertEqual(len(self.exception_callback_value), 1)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
# DIGITAL TX BOARD TESTS
def test_simple_tx_board(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_tx_board.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'tx_board_telem_22.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_tx_board.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'tx_board_recov_22.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_tx_board(self):
"""
Read test data and pull out data particles.
Assert the expected we get 1 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'tx_board_bad_timestamps.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 4
self.assertEqual(len(particles), 4)
self.assertEqual(len(self.exception_callback_value), 2)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
self.assertIsInstance(self.exception_callback_value[1], RecoverableSampleException)
# FAULT MESSAGE TESTS
def test_simple_fault(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_fault.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(35)
self.assert_particles(particles, 'fault_telem_35.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_fault.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(35)
self.assert_particles(particles, 'fault_recov_35.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_fault(self):
"""
Read test data and pull out data particles.
Assert the expected we get 1 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
with open(os.path.join(RESOURCE_PATH, 'fault_bad_timestamp.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 5
self.assertEqual(len(particles), 5)
self.assertEqual(len(self.exception_callback_value), 1)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
# AUV STATE TESTS
def test_simple_state(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
"""
with open(os.path.join(RESOURCE_PATH, 'subset_state.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(25)
self.assert_particles(particles, 'state_telem_25.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset_state.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(25)
self.assert_particles(particles, 'state_recov_25.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_bad_timestamp_state(self):
"""
Read test data and pull out data particles.
Assert the expected we get 1 errors due to incorrect epoch and mission time formats
This tests the generic timestamp method with two parameters
"""
# TODO the Mission time in this block looks to be way to big, waiting to hear from Hydroid
with open(os.path.join(RESOURCE_PATH, 'state_bad_timestamps.csv'), 'rU') as stream_handle:
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(10) # ask for 10 should get 6
self.assertEqual(len(particles), 6)
self.assertEqual(len(self.exception_callback_value), 2)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
self.assertIsInstance(self.exception_callback_value[1], RecoverableSampleException)
def test_get_many(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
This test parses a file containing all message types and verifies
all of the engineering data messages
"""
with open(os.path.join(RESOURCE_PATH, 'subset2_reduced.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
particles = parser.get_records(200)
self.assert_particles(particles, 'subset2_reduced_telem.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
with open(os.path.join(RESOURCE_PATH, 'subset2_reduced.csv'), 'rU') as stream_handle:
# test the recovered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=False)
particles = parser.get_records(200)
self.assert_particles(particles, 'subset2_reduced_recov.yml', RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_long_stream(self):
"""
Read test data and pull out data particles.
Assert that the results are those we expected.
This test parses a very large file containing all message types and verifies
there are no errors
"""
with open(os.path.join(RESOURCE_PATH, 'subset2.csv'), 'rU') as stream_handle:
# test the telemetered particle stream
parser = AuvEngAuvParser(stream_handle,
self.exception_callback,
is_telemetered=True)
parser.get_records(160000)
self.assertEqual(self.exception_callback_value, [])
|
redeyser/IceCash2 | refs/heads/master | install/pyusb-1.0.0rc1/build/lib.linux-i686-2.7/usb/_lookup.py | 28 | # Copyright (C) 2009-2014 Walker Inman
#
# The following terms apply to all files associated
# with the software unless explicitly disclaimed in individual files.
#
# The authors hereby grant permission to use, copy, modify, distribute,
# and license this software and its documentation for any purpose, provided
# that existing copyright notices are retained in all copies and that this
# notice is included verbatim in any distributions. No written agreement,
# license, or royalty fee is required for any of the authorized uses.
# Modifications to this software may be copyrighted by their authors
# and need not follow the licensing terms described here, provided that
# the new terms are clearly indicated on the first page of each file where
# they apply.
#
# IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
# FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
# ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
# DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE
# IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
# NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
# MODIFICATIONS.
r"""usb._lookups - Lookup tables for USB
"""
descriptors = {
0x1 : "Device",
0x2 : "Configuration",
0x3 : "String",
0x4 : "Interface",
0x5 : "Endpoint",
0x6 : "Device qualifier",
0x7 : "Other speed configuration",
0x8 : "Interface power",
0x9 : "OTG",
0xA : "Debug",
0xB : "Interface association",
0xC : "Security",
0xD : "Key",
0xE : "Encryption type",
0xF : "Binary device object store (BOS)",
0x10 : "Device capability",
0x11 : "Wireless endpoint companion",
0x30 : "SuperSpeed endpoint companion",
}
device_classes = {
0x0 : "Specified at interface",
0x2 : "Communications Device",
0x9 : "Hub",
0xF : "Personal Healthcare Device",
0xDC : "Diagnostic Device",
0xE0 : "Wireless Controller",
0xEF : "Miscellaneous",
0xFF : "Vendor-specific",
}
interface_classes = {
0x0 : "Reserved",
0x1 : "Audio",
0x2 : "CDC Communication",
0x3 : "Human Interface Device",
0x5 : "Physical",
0x6 : "Image",
0x7 : "Printer",
0x8 : "Mass Storage",
0x9 : "Hub",
0xA : "CDC Data",
0xB : "Smart Card",
0xD : "Content Security",
0xE : "Video",
0xF : "Personal Healthcare",
0xDC : "Diagnostic Device",
0xE0 : "Wireless Controller",
0xEF : "Miscellaneous",
0xFE : "Application Specific",
0xFF : "Vendor Specific",
}
ep_attributes = {
0x0 : "Control",
0x1 : "Isochronous",
0x2 : "Bulk",
0x3 : "Interrupt",
}
MAX_POWER_UNITS_USB2p0 = 2 # mA
MAX_POWER_UNITS_USB_SUPERSPEED = 8 # mA
|
cailiwei/buildroot | refs/heads/master | support/scripts/gen-manual-lists.py | 18 | #!/usr/bin/env python
##
## gen-manual-lists.py
##
## This script generates the following Buildroot manual appendices:
## - the package tables (one for the target, the other for host tools);
## - the deprecated items.
##
## Author(s):
## - Samuel Martin <[email protected]>
##
## Copyright (C) 2013 Samuel Martin
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Note about python2.
##
## This script can currently only be run using python2 interpreter due to
## its kconfiglib dependency (which is not yet python3 friendly).
from __future__ import print_function
from __future__ import unicode_literals
import os
import re
import sys
import datetime
from argparse import ArgumentParser
try:
import kconfiglib
except ImportError:
message = """
Could not find the module 'kconfiglib' in the PYTHONPATH:
"""
message += "\n".join([" {0}".format(path) for path in sys.path])
message += """
Make sure the Kconfiglib directory is in the PYTHONPATH, then relaunch the
script.
You can get kconfiglib from:
https://github.com/ulfalizer/Kconfiglib
"""
sys.stderr.write(message)
raise
def get_symbol_subset(root, filter_func):
""" Return a generator of kconfig items.
:param root_item: Root item of the generated subset of items
:param filter_func: Filter function
"""
if hasattr(root, "get_items"):
get_items = root.get_items
elif hasattr(root, "get_top_level_items"):
get_items = root.get_top_level_items
else:
message = "The symbol does not contain any subset of symbols"
raise Exception(message)
for item in get_items():
if item.is_symbol():
if not item.prompts:
continue
if not filter_func(item):
continue
yield item
elif item.is_menu() or item.is_choice():
for i in get_symbol_subset(item, filter_func):
yield i
def get_symbol_parents(item, root=None, enable_choice=False):
""" Return the list of the item's parents. The lasst item of the list is
the closest parent, the first the furthest.
:param item: Item from which the the parent list is generated
:param root: Root item stopping the search (not included in the
parent list)
:param enable_choice: Flag enabling choices to appear in the parent list
"""
parent = item.get_parent()
parents = []
while parent and parent != root:
if parent.is_menu():
parents.append(parent.get_title())
elif enable_choice and parent.is_choice():
parents.append(parent.prompts[0][0])
parent = parent.get_parent()
if isinstance(root, kconfiglib.Menu) or \
(enable_choice and isinstance(root, kconfiglib.Choice)):
parents.append("") # Dummy empty parrent to get a leading arrow ->
parents.reverse()
return parents
def format_asciidoc_table(root, get_label_func, filter_func=lambda x: True,
enable_choice=False, sorted=True, sub_menu=True,
item_label=None):
""" Return the asciidoc formatted table of the items and their location.
:param root: Root item of the item subset
:param get_label_func: Item's label getter function
:param filter_func: Filter function to apply on the item subset
:param enable_choice: Enable choices to appear as part of the item's
location
:param sorted: Flag to alphabetically sort the table
:param sub_menu: Output the column with the sub-menu path
"""
def _format_entry(label, parents, sub_menu):
""" Format an asciidoc table entry.
"""
if sub_menu:
return "| {0:<40} <| {1}\n".format(label, " -> ".join(parents))
else:
return "| {0:<40}\n".format(label)
lines = []
for item in get_symbol_subset(root, filter_func):
if not item.is_symbol() or not item.prompts:
continue
loc = get_symbol_parents(item, root, enable_choice=enable_choice)
lines.append(_format_entry(get_label_func(item), loc, sub_menu))
if sorted:
lines.sort(key=lambda x: x.lower())
if hasattr(root, "get_title"):
loc_label = get_symbol_parents(root, None, enable_choice=enable_choice)
loc_label += [root.get_title(), "..."]
else:
loc_label = ["Location"]
if not item_label:
item_label = "Items"
table = ":halign: center\n\n"
if sub_menu:
width = "100%"
columns = "^1,4"
else:
width = "30%"
columns = "^1"
table = "[width=\"{0}\",cols=\"{1}\",options=\"header\"]\n".format(width, columns)
table += "|===================================================\n"
table += _format_entry(item_label, loc_label, sub_menu)
table += "\n" + "".join(lines) + "\n"
table += "|===================================================\n"
return table
class Buildroot:
""" Buildroot configuration object.
"""
root_config = "Config.in"
package_dirname = "package"
package_prefixes = ["BR2_PACKAGE_", "BR2_PACKAGE_HOST_"]
re_pkg_prefix = re.compile(r"^(" + "|".join(package_prefixes) + ").*")
deprecated_symbol = "BR2_DEPRECATED"
list_in = """\
//
// Automatically generated list for Buildroot manual.
//
{table}
"""
list_info = {
'target-packages': {
'filename': "package-list",
'root_menu': "Package Selection for the target",
'filter': "_is_package",
'sorted': True,
'sub_menu': True,
},
'host-packages': {
'filename': "host-package-list",
'root_menu': "Host utilities",
'filter': "_is_package",
'sorted': True,
'sub_menu': False,
},
'deprecated': {
'filename': "deprecated-list",
'root_menu': None,
'filter': "_is_deprecated",
'sorted': False,
'sub_menu': True,
},
}
def __init__(self):
self.base_dir = os.environ.get("TOPDIR")
self.output_dir = os.environ.get("O")
self.package_dir = os.path.join(self.base_dir, self.package_dirname)
# The kconfiglib requires an environment variable named "srctree" to
# load the configuration, so set it.
os.environ.update({'srctree': self.base_dir})
self.config = kconfiglib.Config(os.path.join(self.base_dir,
self.root_config))
self._deprecated = self.config.get_symbol(self.deprecated_symbol)
self.gen_date = datetime.datetime.utcnow()
self.br_version_full = os.environ.get("BR2_VERSION_FULL")
if self.br_version_full and self.br_version_full.endswith("-git"):
self.br_version_full = self.br_version_full[:-4]
if not self.br_version_full:
self.br_version_full = "undefined"
def _get_package_symbols(self, package_name):
""" Return a tuple containing the target and host package symbol.
"""
symbols = re.sub("[-+.]", "_", package_name)
symbols = symbols.upper()
symbols = tuple([prefix + symbols for prefix in self.package_prefixes])
return symbols
def _is_deprecated(self, symbol):
""" Return True if the symbol is marked as deprecated, otherwise False.
"""
return self._deprecated in symbol.get_referenced_symbols()
def _is_package(self, symbol):
""" Return True if the symbol is a package or a host package, otherwise
False.
"""
if not self.re_pkg_prefix.match(symbol.get_name()):
return False
pkg_name = re.sub("BR2_PACKAGE_(HOST_)?(.*)", r"\2", symbol.get_name())
pattern = "^(HOST_)?" + pkg_name + "$"
pattern = re.sub("_", ".", pattern)
pattern = re.compile(pattern, re.IGNORECASE)
# Here, we cannot just check for the location of the Config.in because
# of the "virtual" package.
#
# So, to check that a symbol is a package (not a package option or
# anything else), we check for the existence of the package *.mk file.
#
# By the way, to actually check for a package, we should grep all *.mk
# files for the following regex:
# "\$\(eval \$\((host-)?(generic|autotools|cmake)-package\)\)"
#
# Implementation details:
#
# * The package list is generated from the *.mk file existence, the
# first time this function is called. Despite the memory consumtion,
# this list is stored because the execution time of this script is
# noticebly shorter than re-scannig the package sub-tree for each
# symbol.
if not hasattr(self, "_package_list"):
pkg_list = []
for _, _, files in os.walk(self.package_dir):
for file_ in (f for f in files if f.endswith(".mk")):
pkg_list.append(re.sub(r"(.*?)\.mk", r"\1", file_))
setattr(self, "_package_list", pkg_list)
for pkg in getattr(self, "_package_list"):
if pattern.match(pkg):
return True
return False
def _get_symbol_label(self, symbol, mark_deprecated=True):
""" Return the label (a.k.a. prompt text) of the symbol.
:param symbol: The symbol
:param mark_deprecated: Append a 'deprecated' to the label
"""
label = symbol.prompts[0][0]
if self._is_deprecated(symbol) and mark_deprecated:
label += " *(deprecated)*"
return label
def print_list(self, list_type, enable_choice=True, enable_deprecated=True,
dry_run=False, output=None):
""" Print the requested list. If not dry run, then the list is
automatically written in its own file.
:param list_type: The list type to be generated
:param enable_choice: Flag enabling choices to appear in the list
:param enable_deprecated: Flag enabling deprecated items to appear in
the package lists
:param dry_run: Dry run (print the list in stdout instead of
writing the list file
"""
def _get_menu(title):
""" Return the first symbol menu matching the given title.
"""
menus = self.config.get_menus()
menu = [m for m in menus if m.get_title().lower() == title.lower()]
if not menu:
message = "No such menu: '{0}'".format(title)
raise Exception(message)
return menu[0]
list_config = self.list_info[list_type]
root_title = list_config.get('root_menu')
if root_title:
root_item = _get_menu(root_title)
else:
root_item = self.config
filter_ = getattr(self, list_config.get('filter'))
filter_func = lambda x: filter_(x)
if not enable_deprecated and list_type != "deprecated":
filter_func = lambda x: filter_(x) and not self._is_deprecated(x)
mark_depr = list_type != "deprecated"
get_label = lambda x: self._get_symbol_label(x, mark_depr)
item_label = "Features" if list_type == "deprecated" else "Packages"
table = format_asciidoc_table(root_item, get_label,
filter_func=filter_func,
enable_choice=enable_choice,
sorted=list_config.get('sorted'),
sub_menu=list_config.get('sub_menu'),
item_label=item_label)
content = self.list_in.format(table=table)
if dry_run:
print(content)
return
if not output:
output_dir = self.output_dir
if not output_dir:
print("Warning: Undefined output directory.")
print("\tUse source directory as output location.")
output_dir = self.base_dir
output = os.path.join(output_dir,
list_config.get('filename') + ".txt")
if not os.path.exists(os.path.dirname(output)):
os.makedirs(os.path.dirname(output))
print("Writing the {0} list in:\n\t{1}".format(list_type, output))
with open(output, 'w') as fout:
fout.write(content)
if __name__ == '__main__':
list_types = ['target-packages', 'host-packages', 'deprecated']
parser = ArgumentParser()
parser.add_argument("list_type", nargs="?", choices=list_types,
help="""\
Generate the given list (generate all lists if unspecified)""")
parser.add_argument("-n", "--dry-run", dest="dry_run", action='store_true',
help="Output the generated list to stdout")
parser.add_argument("--output-target", dest="output_target",
help="Output target package file")
parser.add_argument("--output-host", dest="output_host",
help="Output host package file")
parser.add_argument("--output-deprecated", dest="output_deprecated",
help="Output deprecated file")
args = parser.parse_args()
lists = [args.list_type] if args.list_type else list_types
buildroot = Buildroot()
for list_name in lists:
output = getattr(args, "output_" + list_name.split("-", 1)[0])
buildroot.print_list(list_name, dry_run=args.dry_run, output=output)
|
davidbrazdil/nacl | refs/heads/master | run.py | 5 | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import subprocess
import sys
import tempfile
import pynacl.platform
# Target architecture for PNaCl can be set through the ``-arch``
# command-line argument, and when its value is ``env`` the following
# program environment variable is queried to figure out which
# architecture to target.
ARCH_ENV_VAR_NAME = 'PNACL_RUN_ARCH'
class Environment:
pass
env = Environment()
def SetupEnvironment():
# native_client/ directory
env.nacl_root = FindBaseDir()
toolchain_base = os.path.join(env.nacl_root,
'toolchain',
'%s_x86' % pynacl.platform.GetOS())
# Path to Native NaCl toolchain (glibc)
env.nnacl_root = os.path.join(toolchain_base, 'nacl_x86_glibc')
# Path to PNaCl toolchain
env.pnacl_base = os.path.join(toolchain_base, 'pnacl_newlib')
# QEMU
env.arm_root = os.path.join(toolchain_base, 'arm_trusted')
env.qemu_arm = os.path.join(env.arm_root, 'run_under_qemu_arm')
env.mips32_root = os.path.join(toolchain_base, 'mips_trusted')
env.qemu_mips32 = os.path.join(env.mips32_root, 'run_under_qemu_mips32')
# Path to 'readelf'
env.readelf = FindReadElf()
# Path to 'scons'
env.scons = os.path.join(env.nacl_root, 'scons')
# Library path for runnable-ld.so
env.library_path = []
# Suppress -S -a
env.paranoid = False
# Only print commands, don't run them
env.dry_run = False
# Force a specific sel_ldr
env.force_sel_ldr = None
# Force a specific IRT
env.force_irt = None
# Don't print anything
env.quiet = False
# Arch (x86-32, x86-64, arm, mips32)
env.arch = None
# Trace in QEMU
env.trace = False
# Debug the nexe using the debug stub
env.debug = False
# PNaCl (as opposed to NaCl).
env.is_pnacl = False
def PrintBanner(output):
if not env.quiet:
lines = output.split('\n')
print '*' * 80
for line in lines:
padding = ' ' * max(0, (80 - len(line)) / 2)
print padding + output + padding
print '*' * 80
def PrintCommand(s):
if not env.quiet:
print
print s
print
def GetMultiDir(arch):
if arch == 'x86-32':
return 'lib32'
elif arch == 'x86-64':
return 'lib'
else:
Fatal('nacl-gcc does not support %s' % arch)
def SetupArch(arch, allow_build=True):
'''Setup environment variables that require knowing the
architecture. We can only do this after we've seen the
nexe or once we've read -arch off the command-line.
'''
env.arch = arch
env.sel_ldr = FindOrBuildSelLdr(allow_build=allow_build)
env.irt = FindOrBuildIRT(allow_build=allow_build)
def SetupLibC(arch, is_dynamic):
if is_dynamic:
if env.is_pnacl:
libdir = os.path.join(env.pnacl_base, 'lib-' + arch)
else:
libdir = os.path.join(env.nnacl_root, 'x86_64-nacl', GetMultiDir(arch))
env.runnable_ld = os.path.join(libdir, 'runnable-ld.so')
env.library_path.append(libdir)
def main(argv):
SetupEnvironment()
return_code = 0
sel_ldr_options = []
# sel_ldr's "quiet" options need to come early in the command line
# to suppress noisy output from processing other options, like -Q.
sel_ldr_quiet_options = []
nexe, nexe_params = ArgSplit(argv[1:])
try:
if env.is_pnacl:
nexe = Translate(env.arch, nexe)
# Read the ELF file info
if env.is_pnacl and env.dry_run:
# In a dry run, we don't actually run pnacl-translate, so there is
# no nexe for readelf. Fill in the information manually.
arch = env.arch
is_dynamic = False
is_glibc_static = False
else:
arch, is_dynamic, is_glibc_static = ReadELFInfo(nexe)
# Add default sel_ldr options
if not env.paranoid:
sel_ldr_options += ['-a']
# -S signal handling is not supported on windows, but otherwise
# it is useful getting the address of crashes.
if not pynacl.platform.IsWindows():
sel_ldr_options += ['-S']
# X86-64 glibc static has validation problems without stub out (-s)
if arch == 'x86-64' and is_glibc_static:
sel_ldr_options += ['-s']
if env.quiet:
# Don't print sel_ldr logs
# These need to be at the start of the arglist for full effectiveness.
# -q means quiet most stderr warnings.
# -l /dev/null means log to /dev/null.
sel_ldr_quiet_options = ['-q', '-l', '/dev/null']
if env.debug:
# Disabling validation (-c) is used by the debug stub test.
# TODO(dschuff): remove if/when it's no longer necessary
sel_ldr_options += ['-c', '-c', '-g']
# Tell the user
if is_dynamic:
extra = 'DYNAMIC'
else:
extra = 'STATIC'
PrintBanner('%s is %s %s' % (os.path.basename(nexe),
arch.upper(), extra))
# Setup architecture-specific environment variables
SetupArch(arch)
# Setup LibC-specific environment variables
SetupLibC(arch, is_dynamic)
sel_ldr_args = []
# Add irt to sel_ldr arguments
if env.irt:
sel_ldr_args += ['-B', env.irt]
# Setup sel_ldr arguments
sel_ldr_args += sel_ldr_options + ['--']
if is_dynamic:
sel_ldr_args += [env.runnable_ld,
'--library-path', ':'.join(env.library_path)]
# The NaCl dynamic loader prefers posixy paths.
nexe_path = os.path.abspath(nexe)
nexe_path = nexe_path.replace('\\', '/')
sel_ldr_args += [nexe_path] + nexe_params
# Run sel_ldr!
retries = 0
try:
if hasattr(env, 'retries'):
retries = int(env.retries)
except ValueError:
pass
collate = env.collate or retries > 0
input = sys.stdin.read() if collate else None
for iter in range(1 + max(retries, 0)):
output = RunSelLdr(sel_ldr_args, quiet_args=sel_ldr_quiet_options,
collate=collate, stdin_string=input)
if env.last_return_code < 128:
# If the application crashes, we expect a 128+ return code.
break
sys.stdout.write(output or '')
return_code = env.last_return_code
finally:
if env.is_pnacl:
# Clean up the .nexe that was created.
try:
os.remove(nexe)
except:
pass
return return_code
def RunSelLdr(args, quiet_args=[], collate=False, stdin_string=None):
"""Run the sel_ldr command and optionally capture its output.
Args:
args: A string list containing the command and arguments.
collate: Whether to capture stdout+stderr (rather than passing
them through to the terminal).
stdin_string: Text to send to the command via stdin. If None, stdin is
inherited from the caller.
Returns:
A string containing the concatenation of any captured stdout plus
any captured stderr.
"""
prefix = []
# The bootstrap loader args (--r_debug, --reserved_at_zero) need to
# come before quiet_args.
bootstrap_loader_args = []
arch = pynacl.platform.GetArch3264()
if arch != pynacl.platform.ARCH3264_ARM and env.arch == 'arm':
prefix = [ env.qemu_arm, '-cpu', 'cortex-a9']
if env.trace:
prefix += ['-d', 'in_asm,op,exec,cpu']
args = ['-Q'] + args
if arch != pynacl.platform.ARCH3264_MIPS32 and env.arch == 'mips32':
prefix = [env.qemu_mips32]
if env.trace:
prefix += ['-d', 'in_asm,op,exec,cpu']
args = ['-Q'] + args
# Use the bootstrap loader on linux.
if pynacl.platform.IsLinux():
bootstrap = os.path.join(os.path.dirname(env.sel_ldr),
'nacl_helper_bootstrap')
loader = [bootstrap, env.sel_ldr]
template_digits = 'X' * 16
bootstrap_loader_args = ['--r_debug=0x' + template_digits,
'--reserved_at_zero=0x' + template_digits]
else:
loader = [env.sel_ldr]
return Run(prefix + loader + bootstrap_loader_args + quiet_args + args,
exit_on_failure=(not collate),
capture_stdout=collate, capture_stderr=collate,
stdin_string=stdin_string)
def FindOrBuildIRT(allow_build = True):
if env.force_irt:
if env.force_irt == 'none':
return None
elif env.force_irt == 'core':
flavors = ['irt_core']
else:
irt = env.force_irt
if not os.path.exists(irt):
Fatal('IRT not found: %s' % irt)
return irt
else:
flavors = ['irt_core']
irt_paths = []
for flavor in flavors:
path = os.path.join(env.nacl_root, 'scons-out',
'nacl_irt-%s/staging/%s.nexe' % (env.arch, flavor))
irt_paths.append(path)
for path in irt_paths:
if os.path.exists(path):
return path
if allow_build:
PrintBanner('irt not found. Building it with scons.')
irt = irt_paths[0]
BuildIRT(flavors[0])
assert(env.dry_run or os.path.exists(irt))
return irt
return None
def BuildIRT(flavor):
args = ('platform=%s naclsdk_validate=0 ' +
'sysinfo=0 -j8 %s') % (env.arch, flavor)
args = args.split()
Run([env.scons] + args, cwd=env.nacl_root)
def FindOrBuildSelLdr(allow_build=True):
if env.force_sel_ldr:
if env.force_sel_ldr in ('dbg','opt'):
modes = [ env.force_sel_ldr ]
else:
sel_ldr = env.force_sel_ldr
if not os.path.exists(sel_ldr):
Fatal('sel_ldr not found: %s' % sel_ldr)
return sel_ldr
else:
modes = ['opt','dbg']
loaders = []
for mode in modes:
sel_ldr = os.path.join(
env.nacl_root, 'scons-out',
'%s-%s-%s' % (mode, pynacl.platform.GetOS(), env.arch),
'staging', 'sel_ldr')
if pynacl.platform.IsWindows():
sel_ldr += '.exe'
loaders.append(sel_ldr)
# If one exists, use it.
for sel_ldr in loaders:
if os.path.exists(sel_ldr):
return sel_ldr
# Build it
if allow_build:
PrintBanner('sel_ldr not found. Building it with scons.')
sel_ldr = loaders[0]
BuildSelLdr(modes[0])
assert(env.dry_run or os.path.exists(sel_ldr))
return sel_ldr
return None
def BuildSelLdr(mode):
args = ('platform=%s MODE=%s-host naclsdk_validate=0 ' +
'sysinfo=0 -j8 sel_ldr') % (env.arch, mode)
args = args.split()
Run([env.scons] + args, cwd=env.nacl_root)
def Translate(arch, pexe):
output_file = os.path.splitext(pexe)[0] + '.' + arch + '.nexe'
pnacl_translate = os.path.join(env.pnacl_base, 'bin', 'pnacl-translate')
args = [ pnacl_translate, '-arch', arch, pexe, '-o', output_file,
'--allow-llvm-bitcode-input' ]
if env.zerocost_eh:
args.append('--pnacl-allow-zerocost-eh')
Run(args)
return output_file
def Stringify(args):
ret = ''
for arg in args:
if ' ' in arg:
ret += ' "%s"' % arg
else:
ret += ' %s' % arg
return ret.strip()
def PrepareStdin(stdin_string):
"""Prepare a stdin stream for a subprocess based on contents of a string.
This has to be in the form of an actual file, rather than directly piping
the string, since the child may (inappropriately) try to fseek() on stdin.
Args:
stdin_string: The characters to pipe to the subprocess.
Returns:
An open temporary file object ready to be read from.
"""
f = tempfile.TemporaryFile()
f.write(stdin_string)
f.seek(0)
return f
def Run(args, cwd=None, verbose=True, exit_on_failure=False,
capture_stdout=False, capture_stderr=False, stdin_string=None):
"""Run a command and optionally capture its output.
Args:
args: A string list containing the command and arguments.
cwd: Change to this directory before running.
verbose: Print the command before running it.
exit_on_failure: Exit immediately if the command returns nonzero.
capture_stdout: Capture the stdout as a string (rather than passing it
through to the terminal).
capture_stderr: Capture the stderr as a string (rather than passing it
through to the terminal).
stdin_string: Text to send to the command via stdin. If None, stdin is
inherited from the caller.
Returns:
A string containing the concatenation of any captured stdout plus
any captured stderr.
"""
if verbose:
PrintCommand(Stringify(args))
if env.dry_run:
return
stdout_redir = None
stderr_redir = None
stdin_redir = None
if capture_stdout:
stdout_redir = subprocess.PIPE
if capture_stderr:
stderr_redir = subprocess.PIPE
if stdin_string:
stdin_redir = PrepareStdin(stdin_string)
p = None
try:
# PNaCl toolchain executables (pnacl-translate, readelf) are scripts
# not binaries, so it doesn't want to run on Windows without a shell.
use_shell = True if pynacl.platform.IsWindows() else False
p = subprocess.Popen(args, stdin=stdin_redir, stdout=stdout_redir,
stderr=stderr_redir, cwd=cwd, shell=use_shell)
(stdout_contents, stderr_contents) = p.communicate()
except KeyboardInterrupt, e:
if p:
p.kill()
raise e
except BaseException, e:
if p:
p.kill()
raise e
env.last_return_code = p.returncode
if p.returncode != 0 and exit_on_failure:
if capture_stdout or capture_stderr:
# Print an extra message if any of the program's output wasn't
# going to the screen.
Fatal('Failed to run: %s' % Stringify(args))
sys.exit(p.returncode)
return (stdout_contents or '') + (stderr_contents or '')
def ArgSplit(argv):
"""Parse command-line arguments.
Returns:
Tuple (nexe, nexe_args) where nexe is the name of the nexe or pexe
to execute, and nexe_args are its runtime arguments.
"""
desc = ('Run a command-line nexe (or pexe). Automatically handles\n' +
'translation, building sel_ldr, and building the IRT.')
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-L', action='append', dest='library_path', default=[],
help='Additional library path for runnable-ld.so.')
parser.add_argument('--paranoid', action='store_true', default=False,
help='Remove -S (signals) and -a (file access) ' +
'from the default sel_ldr options.')
parser.add_argument('--loader', dest='force_sel_ldr', metavar='SEL_LDR',
help='Path to sel_ldr. "dbg" or "opt" means use ' +
'dbg or opt version of sel_ldr. ' +
'By default, use whichever sel_ldr already exists; ' +
'otherwise, build opt version.')
parser.add_argument('--irt', dest='force_irt', metavar='IRT',
help='Path to IRT nexe. "core" or "none" means use ' +
'Core IRT or no IRT. By default, use whichever IRT ' +
'already exists; otherwise, build irt_core.')
parser.add_argument('--dry-run', '-n', action='store_true', default=False,
help="Just print commands, don't execute them.")
parser.add_argument('--quiet', '-q', action='store_true', default=False,
help="Don't print anything.")
parser.add_argument('--retries', default='0', metavar='N',
help='Retry sel_ldr command up to N times (if ' +
'flakiness is expected). This argument implies ' +
'--collate.')
parser.add_argument('--collate', action='store_true', default=False,
help="Combine/collate sel_ldr's stdout and stderr, and " +
"print to stdout.")
parser.add_argument('--trace', '-t', action='store_true', default=False,
help='Trace qemu execution.')
parser.add_argument('--debug', '-g', action='store_true', default=False,
help='Run sel_ldr with debugging enabled.')
parser.add_argument('-arch', '-m', dest='arch', action='store',
choices=sorted(
pynacl.platform.ARCH3264_LIST + ['env']),
help=('Specify architecture for PNaCl translation. ' +
'"env" is a special value which obtains the ' +
'architecture from the environment ' +
'variable "%s".') % ARCH_ENV_VAR_NAME)
parser.add_argument('remainder', nargs=argparse.REMAINDER,
metavar='nexe/pexe + args')
parser.add_argument('--pnacl-allow-zerocost-eh', action='store_true',
default=False, dest='zerocost_eh',
help='Allow non-stable zero-cost exception handling.')
(options, args) = parser.parse_known_args(argv)
# Copy the options into env.
for (key, value) in vars(options).iteritems():
setattr(env, key, value)
args += options.remainder
nexe = args[0] if len(args) else ''
env.is_pnacl = nexe.endswith('.pexe')
if env.arch == 'env':
# Get the architecture from the environment.
try:
env.arch = os.environ[ARCH_ENV_VAR_NAME]
except Exception as e:
Fatal(('Option "-arch env" specified, but environment variable ' +
'"%s" not specified: %s') % (ARCH_ENV_VAR_NAME, e))
if not env.arch and env.is_pnacl:
# For NaCl we'll figure out the architecture from the nexe's
# architecture, but for PNaCl we first need to translate and the
# user didn't tell us which architecture to translate to. Be nice
# and just translate to the current machine's architecture.
env.arch = pynacl.platform.GetArch3264()
# Canonicalize env.arch.
env.arch = pynacl.platform.GetArch3264(env.arch)
return nexe, args[1:]
def Fatal(msg, *args):
if len(args) > 0:
msg = msg % args
print msg
sys.exit(1)
def FindReadElf():
'''Returns the path of "readelf" binary.'''
candidates = []
# Use PNaCl's if it available.
candidates.append(
os.path.join(env.pnacl_base, 'bin', 'pnacl-readelf'))
# Otherwise, look for the system readelf
for path in os.environ['PATH'].split(os.pathsep):
candidates.append(os.path.join(path, 'readelf'))
for readelf in candidates:
if os.path.exists(readelf):
return readelf
Fatal('Cannot find readelf!')
def ReadELFInfo(f):
''' Returns: (arch, is_dynamic, is_glibc_static) '''
readelf = env.readelf
readelf_out = Run([readelf, '-lh', f], capture_stdout=True, verbose=False)
machine_line = None
is_dynamic = False
is_glibc_static = False
for line in readelf_out.split('\n'):
line = line.strip()
if line.startswith('Machine:'):
machine_line = line
if line.startswith('DYNAMIC'):
is_dynamic = True
if '__libc_atexit' in line:
is_glibc_static = True
if not machine_line:
Fatal('Script error: readelf output did not make sense!')
if 'Intel 80386' in machine_line:
arch = 'x86-32'
elif 'X86-64' in machine_line:
arch = 'x86-64'
elif 'ARM' in machine_line:
arch = 'arm'
elif 'MIPS' in machine_line:
arch = 'mips32'
else:
Fatal('%s: Unknown machine type', f)
return (arch, is_dynamic, is_glibc_static)
def FindBaseDir():
'''Crawl backwards, starting from the directory containing this script,
until we find the native_client/ directory.
'''
curdir = os.path.abspath(sys.argv[0])
while os.path.basename(curdir) != 'native_client':
curdir,subdir = os.path.split(curdir)
if subdir == '':
# We've hit the file system root
break
if os.path.basename(curdir) != 'native_client':
Fatal('Unable to find native_client directory!')
return curdir
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
lixun910/pysal | refs/heads/master | pysal/model/spint/count_model.py | 1 | """
CountModel class for dispatching different types of count models and different
types of estimation technqiues.
"""
__author__ = "Taylor Oshan [email protected]"
import numpy as np
from pysal.model.spglm.glm import GLM
from pysal.model.spglm.family import Poisson, QuasiPoisson
class CountModel(object):
"""
Base class for variety of count-based models such as Poisson, negative binomial,
etc. of the exponetial family.
Parameters
----------
y : array
n x 1; n observations of the depedent variable
X : array
n x k; design matrix of k explanatory variables
family : instance of class 'family'
default is Poisson()
constant : boolean
True if intercept should be estimated and false otherwise.
Default is True.
Attributes
----------
y : array
n x 1; n observations of the depedent variable
X : array
n x k; design matrix of k explanatory variables
fitted : boolean
False is model has not been fitted and True if it has been
successfully fitted. Deault is False.
constant : boolean
True if intercept should be estimated and false otherwise.
Default is True.
Example
-------
>>> from spint.count_model import CountModel
>>> import pysal.lib
>>> db = pysal.lib.io.open(pysal.lib.examples.get_path('columbus.dbf'),'r')
>>> y = np.array(db.by_col("HOVAL"))
>>> y = np.reshape(y, (49,1))
>>> y = np.round(y).astype(int)
>>> X = []
>>> X.append(db.by_col("INC"))
>>> X.append(db.by_col("CRIME"))
>>> X = np.array(X).T
>>> model = CountModel(y, X, family=Poisson())
>>> results = model.fit('GLM')
>>> results.params
array([ 3.92159085, 0.01183491, -0.01371397])
"""
def __init__(self, y, X, family=Poisson(), constant=True):
self.y = self._check_counts(y)
self.X = X
self.constant = constant
def _check_counts(self, y):
if (y.dtype == 'int64') | (y.dtype == 'int32'):
return y
else:
raise TypeError(
'Dependent variable (y) must be composed of integers')
def fit(self, framework='GLM', Quasi=False):
"""
Method that fits a particular count model usign the appropriate
estimation technique. Models include Poisson GLM, Negative Binomial GLM,
Quasi-Poisson - at the moment Poisson GLM is the only option.
TODO: add zero inflated variants and hurdle variants.
Parameters
----------
framework : string
estimation framework; default is GLM
"GLM" | "QUASI" |
"""
if (framework.lower() == 'glm'):
if not Quasi:
results = GLM(
self.y,
self.X,
family=Poisson(),
constant=self.constant).fit()
else:
results = GLM(
self.y,
self.X,
family=QuasiPoisson(),
constant=self.constant).fit()
return CountModelResults(results)
else:
raise NotImplemented(
'Poisson GLM is the only count model currently implemented')
class CountModelResults(object):
"""
Results of estimated GLM and diagnostics.
Parameters
----------
results : GLM object
Pointer to GLMResults object with estimated parameters
and diagnostics.
Attributes
----------
model : GLM Object
Points to model object for which parameters have been
estimated. May contain additional diagnostics.
y : array
n*1, dependent variable.
X : array
n*k, independent variable, including constant.
family : string
Model type: 'Gaussian', 'Poisson', 'Logistic'
n : integer
Number of observations
k : integer
Number of independent variables
df_model : float
k-1, where k is the number of variables (including
intercept)
df_residual : float
observations minus variables (n-k)
routine.
params : array
n*k, estimared beta coefficients
yhat : array
n*1, predicted value of y (i.e., fittedvalues)
cov_params : array
Variance covariance matrix (kxk) of betas
std_err : array
k*1, standard errors of betas
pvalues : array
k*1, two-tailed pvalues of parameters
tvalues : array
k*1, the tvalues of the standard errors
deviance : float
value of the deviance function evalued at params;
see family.py for distribution-specific deviance
llf : float
value of the loglikelihood function evalued at params;
see family.py for distribution-specific loglikelihoods
llnull : float
value of the loglikelihood function evaluated with only an
intercept; see family.py for distribution-specific
loglikelihoods
AIC : float
Akaike information criterion
resid : array
response residuals; defined as y-mu
resid_dev : array
k x 1, residual deviance of model
D2 : float
percentage of explained deviance
adj_D2 : float
pseudo_R2 : float
McFadden's pseudo R2 (coefficient of determination)
adj_pseudoR2 : float
adjusted McFadden's pseudo R2
"""
def __init__(self, results):
self.y = results.y
self.X = results.X
self.family = results.family
self.params = results.params
self.AIC = results.aic
self.df_model = results.df_model
self.df_resid = results.df_resid
self.llf = results.llf
self.llnull = results.llnull
self.yhat = results.mu
self.deviance = results.deviance
self.n = results.n
self.k = results.k
self.resid = results.resid_response
self.resid_dev = results.resid_deviance
self.cov_params = results.cov_params()
self.std_err = results.bse
self.pvalues = results.pvalues
self.tvalues = results.tvalues
self.D2 = results.D2
self.adj_D2 = results.adj_D2
self.pseudoR2 = results.pseudoR2
self.adj_pseudoR2 = results.adj_pseudoR2
self.model = results
|
twitter/pants | refs/heads/master | contrib/go/src/python/pants/contrib/go/targets/go_local_source.py | 1 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import re
from pants.base.exceptions import TargetDefinitionException
from pants.base.payload import Payload
from pants.build_graph.address import Address
from pants.util.memo import memoized_property
from pants.contrib.go.targets.go_target import GoTarget
class GoLocalSource(GoTarget):
@classmethod
def is_go_source(cls, path):
"""Returns `True` if the file at the given `path` is a go source file."""
return path.endswith('.go') and os.path.isfile(path)
@classmethod
def local_import_path(cls, source_root, address):
"""Returns the Go import path for the given address housed under the given source root.
:param string source_root: The path of the source root the address is found within.
:param address: The target address of a GoLocalSource target.
:type: :class:`pants.build_graph.address.Address`
:raises: `ValueError` if the address does not reside within the source root.
"""
return cls.package_path(source_root, address.spec_path)
@classmethod
def create(cls, parse_context, **kwargs):
if 'name' in kwargs:
raise TargetDefinitionException(Address(parse_context.rel_path, kwargs['name']).spec,
'A {} does not accept a name; instead, the name is taken '
'from the BUILD file location.'.format(cls.alias()))
name = os.path.basename(parse_context.rel_path)
if 'sources' in kwargs:
raise TargetDefinitionException(Address(parse_context.rel_path, name).spec,
'A {} does not accept sources; instead, it always globs all '
'the *.go sources in the BUILD file\'s '
'directory.'.format(cls.alias()))
parse_context.create_object(cls, type_alias=cls.alias(), name=name, **kwargs)
def __init__(self, address=None, payload=None, sources=None, **kwargs):
payload = payload or Payload()
payload.add_fields({
'sources': self.create_sources_field(sources=sources,
sources_rel_path=address.spec_path,
key_arg='sources'),
})
super(GoLocalSource, self).__init__(address=address, payload=payload, **kwargs)
@property
def import_path(self):
"""The import path as used in import statements in `.go` source files."""
return self.local_import_path(self.target_base, self.address)
# From `go help test`, ignore files beginning with "_" or ".", but otherwise match the glob
# "*_test.go".
_test_file_regexp = re.compile(r'^[^_\.].*_test\.go')
@classmethod
def _is_test_file(cls, src_path):
base = os.path.basename(src_path)
return re.match(cls._test_file_regexp, base) is not None
@memoized_property
def has_tests(self):
return any(self._is_test_file(src) for src in self.payload.sources.source_paths)
|
Kotaimen/georest | refs/heads/develop | georest/geo/__init__.py | 1 | # -*- encoding: utf-8 -*-
__author__ = 'kotaimen'
__date__ = '5/29/14'
"""
georest.geo
~~~~~~~~~~~
This package provides GeoJson a-like Feature data model.
Various python geo packages:
- Geometry Engine:
- shapely
- geos
- osgeo.ogr
- django.contrib.gis.geos
- Coordinate Reference System:
- pyproj
- django.contrib.gis.gdal
- osgeo.osr
- GeoJson IO:
- json
- geojson
- ujson
- yajl
- Feature Abstraction:
- geojson.Feature
- osgeo.ogr.Feature
Note on packages (after a lot of painful research):
- shapely: good geometry abstraction, fast, much better API than the
official python binding, no out-of-box CRS support,
GeometryCollection support not complete.
- geos/osgeo.ogr/osgeo.osr: official binding of c++ interface, powerful,
too complex, not pythonic at all, requires
convert `GEOSGeometry` to `OGRGeometry` to do
coordinate transform, slow GeoJson
serialization (load/dump json...).
- django.contrib.gis: very nice python bindings, still requires convert
geometry for CRS transform, and it feels strange
to use `django` in a `Flask` project ...
(used in Mark1, don't require python-gdal)
- geojson: Feature abstraction is what we want but uses `simplejson` for
serialization which is slow.
- ujson: Fast, stable, not as much options as standard library `json`, and
does not preserve float point xxx (smaller dump result)
- yajl: Promising but crashes interpreter ...slower than `ujson` anyway.
- pyshape: Very slow (10x-20x) compared to `osgeo.ogr.DataSource`,
can't read a lot of shapefiles, implemented in pure python.
- pyproj: Very weird abstraction compared to `osgeo.osr`, don't require
`python-gdal` and `gdal`, supports transform coordinates without
load geometry from `geos` into `gdal`. Note it doesn't use
standard `gdal` projection database and don't link to libgdal.
"""
from .exceptions import GeoException
from .key import Key
from .metadata import Metadata
from .spatialref import SpatialReference
from .geometry import Geometry
from .feature import Feature
from .operations import *
from .import jsonhelper
def _describe():
import ujson
import shapely
import shapely.geos
import shapely.speedups
import shapely.geometry
import pyproj
return {
'json': 'ujson-%s' % ujson.__version__,
'geometry': 'shapely-%s' % shapely.__version__,
'geometry_engine': 'GEOS %d.%d.%d' % shapely.geos.geos_version,
'operation_speedups': shapely.speedups.available,
'proj': 'pyproj-%s' % pyproj.__version__,
'proj_data': '%s' % pyproj.pyproj_datadir,
}
# cache the _describe call since its data is static
_description = _describe()
del _describe
def describe():
global _description
return _description
|
tantexian/sps-2014-12-4 | refs/heads/master | sps/tests/__init__.py | 2 | # Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables tests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# Set up logging to output debugging
import logging
logger = logging.getLogger()
hdlr = logging.FileHandler('run_tests.log', 'w')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
|
alfa-addon/addon | refs/heads/master | plugin.video.alfa/lib/quasar/quasar/osarch.py | 2 | import xbmc
import sys
import platform
def get_platform():
build = xbmc.getInfoLabel("System.BuildVersion")
kodi_version = int(build.split()[0][:2])
ret = {
"auto_arch": sys.maxsize > 2 ** 32 and "64-bit" or "32-bit",
"arch": sys.maxsize > 2 ** 32 and "x64" or "x86",
"os": "",
"version": platform.release(),
"kodi": kodi_version,
"build": build
}
if xbmc.getCondVisibility("system.platform.android"):
ret["os"] = "android"
if "arm" in platform.machine() or "aarch" in platform.machine():
ret["arch"] = "arm"
if "64" in platform.machine() and ret["auto_arch"] == "64-bit":
ret["arch"] = "arm"
#ret["arch"] = "x64" #The binary is corrupted in install package
elif xbmc.getCondVisibility("system.platform.linux"):
ret["os"] = "linux"
if "aarch" in platform.machine() or "arm64" in platform.machine():
if xbmc.getCondVisibility("system.platform.linux.raspberrypi"):
ret["arch"] = "armv7"
elif ret["auto_arch"] == "32-bit":
ret["arch"] = "armv7"
elif ret["auto_arch"] == "64-bit":
ret["arch"] = "arm64"
elif platform.architecture()[0].startswith("32"):
ret["arch"] = "arm"
else:
ret["arch"] = "arm64"
elif "armv7" in platform.machine():
ret["arch"] = "armv7"
elif "arm" in platform.machine():
ret["arch"] = "arm"
elif xbmc.getCondVisibility("system.platform.xbox"):
ret["os"] = "windows"
ret["arch"] = "x64"
elif xbmc.getCondVisibility("system.platform.windows"):
ret["os"] = "windows"
if platform.machine().endswith('64'):
ret["arch"] = "x64"
elif xbmc.getCondVisibility("system.platform.osx"):
ret["os"] = "darwin"
ret["arch"] = "x64"
elif xbmc.getCondVisibility("system.platform.ios"):
ret["os"] = "ios"
ret["arch"] = "arm"
return ret
PLATFORM = get_platform()
|
wbc2010/django1.2.5 | refs/heads/master | django1.2.5/django/conf/locale/en_GB/formats.py | 80 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
DATE_FORMAT = 'N j, Y' # 'Oct. 25, 2006'
TIME_FORMAT = 'P' # '2:30 pm'
DATETIME_FORMAT = 'N j, Y, P' # 'Oct. 25, 2006, 2:30 pm'
YEAR_MONTH_FORMAT = 'F Y' # 'October 2006'
MONTH_DAY_FORMAT = 'F j' # 'October 25'
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 pm'
FIRST_DAY_OF_WEEK = 0 # Sunday
DATE_INPUT_FORMATS = (
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
'%Y-%m-%d', # '2006-10-25'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
|
leoliujie/odoo | refs/heads/8.0 | addons/website/tests/test_crawl.py | 251 | # -*- coding: utf-8 -*-
import logging
import urlparse
import time
import lxml.html
import openerp
import re
_logger = logging.getLogger(__name__)
class Crawler(openerp.tests.HttpCase):
""" Test suite crawling an openerp CMS instance and checking that all
internal links lead to a 200 response.
If a username and a password are provided, authenticates the user before
starting the crawl
"""
at_install = False
post_install = True
def crawl(self, url, seen=None, msg=''):
if seen == None:
seen = set()
url_slug = re.sub(r"[/](([^/=?&]+-)?[0-9]+)([/]|$)", '/<slug>/', url)
url_slug = re.sub(r"([^/=?&]+)=[^/=?&]+", '\g<1>=param', url_slug)
if url_slug in seen:
return seen
else:
seen.add(url_slug)
_logger.info("%s %s", msg, url)
r = self.url_open(url)
code = r.getcode()
self.assertIn( code, xrange(200, 300), "%s Fetching %s returned error response (%d)" % (msg, url, code))
if r.info().gettype() == 'text/html':
doc = lxml.html.fromstring(r.read())
for link in doc.xpath('//a[@href]'):
href = link.get('href')
parts = urlparse.urlsplit(href)
# href with any fragment removed
href = urlparse.urlunsplit((
parts.scheme,
parts.netloc,
parts.path,
parts.query,
''
))
# FIXME: handle relative link (not parts.path.startswith /)
if parts.netloc or \
not parts.path.startswith('/') or \
parts.path == '/web' or\
parts.path.startswith('/web/') or \
parts.path.startswith('/en_US/') or \
(parts.scheme and parts.scheme not in ('http', 'https')):
continue
self.crawl(href, seen, msg)
return seen
def test_10_crawl_public(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
seen = self.crawl('/', msg='Anonymous Coward')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "public crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request, ", count, duration, sql, duration/count, float(sql)/count)
def test_20_crawl_demo(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
self.authenticate('demo', 'demo')
seen = self.crawl('/', msg='demo')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "demo crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
def test_30_crawl_admin(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
self.authenticate('admin', 'admin')
seen = self.crawl('/', msg='admin')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "admin crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
|
lckung/spark-ec2 | refs/heads/branch-1.5 | launch-script/lib/boto-2.34.0/boto/vpc/dhcpoptions.py | 170 | # Copyright (c) 2009-2010 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents a DHCP Options set
"""
from boto.ec2.ec2object import TaggedEC2Object
class DhcpValueSet(list):
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'value':
self.append(value)
class DhcpConfigSet(dict):
def startElement(self, name, attrs, connection):
if name == 'valueSet':
if self._name not in self:
self[self._name] = DhcpValueSet()
return self[self._name]
def endElement(self, name, value, connection):
if name == 'key':
self._name = value
class DhcpOptions(TaggedEC2Object):
def __init__(self, connection=None):
super(DhcpOptions, self).__init__(connection)
self.id = None
self.options = None
def __repr__(self):
return 'DhcpOptions:%s' % self.id
def startElement(self, name, attrs, connection):
retval = super(DhcpOptions, self).startElement(name, attrs, connection)
if retval is not None:
return retval
if name == 'dhcpConfigurationSet':
self.options = DhcpConfigSet()
return self.options
def endElement(self, name, value, connection):
if name == 'dhcpOptionsId':
self.id = value
else:
setattr(self, name, value)
|
ifxit/nidhogg | refs/heads/develop | nidhogg/clustermode.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from time import sleep
import nidhogg.core # this style needed for patching
from .compatible import ACE, CifsShare, SnapmirrorDestinationInfo, Snapshot, Volume
from .core import Nidhogg, NidhoggException
try:
# py2
from backports.functools_lru_cache import lru_cache
except ImportError: # pragma: no cover
# py3
from functools import lru_cache
logger = logging.getLogger(__name__)
#: maximum records that can be retrieved via NETAPP API
MAX_RECORDS = 2 ** 16
class ClusterMode(Nidhogg):
"""This class implements cluster-mode filer specific API calls."""
ACL_FULL_CONTROL = "full_control" #: ACL permission constant for full control
ACL_READ = "read" #: ACL permission constant for read access
ACL_CHANGE = "change" #: ACL permission constant for write access
ACL_NO_ACCESS = "no_access" #: ACL permission constant for denying access
ACL_PERMISSIONS = [
ACL_FULL_CONTROL,
ACL_READ,
ACL_CHANGE,
ACL_NO_ACCESS
] #: list of all permission constants
def _item_to_volume(self, item):
return Volume(
name=item['volume-id-attributes']['name'],
# RW for read-write, DP for data-protection, DC for data-cache, LS for load-sharing
snapable=item['volume-id-attributes']['type'] == "rw",
state=item['volume-state-attributes']['state'] if 'volume-state-attributes' in item and 'state' in item['volume-state-attributes'] else None,
size_total=float(item['volume-space-attributes']['size-total']) if 'volume-space-attributes' in item and 'size-total' in item['volume-space-attributes'] else None,
size_used=float(item['volume-space-attributes']['size-used']) if 'volume-space-attributes' in item and 'size-used' in item['volume-space-attributes'] else None,
size_available=float(item['volume-space-attributes']['size-available']) if 'volume-space-attributes' in item and 'size-available' in item['volume-space-attributes'] else None,
files_used=float(item['volume-inode-attributes']['files-used']) if 'volume-inode-attributes' in item and 'files-used' in item['volume-inode-attributes'] else None,
files_total=float(item['volume-inode-attributes']['files-total']) if 'volume-inode-attributes' in item and 'files-total' in item['volume-inode-attributes'] else None,
filer=self.vserver_fqdn,
)
def _item_to_ace(self, item):
return ACE(
share_name=item['share'],
user_or_group=item['user-or-group'],
permission=item['permission'],
is_group=None, # not used
user_group_type=item['user-group-type'] if "user-group-type" in item else None
)
def _item_to_snapmirrordestinationinfo(self, item):
return SnapmirrorDestinationInfo(
destination_location=item["destination-location"],
destination_volume=item['destination-volume'],
destination_vserver=item['destination-vserver'],
is_constituent=item['is-constituent'],
policy_type=item['policy-type'],
relationship_group_type=item['relationship-group-type'],
relationship_id=item['relationship-id'],
relationship_status=item['relationship-status'],
relationship_type=item['relationship-type'],
source_location=item["source-location"],
source_volume=item['source-volume'],
source_volume_node=item['source-volume-node'],
source_vserver=item['source-vserver']
)
#
# API FUNCTIONS
#
def list_qtrees(self, volume, max_records=MAX_RECORDS):
"""Return a list of qtrees of type :class:`~nidhogg.compatible.QTree`.
:param volume: name of the volume
:type volume: str
:param max_records: limit returned records
:type max_records: int
:return: list of qtrees
:rtype: list of :class:`~nidhogg.compatible.QTree` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict(
query=dict(
qtree_info=dict(
volume=volume
)
),
max_records=max_records
)
results = self.qtree_list_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
self._item_to_qtree(item)
for item in results["attributes-list"]["qtree-info"]
]
elif int(results["num-records"]) == 1:
return [
self._item_to_qtree(results["attributes-list"]["qtree-info"])
]
logger.warning("list_qtrees: no entries found")
return []
@lru_cache(maxsize=100)
def list_volumes(self, max_records=MAX_RECORDS):
"""Return a list of volumes of type :class:`~nidhogg.compatible.Volume`.
:param max_records: limit returned records
:type max_records: int
:return: list of volumes
:rtype: list of :class:`~nidhogg.compatible.Volume` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict(
max_records=max_records
)
results = self.volume_get_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
self._item_to_volume(item)
for item in results["attributes-list"]["volume-attributes"]
]
elif int(results["num-records"]) == 1:
return [
self._item_to_volume(results["attributes-list"]["volume-attributes"])
]
logger.warning("list_volumes: no entries found")
return []
@lru_cache(maxsize=100)
def volume_info(self, volume):
"""Return basic information about the volume.
:param volume: name of the volume
:type volume: str
:return: volume
:rtype: :class:`~nidhogg.compatible.Volume`
:raises NidhoggException: if an error occurs
"""
opts = dict(
query=dict(
volume_id_attributes=dict(
name=volume
)
)
)
return self._item_to_volume(
self.volume_get_iter(**opts)["netapp"]["results"]["attributes-list"]["volume-attributes"])
def list_snapshots(self, target_name, max_records=MAX_RECORDS):
"""Return list of snapshots for given volume.
:param target_name: name of the volume
:type target_name: str
:param max_records: limit returned records
:type max_records: int
:return: list of snapshots
:rtype: list of :class:`~nidhogg.compatible.Snapshot` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict(
query=dict(
snapshot_info=dict(
volume=target_name
)
),
max_records=max_records
)
results = self.snapshot_get_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
Snapshot(name=item['name'])
for item in results["attributes-list"]["snapshot-info"]
]
elif int(results["num-records"]) == 1:
return [
Snapshot(name=results["attributes-list"]["snapshot-info"]['name'])
]
logger.warning("list_snapshots: no entries found")
return []
def get_quota(self, volume, qtree, max_records=MAX_RECORDS):
"""Return the quota of the specified qtree on the given volume.
:param volume: name of the volume
:type volume: str
:param qtree: name of the qtree
:type qtree: str
:param max_records: limit returned records
:type max_records: int
:return: quota
:rtype: :class:`~nidhogg.compatible.Quota` or empty dict
:raises NidhoggException: if an error occurs
"""
opts = dict(
query=dict(
quota_entry=dict(
quota_target="/vol/{0}/{1}".format(volume, qtree)
)
),
max_records=max_records
)
results = self.quota_list_entries_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) == 1:
return self._item_to_quota(results['attributes-list']['quota-entry'])
logger.warning("get_quota: no entries found")
return {}
def list_quotas(self, volume, max_records=MAX_RECORDS):
"""Return a list of quota reports of the specified volume.
:param volume: name of the volume
:type volume: str
:param max_records: limit returned records
:type max_records: int
:return: list of quota reports
:rtype: :class:`~nidhogg.compatible.QuotaReport` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict(
query=dict(
quota=dict(
volume=volume
)
),
max_records=max_records
)
results = self.quota_report_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
self._item_to_quota_report(item)
for item in results['attributes-list']['quota']
]
elif int(results["num-records"]) == 1:
return [
self._item_to_quota_report(results['attributes-list']['quota'])
]
logger.warning("list_quotas: no entries found")
return []
def list_cifs_shares(self):
"""List all cifs shares.
:return: list of cifs shares
:rtype: list of :class:`~nidhogg.compatible.CifsShare` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict(
max_records=2 ** 32 - 1
)
results = self.cifs_share_get_iter(**opts)['netapp']['results']
if int(results['num-records']) > 1:
return [
CifsShare(path=item['path'], share_name=item['share-name'])
for item in results['attributes-list']['cifs-share']
]
elif int(results['num-records']) == 1:
return [
CifsShare(path=results['attributes-list']['cifs-share']['path'], share_name=results['attributes-list']['cifs-share']['share-name'])
]
logger.warning("list_cifs_shares: cifs shares found")
return []
def create_cifs_share(self, volume, qtree, share_name, group_name=None, comment=None, umask="007", vscan_fileop_profile="standard", share_properties=None):
"""Create a cifs share.
:param volume: name of the volume
:type volume: str
:param qtree: name of the qtree
:type qtree: str
:param share_name: name of the share
:type share_name: str
:param group_name: force group name if provided (supported by cluster-mode filers with ontapi >= 1.30)
:type group_name: str
:param comment: description of the share
:type comment: str
:param umask: file permission umask
:type umask: str
:param vscan_fileop_profile: vscan-fileop-profile virus scan option (no_scan, standard, strict, writes_only)
:type vscan_fileop_profile: str
:param share_properties: list of share properties to set
:type share_properties: list of strings
:raises NidhoggException: if an error occurs
"""
opts = dict(
dir_umask=umask,
file_umask=umask,
path="/vol/{0}/{1}".format(volume, qtree),
share_name=share_name,
vscan_fileop_profile=vscan_fileop_profile
)
if group_name and self.has_forcegroup:
opts['force_group_for_create'] = group_name
if comment:
opts['comment'] = comment
if share_properties:
opts['share_properties'] = share_properties
self.cifs_share_create(**opts)
def set_cifs_acl(self, share_name, user="everyone", right=ACL_READ, set_group_rights=None):
"""Set a single ACL for the specifed share.
:param share_name: name of the share
:type share_name: str
:param user: name of a user or group
:type user: str
:param right: right to be set, value must be one of :py:const:`~ACL_PERMISSIONS`
:type right: str
:param set_group_rights: if true, *user* param specifies a unix group name; if false, *user*
param specifies a unix user name; if not defined, *user* param specifies a windows name
:type set_group_rights: bool
:raises NidhoggException: if an error occurs
:raises NidhoggException: if wrong right was set
"""
# check permissions
if right not in self.ACL_PERMISSIONS:
raise NidhoggException("Permission {0} not in {1}.".format(right, self.ACL_PERMISSIONS))
# set user_group_type
if set_group_rights is None:
user_group_type = "windows"
elif set_group_rights is True:
user_group_type = "unix_group"
else:
user_group_type = "unix_user"
opts = dict(
permission=right,
share=share_name,
user_or_group=user,
user_group_type=user_group_type
)
self.cifs_share_access_control_create(**opts)
def list_cifs_acls(self, share_name, max_records=MAX_RECORDS):
"""Return ACL of the specified share.
:param share_name: name of the share
:type share_name: str
:param max_records: limit returned records
:type max_records: int
:return: list of ACEs (access control entries)
:rtype: :class:`~nidhogg.compatible.ACE` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict(
query=dict(
cifs_share_access_control=dict(
share=share_name
)
),
max_records=max_records
)
results = self.cifs_share_access_control_get_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
self._item_to_ace(item)
for item in results['attributes-list']['cifs-share-access-control']
]
elif int(results["num-records"]) == 1:
return [
self._item_to_ace(results['attributes-list']['cifs-share-access-control'])
]
logger.warning("get_cifs_acl: no acls found")
return []
def delete_cifs_acl(self, share_name, user_or_group, is_group=None):
"""Delete cifs ACL of the specified user or group.
:param share_name: name of the share
:type share_name: str
:param user_or_group: name of a user or group
:type user_or_group: str
:param is_group: not used for cluster-mode filers, specified here
to be compatible with seven-mode method signature
:type is_group: None
:raises NidhoggException: if an error occurs
"""
opts = dict(
share=share_name,
user_or_group=user_or_group
)
self.cifs_share_access_control_delete(**opts)
def delete_cifs_acls(self, share_name):
"""Remove all cifs permssions.
:param share_name: name of the share
:type share_name: str
:raises NidhoggException: if an error occurs
"""
acls = self.list_cifs_acls(share_name)
for ace in acls:
self.delete_cifs_acl(
share_name=ace["share_name"],
user_or_group=ace["user_or_group"]
)
def set_quota(self, volume, qtree, quota_in_mb=1024, wait_til_finished=True):
"""Set a quota in MiB (default = 1GiB) for the specified volume and qtree.
:param volume: name of the volume
:type volume: str
:param qtree: name of the qtree
:type qtree: str
:param quota_in_mb: quota in MiB
:type quota_in_mb: int
:param wait_til_finished: if false, do not wait for resize operation
:type wait_til_finished: bool
:raises NidhoggException: if an error occurs
:raises NidhoggException: if resize did not finish in time and we were waiting for it
:raises NidhoggException: if quotas are not enabled
"""
quota_target = "/vol/{0}/{1}".format(volume, qtree)
quota_type = "tree"
quota_in_kb = int(round(quota_in_mb * 1024))
# policy "default" must be specified for cluster-mode filers
self.quota_set_entry(
volume=volume,
qtree="",
disk_limit=quota_in_kb,
soft_disk_limit=int(round(quota_in_kb * 0.8)), # use 80% of the given quota as warn-limit
quota_target=quota_target,
quota_type=quota_type,
policy="default"
)
self.quota_resize(volume=volume)
if wait_til_finished:
for i in range(0, nidhogg.core.QUOTA_RESIZE_WAIT_CYCLES):
sleep(nidhogg.core.QUOTA_RESIZE_WAIT_TIME)
status = self.quota_status(volume=volume)["netapp"]["results"]["status"]
if status.lower() == "on":
return
# check if quotas are turned on at all
if status.lower() == "off":
raise NidhoggException("Quotas are not enabled.")
# waiting for quote resize exceeded
logger.debug("resize of {0}:/vol/{1} after setting quota for {2} did not finish".format(
self.vserver_fqdn,
volume,
qtree
))
raise NidhoggException("Quota resize did not finish in time.")
def delete_quota(self, volume, qtree):
"""Delete the quota of the specified volume and qtree.
:param volume: name of the volume
:type volume: str
:param qtree: name of the qtree
:type qtree: str
:raises NidhoggException: if an error occurs
"""
quota_target = "/vol/{0}/{1}".format(volume, qtree)
quota_type = "tree"
# policy "default" must be specified for cluster-mode filers
self.quota_delete_entry(
volume=volume,
qtree="",
quota_target=quota_target,
quota_type=quota_type,
policy="default"
)
def update_snapmirror(self, volume):
"""Trigger the snapmirror replication. You have to be connected on the destination server.
:param volume: name of snapmirror destination volume
:type volume: str
:raises NidhoggException: if an error occurs
"""
self.snapmirror_update(
destination_location="{}:{}".format(self.vserver, volume)
)
def update_snapmirror_with_snapshot(self, name, volume):
"""Trigger the snapmirror replication. You have to be connected on the destination server.
:param name: name of the source snapshot
:type name: str
:param volume: name of snapmirror destination volume
:type volume: str
:raises NidhoggException: if an error occurs
"""
self.snapmirror_update(
destination_location="{}:{}".format(self.vserver, volume),
source_snapshot=name
)
def get_snapmirror_status(self, volume=None, max_records=MAX_RECORDS):
"""Get status of snapmirror replication pairs. You have to be connected on the destination server.
If no params are provided, return all snapmirror status pairs.
:param volume: name of destination volume
:type volume: str
:return: list of all snapmirror pair status
:rtype: list of :class:`~nidhogg.compatible.SnapmirrorStatus` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict()
if volume:
opts['query'] = dict(
snapmirror_info=dict(
destination_location="{}:{}".format(self.vserver, volume)
)
)
opts['max_records'] = max_records
results = self.snapmirror_get_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
self._item_to_snapmirrorstatus(item)
for item in results["attributes-list"]["snapmirror-info"]
]
elif int(results["num-records"]) == 1:
return [
self._item_to_snapmirrorstatus(results["attributes-list"]["snapmirror-info"])
]
logger.warning("get_snapmirror_status: no entries found")
return []
def get_snapmirror_volume_status(self, *args, **kwargs):
"""Not available for cluster mode."""
raise NotImplementedError() # pragma: no cover
def create_snapshot(self, volume, name, label=None):
"""Create a snapshot with an optional label.
:param volume: name of the volume
:type volume: str
:param name: name of the snapshot
:type name: str
:param label: add a snapmirror label to snapshot
:type label: str
:raises NidhoggException: if an error occurs
"""
opts = dict()
if label:
opts['snapmirror_label'] = label
opts['volume'] = volume
opts['snapshot'] = name
self.snapshot_create(**opts)
def list_snapmirror_destinations(self, volume=None, max_records=MAX_RECORDS):
"""List all snapmirror destinations. You have to be connected on the source server.
If no params are provided, return all snapmirror destinations.
:param volume: name of source volume
:type volume: str
:return: list of all snapmirror destinations
:rtype: list of :class:`~nidhogg.compatible.SnapmirrorDestinationInfo` or empty list
:raises NidhoggException: if an error occurs
"""
opts = dict()
if volume:
opts['query'] = dict(
snapmirror_destination_info=dict(
source_location="{}:{}".format(self.vserver, volume)
)
)
opts['max_records'] = max_records
results = self.snapmirror_get_destination_iter(**opts)["netapp"]["results"]
if int(results["num-records"]) > 1:
return [
self._item_to_snapmirrordestinationinfo(item)
for item in results["attributes-list"]["snapmirror-destination-info"]
]
elif int(results["num-records"]) == 1:
return [
self._item_to_snapmirrordestinationinfo(results["attributes-list"]["snapmirror-destination-info"])
]
logger.warning("list_snapmirror_destinations: no entries found")
return []
|
druuu/django | refs/heads/master | tests/managers_regress/tests.py | 264 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.template import Context, Template
from django.test import TestCase, override_settings
from django.utils.encoding import force_text
from .models import (
AbstractBase1, AbstractBase2, AbstractBase3, Child1, Child2, Child3,
Child4, Child5, Child6, Child7, RelatedModel, RelationModel,
)
class ManagersRegressionTests(TestCase):
def test_managers(self):
Child1.objects.create(name='fred', data='a1')
Child1.objects.create(name='barney', data='a2')
Child2.objects.create(name='fred', data='b1', value=1)
Child2.objects.create(name='barney', data='b2', value=42)
Child3.objects.create(name='fred', data='c1', comment='yes')
Child3.objects.create(name='barney', data='c2', comment='no')
Child4.objects.create(name='fred', data='d1')
Child4.objects.create(name='barney', data='d2')
Child5.objects.create(name='fred', comment='yes')
Child5.objects.create(name='barney', comment='no')
Child6.objects.create(name='fred', data='f1', value=42)
Child6.objects.create(name='barney', data='f2', value=42)
Child7.objects.create(name='fred')
Child7.objects.create(name='barney')
self.assertQuerysetEqual(Child1.manager1.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child1.manager2.all(), ["<Child1: a2>"])
self.assertQuerysetEqual(Child1._default_manager.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child2._default_manager.all(), ["<Child2: b1>"])
self.assertQuerysetEqual(Child2.restricted.all(), ["<Child2: b2>"])
self.assertQuerysetEqual(Child3._default_manager.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager1.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager2.all(), ["<Child3: c2>"])
# Since Child6 inherits from Child4, the corresponding rows from f1 and
# f2 also appear here. This is the expected result.
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
"<Child4: d1>",
"<Child4: d2>",
"<Child4: f1>",
"<Child4: f2>"
]
)
self.assertQuerysetEqual(Child4.manager1.all(), [
"<Child4: d1>",
"<Child4: f1>"
],
ordered=False
)
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>"])
self.assertQuerysetEqual(Child7._default_manager.order_by('name'), [
"<Child7: barney>",
"<Child7: fred>"
]
)
def test_abstract_manager(self):
# Accessing the manager on an abstract model should
# raise an attribute error with an appropriate message.
# This error message isn't ideal, but if the model is abstract and
# a lot of the class instantiation logic isn't invoked; if the
# manager is implied, then we don't get a hook to install the
# error-raising manager.
msg = "type object 'AbstractBase3' has no attribute 'objects'"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase3.objects.all()
def test_custom_abstract_manager(self):
# Accessing the manager on an abstract model with an custom
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase2 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase2.restricted.all()
def test_explicit_abstract_manager(self):
# Accessing the manager on an abstract model with an explicit
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase1 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase1.objects.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
def test_swappable_manager(self):
# The models need to be removed after the test in order to prevent bad
# interactions with the flush operation in other tests.
_old_models = apps.app_configs['managers_regress'].models.copy()
try:
class SwappableModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model should
# raise an attribute error with a helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
finally:
apps.app_configs['managers_regress'].models = _old_models
apps.all_models['managers_regress'] = _old_models
apps.clear_cache()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
def test_custom_swappable_manager(self):
# The models need to be removed after the test in order to prevent bad
# interactions with the flush operation in other tests.
_old_models = apps.app_configs['managers_regress'].models.copy()
try:
class SwappableModel(models.Model):
stuff = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.stuff.all()
finally:
apps.app_configs['managers_regress'].models = _old_models
apps.all_models['managers_regress'] = _old_models
apps.clear_cache()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
def test_explicit_swappable_manager(self):
# The models need to be removed after the test in order to prevent bad
# interactions with the flush operation in other tests.
_old_models = apps.app_configs['managers_regress'].models.copy()
try:
class SwappableModel(models.Model):
objects = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
finally:
apps.app_configs['managers_regress'].models = _old_models
apps.all_models['managers_regress'] = _old_models
apps.clear_cache()
def test_regress_3871(self):
related = RelatedModel.objects.create()
relation = RelationModel()
relation.fk = related
relation.gfk = related
relation.save()
relation.m2m.add(related)
t = Template('{{ related.test_fk.all.0 }}{{ related.test_gfk.all.0 }}{{ related.test_m2m.all.0 }}')
self.assertEqual(
t.render(Context({'related': related})),
''.join([force_text(relation.pk)] * 3),
)
def test_field_can_be_called_exact(self):
# Make sure related managers core filters don't include an
# explicit `__exact` lookup that could be interpreted as a
# reference to a foreign `exact` field. refs #23940.
related = RelatedModel.objects.create(exact=False)
relation = related.test_fk.create()
self.assertEqual(related.test_fk.get(), relation)
|
orlenko/plei | refs/heads/master | mezzanine/generic/templatetags/rating_tags.py | 7 |
from mezzanine import template
from mezzanine.generic.forms import RatingForm
register = template.Library()
@register.inclusion_tag("generic/includes/rating.html", takes_context=True)
def rating_for(context, obj):
"""
Provides a generic context variable name for the object that
ratings are being rendered for, and the rating form.
"""
context["rating_object"] = context["rating_obj"] = obj
context["rating_form"] = RatingForm(context["request"], obj)
ratings = context["request"].COOKIES.get("mezzanine-rating", "")
rating_string = "%s.%s" % (obj._meta, obj.pk)
context["rated"] = (rating_string in ratings)
rating_name = obj.get_ratingfield_name()
for f in ("average", "count", "sum"):
context["rating_" + f] = getattr(obj, "%s_%s" % (rating_name, f))
return context
|
SciLifeLab/TACA | refs/heads/master | taca/log/__init__.py | 3 | """ TACA logging module for external scripts
"""
import logging
# get root logger
ROOT_LOG = logging.getLogger()
ROOT_LOG.setLevel(logging.INFO)
# Console logger
stream_handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
stream_handler.setFormatter(formatter)
ROOT_LOG.addHandler(stream_handler)
LOG_LEVELS = {
'ERROR': logging.ERROR,
'WARN': logging.WARN,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
def init_logger_file(log_file, log_level='INFO'):
""" Append a FileHandler to the root logger.
:param str log_file: Path to the log file
:param str log_level: Logging level
"""
ROOT_LOG.handlers=[]
log_level = LOG_LEVELS[log_level] if log_level in LOG_LEVELS.keys() else logging.INFO
ROOT_LOG.setLevel(log_level)
file_handle = logging.FileHandler(log_file)
file_handle.setLevel(log_level)
file_handle.setFormatter(formatter)
ROOT_LOG.addHandler(file_handle)
ROOT_LOG.addHandler(stream_handler)
|
samupl/simpleERP | refs/heads/master | apps/contacts/migrations/0008_auto_20181207_1436.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2018-12-07 14:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0007_auto_20181207_1433'),
]
operations = [
migrations.AlterField(
model_name='companybankaccount',
name='slug',
field=models.CharField(max_length=16, unique=True, verbose_name='Short name'),
),
]
|
pslacerda/GromacsWrapper | refs/heads/develop | numkit/fitting.py | 1 | # numkit --- data fitting
# Copyright (c) 2010 Oliver Beckstein <[email protected]>
# Released under the "Modified BSD Licence" (see COPYING).
"""
:mod:`numkit.fitting` --- Fitting data
======================================
The module contains functions to do least square fits of functions of
one variable f(x) to data points (x,y).
Example
-------
For example, to fit a un-normalized Gaussian with :class:`FitGauss` to
data distributed with mean 5.0 and standard deviation 3.0::
from numkit.fitting import FitGauss
import numpy, numpy.random
# generate suitably noisy data
mu, sigma = 5.0, 3.0
Y,edges = numpy.histogram(sigma*numpy.random.randn(10000), bins=100, density=True)
X = 0.5*(edges[1:]+edges[:-1]) + mu
g = FitGauss(X, Y)
print(g.parameters)
# [ 4.98084541 3.00044102 1.00069061]
print(numpy.array([mu, sigma, 1]) - g.parameters)
# [ 0.01915459 -0.00044102 -0.00069061]
import matplotlib.pyplot as plt
plt.plot(X, Y, 'ko', label="data")
plt.plot(X, g.fit(X), 'r-', label="fit")
.. figure:: /numkit/FitGauss.png
:scale: 40 %
:alt: Gaussian fit with data points
A Gaussian (red) was fit to the data points (black circles) with
the :class:`numkit.fitting.FitGauss` class.
If the initial parameters for the least square optimization do not
lead to a solution then one can provide customized starting values in
the *parameters* keyword argument::
g = FitGauss(X, Y, parameters=[10, 1, 1])
The *parameters* have different meaning for the different fit
functions; the documentation for each function shows them in the
context of the fit function.
Creating new fit functions
--------------------------
New fit function classes can be derived from :class:`FitFunc`. The
documentation and the methods :meth:`FitFunc.f_factory` and
:meth:`FitFunc.initial_values` must be overriden. For example, the
class :class:`FitGauss` is implemented as ::
class FitGauss(FitFunc):
'''y = f(x) = p[2] * 1/sqrt(2*pi*p[1]**2) * exp(-(x-p[0])**2/(2*p[1]**2))'''
def f_factory(self):
def fitfunc(p,x):
return p[2] * 1.0/(p[1]*numpy.sqrt(2*numpy.pi)) * numpy.exp(-(x-p[0])**2/(2*p[1]**2))
return fitfunc
def initial_values(self):
return [0.0,1.0,0.0]
The function to be fitted is defined in :func:`fitfunc`. The
parameters are accessed as ``p[0]``, ``p[1]``, ... For each parameter,
a suitable initial value must be provided.
Functions and classes
---------------------
.. autofunction:: Pearson_r
.. autofunction:: linfit
.. autoclass:: FitFunc
:members:
.. autoclass:: FitLin
.. autoclass:: FitExp
.. autoclass:: FitExp2
.. autoclass:: FitGauss
"""
import numpy
import logging
logger = logging.getLogger("numkit.fitting")
def Pearson_r(x,y):
"""Pearson's r (correlation coefficient).
Pearson(x,y) --> correlation coefficient
*x* and *y* are arrays of same length.
Historical note -- Naive implementation of Pearson's r ::
Ex = scipy.stats.mean(x)
Ey = scipy.stats.mean(y)
covxy = numpy.sum((x-Ex)*(y-Ey))
r = covxy/math.sqrt(numpy.sum((x-Ex)**2)*numpy.sum((y-Ey)**2))
"""
return numpy.corrcoef(x,y)[1,0]
def linfit(x,y,dy=None):
"""Fit a straight line y = a + bx to the data in *x* and *y*.
Errors on y should be provided in dy in order to assess the
goodness of the fit and derive errors on the parameters.
linfit(x,y[,dy]) --> result_dict
Fit y = a + bx to the data in x and y by analytically minimizing
chi^2. dy holds the standard deviations of the individual y_i. If
dy is not given, they are assumed to be constant (note that in
this case Q is set to 1 and it is meaningless and chi2 is
normalised to unit standard deviation on all points!).
Returns the parameters a and b, their uncertainties sigma_a and
sigma_b, and their correlation coefficient r_ab; it also returns
the chi-squared statistic and the goodness-of-fit probability Q
(that the fit would have chi^2 this large or larger; Q < 10^-2
indicates that the model is bad --- Q is the probability that a
value of chi-square as _poor_ as the calculated statistic chi2
should occur by chance.)
:Returns: result_dict with components
intercept, sigma_intercept
a +/- sigma_a
slope, sigma_slope
b +/- sigma_b
parameter_correlation
correlation coefficient r_ab between a and b
chi_square
chi^2 test statistic
Q_fit
goodness-of-fit probability
Based on 'Numerical Recipes in C', Ch 15.2.
"""
if dy is None:
dy = []
import scipy.stats
n = len(x)
m = len(y)
if n != m:
raise ValueError("lengths of x and y must match: %s != %s" % (n, m))
try:
have_dy = (len(dy) > 0)
except TypeError:
have_dy = False
if not have_dy:
dy = numpy.ones((n),numpy.float)
x = numpy.asarray(x)
y = numpy.asarray(y)
dy = numpy.asarray(dy)
s2 = dy*dy
S = numpy.add.reduce(1/s2)
Sx = numpy.add.reduce(x/s2)
Sy = numpy.add.reduce(y/s2)
Sxx = numpy.add.reduce(x*x/s2)
Sxy = numpy.add.reduce(x*y/s2)
t = (x - Sx/S)/dy
Stt = numpy.add.reduce(t*t)
b = numpy.add.reduce(t*y/dy)/Stt
a = (Sy - Sx*b)/S
sa = numpy.sqrt((1 + (Sx*Sx)/(S*Stt))/S)
sb = numpy.sqrt(1/Stt)
covab = -Sx/(S*Stt)
r = covab/(sa*sb)
chi2 = numpy.add.reduce(((y-a-b*x)/dy)**2)
if not have_dy:
# estimate error if none were provided
sigmadata = numpy.sqrt(chi2/(n-2))
sa *= sigmadata
sb *= sigmadata
Q = 1.0
else:
Q = scipy.stats.chisqprob(chi2,n-2)
return {"intercept":a,"slope":b,
"sigma_intercept":sa,"sigma_slope":sb,
"parameter_correlation":r, "chi_square":chi2, "Q":Q}
class FitFunc(object):
"""Fit a function f to data (x,y) using the method of least squares.
The function is fitted when the object is created, using
:func:`scipy.optimize.leastsq`. One must derive from the base class
:class:`FitFunc` and override the :meth:`FitFunc.f_factory` (including
the definition of an appropriate local :func:`fitfunc` function) and
:meth:`FitFunc.initial_values` appropriately. See the examples for a
linear fit :class:`FitLin`, a 1-parameter exponential fit :class:`FitExp`,
or a 3-parameter double exponential fit :class:`FitExp2`.
The object provides two attributes
:attr:`FitFunc.parameters`
list of parameters of the fit
:attr:`FitFunc.message`
message from :func:`scipy.optimize.leastsq`
After a successful fit, the fitted function can be applied to any data (a
1D-numpy array) with :meth:`FitFunc.fit`.
"""
def __init__(self,x,y,parameters=None):
import scipy.optimize
_x = numpy.asarray(x)
_y = numpy.asarray(y)
p0 = self._get_initial_values(parameters)
fitfunc = self.f_factory()
def errfunc(p,x,y):
return fitfunc(p,x) - y # residuals
p,msg = scipy.optimize.leastsq(errfunc,p0[:],args=(_x,_y))
try:
p[0]
self.parameters = p
except (TypeError,IndexError,):
# TypeError for int p, IndexError for numpy scalar (new scipy)
self.parameters = [p]
self.message = msg
def f_factory(self):
"""Stub for fit function factory, which returns the fit function.
Override for derived classes.
"""
def fitfunc(p,x):
# return f(p,x); should be a numpy ufunc
raise NotImplementedError("base class must be extended for each fit function")
return fitfunc
def _get_initial_values(self, parameters=None):
p0 = numpy.asarray(self.initial_values())
if parameters is not None:
try:
p0[:] = parameters
except ValueError:
raise ValueError("Wrong number of custom initital values %r, should be something like %r" % (parameters, p0))
return p0
def initial_values(self):
"""List of initital guesses for all parameters p[]"""
# return [1.0, 2.0, 0.5]
raise NotImplementedError("base class must be extended for each fit function")
def fit(self,x):
"""Applies the fit to all *x* values"""
fitfunc = self.f_factory()
return fitfunc(self.parameters,numpy.asarray(x))
class FitExp(FitFunc):
"""y = f(x) = exp(-p[0]*x)"""
def f_factory(self):
def fitfunc(p,x):
return numpy.exp(-p[0]*x) # exp(-B*x)
return fitfunc
def initial_values(self):
return [1.0]
def __repr__(self):
return "<FitExp "+str(self.parameters)+">"
class FitExp2(FitFunc):
"""y = f(x) = p[0]*exp(-p[1]*x) + (1-p[0])*exp(-p[2]*x)"""
def f_factory(self):
def fitfunc(p,x):
return p[0]*numpy.exp(-p[1]*x) + (1-p[0])*numpy.exp(-p[2]*x)
return fitfunc
def initial_values(self):
return [0.5,0.1,1e-4]
def __repr__(self):
return "<FitExp2"+str(self.parameters)+">"
class FitLin(FitFunc):
"""y = f(x) = p[0]*x + p[1]"""
def f_factory(self):
def fitfunc(p,x):
return p[0]*x + p[1]
return fitfunc
def initial_values(self):
return [1.0,0.0]
def __repr__(self):
return "<FitLin"+str(self.parameters)+">"
class FitGauss(FitFunc):
"""y = f(x) = p[2] * 1/sqrt(2*pi*p[1]**2) * exp(-(x-p[0])**2/(2*p[1]**2))
Fits an un-normalized gaussian (height scaled with parameter p[2]).
* p[0] == mean $\mu$
* p[1] == standard deviation $\sigma$
* p[2] == scale $a$
"""
def f_factory(self):
def fitfunc(p,x):
return p[2] * 1.0/(p[1]*numpy.sqrt(2*numpy.pi)) * numpy.exp(-(x-p[0])**2/(2*p[1]**2))
return fitfunc
def initial_values(self):
return [0.0,1.0,0.0]
def __repr__(self):
return "<FitGauss"+str(self.parameters)+">"
|
3Jade/csbuild | refs/heads/master | csbuild/_gui.py | 1 | # coding=utf-8
import functools
import re
import stat
import sys
if sys.version_info >= (3,0):
import io
StringIO = io.StringIO
else:
import cStringIO
StringIO = cStringIO.StringIO
import csbuild
from . import log
# try:
# from PyQt5 import QtCore, QtGui, QtWidgets
# QMainWindow = QtWidgets.QMainWindow
# QApplication = QtWidgets.QApplication
# QtGui.QWidget = QtWidgets.QWidget
# QtGui.QHBoxLayout = QtWidgets.QHBoxLayout
# QtGui.QVBoxLayout = QtWidgets.QVBoxLayout
# QtGui.QSplitter = QtWidgets.QSplitter
# QtGui.QLabel = QtWidgets.QLabel
# QtGui.QProgressBar = QtWidgets.QProgressBar
# QtGui.QPushButton = QtWidgets.QPushButton
# QtGui.QTreeWidget = QtWidgets.QTreeWidget
# QtGui.QTreeWidgetItem = QtWidgets.QTreeWidgetItem
# QtGui.QSpacerItem = QtWidgets.QSpacerItem
# QtGui.QSizePolicy = QtWidgets.QSizePolicy
# QtGui.QTextEdit = QtWidgets.QTextEdit
# QtGui.QTabWidget = QtWidgets.QTabWidget
# log.LOG_INFO("Using Qt5")
# except:
try:
from PyQt4 import QtCore, QtGui
QMainWindow = QtGui.QMainWindow
QApplication = QtGui.QApplication
log.LOG_INFO("Using Qt4")
except:
log.LOG_ERROR("PyQt4 must be installed on your system to load the CSBuild GUI")
csbuild.Exit( 1 )
import os
import threading
import time
import math
import signal
from . import _shared_globals
class TreeWidgetItem(QtGui.QTreeWidgetItem):
def __init__(self, *args, **kwargs):
QtGui.QTreeWidgetItem.__init__(self, *args, **kwargs)
self.numericColumns = set()
def setColumnNumeric(self, col):
self.numericColumns.add(col)
def __lt__(self, other):
if self.parent():
return False
sortCol = self.treeWidget().sortColumn()
numericColumns = self.treeWidget().headerItem().numericColumns
try:
if sortCol in numericColumns:
myNumber = float(self.text(sortCol))
otherNumber = float(other.text(sortCol))
return myNumber > otherNumber
except:
pass
myText = str(self.text(sortCol))
otherText = str(other.text(sortCol))
return myText > otherText
class TreeWidgetWithBarGraph(QtGui.QTreeWidgetItem):
def __init__(self, parent, renderParent, isFile):
QtGui.QTreeWidgetItem.__init__(self, parent)
self.numericColumns = set()
self.startTime = -1
self.buildEnd = -1
self.linkQueueStart = -1
self.linkStart = -1
self.endTime = -1
self.isFile = isFile
self.m_childrenShowing = False
self.renderParent = renderParent
self.lastUpdate = 0
def setChildrenShowing(self, showing):
self.m_childrenShowing = showing
def childrenShowing(self):
return self.m_childrenShowing
def setStartTime(self, startTime):
self.startTime = startTime
self.lastUpdate = time.time()
def setBuildEnd(self, buildEnd):
self.buildEnd = buildEnd
def setLinkStart(self, linkStart):
self.linkStart = linkStart
def setLinkQueueStart(self, linkQueueStart):
self.linkQueueStart = linkQueueStart
def setEndTime(self, endTime):
self.endTime = endTime
def draw(self, painter):
rect = self.renderParent.visualItemRect(self)
def drawBar(color, startTime, endTime):
if startTime != -1:
if endTime == -1:
endTime = self.lastUpdate
topLeft = rect.topLeft()
if topLeft.y() < 0:
return
bottomRight = rect.bottomRight()
xoffset = 24
if self.isFile:
xoffset += 20
topLeft.setX(topLeft.x() + (250-xoffset) + math.floor((startTime - _shared_globals.starttime) * 30))
topLeft.setY(topLeft.y())
bottomRight.setX(topLeft.x() + math.ceil((endTime - startTime) * 30))
bottomRight.setY(topLeft.y() + rect.height() - 2)
drawRect = QtCore.QRect(topLeft, bottomRight)
brush = painter.brush()
painter.setBrush(QtGui.QColor(color))
painter.drawRect(drawRect)
painter.setBrush(brush)
if self.isFile:
drawBar("#FF4000", self.startTime, self.buildEnd)
else:
drawBar("#0040FF", self.startTime, self.buildEnd)
drawBar("#008080", self.buildEnd, self.linkQueueStart)
drawBar("#00C0C0", self.linkQueueStart, self.linkStart)
drawBar("#00E080", self.linkStart, self.endTime)
class SyntaxHighlighter( QtGui.QSyntaxHighlighter ):
class HighlightRule( object ):
def __init__(self, pattern, argument):
self.pattern = pattern
self.format = argument
def __init__(self, *args):
QtGui.QSyntaxHighlighter.__init__(self, *args)
self.highlightRules = []
self.commentStart = re.compile("/\\*")
self.commentEnd = re.compile("\\*/")
self.keywordFormat = QtGui.QTextCharFormat()
self.commentFormat = QtGui.QTextCharFormat()
self.stringFormat = QtGui.QTextCharFormat()
self.functionFormat = QtGui.QTextCharFormat()
self.keywordFormat.setForeground(QtGui.QColor("#800000"))
self.keywordFormat.setFontWeight(QtGui.QFont.Bold)
for pattern in [
"\\b__alignof\\b",
"\\b__asm\\b",
"\\b__assume\\b",
"\\b__based\\b",
"\\b__box\\b",
"\\b__cdecl\\b",
"\\b__declspec\\b",
"\\b__delegate\\b",
"\\b__event\\b",
"\\b__except\\b",
"\\b__fastcall\\b",
"\\b__finally\\b",
"\\b__forceinline\\b",
"\\b__gc\\b",
"\\b__hook\\b",
"\\b__identifier\\b",
"\\b__if_exists\\b",
"\\b__if_not_exists\\b",
"\\b__inline\\b",
"\\b__int16\\b",
"\\b__int32\\b",
"\\b__int64\\b",
"\\b__int8\\b",
"\\b__interface\\b",
"\\b__leave\\b",
"\\b__m128\\b",
"\\b__m128d\\b",
"\\b__m128i\\b",
"\\b__m64\\b",
"\\b__multiple_inheritance\\b",
"\\b__nogc\\b",
"\\b__noop\\b",
"\\b__pin\\b",
"\\b__property\\b",
"\\b__raise\\b",
"\\b__restrict\\b",
"\\b__single_inheritance\\b",
"\\b__stdcall\\b",
"\\b__super\\b",
"\\b__thiscall\\b",
"\\b__try\\b",
"\\b__try_cast\\b",
"\\b__unaligned\\b",
"\\b__uuidof\\b",
"\\b__value\\b",
"\\b__virtual_inheritance\\b",
"\\b__w64\\b",
"\\b__wchar_t\\b",
"\\babstract\\b",
"\\barray\\b",
"\\balignas\\b",
"\\balignof\\b",
"\\band\\b",
"\\band_eq\\b",
"\\basm\\b",
"\\bauto\\b",
"\\bbitand\\b",
"\\bbitor\\b",
"\\bbool\\b",
"\\bbreak\\b",
"\\bcase\\b",
"\\bcatch\\b",
"\\bchar\\b",
"\\bchar16_t\\b",
"\\bchar32_t\\b",
"\\bclass\\b",
"\\bcompl\\b",
"\\bconst\\b",
"\\bconst_cast\\b",
"\\bconstexpr\\b",
"\\bcontinue\\b",
"\\bdecltype\\b",
"\\bdefault\\b",
"\\bdelegate\\b",
"\\bdelete\\b",
"\\bdeprecated\\b",
"\\bdllexport\\b",
"\\bdllimport\\b",
"\\bdo\\b",
"\\bdouble\\b",
"\\bdynamic_cast\\b",
"\\belse\\b",
"\\benum\\b",
"\\bevent\\b",
"\\bexplicit\\b",
"\\bexport\\b",
"\\bextern\\b",
"\\bfalse\\b",
"\\bfinal\\b",
"\\bfinally\\b",
"\\bfloat\\b",
"\\bfor\\b",
"\\bfor each\\b",
"\\bfriend\\b",
"\\bfriend_as\\b",
"\\bgcnew\\b",
"\\bgeneric\\b",
"\\bgoto\\b",
"\\bif\\b",
"\\bin\\b",
"\\binitonly\\b",
"\\binline\\b",
"\\bint\\b",
"\\bint16_t\\b",
"\\bint32_t\\b",
"\\bint64_t\\b",
"\\bint8_t\\b",
"\\binterface\\b",
"\\binterior_ptr\\b",
"\\bliteral\\b",
"\\blong\\b",
"\\bmutable\\b",
"\\bnaked\\b",
"\\bnamespace\\b",
"\\bnew\\b",
"\\bnoexcept\\b",
"\\bnoinline\\b",
"\\bnoreturn\\b",
"\\bnot\\b",
"\\bnot_eq\\b",
"\\bnothrow\\b",
"\\bnovtable\\b",
"\\bNULL\\b",
"\\bnullptr\\b",
"\\bnullptr_t\\b",
"\\boperator\\b",
"\\bor\\b",
"\\bor_eq\\b",
"\\boverride\\b",
"\\bproperty\\b",
"\\bprivate\\b",
"\\bprotected\\b",
"\\bpublic\\b",
"\\braise\\b",
"\\bref\\b",
"\\bregister\\b",
"\\breinterpret_cast\\b",
"\\brestrict\\b",
"\\breturn\\b",
"\\bsafecast\\b",
"\\bsealed\\b",
"\\bselectany\\b",
"\\bshort\\b",
"\\bsignals\\b",
"\\bsigned\\b",
"\\bsize_t\\b",
"\\bsizeof\\b",
"\\bslots\\b",
"\\bstatic\\b",
"\\bstatic_assert\\b",
"\\bstatic_cast\\b",
"\\bstruct\\b",
"\\bswitch\\b",
"\\btemplate\\b",
"\\btypedef\\b",
"\\btypename\\b",
"\\bthis\\b",
"\\bthread\\b",
"\\bthread_local\\b",
"\\bthrow\\b",
"\\btrue\\b",
"\\btry\\b",
"\\btypeid\\b",
"\\buint16_t\\b",
"\\buint32_t\\b",
"\\buint64_t\\b",
"\\buint8_t\\b",
"\\bunion\\b",
"\\bunsigned\\b",
"\\busing\\b",
"\\buuid\\b",
"\\bvalue\\b",
"\\bvirtual\\b",
"\\bvoid\\b",
"\\bvolatile\\b",
"\\bwchar_t\\b",
"\\bwhile\\b",
"\\bxor\\b",
"\\bxor_eq\\b",
]:
self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile(pattern), self.keywordFormat))
#self.functionFormat.setForeground(QtCore.Qt.darkMagenta)
#self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile("\\b[A-Za-z0-9_]+(?=\\()"), self.functionFormat))
self.numberFormat = QtGui.QTextCharFormat()
self.numberFormat.setForeground(QtGui.QColor("#008c00"))
self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile("\\b\d+\\b"), self.numberFormat))
self.symbolFormat = QtGui.QTextCharFormat()
self.symbolFormat.setForeground(QtGui.QColor("#808030"))
self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile(r"[\[\]\+\=\-\*\/\(\)\{\}\;\,\.\<\>\?\&\^\%\!\~\|]"), self.symbolFormat))
self.commentFormat.setForeground(QtGui.QColor("#696969"))
self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile("//[^\n]*"), self.commentFormat))
self.preprocessorFormat = QtGui.QTextCharFormat()
self.preprocessorFormat.setForeground(QtGui.QColor("#004a43"))
self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile("^\s*#.*$"), self.preprocessorFormat))
self.stringFormat.setForeground(QtCore.Qt.darkCyan)
self.highlightRules.append(SyntaxHighlighter.HighlightRule(re.compile("\".*?\""), self.stringFormat))
def highlightBlock(self, line):
for rule in self.highlightRules:
match = rule.pattern.search(line)
while match:
start, end = match.span()
length = end - start
self.setFormat(start, length, rule.format)
match = rule.pattern.search(line, end)
self.setCurrentBlockState(0)
startIndex = 0
if self.previousBlockState() != 1:
match = self.commentStart.search(line)
if match:
startIndex = match.start()
else:
startIndex = -1
while startIndex >= 0:
endIndex = -1
match = self.commentEnd.search(line, startIndex)
if match:
endIndex = match.end()
length = -1
if endIndex == -1:
self.setCurrentBlockState(1)
length = len(line) - startIndex
else:
length = endIndex - startIndex
self.setFormat(startIndex, length, self.commentFormat)
match = self.commentStart.search(line, startIndex + length)
if match:
startIndex = match.start()
else:
startIndex = -1
class LineNumberArea( QtGui.QWidget ):
def __init__(self, editor):
QtGui.QWidget.__init__(self, editor)
self.editor = editor
self.buttonDown = False
def sizeHint(self):
return QtCore.QSize(self.editor.lineNumberAreaWidth(), 0)
def paintEvent(self, event):
self.editor.lineNumberAreaPaintEvent(event)
def mouseMoveEvent(self, event):
if self.buttonDown:
self.editor.sideBarMousePress(event)
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.buttonDown = True
self.editor.sideBarMousePress(event)
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.buttonDown = False
class CodeEditor( QtGui.QPlainTextEdit ):
def __init__(self, parent, parentEditor, project, directory = None):
QtGui.QPlainTextEdit.__init__(self, parent)
self.parentEditor = parentEditor
self.project = project
font = QtGui.QFont()
font.setFamily("monospace")
font.setFixedPitch(True)
font.setPointSize(10)
metrics = QtGui.QFontMetrics(font)
self.setTabStopWidth(4 * metrics.width(' '))
self.setFont(font)
self.sideBar = LineNumberArea(self)
self.cursorPositionChanged.connect(self.highlightCurrentLine)
self.blockCountChanged.connect(self.updateLineNumberAreaWidth)
self.updateRequest.connect(self.updateLineNumberArea)
self.updateLineNumberAreaWidth(0)
self.highlightCurrentLine()
def lineNumberAreaPaintEvent(self, event):
painter = QtGui.QPainter(self.sideBar)
painter.fillRect(event.rect(), QtCore.Qt.lightGray)
block = self.firstVisibleBlock()
blockNum = block.blockNumber()
top = int(self.blockBoundingGeometry(block).translated(self.contentOffset()).top())
bottom = top + int(self.blockBoundingRect(block).height())
while block.isValid() and top <= event.rect().bottom():
if block.isVisible and bottom >= event.rect().top():
number = str(blockNum + 1)
painter.setPen(QtCore.Qt.black)
painter.drawText(0, top, self.sideBar.width(), self.fontMetrics().height(), QtCore.Qt.AlignRight, number)
block = block.next()
top = bottom
bottom = top + int(self.blockBoundingRect(block).height())
blockNum += 1
def lineNumberAreaWidth(self):
digits = 1
maxDigits = max(1, self.blockCount())
while maxDigits >= 10:
maxDigits /= 10
digits += 1
space = 3 + self.fontMetrics().width("9") * digits
return space
def resizeEvent(self, event):
QtGui.QPlainTextEdit.resizeEvent(self, event)
cr = self.contentsRect()
self.sideBar.setGeometry(QtCore.QRect(cr.left(), cr.top(), self.lineNumberAreaWidth(), cr.height()))
def updateLineNumberAreaWidth(self, blockCount):
self.setViewportMargins(self.lineNumberAreaWidth(), 0, 0, 0)
def highlightCurrentLine(self):
extraSelections = []
lineColor = "#DDEDEC"
selection = QtGui.QTextEdit.ExtraSelection()
selection.format.setBackground(QtGui.QColor(lineColor))
selection.format.setProperty(QtGui.QTextFormat.FullWidthSelection, True)
selection.cursor = self.textCursor()
selection.cursor.clearSelection()
extraSelections.append(selection)
self.setExtraSelections(extraSelections)
def updateLineNumberArea(self, rect, num):
if num:
self.sideBar.scroll(0, num)
else:
self.sideBar.update(0, rect.y(), self.sideBar.width(), rect.height())
if rect.contains(self.viewport().rect()):
self.updateLineNumberAreaWidth(0)
def sideBarMousePress(self, event):
pass
class CodeProfileDisplay(CodeEditor):
def __init__(self, parent, parentEditor, project, directory):
self.visualizationWidth = 15
CodeEditor.__init__(self, parent, parentEditor, project)
self.directory = directory
self.setReadOnly(True)
self.vals = []
self.highVal = 0.0
self.setMouseTracking(True)
self.selections = []
self.mousePos = None
self.mouseGlobalPos = None
self.maxVal = 0.0
self.settingValue = False
def keyPressEvent(self, event):
if not self.mousePos:
return
if event.key() == QtCore.Qt.Key_Control:
mouseEvent = QtGui.QMouseEvent(
QtCore.QEvent.MouseMove,
self.mousePos,
self.mouseGlobalPos,
QtCore.Qt.NoButton,
QtCore.Qt.NoButton,
QtGui.QApplication.keyboardModifiers()
)
self.mouseMoveEvent(mouseEvent)
def keyReleaseEvent(self, event):
if not self.mousePos:
return
if event.key() == QtCore.Qt.Key_Control:
mouseEvent = QtGui.QMouseEvent(
QtCore.QEvent.MouseMove,
self.mousePos,
self.mouseGlobalPos,
QtCore.Qt.NoButton,
QtCore.Qt.NoButton,
QtGui.QApplication.keyboardModifiers()
)
self.mouseMoveEvent(mouseEvent)
def mouseMoveEvent(self, event):
cursor = self.cursorForPosition(event.pos())
block = cursor.block()
line = str(block.text())
RMatch = re.search( r"#\s*include\s*[<\"](.*?)[\">]", line )
if RMatch:
extraSelections = list(self.selections)
selection = QtGui.QTextEdit.ExtraSelection()
selection.format.setFontUnderline(True)
modifiers = QtGui.QApplication.keyboardModifiers()
if modifiers == QtCore.Qt.ControlModifier:
selection.format.setForeground(QtGui.QColor("#0000FF"))
selection.format.setFontWeight(QtGui.QFont.Bold)
QApplication.setOverrideCursor(QtCore.Qt.PointingHandCursor)
QtGui.QToolTip.showText(event.globalPos(), "", self)
else:
QtGui.QToolTip.showText(event.globalPos(), "Ctrl+click to open profile view for {}".format(RMatch.group(1)), self)
QApplication.restoreOverrideCursor()
selection.cursor = QtGui.QTextCursor(self.document())
selection.cursor.movePosition(QtGui.QTextCursor.Down, QtGui.QTextCursor.MoveAnchor, block.blockNumber())
selection.cursor.movePosition(QtGui.QTextCursor.Right, QtGui.QTextCursor.MoveAnchor, RMatch.start())
selection.cursor.clearSelection()
selection.cursor.movePosition(QtGui.QTextCursor.Right, QtGui.QTextCursor.KeepAnchor, RMatch.end() - RMatch.start())
extraSelections.append(selection)
self.setExtraSelections(extraSelections)
self.mousePos = event.pos()
self.mouseGlobalPos = event.globalPos()
else:
QtGui.QToolTip.showText(event.globalPos(), "", self)
self.setExtraSelections(self.selections)
QApplication.restoreOverrideCursor()
self.mousePos = None
self.mouseGlobalPos = None
def highlightCurrentLine(self):
pass
def sideBarMousePress(self, event):
if event.pos().x() <= self.visualizationWidth:
totalLines = self.blockCount()
pct = float(event.pos().y()) / self.sideBar.rect().height()
cursor = self.textCursor()
block = cursor.block()
blockNo = block.blockNumber()
desiredBlockNo = int(totalLines * pct)
if blockNo > desiredBlockNo:
cursor.movePosition(QtGui.QTextCursor.Up, QtGui.QTextCursor.MoveAnchor, blockNo - desiredBlockNo)
else:
cursor.movePosition(QtGui.QTextCursor.Down, QtGui.QTextCursor.MoveAnchor, desiredBlockNo - blockNo)
self.setTextCursor(cursor)
self.centerCursor()
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton and event.modifiers() == QtCore.Qt.ControlModifier:
cursor = self.cursorForPosition(event.pos())
block = cursor.block()
line = str(block.text())
RMatch = re.search( r"#\s*include\s*[<\"](.*?)[\">]", line )
if RMatch:
includeFile = RMatch.group(1)
project = self.project
#First try: Absolute path relative to base file directory.
absPath = os.path.abspath(os.path.join(self.directory, includeFile))
if not os.access(absPath, os.F_OK):
#Second try: Look in the project's include directories.
for directory in project.includeDirs:
absPath = os.path.abspath(os.path.join(directory, includeFile))
if os.access(absPath, os.F_OK):
break
if not os.access(absPath, os.F_OK):
#Third try, brute force it against the filemap our parent has for us.
base = os.path.basename(includeFile)
if base in self.parentEditor.filemap:
options = self.parentEditor.filemap[base]
if len(options) == 1:
absPath = list(options)[0]
else:
log.LOG_ERROR("TODO: Multiple options exist for header {}: {}".format(includeFile, options))
return
else:
return
with open(absPath, "r") as f:
data = f.read().split("\n")
io = StringIO()
absPath = os.path.normcase(absPath)
baseFile = self.parentEditor.sourceFile
lineNo = 1
for line in data:
lineTime = 0.0
if lineNo in project.times[baseFile][absPath]:
lineTime = project.times[baseFile][absPath][lineNo]
io.write("{: 9.6f}\t\t{}\n".format(lineTime, line))
lineNo += 1
data = io.getvalue()
io.close()
window = EditorWindow(baseFile, 0, 0, CodeProfileDisplay, self, project=project, directory=os.path.dirname(absPath), data=data, filemap=self.parentEditor.filemap, baseFile=os.path.basename(absPath))
window.show()
def setPlainText(self, text):
CodeEditor.setPlainText(self, text)
text = text.split("\n")
class VisMode:
Mean = 1
HighVal = 3
Constant = 4
mode = VisMode.Mean
skipIncludes = True
maxVal = 0.0
for line in text:
if not line.strip():
continue
val = float(line.split('\t')[0])
maxVal = max(maxVal, val)
self.maxVal = maxVal
self.parentEditor.slider.setMaximum(self.toLog(maxVal))
self.parentEditor.slider.setMinimum(1)
if mode == VisMode.Mean:
highVal = 0.0
num = 0
for line in text:
if not line.strip():
continue
if skipIncludes:
RMatch = re.search( r"#\s*include\s*[<\"](.*?)[\">]", line )
if RMatch:
continue
val = float(line.split('\t')[0])
highVal += val
num += 1
if num == 0:
return
highVal /= num
highVal *= 2
if not highVal:
for line in text:
if not line.strip():
continue
val = float(line.split('\t')[0])
highVal += val
num += 1
if num == 0:
return
highVal /= num
highVal *= 2
elif mode == VisMode.HighVal:
highVal = 0.0
for line in text:
if not line.strip():
continue
if skipIncludes:
RMatch = re.search( r"#\s*include\s*[<\"](.*?)[\">]", line )
if RMatch:
continue
val = float(line.split('\t')[0])
highVal = max(highVal, val)
if not highVal:
for line in text:
if not line.strip():
continue
val = float(line.split('\t')[0])
highVal = max(highVal, val)
elif mode == VisMode.Constant:
highVal = 0.01
if not highVal:
return
self.highVal = highVal
self.settingValue = True
self.parentEditor.slider.setValue(self.toLog(highVal))
self.settingValue = False
self.parentEditor.textBox.setText("{:f}".format(highVal))
self.highlightProblemAreas(text)
def toLog(self, val):
normalized = float(val)/self.maxVal
return int(round(math.sqrt(normalized) * 1000))
def fromLog(self, val):
if val == 0:
return 0
val = float(val)/1000.0
return val * val * self.maxVal
def sliderMoved(self, value):
if self.settingValue:
return
self.highVal = self.fromLog(value)
self.parentEditor.textBox.setText("{:f}".format(self.highVal))
if not self.parentEditor.slider.isSliderDown():
text = str(self.toPlainText())
self.highlightProblemAreas(text.split("\n"))
def textEdited(self):
try:
val = float(self.parentEditor.textBox.text())
except:
self.parentEditor.textBox.setText("{:f}".format(self.highVal))
else:
if val <= 0.0:
self.parentEditor.textBox.setText("{:f}".format(self.highVal))
return
if val > self.maxVal:
val = self.maxVal
self.parentEditor.textBox.setText("{:f}".format(val))
self.highVal = val
self.settingValue = True
self.parentEditor.slider.setValue(self.toLog(self.highVal))
self.settingValue = False
text = str(self.toPlainText())
self.highlightProblemAreas(text.split("\n"))
def sliderReleased(self):
self.highVal = self.fromLog(self.parentEditor.slider.value())
text = str(self.toPlainText())
self.highlightProblemAreas(text.split("\n"))
def highlightProblemAreas(self, text):
extraSelections = []
self.vals = []
lineNo = 0
for line in text:
if not line.strip():
continue
val = float(line.split('\t')[0])
if val > self.highVal:
val = self.highVal
selection = QtGui.QTextEdit.ExtraSelection()
gbVals = 255 - math.ceil(255 * (val/self.highVal))
selection.format.setBackground(QtGui.QColor(255, gbVals, gbVals))
selection.format.setProperty(QtGui.QTextFormat.FullWidthSelection, True)
selection.cursor = QtGui.QTextCursor(self.document())
selection.cursor.movePosition(QtGui.QTextCursor.Down, QtGui.QTextCursor.MoveAnchor, lineNo)
selection.cursor.clearSelection()
extraSelections.append(selection)
lineNo += 1
self.vals.append(val)
self.selections = extraSelections
self.setExtraSelections(extraSelections)
def lineNumberAreaWidth(self):
return self.visualizationWidth + CodeEditor.lineNumberAreaWidth(self)
def lineNumberAreaPaintEvent(self, event):
painter = QtGui.QPainter(self.sideBar)
painter.fillRect(event.rect(), QtCore.Qt.lightGray)
width = self.visualizationWidth
visualHeight = self.sideBar.rect().height()
height = min(visualHeight, len(self.vals))
image = QtGui.QImage(width, height, QtGui.QImage.Format_RGB32)
image.fill(QtGui.qRgb(255, 255, 255))
lineNo = 0
for val in self.vals:
y = int(lineNo * (float(height) / float(len(self.vals))))
color = QtGui.QColor(image.pixel(0, y))
gbVal = min(255 - int(math.ceil((val / self.highVal) * 255)), color.blue())
onColor = QtGui.qRgb(255, gbVal, gbVal)
for x in range(width):
image.setPixel(x, y, onColor)
lineNo += 1
block = self.firstVisibleBlock()
blockNum = block.blockNumber()
top = int(self.blockBoundingGeometry(block).translated(self.contentOffset()).top())
bottom = top + int(self.blockBoundingRect(block).height())
topLeft = self.sideBar.rect().topLeft()
bottomRight = self.sideBar.rect().bottomRight()
bottomRight.setX(self.visualizationWidth)
rect = QtCore.QRect(topLeft, bottomRight)
painter.drawImage(rect, image, image.rect())
image2 = QtGui.QImage(self.sideBar.rect().width(), self.sideBar.rect().height(), QtGui.QImage.Format_ARGB32)
firstNum = -1
lastNum = -1
while block.isValid() and top <= self.rect().bottom():
if block.isVisible() and bottom >= self.rect().top():
if firstNum == -1:
firstNum = blockNum
lastNum = blockNum + 1
block = block.next()
top = bottom
bottom = top + int(self.blockBoundingRect(block).height())
blockNum += 1
mult = float(self.sideBar.rect().height())/float(len(self.vals))
fillColor = QtGui.qRgba(192, 192, 192, 64)
onColor = QtGui.qRgba(64, 64, 64, 127)
offColor = QtGui.qRgba(127, 127, 127, 127)
image2.fill(offColor)
startPixel = int(math.floor(firstNum * mult))
endPixel = min(int(math.ceil(lastNum * mult)) - 1, self.sideBar.rect().height() - 1)
for i in range(startPixel, endPixel):
for j in range(self.sideBar.rect().width()):
image2.setPixel(j, i, fillColor)
image2.setPixel(0, i, onColor)
image2.setPixel(1, i, onColor)
image2.setPixel(self.sideBar.width()-2, i, onColor)
image2.setPixel(self.sideBar.width()-1, i, onColor)
for i in range(self.sideBar.rect().width()):
image2.setPixel(i, startPixel, onColor)
image2.setPixel(i, endPixel, onColor)
image2.setPixel(i, startPixel + 1, onColor)
image2.setPixel(i, endPixel - 1, onColor)
painter.drawImage(rect, image2, image2.rect())
block = self.firstVisibleBlock()
blockNum = block.blockNumber()
top = int(self.blockBoundingGeometry(block).translated(self.contentOffset()).top())
bottom = top + int(self.blockBoundingRect(block).height())
while block.isValid() and top <= event.rect().bottom():
if block.isVisible() and bottom >= event.rect().top():
number = str(blockNum + 1)
painter.setPen(QtCore.Qt.black)
painter.drawText(0, top, self.sideBar.width(), self.fontMetrics().height(), QtCore.Qt.AlignRight, number)
block = block.next()
top = bottom
bottom = top + int(self.blockBoundingRect(block).height())
blockNum += 1
class GridLineDelegate(QtGui.QStyledItemDelegate):
def __init__(self, parent, *args, **kwargs):
self.parent = parent
QtGui.QStyledItemDelegate.__init__(self, *args, **kwargs)
self.highCol = 0
self.lastRow = 0
def paint(self, painter, option, index):
QtGui.QStyledItemDelegate.paint(self, painter, option, index)
item = self.parent.itemFromIndex(index)
pen = QtGui.QPen()
pen.setWidth(1)
painter.setPen(pen)
if isinstance(item, TreeWidgetWithBarGraph):
painter.drawRect(option.rect)
painter.drawLine(option.rect.bottomLeft(), option.rect.bottomRight())
if index.row() <= self.lastRow:
self.highCol = index.column()
item.draw(painter)
elif index.column() == self.highCol:
item.draw(painter)
self.lastRow = index.row()
class EditorWindow( QMainWindow ):
def __init__(self, sourceFile, line, column, EditorType, parent, project = None, directory = None, baseFile = None, data = None, filemap = None, *args, **kwargs):
QMainWindow.__init__(self, parent, *args, **kwargs)
self.resize(1275, 600)
self.project = project
self.centralWidget = QtGui.QWidget(self)
self.centralWidget.setObjectName("centralWidget")
self.outerLayout = QtGui.QVBoxLayout(self.centralWidget)
self.editor = EditorType(self.centralWidget, self, project, directory)
self.editor.setStyleSheet(
"""
QPlainTextEdit
{
color: black;
background-color: white;
}
"""
)
self.filemap = filemap
self.highlighter = SyntaxHighlighter(self.editor.document())
self.editor.setLineWrapMode(QtGui.QPlainTextEdit.NoWrap)
self.statusBar = QtGui.QStatusBar()
self.setStatusBar(self.statusBar)
self.outerLayout.addWidget(self.editor)
self.highlighting = False
self.sourceFile = sourceFile
self.innerLayout = QtGui.QHBoxLayout()
if EditorType == CodeEditor:
self.isCodeEditor = True
horizontalSpacer = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.innerLayout.addItem(horizontalSpacer)
self.makeWriteable = QtGui.QPushButton(self.centralWidget)
self.makeWriteable.setText("Make Writeable")
self.makeWriteable.pressed.connect(self.MakeWriteable)
self.innerLayout.addWidget(self.makeWriteable)
if os.access(sourceFile, os.W_OK):
self.makeWriteable.hide()
else:
self.editor.setReadOnly(True)
self.saveButton = QtGui.QPushButton(self.centralWidget)
self.saveButton.setText("Save")
self.saveButton.pressed.connect(self.save)
self.innerLayout.addWidget(self.saveButton)
self.outerLayout.addLayout(self.innerLayout)
self.saveAction = QtGui.QAction(self)
self.saveAction.setShortcut( QtCore.Qt.CTRL | QtCore.Qt.Key_S )
self.saveAction.triggered.connect(self.save)
self.addAction(self.saveAction)
else:
self.isCodeEditor = False
label = QtGui.QLabel(self.centralWidget)
label.setText("Highlight values approaching:")
self.innerLayout.addWidget(label)
self.slider = QtGui.QSlider(QtCore.Qt.Horizontal, self.centralWidget)
self.innerLayout.addWidget(self.slider)
self.slider.valueChanged.connect(self.editor.sliderMoved)
self.slider.sliderReleased.connect(self.editor.sliderReleased)
self.textBox = QtGui.QLineEdit(self.centralWidget)
self.textBox.setMaximumWidth(160)
self.innerLayout.addWidget(self.textBox)
self.textBox.editingFinished.connect(self.editor.textEdited)
self.outerLayout.addLayout(self.innerLayout)
if data:
self.editor.setPlainText(data)
else:
with open(sourceFile, "r") as f:
self.editor.setPlainText(f.read())
self.setCentralWidget(self.centralWidget)
if baseFile:
self.setWindowTitle("Profile view: {}".format(baseFile))
else:
self.setWindowTitle(sourceFile)
def ScrollTo(self, line, column):
if line or column:
cursor = self.editor.textCursor()
cursor.setPosition(0)
if line:
line = int(line)
cursor.movePosition( QtGui.QTextCursor.Down, QtGui.QTextCursor.MoveAnchor, line - 1 )
if column:
column = int(column)
cursor.movePosition( QtGui.QTextCursor.NextCharacter, QtGui.QTextCursor.MoveAnchor, column - 1 )
self.editor.setTextCursor(cursor)
def MakeWriteable(self):
stats = os.stat(self.sourceFile)
mode = stats.st_mode
try:
os.chmod( self.sourceFile, mode | stat.S_IWRITE )
except:
self.statusBar.showMessage("Could not open file for writing. Permission error?.", 5000)
else:
self.makeWriteable.hide()
self.editor.setReadOnly(False)
self.statusBar.showMessage("File opened for writing.", 5000)
def save(self):
with open(self.sourceFile, "w") as f:
f.write(self.editor.toPlainText())
self.statusBar.showMessage("Saved.", 5000)
def closeEvent(self, event):
if self.isCodeEditor:
del self.parent().openWindows[self.sourceFile]
QMainWindow.closeEvent(self, event)
class MainWindow( QMainWindow ):
def __init__(self, *args, **kwargs):
self.exitRequested = False
QMainWindow.__init__(self, *args, **kwargs)
self.setObjectName("MainWindow")
self.resize(1275, 600)
self.centralWidget = QtGui.QWidget(self)
self.centralWidget.setObjectName("centralWidget")
self.outerLayout = QtGui.QVBoxLayout(self.centralWidget)
self.mainLayout = QtGui.QHBoxLayout()
self.m_splitter = QtGui.QSplitter(self.centralWidget)
self.m_splitter.setOrientation(QtCore.Qt.Vertical)
self.innerWidget = QtGui.QWidget(self.centralWidget)
self.innerLayout = QtGui.QVBoxLayout(self.innerWidget)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.m_buildSummaryLabel = QtGui.QLabel(self.innerWidget)
self.m_buildSummaryLabel.setObjectName("m_buildSummaryLabel")
font = QtGui.QFont()
font.setPointSize( 16 )
self.m_buildSummaryLabel.setFont(font)
self.verticalLayout.addWidget(self.m_buildSummaryLabel)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.m_successfulBuildsLabel = QtGui.QLabel(self.innerWidget)
self.m_successfulBuildsLabel.setObjectName("m_successfulBuildsLabel")
self.horizontalLayout.addWidget(self.m_successfulBuildsLabel)
self.m_failedBuildsLabel = QtGui.QLabel(self.innerWidget)
self.m_failedBuildsLabel.setObjectName("m_failedBuildsLabel")
self.horizontalLayout.addWidget(self.m_failedBuildsLabel)
self.m_warningLabel = QtGui.QLabel(self.innerWidget)
self.m_warningLabel.setObjectName("m_successfulBuildsLabel")
self.horizontalLayout.addWidget(self.m_warningLabel)
self.m_errorLabel = QtGui.QLabel(self.innerWidget)
self.m_errorLabel.setObjectName("m_failedBuildsLabel")
self.horizontalLayout.addWidget(self.m_errorLabel)
horizontalSpacer_2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(horizontalSpacer_2)
self.m_filesCompletedLabel = QtGui.QLabel(self.centralWidget)
self.m_filesCompletedLabel.setObjectName("m_filesCompletedLabel")
self.horizontalLayout.addWidget(self.m_filesCompletedLabel)
self.verticalLayout.addLayout(self.horizontalLayout)
self.m_mainProgressBar = QtGui.QProgressBar(self.centralWidget)
self.m_mainProgressBar.setObjectName("m_mainProgressBar")
self.m_mainProgressBar.setValue(0)
self.verticalLayout.addWidget(self.m_mainProgressBar)
self.topPane = QtGui.QTabWidget(self.innerWidget)
self.buildWidget = QtGui.QWidget(self.innerWidget)
verticalLayout = QtGui.QVBoxLayout(self.buildWidget)
self.m_buildTree = QtGui.QTreeWidget(self.buildWidget)
self.m_buildTree.setColumnCount(12)
self.m_buildTree.setUniformRowHeights(True)
self.m_treeHeader = TreeWidgetItem()
self.m_buildTree.setHeaderItem(self.m_treeHeader)
self.m_buildTree.setObjectName("m_buildTree")
self.m_buildTree.setAlternatingRowColors(True)
self.m_buildTree.setUniformRowHeights(True)
self.m_buildTree.setSortingEnabled(True)
self.m_buildTree.setAnimated(True)
self.m_buildTree.header().setStretchLastSection(True)
self.m_buildTree.currentItemChanged.connect(self.SelectionChanged)
self.m_buildTree.itemExpanded.connect(self.UpdateProjects)
self.m_buildTree.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.m_buildTree.customContextMenuRequested.connect(self.buildTreeContextMenu)
verticalLayout.addWidget(self.m_buildTree)
self.topPane.addTab(self.buildWidget, "Build Progress")
self.timelinePage = QtGui.QWidget(self.centralWidget)
verticalLayout = QtGui.QVBoxLayout(self.timelinePage)
self.timelineWidget = QtGui.QTreeWidget(self.timelinePage)
self.m_timelineHeader = TreeWidgetItem()
self.timelineWidget.setHeaderItem(self.m_timelineHeader)
self.timelineWidget.setFocusPolicy(QtCore.Qt.NoFocus)
self.timelineWidget.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.timelineWidget.setProperty("showDropIndicator", False)
#self.timelineWidget.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.timelineWidget.setAlternatingRowColors(True)
#self.timelineWidget.setVerticalScrollMode(QtGui.QAbstractItemView.ScrollPerPixel)
#self.timelineWidget.setAnimated(True)
self.timelineWidget.header().setDefaultSectionSize(30)
self.timelineWidget.header().setStretchLastSection(False)
self.timelineWidget.header().setResizeMode(QtGui.QHeaderView.Fixed)
self.timelineWidget.itemExpanded.connect(self.TimelineItemExpended)
self.timelineWidget.itemCollapsed.connect(self.TimelineItemExpended)
self.timelineWidget.setItemDelegate(GridLineDelegate(self.timelineWidget))
self.timelineWidget.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.timelineWidget.customContextMenuRequested.connect(self.timelineContextMenu)
verticalLayout.addWidget(self.timelineWidget)
self.topPane.addTab(self.timelinePage, "Build Timeline")
self.verticalLayout.addWidget(self.topPane)
self.innerLayout.addLayout(self.verticalLayout)
self.m_pushButton = QtGui.QPushButton(self.buildWidget)
self.m_pushButton.setObjectName("self.m_pushButton")
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.m_pushButton.sizePolicy().hasHeightForWidth())
self.m_pushButton.setSizePolicy(sizePolicy)
self.m_pushButton.setMaximumSize(QtCore.QSize(16777215, 20))
self.m_pushButton.setCheckable(True)
self.m_pushButton.toggled.connect(self.ButtonClicked)
self.innerLayout.addWidget(self.m_pushButton)
self.m_splitter.addWidget(self.innerWidget)
self.innerWidget2 = QtGui.QTabWidget(self.centralWidget)
self.textPage = QtGui.QWidget(self.innerWidget2)
self.innerLayout2 = QtGui.QVBoxLayout(self.textPage)
self.m_textEdit = QtGui.QTextEdit(self.textPage)
self.m_textEdit.setObjectName("textEdit")
self.m_textEdit.setReadOnly(True)
self.m_textEdit.setFontFamily("monospace")
self.innerLayout2.addWidget(self.m_textEdit)
self.commandPage = QtGui.QWidget(self.innerWidget2)
self.innerLayout2 = QtGui.QVBoxLayout(self.commandPage)
self.m_commandEdit = QtGui.QTextEdit(self.commandPage)
self.m_commandEdit.setObjectName("commandEdit")
self.m_commandEdit.setReadOnly(True)
self.m_commandEdit.setFontFamily("monospace")
self.innerLayout2.addWidget(self.m_commandEdit)
self.errorsPage = QtGui.QWidget(self.innerWidget2)
self.innerLayout3 = QtGui.QVBoxLayout(self.errorsPage)
self.m_errorTree = QtGui.QTreeWidget(self.errorsPage)
self.m_errorTree.setColumnCount(5)
self.m_errorTree.setUniformRowHeights(True)
self.m_treeHeader2 = TreeWidgetItem()
self.m_errorTree.setHeaderItem(self.m_treeHeader2)
self.m_errorTree.setObjectName("m_errorTree")
self.m_errorTree.setAlternatingRowColors(True)
self.m_errorTree.setUniformRowHeights(True)
self.m_errorTree.setSortingEnabled(True)
self.m_errorTree.setAnimated(True)
self.m_errorTree.header().setStretchLastSection(True)
self.m_errorTree.itemDoubleClicked.connect(self.OpenFileForEdit)
self.innerLayout3.addWidget(self.m_errorTree)
self.innerWidget2.addTab(self.errorsPage, "Errors/Warnings")
self.innerWidget2.addTab(self.textPage, "Text Output")
self.innerWidget2.addTab(self.commandPage, "Command Line")
self.m_splitter.addWidget(self.innerWidget2)
self.m_splitter.setSizes( [ 1, 0 ] )
self.m_splitter.setCollapsible( 0, False )
self.m_splitter.splitterMoved.connect(self.SplitterMoved)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.m_splitter.sizePolicy().hasHeightForWidth())
self.m_splitter.setSizePolicy(sizePolicy)
self.mainLayout.addWidget(self.m_splitter)
self.outerLayout.addLayout(self.mainLayout)
#self.horizontalLayout_2 = QtGui.QHBoxLayout()
#self.horizontalLayout_2.setObjectName("horizontalLayout_2")
#horizontalSpacer = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
#self.horizontalLayout_2.addItem(horizontalSpacer)
self.m_timeLeftLabel = QtGui.QLabel(self.centralWidget)
#self.m_timeLeftLabel.setObjectName("m_timeLeftLabel")
#self.horizontalLayout_2.addWidget(self.m_timeLeftLabel)
self.m_timeLeftLabel.hide()
#self.outerLayout.addLayout(self.horizontalLayout_2)
self.setCentralWidget(self.centralWidget)
self.retranslateUi()
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.onTick)
self.timer.start(100)
QtCore.QMetaObject.connectSlotsByName(self)
self.readyToClose = False
self.exiting = False
self.marqueeValue = 0
self.marqueeInverted = True
self.successfulBuilds = set()
self.failedBuilds = set()
self.m_ignoreButton = False
self.pulseColor = 0
self.pulseUp = False
self.animatingBars = {}
self.projectToItem = {}
self.itemToProject = {}
self.warningErrorCount = 0
self.openWindows = {}
self.tick = 0
def buildTreeContextMenu(self, point):
if not _shared_globals.profile:
return
if not self.readyToClose:
return
item = self.m_buildTree.itemAt(point)
parent = item.parent()
if not parent:
return
if parent.parent():
return
menu = QtGui.QMenu(self)
action = QtGui.QAction("View profile data", self)
action.triggered.connect(functools.partial(self.buildTreeViewProfile, item))
menu.addAction(action)
menu.popup(self.m_buildTree.viewport().mapToGlobal(point))
def timelineContextMenu(self, point):
if not _shared_globals.profile:
return
if not self.readyToClose:
return
item = self.timelineWidget.itemAt(point)
parent = item.parent()
if not parent:
return
if parent.parent():
return
menu = QtGui.QMenu(self)
action = QtGui.QAction("View profile data", self)
action.triggered.connect(functools.partial(self.timelineViewProfile, item))
menu.addAction(action)
menu.popup(self.timelineWidget.viewport().mapToGlobal(point))
def launchProfileView(self, project, filename):
baseFile = os.path.basename(filename)
directory = os.path.dirname(filename)
with open(filename, "r") as f:
data = f.read().split("\n")
io = StringIO()
lineNo = 1
for line in data:
lineTime = 0.0
if lineNo in project.times[filename][filename]:
lineTime = project.times[filename][filename][lineNo]
io.write("{: 9.6f}\t\t{}\n".format(lineTime, line))
lineNo += 1
data = io.getvalue()
io.close()
filemap = {}
for otherfile in project.times[filename]:
baseName = os.path.basename(otherfile)
if baseName not in filemap:
filemap[baseName] = {otherfile}
else:
filemap[baseName].add(otherfile)
window = EditorWindow(filename, 0, 0, CodeProfileDisplay, self, baseFile=baseFile, project=project, directory=directory, data=data, filemap=filemap)
window.show()
def buildTreeViewProfile(self, item, checked):
filename = os.path.normcase(str(item.toolTip(3)))
project = self.itemToProject[str(item.parent().text(0))]
self.launchProfileView(project, filename)
def timelineViewProfile(self, item, checked):
filename = os.path.normcase(str(item.toolTip(0)))
idx = self.timelineWidget.indexOfTopLevelItem(self.parent())
project = _shared_globals.sortedProjects[idx]
self.launchProfileView(project, filename)
def ButtonClicked(self, toggled):
if self.m_ignoreButton:
return
if toggled:
self.m_splitter.setSizes( [ 1275, max( self.width() - 1275, 600 ) ] )
self.m_errorTree.setColumnWidth( 0, 50 )
self.m_errorTree.setColumnWidth( 1, max(250, self.m_errorTree.width() - 350) )
self.m_errorTree.setColumnWidth( 2, 200 )
self.m_errorTree.setColumnWidth( 3, 50 )
self.m_errorTree.setColumnWidth( 4, 50 )
self.m_pushButton.setText(u"▾ Output ▾")
else:
self.m_splitter.setSizes( [ 1, 0 ] )
self.m_pushButton.setText(u"▴ Output ▴")
def OpenFileForEdit(self, item, column):
file = str(item.toolTip(2))
line = item.text(3)
col = item.text(4)
if not file or not os.access(file, os.F_OK):
return
if file in self.openWindows:
window = self.openWindows[file]
window.setWindowState(QtCore.Qt.WindowActive)
window.activateWindow()
window.raise_()
window.ScrollTo(line, col)
return
if(
#TODO: Somehow get extension from the active toolchain?
not file.endswith(".o")
and not file.endswith(".so")
and not file.endswith(".a")
and not file.endswith(".exe")
and not file.endswith(".dll")
and not file.endswith(".lib")
and not file.endswith(".obj")
):
window = EditorWindow(file, line, col, CodeEditor, self)
window.show()
window.ScrollTo(line, col)
self.openWindows[file] = window
def resizeEvent(self, event):
QMainWindow.resizeEvent(self, event)
textBoxSize = self.m_splitter.sizes()[1]
if textBoxSize != 0:
self.m_splitter.setSizes( [ 1275, max( self.width() - 1275, 600 ) ] )
self.m_errorTree.setColumnWidth( 0, 50 )
self.m_errorTree.setColumnWidth( 1, max(250, self.m_errorTree.width() - 350) )
self.m_errorTree.setColumnWidth( 2, 200 )
self.m_errorTree.setColumnWidth( 3, 50 )
self.m_errorTree.setColumnWidth( 4, 50 )
def SplitterMoved(self, index, pos):
textBoxSize = self.m_splitter.sizes()[1]
if textBoxSize == 0:
if self.m_pushButton.isChecked():
self.m_ignoreButton = True
self.m_pushButton.setChecked(False)
self.m_ignoreButton = False
self.m_pushButton.setText(u"▴ Output ▴")
else:
if not self.m_pushButton.isChecked():
self.m_ignoreButton = True
self.m_pushButton.setChecked(True)
self.m_ignoreButton = False
self.m_errorTree.setColumnWidth( 0, 50 )
self.m_errorTree.setColumnWidth( 1, max(250, self.m_errorTree.width() - 350) )
self.m_errorTree.setColumnWidth( 2, 200 )
self.m_errorTree.setColumnWidth( 3, 50 )
self.m_errorTree.setColumnWidth( 4, 50 )
self.m_pushButton.setText(u"▾ Output ▾")
def SelectionChanged(self, current, previous):
if self.m_textEdit.isVisible():
if current is None:
outStr = ""
for project in _shared_globals.sortedProjects:
outStr += ("=" * 40) + "\n\n"
outStr += project.name
outStr += ("=" * 40) + "\n\n"
with project.mutex:
for filename in project.compileOutput:
outStr += filename
errors = ""
output = ""
if filename in project.compileErrors:
errors = project.compileErrors[filename]
output = project.compileOutput[filename]
if errors or output:
outStr += "\n" + ("-" * len(filename)) + "\n\n"
outStr += "\n" + ("-" * 40) + "\n\n"
if errors:
outStr += "ERROR OUTPUT:\n\n" + errors + "\n\n"
if output:
outStr += "OUTPUT:\n\n" + output + "\n\n"
if project.linkErrors:
outStr += "LINK ERRORS:\n\n" + project.linkErrors + "\n\n"
if project.linkOutput:
outStr += "LINK OUTPUT:\n\n" + project.linkOutput + "\n\n"
outStr += "\n\n"
if outStr != self.m_textEdit.toPlainText():
self.m_textEdit.setText(outStr)
else:
for project in _shared_globals.sortedProjects:
widget = self.projectToItem[project]
if not widget:
continue
if widget == current:
outStr = ""
with project.mutex:
for filename in project.compileOutput:
errors = ""
output = ""
if filename in project.compileErrors:
errors = project.compileErrors[filename]
output = project.compileOutput[filename]
if errors or output:
outStr += filename
outStr += "\n" + ("=" * 40) + "\n\n"
if errors:
outStr += "ERROR OUTPUT:\n\n" + errors + "\n\n"
if output:
outStr += "OUTPUT:\n\n" + output + "\n\n"
if project.linkErrors:
outStr += "LINK ERRORS:\n\n" + project.linkErrors + "\n\n"
if project.linkOutput:
outStr += "LINK OUTPUT:\n\n" + project.linkOutput + "\n\n"
if outStr != self.m_textEdit.toPlainText():
self.m_textEdit.setText(outStr)
elif widget.isExpanded():
def HandleChild( idx, file ):
file = os.path.normcase(file)
childWidget = widget.child(idx)
if childWidget == current:
outStr = ""
errors = ""
output = ""
with project.mutex:
if file in project.compileErrors:
errors = project.compileErrors[file]
if file in project.compileOutput:
output = project.compileOutput[file]
if errors or output:
outStr += file
outStr += "\n" + ("=" * 40) + "\n\n"
if errors:
outStr += "ERROR OUTPUT:\n\n" + errors + "\n\n"
if output:
outStr += "OUTPUT:\n\n" + output + "\n\n"
if outStr != self.m_textEdit.toPlainText():
self.m_textEdit.setText(outStr)
idx = 0
if project.needsPrecompileCpp:
HandleChild( idx, project.cppHeaderFile )
idx += 1
if project.needsPrecompileC:
HandleChild( idx, project.cHeaderFile )
idx += 1
used_chunks = set()
for source in project.allsources:
inThisBuild = False
if source not in project._finalChunkSet:
chunk = project.get_chunk( source )
if not chunk:
continue
extension = "." + source.rsplit(".", 1)[1]
if extension in project.cExtensions:
extension = ".c"
else:
extension = ".cpp"
chunk = os.path.join( project.csbuildDir, "{}{}".format( chunk, extension ) )
if chunk in used_chunks:
continue
if chunk in project._finalChunkSet:
inThisBuild = True
source = chunk
used_chunks.add(chunk)
else:
inThisBuild = True
if inThisBuild:
HandleChild( idx, source )
idx += 1
elif self.m_commandEdit.isVisible():
if current is not None:
for project in _shared_globals.sortedProjects:
widget = self.projectToItem[project]
if not widget:
continue
if widget == current:
self.m_commandEdit.setText(project.linkCommand)
elif widget.isExpanded():
def HandleChild( idx, file ):
file = os.path.normcase(file)
childWidget = widget.child(idx)
if childWidget == current:
if file in project.compileCommands:
self.m_commandEdit.setText(project.compileCommands[file])
else:
self.m_commandEdit.setText("")
idx = 0
if project.needsPrecompileCpp:
HandleChild( idx, project.cppHeaderFile )
idx += 1
if project.needsPrecompileC:
HandleChild( idx, project.cHeaderFile )
idx += 1
used_chunks = set()
for source in project.allsources:
inThisBuild = False
if source not in project._finalChunkSet:
chunk = project.get_chunk( source )
if not chunk:
continue
extension = "." + source.rsplit(".", 1)[1]
if extension in project.cExtensions:
extension = ".c"
else:
extension = ".cpp"
chunk = os.path.join( project.csbuildDir, "{}{}".format( chunk, extension ) )
if chunk in used_chunks:
continue
if chunk in project._finalChunkSet:
inThisBuild = True
source = chunk
used_chunks.add(chunk)
else:
inThisBuild = True
if inThisBuild:
HandleChild( idx, source )
idx += 1
else:
if current != previous:
while self.m_errorTree.takeTopLevelItem(0):
pass
def HandleError(datas):
if datas is None:
return
for data in datas:
exists = False
for i in range(self.m_errorTree.topLevelItemCount()):
tempWidget = self.m_errorTree.topLevelItem(i)
if(
tempWidget.text(1) == data.text
and tempWidget.text(2) == os.path.basename( data.file )
and (
( tempWidget.text(3) == "" and data.line == -1 )
or ( tempWidget.text(3) == str(data.line) )
)
and (
( tempWidget.text(4) == "" and data.column == -1 )
or ( tempWidget.text(4) == str(data.column) )
)
):
#don't re-add data that already exists.
exists = True
break
if exists:
continue
font = QtGui.QFont()
font.setFamily("monospace")
newItem = TreeWidgetItem()
if data.level == _shared_globals.OutputLevel.WARNING:
newItem.setText(0, "W")
brush = QtGui.QBrush( QtCore.Qt.darkYellow )
newItem.setForeground(0, brush )
#newItem.setForeground(1, brush )
#newItem.setForeground(2, brush )
#newItem.setForeground(3, brush )
#newItem.setForeground(4, brush )
elif data.level == _shared_globals.OutputLevel.ERROR:
newItem.setText(0, "E")
brush = QtGui.QBrush( QtCore.Qt.red )
newItem.setForeground(0, brush )
#newItem.setForeground(1, brush )
#newItem.setForeground(2, brush )
#newItem.setForeground(3, brush )
#newItem.setForeground(4, brush )
font.setBold(True)
elif data.level == _shared_globals.OutputLevel.NOTE:
newItem.setText(0, "N")
else:
newItem.setText(0, "?")
newItem.setText(1, data.text)
newItem.setToolTip(1, data.text)
if data.file:
newItem.setText(2, os.path.basename(data.file))
newItem.setToolTip(2, os.path.abspath(data.file))
if data.line != -1:
newItem.setText(3, str(data.line))
if data.column != -1:
newItem.setText(4, str(data.column))
newItem.setFont(0, font)
newItem.setFont(1, font)
newItem.setFont(2, font)
newItem.setFont(3, font)
newItem.setFont(4, font)
for detail in data.details:
font = QtGui.QFont()
font.setItalic(True)
font.setFamily("monospace")
childItem = TreeWidgetItem(newItem)
childItem.setDisabled(True)
if detail.level == _shared_globals.OutputLevel.NOTE:
font.setBold(True)
childItem.setText(1, detail.text)
childItem.setToolTip(1, detail.text)
if detail.file:
childItem.setText(2, os.path.basename(detail.file))
childItem.setToolTip(2, os.path.abspath(detail.file))
if detail.line != -1:
childItem.setText(3, str(detail.line))
if detail.column != -1:
childItem.setText(4, str(detail.column))
childItem.setFont(0, font)
childItem.setFont(1, font)
childItem.setFont(2, font)
childItem.setFont(3, font)
childItem.setFont(4, font)
newItem.addChild(childItem)
self.m_errorTree.addTopLevelItem(newItem)
self.m_errorTree.setSortingEnabled(False)
if current is None:
for project in _shared_globals.sortedProjects:
with project.mutex:
for filename in project.parsedErrors:
HandleError(project.parsedErrors[filename])
HandleError(project.parsedLinkErrors)
else:
for project in _shared_globals.sortedProjects:
widget = self.projectToItem[project]
if not widget:
continue
if widget == current:
with project.mutex:
for filename in project.parsedErrors:
HandleError(project.parsedErrors[filename])
HandleError(project.parsedLinkErrors)
elif widget.isExpanded():
def HandleChild( idx, file ):
file = os.path.normcase(file)
childWidget = widget.child(idx)
if childWidget == current:
with project.mutex:
if file in project.parsedErrors:
HandleError(project.parsedErrors[file])
idx = 0
if project.needsPrecompileCpp:
HandleChild( idx, project.cppHeaderFile )
idx += 1
if project.needsPrecompileC:
HandleChild( idx, project.cHeaderFile )
idx += 1
used_chunks = set()
for source in project.allsources:
inThisBuild = False
if source not in project._finalChunkSet:
chunk = project.get_chunk( source )
if not chunk:
continue
extension = "." + source.rsplit(".", 1)[1]
if extension in project.cExtensions:
extension = ".c"
else:
extension = ".cpp"
chunk = os.path.join( project.csbuildDir, "{}{}".format( chunk, extension ) )
if chunk in used_chunks:
continue
if chunk in project._finalChunkSet:
inThisBuild = True
source = chunk
used_chunks.add(chunk)
else:
inThisBuild = True
if inThisBuild:
HandleChild( idx, source )
idx += 1
self.m_errorTree.setSortingEnabled(True)
def TimelineItemExpended(self, item):
self.UpdateTimeline(False)
def UpdateTimeline(self, addTime = False):
needsUpdate = False
if addTime:
font = QtGui.QFont()
font.setPointSize(5)
curtime = time.time( ) - _shared_globals.starttime
mult = 1
curtime *= mult
cols = self.m_timelineHeader.columnCount()
colsNeeded = int(math.ceil(curtime)) + 1
if colsNeeded > cols:
scrollBar = self.timelineWidget.horizontalScrollBar()
max = scrollBar.maximum()
needsUpdate = True
for i in range(colsNeeded - cols):
idx = cols + i - 1
self.m_timelineHeader.setFont(idx + 1, font)
if idx % (10*mult) == 0:
minutes = int(math.floor( idx / (60*mult) ))
seconds = int(round( idx % (60*mult) ))
self.m_timelineHeader.setText(idx+1, "{}:{:02}".format(minutes, seconds/mult))
else:
self.m_timelineHeader.setText(idx+1, "")
if scrollBar.value() == max:
scrollBar.setValue(scrollBar.maximum())
else:
needsUpdate = True
if not needsUpdate:
return
idx = 0
for project in _shared_globals.sortedProjects:
item = self.timelineWidget.topLevelItem(idx)
if project.startTime != 0:
item.setStartTime(project.startTime)
if project.buildEnd != 0:
item.setBuildEnd(project.buildEnd)
if project.linkQueueStart != 0:
item.setLinkQueueStart(project.linkQueueStart)
if project.linkStart != 0:
item.setLinkStart(project.linkStart)
if project.endTime != 0:
item.setEndTime(project.endTime)
if item.isExpanded() or item.childrenShowing():
item.setChildrenShowing(item.isExpanded())
def HandleChildTimeline( idx2, file ):
childWidget = item.child(idx2)
file = os.path.normcase(file)
project.mutex.acquire( )
try:
startTime = project.fileStart[file]
except:
startTime = 0
try:
endTime = project.fileEnd[file]
except:
endTime = 0
project.mutex.release( )
if startTime != 0:
childWidget.setStartTime(startTime)
if endTime != 0:
childWidget.setBuildEnd(endTime)
idx2 = 0
if project.needsPrecompileCpp:
HandleChildTimeline( idx2, project.cppHeaderFile )
idx2 += 1
if project.needsPrecompileC:
HandleChildTimeline( idx2, project.cHeaderFile )
idx2 += 1
used_chunks = set()
for source in project.allsources:
inThisBuild = False
if source not in project._finalChunkSet:
chunk = project.get_chunk( source )
if not chunk:
continue
extension = "." + source.rsplit(".", 1)[1]
if extension in project.cExtensions:
extension = ".c"
else:
extension = ".cpp"
chunk = os.path.join( project.csbuildDir, "{}{}".format( chunk, extension ) )
if chunk in used_chunks:
continue
if chunk in project._finalChunkSet:
inThisBuild = True
source = chunk
used_chunks.add(chunk)
else:
inThisBuild = True
if inThisBuild:
HandleChildTimeline( idx2, source )
idx2 += 1
idx += 1
def UpdateProjects(self, expandedItem = None):
updatedProjects = []
if expandedItem is not None:
text = str( expandedItem.text(0) )
if text and text in self.itemToProject:
updatedProjects = [ self.itemToProject[text] ]
else:
for project in _shared_globals.sortedProjects:
with project.mutex:
if project.updated or project:
updatedProjects.append(project)
project.updated = False
class SharedLocals(object):
foundAnError = bool(self.warningErrorCount != 0)
def drawProgressBar( progressBar, widget, state, startTime, endTime, percent, forFile, warnings, errors ):
if warnings > 0:
brush = QtGui.QBrush( QtCore.Qt.darkYellow )
font = QtGui.QFont()
font.setBold(True)
widget.setForeground( 7, brush )
widget.setFont( 7, font )
if errors > 0:
brush = QtGui.QBrush( QtCore.Qt.red )
font = QtGui.QFont()
font.setBold(True)
widget.setForeground( 8, brush )
widget.setFont( 8, font )
if ( warnings > 0 or errors > 0 ) and not SharedLocals.foundAnError:
self.m_buildTree.setCurrentItem(widget)
if not self.m_pushButton.isChecked():
self.m_pushButton.setChecked(True)
SharedLocals.foundAnError = True
if _shared_globals.ProjectState.BUILDING <= state < _shared_globals.ProjectState.FAILED:
if not forFile or state != _shared_globals.ProjectState.BUILDING:
if state == _shared_globals.ProjectState.BUILDING:
if percent < 1:
percent = 1
value = progressBar.value()
quarter = max( 4.0, (percent - value) / 4.0 )
if value < percent - quarter:
progressBar.setValue( value + quarter )
else:
progressBar.setValue( percent )
else:
progressBar.setValue( percent )
progressBar.setTextVisible(True)
if widget.text(1) != str(percent):
widget.setText(1, str(percent))
else:
if widget.text(1) != "0":
widget.setText(1, "0")
progressBar.setFormat( "%p%" )
if state >= _shared_globals.ProjectState.BUILDING:
widget.setText(7, str(warnings))
widget.setText(8, str(errors))
widget.setText(9, time.asctime(time.localtime(startTime)))
if state == _shared_globals.ProjectState.BUILDING:
self.animatingBars[progressBar] = ( widget, state, startTime, endTime, percent, forFile, warnings, errors )
widget.setText(2, "Building")
if forFile:
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{{
background-color: #FF{:02x}00;
}}
QProgressBar
{{
border: 1px solid black;
border-radius: 3px;
padding: 0px;
text-align: center;
}}
""".format(self.pulseColor+127)
)
progressBar.setValue( 100 )
progressBar.setTextVisible(False)
else:
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{{
background-color: #00{:02x}FF;
}}
QProgressBar
{{
border: 1px solid black;
border-radius: 3px;
padding: 0px;
text-align: center;
}}
""".format(self.pulseColor)
)
elif state == _shared_globals.ProjectState.LINK_QUEUED:
if progressBar in self.animatingBars:
del self.animatingBars[progressBar]
widget.setText(2,"Link/Queue")
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{
background-color: #00C0C0;
}
QProgressBar
{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
}
"""
)
elif state == _shared_globals.ProjectState.WAITING_FOR_LINK:
if progressBar in self.animatingBars:
del self.animatingBars[progressBar]
widget.setText(2,"Link/Wait")
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{
background-color: #008080;
}
QProgressBar
{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
}
"""
)
elif state == _shared_globals.ProjectState.LINKING:
self.animatingBars[progressBar] = ( widget, state, startTime, endTime, percent, forFile, warnings, errors )
widget.setText(2, "Linking")
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{{
background-color: #00E0{:02x};
}}
QProgressBar
{{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
color: black;
}}
""".format(self.pulseColor + 64)
)
elif state == _shared_globals.ProjectState.FINISHED:
if progressBar in self.animatingBars:
del self.animatingBars[progressBar]
widget.setText(2, "Done!")
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{{
background-color: #{};
}}
QProgressBar
{{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
color: black;
}}
""".format( "ADFFD0" if forFile else "00FF80" )
)
widget.setText(10, time.asctime(time.localtime(endTime)))
timeDiff = endTime - startTime
minutes = math.floor( timeDiff / 60 )
seconds = math.floor( timeDiff % 60 )
widget.setText(11, "{0:2}:{1:02}".format( int(minutes), int(seconds) ) )
elif state == _shared_globals.ProjectState.FAILED or state == _shared_globals.ProjectState.LINK_FAILED:
if progressBar in self.animatingBars:
del self.animatingBars[progressBar]
progressBar.setTextVisible(True)
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{
background-color: #800000;
}
QProgressBar
{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
}
"""
)
progressBar.setValue(100)
if state == _shared_globals.ProjectState.FAILED:
widget.setText(2, "Failed!")
progressBar.setFormat("FAILED!")
else:
widget.setText(2, "Link Failed!")
progressBar.setFormat("LINK FAILED!")
widget.setText(10, time.asctime(time.localtime(endTime)))
timeDiff = endTime - startTime
minutes = math.floor( timeDiff / 60 )
seconds = math.floor( timeDiff % 60 )
widget.setText(11, "{0:2}:{1:02}".format( int(minutes), int(seconds) ) )
elif state == _shared_globals.ProjectState.UP_TO_DATE:
self.SetProgressBarUpToDate( progressBar, widget, endTime, startTime, forFile )
elif state == _shared_globals.ProjectState.ABORTED:
if progressBar in self.animatingBars:
del self.animatingBars[progressBar]
widget.setText(2, "Aborted!")
progressBar.setTextVisible(True)
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{
background-color: #800040;
}
QProgressBar
{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
}
"""
)
progressBar.setValue(100)
if forFile:
progressBar.setFormat("ABORTED! (PCH Failed!)")
else:
progressBar.setFormat("ABORTED! (Dependency Failed!)")
if updatedProjects:
self.m_buildTree.setSortingEnabled(False)
if self.pulseColor == 0 or self.pulseColor == 128:
self.pulseUp = not self.pulseUp
if self.pulseUp:
self.pulseColor += 32
else:
self.pulseColor -= 32
if self.pulseColor > 128:
self.pulseColor = 128
if self.pulseColor < 0:
self.pulseColor = 0
selectedWidget = self.m_buildTree.currentItem()
for project in updatedProjects:
widget = self.projectToItem[project]
if not widget:
continue
if selectedWidget == widget:
self.SelectionChanged(selectedWidget, selectedWidget)
progressBar = self.m_buildTree.itemWidget(widget, 1)
project.mutex.acquire( )
complete = project.compilationCompleted
project.mutex.release( )
total = len( project._finalChunkSet ) + int(
project.needsPrecompileC ) + int(
project.needsPrecompileCpp )
percent = 100 if total == 0 else ( float(complete) / float(total) ) * 100
if percent == 100 and project.state < _shared_globals.ProjectState.FINISHED:
percent = 99
drawProgressBar( progressBar, widget, project.state, project.startTime, project.endTime, percent, False, project.warnings, project.errors )
if project.state == _shared_globals.ProjectState.FINISHED or project.state == _shared_globals.ProjectState.UP_TO_DATE:
self.successfulBuilds.add(project.key)
elif(
project.state == _shared_globals.ProjectState.FAILED
or project.state == _shared_globals.ProjectState.LINK_FAILED
or project.state == _shared_globals.ProjectState.ABORTED
):
self.failedBuilds.add(project.key)
if widget.isExpanded():
def HandleChildProgressBar( idx, file ):
childWidget = widget.child(idx)
progressBar = self.m_buildTree.itemWidget(childWidget, 1)
file = os.path.normcase(file)
project.mutex.acquire( )
try:
state = project.fileStatus[file]
except:
state = _shared_globals.ProjectState.PENDING
try:
startTime = project.fileStart[file]
except:
startTime = 0
try:
endTime = project.fileEnd[file]
except:
endTime = 0
warnings = 0
errors = 0
if file in project.warningsByFile:
warnings = project.warningsByFile[file]
if file in project.errorsByFile:
errors = project.errorsByFile[file]
project.mutex.release( )
drawProgressBar( progressBar, childWidget, state, startTime, endTime, 0 if state <= _shared_globals.ProjectState.BUILDING else 100, True, warnings, errors )
if selectedWidget == childWidget:
self.SelectionChanged(selectedWidget, selectedWidget)
idx = 0
if project.needsPrecompileCpp:
HandleChildProgressBar( idx, project.cppHeaderFile )
idx += 1
if project.needsPrecompileC:
HandleChildProgressBar( idx, project.cHeaderFile )
idx += 1
used_chunks = set()
for source in project.allsources:
inThisBuild = False
if source not in project._finalChunkSet:
chunk = project.get_chunk( source )
if not chunk:
continue
extension = "." + source.rsplit(".", 1)[1]
if extension in project.cExtensions:
extension = ".c"
else:
extension = ".cpp"
chunk = os.path.join( project.csbuildDir, "{}{}".format( chunk, extension ) )
if chunk in used_chunks:
continue
if chunk in project._finalChunkSet:
inThisBuild = True
source = chunk
used_chunks.add(chunk)
else:
inThisBuild = True
if inThisBuild:
HandleChildProgressBar( idx, source )
idx += 1
self.m_buildTree.setSortingEnabled(True)
successcount = len(self.successfulBuilds)
failcount = len(self.failedBuilds)
self.m_successfulBuildsLabel.setText("Successful Builds: {}".format(successcount))
self.m_failedBuildsLabel.setText("Failed Builds: {}".format(failcount))
if failcount > 0:
font = QtGui.QFont()
font.setBold(True)
self.m_failedBuildsLabel.setFont( font )
palette = QtGui.QPalette()
palette.setColor( self.m_errorLabel.foregroundRole(), QtCore.Qt.red )
self.m_failedBuildsLabel.setPalette(palette)
if successcount + failcount == len(_shared_globals.sortedProjects):
if _shared_globals.profile and not self.readyToClose:
window = QtGui.QMainWindow(self)
window.centralWidget = QtGui.QWidget(window)
window.setCentralWidget(window.centralWidget)
layout = QtGui.QHBoxLayout(window.centralWidget)
window.editor = QtGui.QPlainTextEdit(window.centralWidget)
font = QtGui.QFont()
font.setFamily("monospace")
window.editor.setFont(font)
window.editor.setLineWrapMode(QtGui.QPlainTextEdit.NoWrap)
layout.addWidget(window.editor)
summedTimes = {}
for project in _shared_globals.sortedProjects:
for filename in project.summedTimes:
if filename in summedTimes:
summedTimes[filename] += project.summedTimes[filename]
else:
summedTimes[filename] = project.summedTimes[filename]
builder = StringIO()
for item in sorted(summedTimes.items(), key=lambda tup: tup[1], reverse=True):
builder.write("{:f}\t::{}\n".format(item[1], item[0]))
window.editor.setPlainText(builder.getvalue())
window.setWindowTitle("Profile Summary")
window.resize(1275,600)
window.show()
self.readyToClose = True
if _shared_globals.autoCloseGui and failcount == 0:
self.exiting = True
self.close()
if self.animatingBars:
for bar in self.animatingBars:
data = self.animatingBars[bar]
drawProgressBar( bar, *data )
def retranslateUi(self):
self.setWindowTitle("CSBuild {}".format(csbuild.__version__.strip()))
self.m_buildSummaryLabel.setText("Build Started at 00:00... (00:00)")
self.m_successfulBuildsLabel.setText("Successful Builds: 0")
self.m_failedBuildsLabel.setText("Failed Builds: 0")
self.m_warningLabel.setText("Warnings: 0")
self.m_errorLabel.setText("Errors: 0")
self.m_treeHeader.setText(0, "#")
self.m_treeHeader.setText(1, "Progress")
self.m_treeHeader.setText(2, "Status")
self.m_treeHeader.setText(3, "Name")
self.m_treeHeader.setText(4, "Target")
self.m_treeHeader.setText(5, "Arch")
self.m_treeHeader.setText(6, "Toolchain")
self.m_treeHeader.setText(7, "W")
self.m_treeHeader.setText(8, "E")
self.m_treeHeader.setText(9, "Build Started")
self.m_treeHeader.setText(10, "Build Finished")
self.m_treeHeader.setText(11, "Time")
self.m_treeHeader.setColumnNumeric(0)
self.m_treeHeader.setColumnNumeric(1)
self.m_treeHeader.setColumnNumeric(6)
self.m_treeHeader.setColumnNumeric(7)
self.m_buildTree.setColumnWidth( 0, 50 )
self.m_buildTree.setColumnWidth( 1, 250 )
self.m_buildTree.setColumnWidth( 2, 75 )
self.m_buildTree.setColumnWidth( 3, 125 )
self.m_buildTree.setColumnWidth( 4, 75 )
self.m_buildTree.setColumnWidth( 5, 75 )
self.m_buildTree.setColumnWidth( 6, 75 )
self.m_buildTree.setColumnWidth( 7, 25 )
self.m_buildTree.setColumnWidth( 8, 25 )
self.m_buildTree.setColumnWidth( 9, 175 )
self.m_buildTree.setColumnWidth( 10, 175 )
self.m_buildTree.setColumnWidth( 11, 50 )
self.m_timelineHeader.setText(0, "Name")
self.timelineWidget.setColumnWidth(0,250)
self.m_treeHeader2.setText(0, "Type")
self.m_treeHeader2.setText(1, "Output")
self.m_treeHeader2.setText(2, "File")
self.m_treeHeader2.setText(3, "Line")
self.m_treeHeader2.setText(4, "Col")
self.m_treeHeader2.setColumnNumeric(3)
self.m_treeHeader2.setColumnNumeric(4)
self.m_errorTree.setColumnWidth( 0, 50 )
self.m_errorTree.setColumnWidth( 1, max(250, self.m_errorTree.width() - 350) )
self.m_errorTree.setColumnWidth( 2, 200 )
self.m_errorTree.setColumnWidth( 3, 50 )
self.m_errorTree.setColumnWidth( 4, 50 )
self.m_filesCompletedLabel.setText("0/0 files compiled")
self.m_timeLeftLabel.setText("Est. Time Left: 0:00")
self.m_pushButton.setText(u"▴ Output ▴")
def onTick(self):
self.UpdateProjects()
self.UpdateTimeline(True)
self.tick += 1
totalCompletedCompiles = 0
for project in _shared_globals.sortedProjects:
totalCompletedCompiles += project.compilationCompleted
perc = 100 if _shared_globals.total_compiles == 0 else float(totalCompletedCompiles)/float(_shared_globals.total_compiles) * 100
if perc == 100 and not self.readyToClose:
perc = 99
self.m_mainProgressBar.setValue( perc )
self.m_filesCompletedLabel.setText("{}/{} files compiled".format(totalCompletedCompiles, _shared_globals.total_compiles))
curtime = time.time( )
timeDiff = curtime - _shared_globals.starttime
minutes = math.floor( timeDiff / 60 )
seconds = math.floor( timeDiff % 60 )
self.m_buildSummaryLabel.setText("Build Started {0}... ({1}:{2:02})".format( time.asctime(time.localtime(_shared_globals.starttime)), int(minutes), int(seconds) ))
with _shared_globals.sgmutex:
warningcount = _shared_globals.warningcount
errorcount = _shared_globals.errorcount
self.m_warningLabel.setText("Warnings: {}".format(warningcount))
self.m_errorLabel.setText("Errors: {}".format(errorcount))
if warningcount > 0:
font = QtGui.QFont()
font.setBold(True)
self.m_warningLabel.setFont( font )
palette = QtGui.QPalette()
palette.setColor( self.m_warningLabel.foregroundRole(), QtCore.Qt.darkYellow )
self.m_warningLabel.setPalette(palette)
if errorcount > 0:
font = QtGui.QFont()
font.setBold(True)
self.m_errorLabel.setFont( font )
palette = QtGui.QPalette()
palette.setColor( self.m_errorLabel.foregroundRole(), QtCore.Qt.red )
self.m_errorLabel.setPalette(palette)
self.warningErrorCount = warningcount + errorcount
if self.exitRequested:
self.timer.stop()
self.close()
elif self.readyToClose:
self.timer.stop()
def closeEvent(self, event):
if not self.readyToClose:
answer = QtGui.QMessageBox.question(
self,
"Really close?",
"A compile is still in progress. Closing will cancel it. Are you sure you want to close?",
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No
)
if answer == QtGui.QMessageBox.Yes:
QMainWindow.closeEvent(self, event)
self.timer.stop()
os.kill(os.getpid(), signal.SIGINT)
else:
event.ignore()
else:
QMainWindow.closeEvent(self, event)
self.timer.stop()
def SetProgressBarUpToDate( self, progressBar, widget, endTime, startTime, forFile ):
if progressBar in self.animatingBars:
del self.animatingBars[progressBar]
widget.setText(2, "Up-to-date!")
progressBar.setTextVisible(True)
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{{
background-color: #{};
}}
QProgressBar
{{
border: 1px solid black;
border-radius: 3px;
background: #505050;
padding: 0px;
text-align: center;
color: black;
}}
""".format( "ADFFD0" if forFile else "00FF80" )
)
progressBar.setValue(100)
progressBar.setFormat("Up-to-date!")
if endTime != 0 and startTime != 0:
widget.setText(10, time.asctime(time.localtime(endTime)))
timeDiff = endTime - startTime
minutes = math.floor( timeDiff / 60 )
seconds = math.floor( timeDiff % 60 )
widget.setText(11, "{0:2}:{1:02}".format( int(minutes), int(seconds) ) )
class GuiThread( threading.Thread ):
"""Multithreaded build system, launches a new thread to run the compiler in.
Uses a threading.BoundedSemaphore object to keep the number of threads equal to the number of processors on the
machine.
"""
def __init__( self ):
"""Initialize the object. Also handles above-mentioned bug with dummy threads."""
threading.Thread.__init__( self )
self.app = None
#Prevent certain versions of python from choking on dummy threads.
if not hasattr( threading.Thread, "_Thread__block" ):
threading.Thread._Thread__block = _shared_globals.dummy_block( )
def run( self ):
self.app = QApplication([])
global lock
lock.release()
window = MainWindow()
window.m_buildTree.setSortingEnabled(False)
row = 0
for project in _shared_globals.sortedProjects:
row += 1
widgetItem = TreeWidgetItem()
window.m_buildTree.addTopLevelItem(widgetItem)
widgetItem.setText(0, str(row))
widgetItem.setText(1, "1000")
widgetItem.setText(2, "Pending...")
widgetItem.setText(3, project.name)
widgetItem.setToolTip(3, project.name)
widgetItem.setText(4, project.targetName)
widgetItem.setToolTip(4, project.targetName)
widgetItem.setText(5, project.outputArchitecture)
widgetItem.setToolTip(5, project.outputArchitecture)
widgetItem.setText(6, project.activeToolchainName)
widgetItem.setToolTip(6, project.activeToolchainName)
widgetItem.setText(7, "0")
widgetItem.setText(8, "0")
widgetItem2 = TreeWidgetWithBarGraph(window.timelineWidget, window.timelineWidget, False)
window.timelineWidget.addTopLevelItem(widgetItem2)
widgetItem2.setText(0, "{} ({} {}/{})".format(project.name, project.targetName, project.outputArchitecture, project.activeToolchainName ))
window.projectToItem[project] = widgetItem
window.itemToProject[str(row)] = project
def AddProgressBar( widgetItem):
progressBar = QtGui.QProgressBar()
progressBar.setStyleSheet(
"""
QProgressBar::chunk
{
background-color: #808080;
}
QProgressBar
{
background-color: #808080;
border: 1px solid black;
border-radius: 3px;
padding: 0px;
text-align: center;
}
"""
)
progressBar.setFormat("Pending...")
progressBar.setValue(0)
window.m_buildTree.setItemWidget( widgetItem, 1, progressBar )
AddProgressBar( widgetItem )
idx = 0
font = QtGui.QFont()
font.setItalic(True)
if project.needsPrecompileCpp:
idx += 1
childItem = TreeWidgetItem( widgetItem )
childItem.setText(0, "{}.{}".format(row, idx))
childItem.setText(1, "1000")
childItem.setText(2, "Pending...")
childItem.setText(3, os.path.basename(project.cppHeaderFile))
childItem.setToolTip(3, project.cppHeaderFile)
childItem.setText(4, project.targetName)
childItem.setToolTip(4, project.targetName)
childItem.setText(5, project.outputArchitecture)
childItem.setToolTip(5, project.outputArchitecture)
childItem.setText(6, project.activeToolchainName)
childItem.setToolTip(6, project.activeToolchainName)
childItem.setText(7, "0")
childItem.setText(8, "0")
childItem.setFont(0, font)
childItem.setFont(1, font)
childItem.setFont(2, font)
childItem.setFont(3, font)
childItem.setFont(4, font)
childItem.setFont(5, font)
childItem.setFont(6, font)
childItem.setFont(7, font)
childItem.setFont(8, font)
childItem.setFont(9, font)
childItem.setFont(10, font)
AddProgressBar( childItem )
widgetItem.addChild(childItem)
timelineChild = TreeWidgetWithBarGraph(widgetItem2, window.timelineWidget, True)
timelineChild.setText(0, os.path.basename(project.cppHeaderFile))
timelineChild.setToolTip(0, project.cppHeaderFile)
widgetItem2.addChild(timelineChild)
for header in project.cppPchContents:
subChildItem = TreeWidgetItem( childItem )
subChildItem.setText( 0, os.path.basename(header) )
subChildItem.setFirstColumnSpanned(True)
subChildItem.setToolTip( 0, header )
childItem.addChild(subChildItem)
timelineSubChild = TreeWidgetItem(timelineChild)
timelineSubChild.setText( 0, os.path.basename(header) )
timelineSubChild.setFirstColumnSpanned(True)
timelineSubChild.setToolTip( 0, header )
timelineChild.addChild(timelineSubChild)
if project.needsPrecompileC:
idx += 1
childItem = TreeWidgetItem( widgetItem )
childItem.setText(0, "{}.{}".format(row, idx))
childItem.setText(1, "1000")
childItem.setText(2, "Pending...")
childItem.setText(3, os.path.basename(project.cHeaderFile))
childItem.setToolTip(3, project.cHeaderFile)
childItem.setText(4, project.targetName)
childItem.setToolTip(4, project.targetName)
childItem.setText(5, project.outputArchitecture)
childItem.setToolTip(5, project.outputArchitecture)
childItem.setText(6, project.activeToolchainName)
childItem.setToolTip(6, project.activeToolchainName)
childItem.setText(7, "0")
childItem.setText(8, "0")
childItem.setFont(0, font)
childItem.setFont(1, font)
childItem.setFont(2, font)
childItem.setFont(3, font)
childItem.setFont(4, font)
childItem.setFont(5, font)
childItem.setFont(6, font)
childItem.setFont(7, font)
childItem.setFont(8, font)
childItem.setFont(9, font)
childItem.setFont(10, font)
AddProgressBar( childItem )
widgetItem.addChild(childItem)
timelineChild = TreeWidgetItem(widgetItem2)
timelineChild.setText(0, os.path.basename(project.cHeaderFile))
timelineChild.setToolTip(0, project.cHeaderFile)
widgetItem2.addChild(timelineChild)
for header in project.cPchContents:
subChildItem = TreeWidgetItem( childItem )
subChildItem.setText( 0, os.path.basename(header) )
subChildItem.setFirstColumnSpanned(True)
subChildItem.setToolTip( 0, header )
childItem.addChild(subChildItem)
timelineSubChild = TreeWidgetItem(timelineChild)
timelineSubChild.setText( 0, os.path.basename(header) )
timelineSubChild.setFirstColumnSpanned(True)
timelineSubChild.setToolTip( 0, header )
timelineChild.addChild(timelineSubChild)
used_chunks = set()
for source in project.allsources:
inThisBuild = False
if source not in project._finalChunkSet:
chunk = project.get_chunk( source )
if not chunk:
continue
extension = "." + source.rsplit(".", 1)[1]
if extension in project.cExtensions:
extension = ".c"
else:
extension = ".cpp"
chunk = os.path.join( project.csbuildDir, "{}{}".format( chunk, extension ) )
if chunk in used_chunks:
continue
if chunk in project._finalChunkSet:
inThisBuild = True
source = chunk
used_chunks.add(chunk)
else:
inThisBuild = True
idx += 1
childItem = TreeWidgetItem( widgetItem )
childItem.setText(0, "{}.{}".format(row, idx))
if inThisBuild:
childItem.setText(1, "1000")
childItem.setText(2, "Pending...")
else:
childItem.setText(1, "100")
#"Up-to-date!" text gets set by window.SetProgressBarUpToDate
name = os.path.basename(source)
if source in project.splitChunks:
name = "[Split Chunk] {}".format(name)
childItem.setText(3, name)
childItem.setToolTip(3, source)
childItem.setText(4, project.targetName)
childItem.setToolTip(4, project.targetName)
childItem.setText(5, project.outputArchitecture)
childItem.setToolTip(5, project.outputArchitecture)
childItem.setText(6, project.activeToolchainName)
childItem.setToolTip(6, project.activeToolchainName)
childItem.setText(7, "0")
childItem.setText(8, "0")
childItem.setFont(0, font)
childItem.setFont(1, font)
childItem.setFont(2, font)
childItem.setFont(3, font)
childItem.setFont(4, font)
childItem.setFont(5, font)
childItem.setFont(6, font)
childItem.setFont(7, font)
childItem.setFont(8, font)
childItem.setFont(9, font)
childItem.setFont(10, font)
AddProgressBar( childItem )
if not inThisBuild:
window.SetProgressBarUpToDate( window.m_buildTree.itemWidget(childItem, 1), childItem, 0, 0, True )
widgetItem.addChild(childItem)
timelineChild = TreeWidgetWithBarGraph(widgetItem2, window.timelineWidget, True)
timelineChild.setText(0, os.path.basename(source))
timelineChild.setToolTip(0, source)
widgetItem2.addChild(timelineChild)
if source in project.chunksByFile:
for piece in project.chunksByFile[source]:
subChildItem = TreeWidgetItem( childItem )
subChildItem.setText( 0, os.path.basename( piece ) )
subChildItem.setFirstColumnSpanned(True)
subChildItem.setToolTip( 0, piece )
childItem.addChild(subChildItem)
timelineSubChild = TreeWidgetItem(timelineChild)
timelineSubChild.setText( 0, os.path.basename(piece) )
timelineSubChild.setFirstColumnSpanned(True)
timelineSubChild.setToolTip( 0, piece )
timelineChild.addChild(timelineSubChild)
window.m_buildTree.setSortingEnabled(True)
window.show()
self.window = window
self.app.exec_()
def stop(self):
self.window.exitRequested = True
_thread = None
lock = threading.Lock()
def run():
global _thread
_thread = GuiThread()
_thread.start()
lock.acquire()
lock.acquire()
def stop():
global _thread
if _thread:
_thread.stop()
_thread.join()
|
ShassAro/ShassAro | refs/heads/master | Bl_project/blVirtualEnv/lib/python2.7/site-packages/django/contrib/flatpages/views.py | 105 | from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.shortcuts import get_current_site
from django.http import Http404, HttpResponse, HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.template import loader, RequestContext
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
DEFAULT_TEMPLATE = 'flatpages/default.html'
# This view is called from FlatpageFallbackMiddleware.process_response
# when a 404 is raised, which often means CsrfViewMiddleware.process_view
# has not been called even if CsrfViewMiddleware is installed. So we need
# to use @csrf_protect, in case the template needs {% csrf_token %}.
# However, we can't just wrap this view; if no matching flatpage exists,
# or a redirect is required for authentication, the 404 needs to be returned
# without any CSRF checks. Therefore, we only
# CSRF protect the internal implementation.
def flatpage(request, url):
"""
Public interface to the flat page view.
Models: `flatpages.flatpages`
Templates: Uses the template defined by the ``template_name`` field,
or :template:`flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
"""
if not url.startswith('/'):
url = '/' + url
site_id = get_current_site(request).id
try:
f = get_object_or_404(FlatPage,
url=url, sites=site_id)
except Http404:
if not url.endswith('/') and settings.APPEND_SLASH:
url += '/'
f = get_object_or_404(FlatPage,
url=url, sites=site_id)
return HttpResponsePermanentRedirect('%s/' % request.path)
else:
raise
return render_flatpage(request, f)
@csrf_protect
def render_flatpage(request, f):
"""
Internal interface to the flat page view.
"""
# If registration is required for accessing this page, and the user isn't
# logged in, redirect to the login page.
if f.registration_required and not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path)
if f.template_name:
t = loader.select_template((f.template_name, DEFAULT_TEMPLATE))
else:
t = loader.get_template(DEFAULT_TEMPLATE)
# To avoid having to always use the "|safe" filter in flatpage templates,
# mark the title and content as already safe (since they are raw HTML
# content in the first place).
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
c = RequestContext(request, {
'flatpage': f,
})
response = HttpResponse(t.render(c))
return response
|
google-code/android-scripting | refs/heads/master | python/src/Doc/includes/sqlite3/converter_point.py | 44 | import sqlite3
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
def __repr__(self):
return "(%f;%f)" % (self.x, self.y)
def adapt_point(point):
return "%f;%f" % (point.x, point.y)
def convert_point(s):
x, y = map(float, s.split(";"))
return Point(x, y)
# Register the adapter
sqlite3.register_adapter(Point, adapt_point)
# Register the converter
sqlite3.register_converter("point", convert_point)
p = Point(4.0, -3.2)
#########################
# 1) Using declared types
con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES)
cur = con.cursor()
cur.execute("create table test(p point)")
cur.execute("insert into test(p) values (?)", (p,))
cur.execute("select p from test")
print "with declared types:", cur.fetchone()[0]
cur.close()
con.close()
#######################
# 1) Using column names
con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_COLNAMES)
cur = con.cursor()
cur.execute("create table test(p)")
cur.execute("insert into test(p) values (?)", (p,))
cur.execute('select p as "p [point]" from test')
print "with column names:", cur.fetchone()[0]
cur.close()
con.close()
|
letzerp/framework | refs/heads/v5.0 | frappe/desk/form/save.py | 4 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.desk.form.load import run_onload
@frappe.whitelist()
def savedocs():
"""save / submit / update doclist"""
try:
doc = frappe.get_doc(json.loads(frappe.form_dict.doc))
set_local_name(doc)
# action
doc.docstatus = {"Save":0, "Submit": 1, "Update": 1, "Cancel": 2}[frappe.form_dict.action]
try:
doc.save()
except frappe.NameError, e:
doctype, name, original_exception = e if isinstance(e, tuple) else (doc.doctype or "", doc.name or "", None)
frappe.msgprint(frappe._("{0} {1} already exists").format(doctype, name))
raise
# update recent documents
run_onload(doc)
send_updated_docs(doc)
except Exception:
frappe.msgprint(frappe._('Did not save'))
frappe.errprint(frappe.utils.get_traceback())
raise
@frappe.whitelist()
def cancel(doctype=None, name=None):
"""cancel a doclist"""
try:
doc = frappe.get_doc(doctype, name)
doc.cancel()
send_updated_docs(doc)
except Exception:
frappe.errprint(frappe.utils.get_traceback())
frappe.msgprint(frappe._("Did not cancel"))
raise
def send_updated_docs(doc):
from load import get_docinfo
get_docinfo(doc)
d = doc.as_dict()
if hasattr(doc, 'localname'):
d["localname"] = doc.localname
frappe.response.docs.append(d)
def set_local_name(doc):
def _set_local_name(d):
if doc.get('__islocal') or d.get('__islocal'):
d.localname = d.name
d.name = None
_set_local_name(doc)
for child in doc.get_all_children():
_set_local_name(child)
if doc.get("__newname"):
doc.name = doc.get("__newname")
|
Hackplayers/Empire-mod-Hackplayers | refs/heads/master | lib/modules/python/management/osx/ls_m.py | 6 | from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'ls_m',
# list of one or more authors for the module
'Author': ['@xorrior'],
# more verbose multi-line description of the module
'Description': ('List contents of a directory'),
# True if the module needs to run in the background
'Background': False,
# File extension to save the file as
# no need to base64 return data
'OutputExtension': None,
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe': True,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': [
'Link:',
'http://stackoverflow.com/questions/17809386/howtoconvertastat-output-to-a-unix-permissions-string'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent': {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to run the module.',
'Required' : True,
'Value' : ''
},
'Path': {
'Description' : 'Path. Defaults to the current directory. This module is mainly for organization. The alias \'ls\' can be used at the agent menu.',
'Required' : True,
'Value' : '.'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
filePath = self.options['Path']['Value']
filePath += '/'
script = """
try:
import Foundation
from AppKit import *
import os
import stat
except:
print "A required module is missing.."
def permissions_to_unix_name(st_mode):
permstr = ''
usertypes = ['USR', 'GRP', 'OTH']
for usertype in usertypes:
perm_types = ['R', 'W', 'X']
for permtype in perm_types:
perm = getattr(stat, 'S_I%%s%%s' %% (permtype, usertype))
if st_mode & perm:
permstr += permtype.lower()
else:
permstr += '-'
return permstr
path = "%s"
dirlist = os.listdir(path)
filemgr = NSFileManager.defaultManager()
directoryListString = "\\t\\towner\\tgroup\\t\\tlast modified\\tsize\\t\\tname\\n"
for item in dirlist:
fullpath = os.path.abspath(os.path.join(path,item))
attrs = filemgr.attributesOfItemAtPath_error_(os.path.abspath(fullpath), None)
name = item
lastModified = str(attrs[0]['NSFileModificationDate'])
group = str(attrs[0]['NSFileGroupOwnerAccountName'])
owner = str(attrs[0]['NSFileOwnerAccountName'])
size = str(os.path.getsize(fullpath))
if int(size) > 1024:
size = int(size) / 1024
size = str(size) + "K"
else:
size += "B"
perms = permissions_to_unix_name(os.stat(fullpath)[0])
listString = perms + " " + owner + "\\t" + group + "\\t\\t" + lastModified.split(" ")[0] + "\\t" + size + "\\t\\t" + name + "\\n"
if os.path.isdir(fullpath):
listString = "d"+listString
else:
listString = "-"+listString
directoryListString += listString
print str(os.getcwd())
print directoryListString
""" % filePath
return script |
ly0/xxadmin | refs/heads/master | xadmin/plugins/xversion.py | 1 | from django.contrib.contenttypes.generic import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied
from django.db import models
from django.db.models.query import QuerySet
from django import get_version
v = get_version()
if v[:3] > '1.7':
from django.db.models.fields.related import ForeignObjectRel
else:
from django.db.models.related import RelatedObject as ForeignObjectRel
from django.forms.models import model_to_dict
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
import sys
if sys.version_info.major < 3:
from django.utils.encoding import force_unicode as force_text
else:
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from xadmin.layout import Field, render_field
from xadmin.plugins.inline import Inline
from xadmin.plugins.actions import BaseActionView
from xadmin.plugins.inline import InlineModelAdmin
from xadmin.sites import site
from xadmin.util import unquote, quote, model_format_dict
from xadmin.views import BaseAdminPlugin, ModelAdminView, CreateAdminView, UpdateAdminView, DetailAdminView, ModelFormAdminView, DeleteAdminView, ListAdminView
from xadmin.views.base import csrf_protect_m, filter_hook
from xadmin.views.detail import DetailAdminUtil
from reversion.models import Revision, Version
from reversion.revisions import default_revision_manager, RegistrationError
from functools import partial
#python 3 compatibility
try:
xrange
except NameError:
xrange = range
try:
basestring
except NameError:
basestring = str
def _autoregister(admin, model, follow=None):
"""Registers a model with reversion, if required."""
if model._meta.proxy:
raise RegistrationError("Proxy models cannot be used with django-reversion, register the parent class instead")
if not admin.revision_manager.is_registered(model):
follow = follow or []
for parent_cls, field in model._meta.parents.items():
follow.append(field.name)
_autoregister(admin, parent_cls)
admin.revision_manager.register(
model, follow=follow, format=admin.reversion_format)
def _register_model(admin, model):
if not hasattr(admin, 'revision_manager'):
admin.revision_manager = default_revision_manager
if not hasattr(admin, 'reversion_format'):
admin.reversion_format = 'json'
if not admin.revision_manager.is_registered(model):
inline_fields = []
for inline in getattr(admin, 'inlines', []):
inline_model = inline.model
if getattr(inline, 'generic_inline', False):
ct_field = getattr(inline, 'ct_field', 'content_type')
ct_fk_field = getattr(inline, 'ct_fk_field', 'object_id')
for field in model._meta.many_to_many:
if isinstance(field, GenericRelation) and field.rel.to == inline_model and field.object_id_field_name == ct_fk_field and field.content_type_field_name == ct_field:
inline_fields.append(field.name)
_autoregister(admin, inline_model)
else:
fk_name = getattr(inline, 'fk_name', None)
if not fk_name:
for field in inline_model._meta.fields:
if isinstance(field, (models.ForeignKey, models.OneToOneField)) and issubclass(model, field.rel.to):
fk_name = field.name
_autoregister(admin, inline_model, follow=[fk_name])
if not inline_model._meta.get_field(fk_name).rel.is_hidden():
accessor = inline_model._meta.get_field(
fk_name).related.get_accessor_name()
inline_fields.append(accessor)
_autoregister(admin, model, inline_fields)
def register_models(admin_site=None):
if admin_site is None:
admin_site = site
for model, admin in admin_site._registry.items():
if getattr(admin, 'reversion_enable', False):
_register_model(admin, model)
class ReversionPlugin(BaseAdminPlugin):
# The revision manager instance used to manage revisions.
revision_manager = default_revision_manager
# The serialization format to use when registering models with reversion.
reversion_format = "json"
# Whether to ignore duplicate revision data.
ignore_duplicate_revisions = False
reversion_enable = False
def init_request(self, *args, **kwargs):
return self.reversion_enable
@property
def revision_context_manager(self):
"""The revision context manager for this VersionAdmin."""
return self.revision_manager._revision_context_manager
def get_revision_instances(self, obj):
"""Returns all the instances to be used in the object's revision."""
return [obj]
def get_revision_data(self, obj, flag):
"""Returns all the revision data to be used in the object's revision."""
return dict(
(o, self.revision_manager.get_adapter(
o.__class__).get_version_data(o, flag))
for o in self.get_revision_instances(obj)
)
def save_revision(self, obj, tag, comment):
self.revision_manager.save_revision(
self.get_revision_data(obj, tag),
user=self.user,
comment=comment,
ignore_duplicates=self.ignore_duplicate_revisions,
db=self.revision_context_manager.get_db(),
)
def do_post(self, __):
def _method():
self.revision_context_manager.set_user(self.user)
comment = ''
admin_view = self.admin_view
if isinstance(admin_view, CreateAdminView):
comment = _(u"Initial version.")
elif isinstance(admin_view, UpdateAdminView):
comment = _(u"Change version.")
elif isinstance(admin_view, RevisionView):
comment = _(u"Revert version.")
elif isinstance(admin_view, RecoverView):
comment = _(u"Rercover version.")
elif isinstance(admin_view, DeleteAdminView):
comment = _(u"Deleted %(verbose_name)s.") % {
"verbose_name": self.opts.verbose_name}
self.revision_context_manager.set_comment(comment)
return __()
return _method
def post(self, __, request, *args, **kwargs):
return self.revision_context_manager.create_revision(manage_manually=False)(self.do_post(__))()
# def save_models(self, __):
# self.revision_context_manager.create_revision(manage_manually=True)(__)()
# if self.admin_view.org_obj is None:
# self.save_revision(self.admin_view.new_obj, VERSION_ADD, _(u"Initial version."))
# else:
# self.save_revision(self.admin_view.new_obj, VERSION_CHANGE, _(u"Change version."))
# def save_related(self, __):
# self.revision_context_manager.create_revision(manage_manually=True)(__)()
# def delete_model(self, __):
# self.save_revision(self.admin_view.obj, VERSION_DELETE, \
# _(u"Deleted %(verbose_name)s.") % {"verbose_name": self.opts.verbose_name})
# self.revision_context_manager.create_revision(manage_manually=True)(__)()
# Block Views
def block_top_toolbar(self, context, nodes):
recoverlist_url = self.admin_view.model_admin_url('recoverlist')
nodes.append(mark_safe('<div class="btn-group"><a class="btn btn-default btn-sm" href="%s"><i class="fa fa-trash-o"></i> %s</a></div>' % (recoverlist_url, _(u"Recover"))))
def block_nav_toggles(self, context, nodes):
obj = getattr(
self.admin_view, 'org_obj', getattr(self.admin_view, 'obj', None))
if obj:
revisionlist_url = self.admin_view.model_admin_url(
'revisionlist', quote(obj.pk))
nodes.append(mark_safe('<a href="%s" class="navbar-toggle pull-right"><i class="fa fa-time"></i></a>' % revisionlist_url))
def block_nav_btns(self, context, nodes):
obj = getattr(
self.admin_view, 'org_obj', getattr(self.admin_view, 'obj', None))
if obj:
revisionlist_url = self.admin_view.model_admin_url(
'revisionlist', quote(obj.pk))
nodes.append(mark_safe('<a href="%s" class="btn btn-default"><i class="fa fa-time"></i> <span>%s</span></a>' % (revisionlist_url, _(u'History'))))
class BaseReversionView(ModelAdminView):
# The revision manager instance used to manage revisions.
revision_manager = default_revision_manager
# The serialization format to use when registering models with reversion.
reversion_format = "json"
# Whether to ignore duplicate revision data.
ignore_duplicate_revisions = False
# If True, then the default ordering of object_history and recover lists will be reversed.
history_latest_first = False
reversion_enable = False
def init_request(self, *args, **kwargs):
if not self.has_change_permission() and not self.has_add_permission():
raise PermissionDenied
def _order_version_queryset(self, queryset):
"""Applies the correct ordering to the given version queryset."""
if self.history_latest_first:
return queryset.order_by("-pk")
return queryset.order_by("pk")
class RecoverListView(BaseReversionView):
recover_list_template = None
def get_context(self):
context = super(RecoverListView, self).get_context()
opts = self.opts
deleted = self._order_version_queryset(
self.revision_manager.get_deleted(self.model))
context.update({
"opts": opts,
"app_label": opts.app_label,
"model_name": capfirst(opts.verbose_name),
"title": _("Recover deleted %(name)s") % {"name": force_text(opts.verbose_name_plural)},
"deleted": deleted,
"changelist_url": self.model_admin_url("changelist"),
})
return context
@csrf_protect_m
def get(self, request, *args, **kwargs):
context = self.get_context()
return TemplateResponse(
request, self.recover_list_template or self.get_template_list(
"views/recover_list.html"),
context, current_app=self.admin_site.name)
class RevisionListView(BaseReversionView):
object_history_template = None
revision_diff_template = None
def get_context(self):
context = super(RevisionListView, self).get_context()
opts = self.opts
action_list = [
{
"revision": version.revision,
"url": self.model_admin_url('revision', quote(version.object_id), version.id),
"version": version
}
for version
in self._order_version_queryset(self.revision_manager.get_for_object_reference(
self.model,
self.obj.pk,
).select_related("revision__user"))
]
context.update({
'title': _('Change history: %s') % force_text(self.obj),
'action_list': action_list,
'model_name': capfirst(force_text(opts.verbose_name_plural)),
'object': self.obj,
'app_label': opts.app_label,
"changelist_url": self.model_admin_url("changelist"),
"update_url": self.model_admin_url("change", self.obj.pk),
'opts': opts,
})
return context
def get(self, request, object_id, *args, **kwargs):
object_id = unquote(object_id)
self.obj = self.get_object(object_id)
if not self.has_change_permission(self.obj):
raise PermissionDenied
return self.get_response()
def get_response(self):
context = self.get_context()
return TemplateResponse(self.request, self.object_history_template or
self.get_template_list('views/model_history.html'), context, current_app=self.admin_site.name)
def get_version_object(self, version):
obj_version = version.object_version
obj = obj_version.object
obj._state.db = self.obj._state.db
for field_name, pks in obj_version.m2m_data.items():
f = self.opts.get_field(field_name)
if f.rel and isinstance(f.rel, models.ManyToManyRel):
setattr(obj, f.name, f.rel.to._default_manager.get_query_set(
).filter(pk__in=pks).all())
detail = self.get_model_view(DetailAdminUtil, self.model, obj)
return obj, detail
def post(self, request, object_id, *args, **kwargs):
object_id = unquote(object_id)
self.obj = self.get_object(object_id)
if not self.has_change_permission(self.obj):
raise PermissionDenied
params = self.request.POST
if 'version_a' not in params or 'version_b' not in params:
self.message_user(_("Must select two versions."), 'error')
return self.get_response()
version_a_id = params['version_a']
version_b_id = params['version_b']
if version_a_id == version_b_id:
self.message_user(
_("Please select two different versions."), 'error')
return self.get_response()
version_a = get_object_or_404(Version, pk=version_a_id)
version_b = get_object_or_404(Version, pk=version_b_id)
diffs = []
obj_a, detail_a = self.get_version_object(version_a)
obj_b, detail_b = self.get_version_object(version_b)
for f in (self.opts.fields + self.opts.many_to_many):
if isinstance(f, ForeignObjectRel):
label = f.opts.verbose_name
else:
label = f.verbose_name
value_a = f.value_from_object(obj_a)
value_b = f.value_from_object(obj_b)
is_diff = value_a != value_b
if type(value_a) in (list, tuple) and type(value_b) in (list, tuple) \
and len(value_a) == len(value_b) and is_diff:
is_diff = False
for i in xrange(len(value_a)):
if value_a[i] != value_a[i]:
is_diff = True
break
if type(value_a) is QuerySet and type(value_b) is QuerySet:
is_diff = list(value_a) != list(value_b)
diffs.append((label, detail_a.get_field_result(
f.name).val, detail_b.get_field_result(f.name).val, is_diff))
context = super(RevisionListView, self).get_context()
context.update({
'object': self.obj,
'opts': self.opts,
'version_a': version_a,
'version_b': version_b,
'revision_a_url': self.model_admin_url('revision', quote(version_a.object_id), version_a.id),
'revision_b_url': self.model_admin_url('revision', quote(version_b.object_id), version_b.id),
'diffs': diffs
})
return TemplateResponse(
self.request, self.revision_diff_template or self.get_template_list('views/revision_diff.html'),
context, current_app=self.admin_site.name)
@filter_hook
def get_media(self):
return super(RevisionListView, self).get_media() + self.vendor('xadmin.plugin.revision.js', 'xadmin.form.css')
class BaseRevisionView(ModelFormAdminView):
@filter_hook
def get_revision(self):
return self.version.field_dict
@filter_hook
def get_form_datas(self):
datas = {"instance": self.org_obj, "initial": self.get_revision()}
if self.request_method == 'post':
datas.update(
{'data': self.request.POST, 'files': self.request.FILES})
return datas
@filter_hook
def get_context(self):
context = super(BaseRevisionView, self).get_context()
context.update({
'object': self.org_obj
})
return context
@filter_hook
def get_media(self):
return super(BaseRevisionView, self).get_media() + self.vendor('xadmin.plugin.revision.js')
class DiffField(Field):
def render(self, form, form_style, context):
html = ''
for field in self.fields:
html += ('<div class="diff_field" rel="tooltip"><textarea class="org-data" style="display:none;">%s</textarea>%s</div>' %
(_('Current: %s') % self.attrs.pop('orgdata', ''), render_field(field, form, form_style, context, template=self.template, attrs=self.attrs)))
return html
class RevisionView(BaseRevisionView):
revision_form_template = None
def init_request(self, object_id, version_id):
self.detail = self.get_model_view(
DetailAdminView, self.model, object_id)
self.org_obj = self.detail.obj
self.version = get_object_or_404(
Version, pk=version_id, object_id=str(self.org_obj.pk))
self.prepare_form()
def get_form_helper(self):
helper = super(RevisionView, self).get_form_helper()
diff_fields = {}
version_data = self.version.field_dict
for f in self.opts.fields:
if f.value_from_object(self.org_obj) != version_data.get(f.name, None):
diff_fields[f.name] = self.detail.get_field_result(f.name).val
for k, v in diff_fields.items():
helper[k].wrap(DiffField, orgdata=v)
return helper
@filter_hook
def get_context(self):
context = super(RevisionView, self).get_context()
context["title"] = _(
"Revert %s") % force_text(self.model._meta.verbose_name)
return context
@filter_hook
def get_response(self):
context = self.get_context()
context.update(self.kwargs or {})
form_template = self.revision_form_template
return TemplateResponse(
self.request, form_template or self.get_template_list(
'views/revision_form.html'),
context, current_app=self.admin_site.name)
@filter_hook
def post_response(self):
self.message_user(_('The %(model)s "%(name)s" was reverted successfully. You may edit it again below.') %
{"model": force_text(self.opts.verbose_name), "name": str(self.new_obj)}, 'success')
return HttpResponseRedirect(self.model_admin_url('change', self.new_obj.pk))
class RecoverView(BaseRevisionView):
recover_form_template = None
def init_request(self, version_id):
if not self.has_change_permission() and not self.has_add_permission():
raise PermissionDenied
self.version = get_object_or_404(Version, pk=version_id)
self.org_obj = self.version.object_version.object
self.prepare_form()
@filter_hook
def get_context(self):
context = super(RecoverView, self).get_context()
context["title"] = _("Recover %s") % self.version.object_repr
return context
@filter_hook
def get_response(self):
context = self.get_context()
context.update(self.kwargs or {})
form_template = self.recover_form_template
return TemplateResponse(
self.request, form_template or self.get_template_list(
'views/recover_form.html'),
context, current_app=self.admin_site.name)
@filter_hook
def post_response(self):
self.message_user(_('The %(model)s "%(name)s" was recovered successfully. You may edit it again below.') %
{"model": force_text(self.opts.verbose_name), "name": str(self.new_obj)}, 'success')
return HttpResponseRedirect(self.model_admin_url('change', self.new_obj.pk))
class InlineDiffField(Field):
def render(self, form, form_style, context):
html = ''
instance = form.instance
if not instance.pk:
return super(InlineDiffField, self).render(form, form_style, context)
initial = form.initial
opts = instance._meta
detail = form.detail
for field in self.fields:
f = opts.get_field(field)
f_html = render_field(field, form, form_style, context,
template=self.template, attrs=self.attrs)
if f.value_from_object(instance) != initial.get(field, None):
current_val = detail.get_field_result(f.name).val
html += ('<div class="diff_field" rel="tooltip"><textarea class="org-data" style="display:none;">%s</textarea>%s</div>'
% (_('Current: %s') % current_val, f_html))
else:
html += f_html
return html
# inline hack plugin
class InlineRevisionPlugin(BaseAdminPlugin):
def get_related_versions(self, obj, version, formset):
"""Retreives all the related Version objects for the given FormSet."""
object_id = obj.pk
# Get the fk name.
try:
fk_name = formset.fk.name
except AttributeError:
# This is a GenericInlineFormset, or similar.
fk_name = formset.ct_fk_field.name
# Look up the revision data.
revision_versions = version.revision.version_set.all()
related_versions = dict([(related_version.object_id, related_version)
for related_version in revision_versions
if ContentType.objects.get_for_id(related_version.content_type_id).model_class() == formset.model
and str(related_version.field_dict[fk_name]) == str(object_id)])
return related_versions
def _hack_inline_formset_initial(self, revision_view, formset):
"""Hacks the given formset to contain the correct initial data."""
# Now we hack it to push in the data from the revision!
initial = []
related_versions = self.get_related_versions(
revision_view.org_obj, revision_view.version, formset)
formset.related_versions = related_versions
for related_obj in formset.queryset:
if str(related_obj.pk) in related_versions:
initial.append(
related_versions.pop(str(related_obj.pk)).field_dict)
else:
initial_data = model_to_dict(related_obj)
initial_data["DELETE"] = True
initial.append(initial_data)
for related_version in related_versions.values():
initial_row = related_version.field_dict
pk_name = ContentType.objects.get_for_id(
related_version.content_type_id).model_class()._meta.pk.name
del initial_row[pk_name]
initial.append(initial_row)
# Reconstruct the forms with the new revision data.
formset.initial = initial
formset.forms = [formset._construct_form(
n) for n in xrange(len(initial))]
# Hack the formset to force a save of everything.
def get_changed_data(form):
return [field.name for field in form.fields]
for form in formset.forms:
form.has_changed = lambda: True
form._get_changed_data = partial(get_changed_data, form=form)
def total_form_count_hack(count):
return lambda: count
formset.total_form_count = total_form_count_hack(len(initial))
if self.request.method == 'GET' and formset.helper and formset.helper.layout:
helper = formset.helper
helper.filter(basestring).wrap(InlineDiffField)
fake_admin_class = type(str('%s%sFakeAdmin' % (self.opts.app_label, self.opts.model_name)), (object, ), {'model': self.model})
for form in formset.forms:
instance = form.instance
if instance.pk:
form.detail = self.get_view(
DetailAdminUtil, fake_admin_class, instance)
def instance_form(self, formset, **kwargs):
admin_view = self.admin_view.admin_view
if hasattr(admin_view, 'version') and hasattr(admin_view, 'org_obj'):
self._hack_inline_formset_initial(admin_view, formset)
return formset
# action revision
class ActionRevisionPlugin(BaseAdminPlugin):
revision_manager = default_revision_manager
reversion_enable = False
def init_request(self, *args, **kwargs):
return self.reversion_enable
@property
def revision_context_manager(self):
return self.revision_manager._revision_context_manager
def do_action_func(self, __):
def _method():
self.revision_context_manager.set_user(self.user)
action_view = self.admin_view
comment = action_view.description % model_format_dict(self.opts)
self.revision_context_manager.set_comment(comment)
return __()
return _method
def do_action(self, __, queryset):
return self.revision_context_manager.create_revision(manage_manually=False)(self.do_action_func(__))()
class VersionInline(object):
model = Version
extra = 0
style = 'accordion'
class ReversionAdmin(object):
model_icon = 'fa fa-exchange'
list_display = ('__str__', 'date_created', 'user', 'comment')
list_display_links = ('__str__',)
list_filter = ('date_created', 'user')
inlines = [VersionInline]
site.register(Revision, ReversionAdmin)
site.register_modelview(
r'^recover/$', RecoverListView, name='%s_%s_recoverlist')
site.register_modelview(
r'^recover/([^/]+)/$', RecoverView, name='%s_%s_recover')
site.register_modelview(
r'^([^/]+)/revision/$', RevisionListView, name='%s_%s_revisionlist')
site.register_modelview(
r'^([^/]+)/revision/([^/]+)/$', RevisionView, name='%s_%s_revision')
site.register_plugin(ReversionPlugin, ListAdminView)
site.register_plugin(ReversionPlugin, ModelFormAdminView)
site.register_plugin(ReversionPlugin, DeleteAdminView)
site.register_plugin(InlineRevisionPlugin, InlineModelAdmin)
site.register_plugin(ActionRevisionPlugin, BaseActionView)
|
samithaj/headphones | refs/heads/master | lib/beets/util/bluelet.py | 17 | """Extremely simple pure-Python implementation of coroutine-style
asynchronous socket I/O. Inspired by, but inferior to, Eventlet.
Bluelet can also be thought of as a less-terrible replacement for
asyncore.
Bluelet: easy concurrency without all the messy parallelism.
"""
import socket
import select
import sys
import types
import errno
import traceback
import time
import collections
# A little bit of "six" (Python 2/3 compatibility): cope with PEP 3109 syntax
# changes.
PY3 = sys.version_info[0] == 3
if PY3:
def _reraise(typ, exc, tb):
raise exc.with_traceback(tb)
else:
exec("""
def _reraise(typ, exc, tb):
raise typ, exc, tb
""")
# Basic events used for thread scheduling.
class Event(object):
"""Just a base class identifying Bluelet events. An event is an
object yielded from a Bluelet thread coroutine to suspend operation
and communicate with the scheduler.
"""
pass
class WaitableEvent(Event):
"""A waitable event is one encapsulating an action that can be
waited for using a select() call. That is, it's an event with an
associated file descriptor.
"""
def waitables(self):
"""Return "waitable" objects to pass to select(). Should return
three iterables for input readiness, output readiness, and
exceptional conditions (i.e., the three lists passed to
select()).
"""
return (), (), ()
def fire(self):
"""Called when an associated file descriptor becomes ready
(i.e., is returned from a select() call).
"""
pass
class ValueEvent(Event):
"""An event that does nothing but return a fixed value."""
def __init__(self, value):
self.value = value
class ExceptionEvent(Event):
"""Raise an exception at the yield point. Used internally."""
def __init__(self, exc_info):
self.exc_info = exc_info
class SpawnEvent(Event):
"""Add a new coroutine thread to the scheduler."""
def __init__(self, coro):
self.spawned = coro
class JoinEvent(Event):
"""Suspend the thread until the specified child thread has
completed.
"""
def __init__(self, child):
self.child = child
class KillEvent(Event):
"""Unschedule a child thread."""
def __init__(self, child):
self.child = child
class DelegationEvent(Event):
"""Suspend execution of the current thread, start a new thread and,
once the child thread finished, return control to the parent
thread.
"""
def __init__(self, coro):
self.spawned = coro
class ReturnEvent(Event):
"""Return a value the current thread's delegator at the point of
delegation. Ends the current (delegate) thread.
"""
def __init__(self, value):
self.value = value
class SleepEvent(WaitableEvent):
"""Suspend the thread for a given duration.
"""
def __init__(self, duration):
self.wakeup_time = time.time() + duration
def time_left(self):
return max(self.wakeup_time - time.time(), 0.0)
class ReadEvent(WaitableEvent):
"""Reads from a file-like object."""
def __init__(self, fd, bufsize):
self.fd = fd
self.bufsize = bufsize
def waitables(self):
return (self.fd,), (), ()
def fire(self):
return self.fd.read(self.bufsize)
class WriteEvent(WaitableEvent):
"""Writes to a file-like object."""
def __init__(self, fd, data):
self.fd = fd
self.data = data
def waitable(self):
return (), (self.fd,), ()
def fire(self):
self.fd.write(self.data)
# Core logic for executing and scheduling threads.
def _event_select(events):
"""Perform a select() over all the Events provided, returning the
ones ready to be fired. Only WaitableEvents (including SleepEvents)
matter here; all other events are ignored (and thus postponed).
"""
# Gather waitables and wakeup times.
waitable_to_event = {}
rlist, wlist, xlist = [], [], []
earliest_wakeup = None
for event in events:
if isinstance(event, SleepEvent):
if not earliest_wakeup:
earliest_wakeup = event.wakeup_time
else:
earliest_wakeup = min(earliest_wakeup, event.wakeup_time)
elif isinstance(event, WaitableEvent):
r, w, x = event.waitables()
rlist += r
wlist += w
xlist += x
for waitable in r:
waitable_to_event[('r', waitable)] = event
for waitable in w:
waitable_to_event[('w', waitable)] = event
for waitable in x:
waitable_to_event[('x', waitable)] = event
# If we have a any sleeping threads, determine how long to sleep.
if earliest_wakeup:
timeout = max(earliest_wakeup - time.time(), 0.0)
else:
timeout = None
# Perform select() if we have any waitables.
if rlist or wlist or xlist:
rready, wready, xready = select.select(rlist, wlist, xlist, timeout)
else:
rready, wready, xready = (), (), ()
if timeout:
time.sleep(timeout)
# Gather ready events corresponding to the ready waitables.
ready_events = set()
for ready in rready:
ready_events.add(waitable_to_event[('r', ready)])
for ready in wready:
ready_events.add(waitable_to_event[('w', ready)])
for ready in xready:
ready_events.add(waitable_to_event[('x', ready)])
# Gather any finished sleeps.
for event in events:
if isinstance(event, SleepEvent) and event.time_left() == 0.0:
ready_events.add(event)
return ready_events
class ThreadException(Exception):
def __init__(self, coro, exc_info):
self.coro = coro
self.exc_info = exc_info
def reraise(self):
_reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2])
SUSPENDED = Event() # Special sentinel placeholder for suspended threads.
class Delegated(Event):
"""Placeholder indicating that a thread has delegated execution to a
different thread.
"""
def __init__(self, child):
self.child = child
def run(root_coro):
"""Schedules a coroutine, running it to completion. This
encapsulates the Bluelet scheduler, which the root coroutine can
add to by spawning new coroutines.
"""
# The "threads" dictionary keeps track of all the currently-
# executing and suspended coroutines. It maps coroutines to their
# currently "blocking" event. The event value may be SUSPENDED if
# the coroutine is waiting on some other condition: namely, a
# delegated coroutine or a joined coroutine. In this case, the
# coroutine should *also* appear as a value in one of the below
# dictionaries `delegators` or `joiners`.
threads = {root_coro: ValueEvent(None)}
# Maps child coroutines to delegating parents.
delegators = {}
# Maps child coroutines to joining (exit-waiting) parents.
joiners = collections.defaultdict(list)
def complete_thread(coro, return_value):
"""Remove a coroutine from the scheduling pool, awaking
delegators and joiners as necessary and returning the specified
value to any delegating parent.
"""
del threads[coro]
# Resume delegator.
if coro in delegators:
threads[delegators[coro]] = ValueEvent(return_value)
del delegators[coro]
# Resume joiners.
if coro in joiners:
for parent in joiners[coro]:
threads[parent] = ValueEvent(None)
del joiners[coro]
def advance_thread(coro, value, is_exc=False):
"""After an event is fired, run a given coroutine associated with
it in the threads dict until it yields again. If the coroutine
exits, then the thread is removed from the pool. If the coroutine
raises an exception, it is reraised in a ThreadException. If
is_exc is True, then the value must be an exc_info tuple and the
exception is thrown into the coroutine.
"""
try:
if is_exc:
next_event = coro.throw(*value)
else:
next_event = coro.send(value)
except StopIteration:
# Thread is done.
complete_thread(coro, None)
except:
# Thread raised some other exception.
del threads[coro]
raise ThreadException(coro, sys.exc_info())
else:
if isinstance(next_event, types.GeneratorType):
# Automatically invoke sub-coroutines. (Shorthand for
# explicit bluelet.call().)
next_event = DelegationEvent(next_event)
threads[coro] = next_event
def kill_thread(coro):
"""Unschedule this thread and its (recursive) delegates.
"""
# Collect all coroutines in the delegation stack.
coros = [coro]
while isinstance(threads[coro], Delegated):
coro = threads[coro].child
coros.append(coro)
# Complete each coroutine from the top to the bottom of the
# stack.
for coro in reversed(coros):
complete_thread(coro, None)
# Continue advancing threads until root thread exits.
exit_te = None
while threads:
try:
# Look for events that can be run immediately. Continue
# running immediate events until nothing is ready.
while True:
have_ready = False
for coro, event in list(threads.items()):
if isinstance(event, SpawnEvent):
threads[event.spawned] = ValueEvent(None) # Spawn.
advance_thread(coro, None)
have_ready = True
elif isinstance(event, ValueEvent):
advance_thread(coro, event.value)
have_ready = True
elif isinstance(event, ExceptionEvent):
advance_thread(coro, event.exc_info, True)
have_ready = True
elif isinstance(event, DelegationEvent):
threads[coro] = Delegated(event.spawned) # Suspend.
threads[event.spawned] = ValueEvent(None) # Spawn.
delegators[event.spawned] = coro
have_ready = True
elif isinstance(event, ReturnEvent):
# Thread is done.
complete_thread(coro, event.value)
have_ready = True
elif isinstance(event, JoinEvent):
threads[coro] = SUSPENDED # Suspend.
joiners[event.child].append(coro)
have_ready = True
elif isinstance(event, KillEvent):
threads[coro] = ValueEvent(None)
kill_thread(event.child)
have_ready = True
# Only start the select when nothing else is ready.
if not have_ready:
break
# Wait and fire.
event2coro = dict((v, k) for k, v in threads.items())
for event in _event_select(threads.values()):
# Run the IO operation, but catch socket errors.
try:
value = event.fire()
except socket.error as exc:
if isinstance(exc.args, tuple) and \
exc.args[0] == errno.EPIPE:
# Broken pipe. Remote host disconnected.
pass
else:
traceback.print_exc()
# Abort the coroutine.
threads[event2coro[event]] = ReturnEvent(None)
else:
advance_thread(event2coro[event], value)
except ThreadException as te:
# Exception raised from inside a thread.
event = ExceptionEvent(te.exc_info)
if te.coro in delegators:
# The thread is a delegate. Raise exception in its
# delegator.
threads[delegators[te.coro]] = event
del delegators[te.coro]
else:
# The thread is root-level. Raise in client code.
exit_te = te
break
except:
# For instance, KeyboardInterrupt during select(). Raise
# into root thread and terminate others.
threads = {root_coro: ExceptionEvent(sys.exc_info())}
# If any threads still remain, kill them.
for coro in threads:
coro.close()
# If we're exiting with an exception, raise it in the client.
if exit_te:
exit_te.reraise()
# Sockets and their associated events.
class SocketClosedError(Exception):
pass
class Listener(object):
"""A socket wrapper object for listening sockets.
"""
def __init__(self, host, port):
"""Create a listening socket on the given hostname and port.
"""
self._closed = False
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((host, port))
self.sock.listen(5)
def accept(self):
"""An event that waits for a connection on the listening socket.
When a connection is made, the event returns a Connection
object.
"""
if self._closed:
raise SocketClosedError()
return AcceptEvent(self)
def close(self):
"""Immediately close the listening socket. (Not an event.)
"""
self._closed = True
self.sock.close()
class Connection(object):
"""A socket wrapper object for connected sockets.
"""
def __init__(self, sock, addr):
self.sock = sock
self.addr = addr
self._buf = b''
self._closed = False
def close(self):
"""Close the connection."""
self._closed = True
self.sock.close()
def recv(self, size):
"""Read at most size bytes of data from the socket."""
if self._closed:
raise SocketClosedError()
if self._buf:
# We already have data read previously.
out = self._buf[:size]
self._buf = self._buf[size:]
return ValueEvent(out)
else:
return ReceiveEvent(self, size)
def send(self, data):
"""Sends data on the socket, returning the number of bytes
successfully sent.
"""
if self._closed:
raise SocketClosedError()
return SendEvent(self, data)
def sendall(self, data):
"""Send all of data on the socket."""
if self._closed:
raise SocketClosedError()
return SendEvent(self, data, True)
def readline(self, terminator=b"\n", bufsize=1024):
"""Reads a line (delimited by terminator) from the socket."""
if self._closed:
raise SocketClosedError()
while True:
if terminator in self._buf:
line, self._buf = self._buf.split(terminator, 1)
line += terminator
yield ReturnEvent(line)
break
data = yield ReceiveEvent(self, bufsize)
if data:
self._buf += data
else:
line = self._buf
self._buf = b''
yield ReturnEvent(line)
break
class AcceptEvent(WaitableEvent):
"""An event for Listener objects (listening sockets) that suspends
execution until the socket gets a connection.
"""
def __init__(self, listener):
self.listener = listener
def waitables(self):
return (self.listener.sock,), (), ()
def fire(self):
sock, addr = self.listener.sock.accept()
return Connection(sock, addr)
class ReceiveEvent(WaitableEvent):
"""An event for Connection objects (connected sockets) for
asynchronously reading data.
"""
def __init__(self, conn, bufsize):
self.conn = conn
self.bufsize = bufsize
def waitables(self):
return (self.conn.sock,), (), ()
def fire(self):
return self.conn.sock.recv(self.bufsize)
class SendEvent(WaitableEvent):
"""An event for Connection objects (connected sockets) for
asynchronously writing data.
"""
def __init__(self, conn, data, sendall=False):
self.conn = conn
self.data = data
self.sendall = sendall
def waitables(self):
return (), (self.conn.sock,), ()
def fire(self):
if self.sendall:
return self.conn.sock.sendall(self.data)
else:
return self.conn.sock.send(self.data)
# Public interface for threads; each returns an event object that
# can immediately be "yield"ed.
def null():
"""Event: yield to the scheduler without doing anything special.
"""
return ValueEvent(None)
def spawn(coro):
"""Event: add another coroutine to the scheduler. Both the parent
and child coroutines run concurrently.
"""
if not isinstance(coro, types.GeneratorType):
raise ValueError('%s is not a coroutine' % str(coro))
return SpawnEvent(coro)
def call(coro):
"""Event: delegate to another coroutine. The current coroutine
is resumed once the sub-coroutine finishes. If the sub-coroutine
returns a value using end(), then this event returns that value.
"""
if not isinstance(coro, types.GeneratorType):
raise ValueError('%s is not a coroutine' % str(coro))
return DelegationEvent(coro)
def end(value=None):
"""Event: ends the coroutine and returns a value to its
delegator.
"""
return ReturnEvent(value)
def read(fd, bufsize=None):
"""Event: read from a file descriptor asynchronously."""
if bufsize is None:
# Read all.
def reader():
buf = []
while True:
data = yield read(fd, 1024)
if not data:
break
buf.append(data)
yield ReturnEvent(''.join(buf))
return DelegationEvent(reader())
else:
return ReadEvent(fd, bufsize)
def write(fd, data):
"""Event: write to a file descriptor asynchronously."""
return WriteEvent(fd, data)
def connect(host, port):
"""Event: connect to a network address and return a Connection
object for communicating on the socket.
"""
addr = (host, port)
sock = socket.create_connection(addr)
return ValueEvent(Connection(sock, addr))
def sleep(duration):
"""Event: suspend the thread for ``duration`` seconds.
"""
return SleepEvent(duration)
def join(coro):
"""Suspend the thread until another, previously `spawn`ed thread
completes.
"""
return JoinEvent(coro)
def kill(coro):
"""Halt the execution of a different `spawn`ed thread.
"""
return KillEvent(coro)
# Convenience function for running socket servers.
def server(host, port, func):
"""A coroutine that runs a network server. Host and port specify the
listening address. func should be a coroutine that takes a single
parameter, a Connection object. The coroutine is invoked for every
incoming connection on the listening socket.
"""
def handler(conn):
try:
yield func(conn)
finally:
conn.close()
listener = Listener(host, port)
try:
while True:
conn = yield listener.accept()
yield spawn(handler(conn))
except KeyboardInterrupt:
pass
finally:
listener.close()
|
crystalspace/CS | refs/heads/master | scripts/python/frozen/cspace/csgfx.py | 1 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.36
#
# Don't modify this file, modify the SWIG interface instead.
import _csgfx
import new
new_instancemethod = new.instancemethod
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'PySwigObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import core
_SetSCFPointer = _csgfx._SetSCFPointer
_GetSCFPointer = _csgfx._GetSCFPointer
if not "core" in dir():
core = __import__("cspace").__dict__["core"]
core.AddSCFLink(_SetSCFPointer)
CSMutableArrayHelper = core.CSMutableArrayHelper
class iShaderVarStringSetBase(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def Request(*args): return _csgfx.iShaderVarStringSetBase_Request(*args)
def Contains(*args): return _csgfx.iShaderVarStringSetBase_Contains(*args)
def Delete(*args): return _csgfx.iShaderVarStringSetBase_Delete(*args)
def Empty(*args): return _csgfx.iShaderVarStringSetBase_Empty(*args)
def Clear(*args): return _csgfx.iShaderVarStringSetBase_Clear(*args)
def GetSize(*args): return _csgfx.iShaderVarStringSetBase_GetSize(*args)
def IsEmpty(*args): return _csgfx.iShaderVarStringSetBase_IsEmpty(*args)
__swig_destroy__ = _csgfx.delete_iShaderVarStringSetBase
__del__ = lambda self : None;
iShaderVarStringSetBase_swigregister = _csgfx.iShaderVarStringSetBase_swigregister
iShaderVarStringSetBase_swigregister(iShaderVarStringSetBase)
CS_IMGFMT_MASK = _csgfx.CS_IMGFMT_MASK
CS_IMGFMT_NONE = _csgfx.CS_IMGFMT_NONE
CS_IMGFMT_TRUECOLOR = _csgfx.CS_IMGFMT_TRUECOLOR
CS_IMGFMT_PALETTED8 = _csgfx.CS_IMGFMT_PALETTED8
CS_IMGFMT_ANY = _csgfx.CS_IMGFMT_ANY
CS_IMGFMT_ALPHA = _csgfx.CS_IMGFMT_ALPHA
CS_IMGFMT_INVALID = _csgfx.CS_IMGFMT_INVALID
csimg2D = _csgfx.csimg2D
csimg3D = _csgfx.csimg3D
csimgCube = _csgfx.csimgCube
class iImage(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def GetImageData(*args): return _csgfx.iImage_GetImageData(*args)
def GetWidth(*args): return _csgfx.iImage_GetWidth(*args)
def GetHeight(*args): return _csgfx.iImage_GetHeight(*args)
def GetDepth(*args): return _csgfx.iImage_GetDepth(*args)
def SetName(*args): return _csgfx.iImage_SetName(*args)
def GetName(*args): return _csgfx.iImage_GetName(*args)
def GetFormat(*args): return _csgfx.iImage_GetFormat(*args)
def GetPalette(*args): return _csgfx.iImage_GetPalette(*args)
def GetAlpha(*args): return _csgfx.iImage_GetAlpha(*args)
def HasKeyColor(*args): return _csgfx.iImage_HasKeyColor(*args)
def GetKeyColor(*args): return _csgfx.iImage_GetKeyColor(*args)
def HasMipmaps(*args): return _csgfx.iImage_HasMipmaps(*args)
def GetMipmap(*args): return _csgfx.iImage_GetMipmap(*args)
def GetRawFormat(*args): return _csgfx.iImage_GetRawFormat(*args)
def GetRawData(*args): return _csgfx.iImage_GetRawData(*args)
def GetImageType(*args): return _csgfx.iImage_GetImageType(*args)
def HasSubImages(*args): return _csgfx.iImage_HasSubImages(*args)
def GetSubImage(*args): return _csgfx.iImage_GetSubImage(*args)
def GetCookedImageFormat(*args): return _csgfx.iImage_GetCookedImageFormat(*args)
def GetCookedImageData(*args): return _csgfx.iImage_GetCookedImageData(*args)
scfGetVersion = staticmethod(_csgfx.iImage_scfGetVersion)
__swig_destroy__ = _csgfx.delete_iImage
__del__ = lambda self : None;
iImage_swigregister = _csgfx.iImage_swigregister
iImage_swigregister(iImage)
iImage_scfGetVersion = _csgfx.iImage_scfGetVersion
class csImageIOFileFormatDescriptions(core.CustomAllocated):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
__swig_destroy__ = _csgfx.delete_csImageIOFileFormatDescriptions
__del__ = lambda self : None;
def __init__(self, *args):
this = _csgfx.new_csImageIOFileFormatDescriptions(*args)
try: self.this.append(this)
except: self.this = this
def GetSize(*args): return _csgfx.csImageIOFileFormatDescriptions_GetSize(*args)
def Get(*args): return _csgfx.csImageIOFileFormatDescriptions_Get(*args)
def Put(*args): return _csgfx.csImageIOFileFormatDescriptions_Put(*args)
def Push(*args): return _csgfx.csImageIOFileFormatDescriptions_Push(*args)
def Merge(*args): return _csgfx.csImageIOFileFormatDescriptions_Merge(*args)
def MergeSmart(*args): return _csgfx.csImageIOFileFormatDescriptions_MergeSmart(*args)
def Pop(*args): return _csgfx.csImageIOFileFormatDescriptions_Pop(*args)
def Top(*args): return _csgfx.csImageIOFileFormatDescriptions_Top(*args)
def Insert(*args): return _csgfx.csImageIOFileFormatDescriptions_Insert(*args)
def Contains(*args): return _csgfx.csImageIOFileFormatDescriptions_Contains(*args)
def DeleteAll(*args): return _csgfx.csImageIOFileFormatDescriptions_DeleteAll(*args)
def Truncate(*args): return _csgfx.csImageIOFileFormatDescriptions_Truncate(*args)
def Empty(*args): return _csgfx.csImageIOFileFormatDescriptions_Empty(*args)
def IsEmpty(*args): return _csgfx.csImageIOFileFormatDescriptions_IsEmpty(*args)
def SetMinimalCapacity(*args): return _csgfx.csImageIOFileFormatDescriptions_SetMinimalCapacity(*args)
def DeleteIndex(*args): return _csgfx.csImageIOFileFormatDescriptions_DeleteIndex(*args)
def DeleteIndexFast(*args): return _csgfx.csImageIOFileFormatDescriptions_DeleteIndexFast(*args)
def DeleteRange(*args): return _csgfx.csImageIOFileFormatDescriptions_DeleteRange(*args)
def __eq__(*args): return _csgfx.csImageIOFileFormatDescriptions___eq__(*args)
def __ne__(*args): return _csgfx.csImageIOFileFormatDescriptions___ne__(*args)
def GetAllocator(*args): return _csgfx.csImageIOFileFormatDescriptions_GetAllocator(*args)
csImageIOFileFormatDescriptions_swigregister = _csgfx.csImageIOFileFormatDescriptions_swigregister
csImageIOFileFormatDescriptions_swigregister(csImageIOFileFormatDescriptions)
CS_IMAGEIO_LOAD = _csgfx.CS_IMAGEIO_LOAD
CS_IMAGEIO_SAVE = _csgfx.CS_IMAGEIO_SAVE
class csImageIOFileFormatDescription(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
mime = _swig_property(_csgfx.csImageIOFileFormatDescription_mime_get)
subtype = _swig_property(_csgfx.csImageIOFileFormatDescription_subtype_get)
cap = _swig_property(_csgfx.csImageIOFileFormatDescription_cap_get, _csgfx.csImageIOFileFormatDescription_cap_set)
def __init__(self, *args):
this = _csgfx.new_csImageIOFileFormatDescription(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _csgfx.delete_csImageIOFileFormatDescription
__del__ = lambda self : None;
csImageIOFileFormatDescription_swigregister = _csgfx.csImageIOFileFormatDescription_swigregister
csImageIOFileFormatDescription_swigregister(csImageIOFileFormatDescription)
class iImageIO(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def GetDescription(*args): return _csgfx.iImageIO_GetDescription(*args)
def Load(*args): return _csgfx.iImageIO_Load(*args)
def Save(*args): return _csgfx.iImageIO_Save(*args)
scfGetVersion = staticmethod(_csgfx.iImageIO_scfGetVersion)
__swig_destroy__ = _csgfx.delete_iImageIO
__del__ = lambda self : None;
iImageIO_swigregister = _csgfx.iImageIO_swigregister
iImageIO_swigregister(iImageIO)
iImageIO_scfGetVersion = _csgfx.iImageIO_scfGetVersion
class iAnimatedImage(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def Animate(*args): return _csgfx.iAnimatedImage_Animate(*args)
def IsAnimated(*args): return _csgfx.iAnimatedImage_IsAnimated(*args)
scfGetVersion = staticmethod(_csgfx.iAnimatedImage_scfGetVersion)
__swig_destroy__ = _csgfx.delete_iAnimatedImage
__del__ = lambda self : None;
iAnimatedImage_swigregister = _csgfx.iAnimatedImage_swigregister
iAnimatedImage_swigregister(iAnimatedImage)
iAnimatedImage_scfGetVersion = _csgfx.iAnimatedImage_scfGetVersion
class iProcTexture(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def GetAlwaysAnimate(*args): return _csgfx.iProcTexture_GetAlwaysAnimate(*args)
def SetAlwaysAnimate(*args): return _csgfx.iProcTexture_SetAlwaysAnimate(*args)
def GetFactory(*args): return _csgfx.iProcTexture_GetFactory(*args)
iProcTexture_swigregister = _csgfx.iProcTexture_swigregister
iProcTexture_swigregister(iProcTexture)
class csRGBcolor(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
red = _swig_property(_csgfx.csRGBcolor_red_get, _csgfx.csRGBcolor_red_set)
green = _swig_property(_csgfx.csRGBcolor_green_get, _csgfx.csRGBcolor_green_set)
blue = _swig_property(_csgfx.csRGBcolor_blue_get, _csgfx.csRGBcolor_blue_set)
def __init__(self, *args):
this = _csgfx.new_csRGBcolor(*args)
try: self.this.append(this)
except: self.this = this
def Set(*args): return _csgfx.csRGBcolor_Set(*args)
def __eq__(*args): return _csgfx.csRGBcolor___eq__(*args)
def __ne__(*args): return _csgfx.csRGBcolor___ne__(*args)
def __add__(*args): return _csgfx.csRGBcolor___add__(*args)
def UnsafeAdd(*args): return _csgfx.csRGBcolor_UnsafeAdd(*args)
def SafeAdd(*args): return _csgfx.csRGBcolor_SafeAdd(*args)
__swig_destroy__ = _csgfx.delete_csRGBcolor
__del__ = lambda self : None;
csRGBcolor_swigregister = _csgfx.csRGBcolor_swigregister
csRGBcolor_swigregister(csRGBcolor)
class csRGBpixel(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
red = _swig_property(_csgfx.csRGBpixel_red_get, _csgfx.csRGBpixel_red_set)
green = _swig_property(_csgfx.csRGBpixel_green_get, _csgfx.csRGBpixel_green_set)
blue = _swig_property(_csgfx.csRGBpixel_blue_get, _csgfx.csRGBpixel_blue_set)
alpha = _swig_property(_csgfx.csRGBpixel_alpha_get, _csgfx.csRGBpixel_alpha_set)
def __init__(self, *args):
this = _csgfx.new_csRGBpixel(*args)
try: self.this.append(this)
except: self.this = this
def __eq__(*args): return _csgfx.csRGBpixel___eq__(*args)
def __ne__(*args): return _csgfx.csRGBpixel___ne__(*args)
def asRGBcolor(*args): return _csgfx.csRGBpixel_asRGBcolor(*args)
def eq(*args): return _csgfx.csRGBpixel_eq(*args)
def Intensity(*args): return _csgfx.csRGBpixel_Intensity(*args)
def Luminance(*args): return _csgfx.csRGBpixel_Luminance(*args)
def Set(*args): return _csgfx.csRGBpixel_Set(*args)
def __iadd__(*args): return _csgfx.csRGBpixel___iadd__(*args)
def UnsafeAdd(*args): return _csgfx.csRGBpixel_UnsafeAdd(*args)
def SafeAdd(*args): return _csgfx.csRGBpixel_SafeAdd(*args)
__swig_destroy__ = _csgfx.delete_csRGBpixel
__del__ = lambda self : None;
csRGBpixel_swigregister = _csgfx.csRGBpixel_swigregister
csRGBpixel_swigregister(csRGBpixel)
R_COEF = _csgfx.R_COEF
G_COEF = _csgfx.G_COEF
B_COEF = _csgfx.B_COEF
R_COEF_SQ = _csgfx.R_COEF_SQ
G_COEF_SQ = _csgfx.G_COEF_SQ
B_COEF_SQ = _csgfx.B_COEF_SQ
class iShaderVarStringSet(iShaderVarStringSetBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
scfGetVersion = staticmethod(_csgfx.iShaderVarStringSet_scfGetVersion)
__swig_destroy__ = _csgfx.delete_iShaderVarStringSet
__del__ = lambda self : None;
iShaderVarStringSet_swigregister = _csgfx.iShaderVarStringSet_swigregister
iShaderVarStringSet_swigregister(iShaderVarStringSet)
cvar = _csgfx.cvar
InvalidShaderVarStringID = cvar.InvalidShaderVarStringID
iShaderVarStringSet_scfGetVersion = _csgfx.iShaderVarStringSet_scfGetVersion
class iShaderVariableAccessor(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def PreGetValue(*args): return _csgfx.iShaderVariableAccessor_PreGetValue(*args)
__swig_destroy__ = _csgfx.delete_iShaderVariableAccessor
__del__ = lambda self : None;
iShaderVariableAccessor_swigregister = _csgfx.iShaderVariableAccessor_swigregister
iShaderVariableAccessor_swigregister(iShaderVariableAccessor)
class csShaderVariable(core.csRefCount):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
UNKNOWN = _csgfx.csShaderVariable_UNKNOWN
INT = _csgfx.csShaderVariable_INT
FLOAT = _csgfx.csShaderVariable_FLOAT
TEXTURE = _csgfx.csShaderVariable_TEXTURE
RENDERBUFFER = _csgfx.csShaderVariable_RENDERBUFFER
VECTOR2 = _csgfx.csShaderVariable_VECTOR2
VECTOR3 = _csgfx.csShaderVariable_VECTOR3
VECTOR4 = _csgfx.csShaderVariable_VECTOR4
MATRIX3X3 = _csgfx.csShaderVariable_MATRIX3X3
MATRIX = _csgfx.csShaderVariable_MATRIX
TRANSFORM = _csgfx.csShaderVariable_TRANSFORM
ARRAY = _csgfx.csShaderVariable_ARRAY
MATRIX4X4 = _csgfx.csShaderVariable_MATRIX4X4
COLOR = _csgfx.csShaderVariable_COLOR
def __init__(self, *args):
this = _csgfx.new_csShaderVariable(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _csgfx.delete_csShaderVariable
__del__ = lambda self : None;
def assign(*args): return _csgfx.csShaderVariable_assign(*args)
def GetType(*args): return _csgfx.csShaderVariable_GetType(*args)
def SetType(*args): return _csgfx.csShaderVariable_SetType(*args)
def SetAccessor(*args): return _csgfx.csShaderVariable_SetAccessor(*args)
def SetName(*args): return _csgfx.csShaderVariable_SetName(*args)
def GetName(*args): return _csgfx.csShaderVariable_GetName(*args)
def GetAccessor(*args): return _csgfx.csShaderVariable_GetAccessor(*args)
def GetAccessorData(*args): return _csgfx.csShaderVariable_GetAccessorData(*args)
def SetValue(*args): return _csgfx.csShaderVariable_SetValue(*args)
def AddVariableToArray(*args): return _csgfx.csShaderVariable_AddVariableToArray(*args)
def RemoveFromArray(*args): return _csgfx.csShaderVariable_RemoveFromArray(*args)
def SetArraySize(*args): return _csgfx.csShaderVariable_SetArraySize(*args)
def GetArraySize(*args): return _csgfx.csShaderVariable_GetArraySize(*args)
def GetArrayElement(*args): return _csgfx.csShaderVariable_GetArrayElement(*args)
def SetArrayElement(*args): return _csgfx.csShaderVariable_SetArrayElement(*args)
def FindArrayElement(*args): return _csgfx.csShaderVariable_FindArrayElement(*args)
def GetValue(*args): return _csgfx.csShaderVariable_GetValue(*args)
csShaderVariable_swigregister = _csgfx.csShaderVariable_swigregister
csShaderVariable_swigregister(csShaderVariable)
class csShaderVariableArrayReadOnly(core.iBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def GetSize(*args): return _csgfx.csShaderVariableArrayReadOnly_GetSize(*args)
def Get(*args): return _csgfx.csShaderVariableArrayReadOnly_Get(*args)
def Top(*args): return _csgfx.csShaderVariableArrayReadOnly_Top(*args)
def Find(*args): return _csgfx.csShaderVariableArrayReadOnly_Find(*args)
def GetIndex(*args): return _csgfx.csShaderVariableArrayReadOnly_GetIndex(*args)
def IsEmpty(*args): return _csgfx.csShaderVariableArrayReadOnly_IsEmpty(*args)
def GetAll(*args): return _csgfx.csShaderVariableArrayReadOnly_GetAll(*args)
__swig_destroy__ = _csgfx.delete_csShaderVariableArrayReadOnly
__del__ = lambda self : None;
csShaderVariableArrayReadOnly_swigregister = _csgfx.csShaderVariableArrayReadOnly_swigregister
csShaderVariableArrayReadOnly_swigregister(csShaderVariableArrayReadOnly)
class csShaderVariableArrayChangeElements(csShaderVariableArrayReadOnly):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def Get(*args): return _csgfx.csShaderVariableArrayChangeElements_Get(*args)
def Top(*args): return _csgfx.csShaderVariableArrayChangeElements_Top(*args)
__swig_destroy__ = _csgfx.delete_csShaderVariableArrayChangeElements
__del__ = lambda self : None;
csShaderVariableArrayChangeElements_swigregister = _csgfx.csShaderVariableArrayChangeElements_swigregister
csShaderVariableArrayChangeElements_swigregister(csShaderVariableArrayChangeElements)
class csShaderVariableArrayChangeAll(csShaderVariableArrayChangeElements):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def SetSize(*args): return _csgfx.csShaderVariableArrayChangeAll_SetSize(*args)
def GetExtend(*args): return _csgfx.csShaderVariableArrayChangeAll_GetExtend(*args)
def Put(*args): return _csgfx.csShaderVariableArrayChangeAll_Put(*args)
def Push(*args): return _csgfx.csShaderVariableArrayChangeAll_Push(*args)
def PushSmart(*args): return _csgfx.csShaderVariableArrayChangeAll_PushSmart(*args)
def Pop(*args): return _csgfx.csShaderVariableArrayChangeAll_Pop(*args)
def Insert(*args): return _csgfx.csShaderVariableArrayChangeAll_Insert(*args)
def DeleteAll(*args): return _csgfx.csShaderVariableArrayChangeAll_DeleteAll(*args)
def Truncate(*args): return _csgfx.csShaderVariableArrayChangeAll_Truncate(*args)
def Empty(*args): return _csgfx.csShaderVariableArrayChangeAll_Empty(*args)
def DeleteIndex(*args): return _csgfx.csShaderVariableArrayChangeAll_DeleteIndex(*args)
def DeleteIndexFast(*args): return _csgfx.csShaderVariableArrayChangeAll_DeleteIndexFast(*args)
def Delete(*args): return _csgfx.csShaderVariableArrayChangeAll_Delete(*args)
__swig_destroy__ = _csgfx.delete_csShaderVariableArrayChangeAll
__del__ = lambda self : None;
csShaderVariableArrayChangeAll_swigregister = _csgfx.csShaderVariableArrayChangeAll_swigregister
csShaderVariableArrayChangeAll_swigregister(csShaderVariableArrayChangeAll)
class csImageBaseBase(iImage):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def IncRef(*args): return _csgfx.csImageBaseBase_IncRef(*args)
def DecRef(*args): return _csgfx.csImageBaseBase_DecRef(*args)
def GetRefCount(*args): return _csgfx.csImageBaseBase_GetRefCount(*args)
def QueryInterface(*args): return _csgfx.csImageBaseBase_QueryInterface(*args)
def AddRefOwner(*args): return _csgfx.csImageBaseBase_AddRefOwner(*args)
def RemoveRefOwner(*args): return _csgfx.csImageBaseBase_RemoveRefOwner(*args)
def GetInterfaceMetadata(*args): return _csgfx.csImageBaseBase_GetInterfaceMetadata(*args)
csImageBaseBase_swigregister = _csgfx.csImageBaseBase_swigregister
csImageBaseBase_swigregister(csImageBaseBase)
class csImageBase(csImageBaseBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _csgfx.delete_csImageBase
__del__ = lambda self : None;
def GetDepth(*args): return _csgfx.csImageBase_GetDepth(*args)
def SetName(*args): return _csgfx.csImageBase_SetName(*args)
def GetName(*args): return _csgfx.csImageBase_GetName(*args)
def GetPalette(*args): return _csgfx.csImageBase_GetPalette(*args)
def GetAlpha(*args): return _csgfx.csImageBase_GetAlpha(*args)
def HasKeyColor(*args): return _csgfx.csImageBase_HasKeyColor(*args)
def GetKeyColor(*args): return _csgfx.csImageBase_GetKeyColor(*args)
def HasMipmaps(*args): return _csgfx.csImageBase_HasMipmaps(*args)
def GetMipmap(*args): return _csgfx.csImageBase_GetMipmap(*args)
def GetImageType(*args): return _csgfx.csImageBase_GetImageType(*args)
def HasSubImages(*args): return _csgfx.csImageBase_HasSubImages(*args)
def GetSubImage(*args): return _csgfx.csImageBase_GetSubImage(*args)
def GetCookedImageFormat(*args): return _csgfx.csImageBase_GetCookedImageFormat(*args)
def GetCookedImageData(*args): return _csgfx.csImageBase_GetCookedImageData(*args)
csImageBase_swigregister = _csgfx.csImageBase_swigregister
csImageBase_swigregister(csImageBase)
class csImageMemoryBase(csImageBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def IncRef(*args): return _csgfx.csImageMemoryBase_IncRef(*args)
def DecRef(*args): return _csgfx.csImageMemoryBase_DecRef(*args)
def GetRefCount(*args): return _csgfx.csImageMemoryBase_GetRefCount(*args)
def QueryInterface(*args): return _csgfx.csImageMemoryBase_QueryInterface(*args)
def AddRefOwner(*args): return _csgfx.csImageMemoryBase_AddRefOwner(*args)
def RemoveRefOwner(*args): return _csgfx.csImageMemoryBase_RemoveRefOwner(*args)
def GetInterfaceMetadata(*args): return _csgfx.csImageMemoryBase_GetInterfaceMetadata(*args)
csImageMemoryBase_swigregister = _csgfx.csImageMemoryBase_swigregister
csImageMemoryBase_swigregister(csImageMemoryBase)
class csImageMemory(csImageMemoryBase):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _csgfx.new_csImageMemory(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _csgfx.delete_csImageMemory
__del__ = lambda self : None;
def GetImagePtr(*args): return _csgfx.csImageMemory_GetImagePtr(*args)
def GetPalettePtr(*args): return _csgfx.csImageMemory_GetPalettePtr(*args)
def GetAlphaPtr(*args): return _csgfx.csImageMemory_GetAlphaPtr(*args)
def GetImageData(*args): return _csgfx.csImageMemory_GetImageData(*args)
def GetWidth(*args): return _csgfx.csImageMemory_GetWidth(*args)
def GetHeight(*args): return _csgfx.csImageMemory_GetHeight(*args)
def GetDepth(*args): return _csgfx.csImageMemory_GetDepth(*args)
def GetRawFormat(*args): return _csgfx.csImageMemory_GetRawFormat(*args)
def GetRawData(*args): return _csgfx.csImageMemory_GetRawData(*args)
def GetFormat(*args): return _csgfx.csImageMemory_GetFormat(*args)
def GetPalette(*args): return _csgfx.csImageMemory_GetPalette(*args)
def GetAlpha(*args): return _csgfx.csImageMemory_GetAlpha(*args)
def HasKeyColor(*args): return _csgfx.csImageMemory_HasKeyColor(*args)
def GetKeyColor(*args): return _csgfx.csImageMemory_GetKeyColor(*args)
def Clear(*args): return _csgfx.csImageMemory_Clear(*args)
def CheckAlpha(*args): return _csgfx.csImageMemory_CheckAlpha(*args)
def SetFormat(*args): return _csgfx.csImageMemory_SetFormat(*args)
def SetKeyColor(*args): return _csgfx.csImageMemory_SetKeyColor(*args)
def ClearKeyColor(*args): return _csgfx.csImageMemory_ClearKeyColor(*args)
def ApplyKeyColor(*args): return _csgfx.csImageMemory_ApplyKeyColor(*args)
def GetImageType(*args): return _csgfx.csImageMemory_GetImageType(*args)
def SetImageType(*args): return _csgfx.csImageMemory_SetImageType(*args)
def HasMipmaps(*args): return _csgfx.csImageMemory_HasMipmaps(*args)
def GetMipmap(*args): return _csgfx.csImageMemory_GetMipmap(*args)
def SetMipmap(*args): return _csgfx.csImageMemory_SetMipmap(*args)
def Copy(*args): return _csgfx.csImageMemory_Copy(*args)
def CopyScale(*args): return _csgfx.csImageMemory_CopyScale(*args)
def CopyTile(*args): return _csgfx.csImageMemory_CopyTile(*args)
def ConvertFromRGBA(*args): return _csgfx.csImageMemory_ConvertFromRGBA(*args)
def ConvertFromPal8(*args): return _csgfx.csImageMemory_ConvertFromPal8(*args)
csImageMemory_swigregister = _csgfx.csImageMemory_swigregister
csImageMemory_swigregister(csImageMemory)
class csImageManipulate(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
Rescale = staticmethod(_csgfx.csImageManipulate_Rescale)
Mipmap = staticmethod(_csgfx.csImageManipulate_Mipmap)
Blur = staticmethod(_csgfx.csImageManipulate_Blur)
Crop = staticmethod(_csgfx.csImageManipulate_Crop)
Sharpen = staticmethod(_csgfx.csImageManipulate_Sharpen)
TransformColor = staticmethod(_csgfx.csImageManipulate_TransformColor)
Gray = staticmethod(_csgfx.csImageManipulate_Gray)
RenormalizeNormals = staticmethod(_csgfx.csImageManipulate_RenormalizeNormals)
def __init__(self, *args):
this = _csgfx.new_csImageManipulate(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _csgfx.delete_csImageManipulate
__del__ = lambda self : None;
csImageManipulate_swigregister = _csgfx.csImageManipulate_swigregister
csImageManipulate_swigregister(csImageManipulate)
csImageManipulate_Rescale = _csgfx.csImageManipulate_Rescale
csImageManipulate_Mipmap = _csgfx.csImageManipulate_Mipmap
csImageManipulate_Blur = _csgfx.csImageManipulate_Blur
csImageManipulate_Crop = _csgfx.csImageManipulate_Crop
csImageManipulate_Sharpen = _csgfx.csImageManipulate_Sharpen
csImageManipulate_TransformColor = _csgfx.csImageManipulate_TransformColor
csImageManipulate_Gray = _csgfx.csImageManipulate_Gray
csImageManipulate_RenormalizeNormals = _csgfx.csImageManipulate_RenormalizeNormals
def CS_REQUEST_IMAGELOADER ():
return core.CS_REQUEST_PLUGIN("crystalspace.graphic.image.io.multiplexer",
iImageIO)
|
rahul-c1/scrapy | refs/heads/master | scrapy/cmdline.py | 9 | from __future__ import print_function
import sys
import optparse
import cProfile
import inspect
import pkg_resources
import scrapy
from scrapy.crawler import CrawlerProcess
from scrapy.xlib import lsprofcalltree
from scrapy.command import ScrapyCommand
from scrapy.exceptions import UsageError
from scrapy.utils.misc import walk_modules
from scrapy.utils.project import inside_project, get_project_settings
from scrapy.settings.deprecated import check_deprecated_settings
def _iter_command_classes(module_name):
# TODO: add `name` attribute to commands and and merge this function with
# scrapy.utils.spider.iter_spider_classes
for module in walk_modules(module_name):
for obj in vars(module).itervalues():
if inspect.isclass(obj) and \
issubclass(obj, ScrapyCommand) and \
obj.__module__ == module.__name__:
yield obj
def _get_commands_from_module(module, inproject):
d = {}
for cmd in _iter_command_classes(module):
if inproject or not cmd.requires_project:
cmdname = cmd.__module__.split('.')[-1]
d[cmdname] = cmd()
return d
def _get_commands_from_entry_points(inproject, group='scrapy.commands'):
cmds = {}
for entry_point in pkg_resources.iter_entry_points(group):
obj = entry_point.load()
if inspect.isclass(obj):
cmds[entry_point.name] = obj()
else:
raise Exception("Invalid entry point %s" % entry_point.name)
return cmds
def _get_commands_dict(settings, inproject):
cmds = _get_commands_from_module('scrapy.commands', inproject)
cmds.update(_get_commands_from_entry_points(inproject))
cmds_module = settings['COMMANDS_MODULE']
if cmds_module:
cmds.update(_get_commands_from_module(cmds_module, inproject))
return cmds
def _pop_command_name(argv):
i = 0
for arg in argv[1:]:
if not arg.startswith('-'):
del argv[i]
return arg
i += 1
def _print_header(settings, inproject):
if inproject:
print("Scrapy %s - project: %s\n" % (scrapy.__version__, \
settings['BOT_NAME']))
else:
print("Scrapy %s - no active project\n" % scrapy.__version__)
def _print_commands(settings, inproject):
_print_header(settings, inproject)
print("Usage:")
print(" scrapy <command> [options] [args]\n")
print("Available commands:")
cmds = _get_commands_dict(settings, inproject)
for cmdname, cmdclass in sorted(cmds.iteritems()):
print(" %-13s %s" % (cmdname, cmdclass.short_desc()))
if not inproject:
print()
print(" [ more ] More commands available when run from project directory")
print()
print('Use "scrapy <command> -h" to see more info about a command')
def _print_unknown_command(settings, cmdname, inproject):
_print_header(settings, inproject)
print("Unknown command: %s\n" % cmdname)
print('Use "scrapy" to see available commands')
def _run_print_help(parser, func, *a, **kw):
try:
func(*a, **kw)
except UsageError as e:
if str(e):
parser.error(str(e))
if e.print_help:
parser.print_help()
sys.exit(2)
def execute(argv=None, settings=None):
if argv is None:
argv = sys.argv
# --- backwards compatibility for scrapy.conf.settings singleton ---
if settings is None and 'scrapy.conf' in sys.modules:
from scrapy import conf
if hasattr(conf, 'settings'):
settings = conf.settings
# ------------------------------------------------------------------
if settings is None:
settings = get_project_settings()
check_deprecated_settings(settings)
# --- backwards compatibility for scrapy.conf.settings singleton ---
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
with warnings.catch_warnings():
warnings.simplefilter("ignore", ScrapyDeprecationWarning)
from scrapy import conf
conf.settings = settings
# ------------------------------------------------------------------
inproject = inside_project()
cmds = _get_commands_dict(settings, inproject)
cmdname = _pop_command_name(argv)
parser = optparse.OptionParser(formatter=optparse.TitledHelpFormatter(), \
conflict_handler='resolve')
if not cmdname:
_print_commands(settings, inproject)
sys.exit(0)
elif cmdname not in cmds:
_print_unknown_command(settings, cmdname, inproject)
sys.exit(2)
cmd = cmds[cmdname]
parser.usage = "scrapy %s %s" % (cmdname, cmd.syntax())
parser.description = cmd.long_desc()
settings.defaults.update(cmd.default_settings)
cmd.settings = settings
cmd.add_options(parser)
opts, args = parser.parse_args(args=argv[1:])
_run_print_help(parser, cmd.process_options, args, opts)
cmd.crawler_process = CrawlerProcess(settings)
_run_print_help(parser, _run_command, cmd, args, opts)
sys.exit(cmd.exitcode)
def _run_command(cmd, args, opts):
if opts.profile or opts.lsprof:
_run_command_profiled(cmd, args, opts)
else:
cmd.run(args, opts)
def _run_command_profiled(cmd, args, opts):
if opts.profile:
sys.stderr.write("scrapy: writing cProfile stats to %r\n" % opts.profile)
if opts.lsprof:
sys.stderr.write("scrapy: writing lsprof stats to %r\n" % opts.lsprof)
loc = locals()
p = cProfile.Profile()
p.runctx('cmd.run(args, opts)', globals(), loc)
if opts.profile:
p.dump_stats(opts.profile)
k = lsprofcalltree.KCacheGrind(p)
if opts.lsprof:
with open(opts.lsprof, 'w') as f:
k.output(f)
if __name__ == '__main__':
execute()
|
zhuwenping/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/distutils/core.py | 54 | """distutils.core
The only module that needs to be imported to use the Distutils; provides
the 'setup' function (which is to be called from the setup script). Also
indirectly provides the Distribution and Command classes, although they are
really defined in distutils.dist and distutils.cmd.
"""
import os
import sys
from distutils.debug import DEBUG
from distutils.errors import *
from distutils.util import grok_environment_error
# Mainly import these so setup scripts can "from distutils.core import" them.
from distutils.dist import Distribution
from distutils.cmd import Command
from distutils.config import PyPIRCCommand
from distutils.extension import Extension
# This is a barebones help message generated displayed when the user
# runs the setup script with no arguments at all. More useful help
# is generated with various --help options: global help, list commands,
# and per-command help.
USAGE = """\
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
or: %(script)s --help [cmd1 cmd2 ...]
or: %(script)s --help-commands
or: %(script)s cmd --help
"""
def gen_usage (script_name):
script = os.path.basename(script_name)
return USAGE % vars()
# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
_setup_stop_after = None
_setup_distribution = None
# Legal keyword arguments for the setup() function
setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
'name', 'version', 'author', 'author_email',
'maintainer', 'maintainer_email', 'url', 'license',
'description', 'long_description', 'keywords',
'platforms', 'classifiers', 'download_url',
'requires', 'provides', 'obsoletes',
)
# Legal keyword arguments for the Extension constructor
extension_keywords = ('name', 'sources', 'include_dirs',
'define_macros', 'undef_macros',
'library_dirs', 'libraries', 'runtime_library_dirs',
'extra_objects', 'extra_compile_args', 'extra_link_args',
'swig_opts', 'export_symbols', 'depends', 'language')
def setup (**attrs):
"""The gateway to the Distutils: do everything your setup script needs
to do, in a highly flexible and user-driven way. Briefly: create a
Distribution instance; find and parse config files; parse the command
line; run each Distutils command found there, customized by the options
supplied to 'setup()' (as keyword arguments), in config files, and on
the command line.
The Distribution instance might be an instance of a class supplied via
the 'distclass' keyword argument to 'setup'; if no such class is
supplied, then the Distribution class (in dist.py) is instantiated.
All other arguments to 'setup' (except for 'cmdclass') are used to set
attributes of the Distribution instance.
The 'cmdclass' argument, if supplied, is a dictionary mapping command
names to command classes. Each command encountered on the command line
will be turned into a command class, which is in turn instantiated; any
class found in 'cmdclass' is used in place of the default, which is
(for command 'foo_bar') class 'foo_bar' in module
'distutils.command.foo_bar'. The command class must provide a
'user_options' attribute which is a list of option specifiers for
'distutils.fancy_getopt'. Any command-line options between the current
and the next command are used to set attributes of the current command
object.
When the entire command-line has been successfully parsed, calls the
'run()' method on each command object in turn. This method will be
driven entirely by the Distribution object (which each command object
has a reference to, thanks to its constructor), and the
command-specific options that became attributes of each command
object.
"""
global _setup_stop_after, _setup_distribution
# Determine the distribution class -- either caller-supplied or
# our Distribution (see below).
klass = attrs.get('distclass')
if klass:
del attrs['distclass']
else:
klass = Distribution
if 'script_name' not in attrs:
attrs['script_name'] = os.path.basename(sys.argv[0])
if 'script_args' not in attrs:
attrs['script_args'] = sys.argv[1:]
# Create the Distribution instance, using the remaining arguments
# (ie. everything except distclass) to initialize it
try:
_setup_distribution = dist = klass(attrs)
except DistutilsSetupError as msg:
if 'name' not in attrs:
raise SystemExit("error in setup command: %s" % msg)
else:
raise SystemExit("error in %s setup command: %s" % \
(attrs['name'], msg))
if _setup_stop_after == "init":
return dist
# Find and parse the config file(s): they will override options from
# the setup script, but be overridden by the command line.
dist.parse_config_files()
if DEBUG:
print("options (after parsing config files):")
dist.dump_option_dicts()
if _setup_stop_after == "config":
return dist
# Parse the command line; any command-line errors are the end user's
# fault, so turn them into SystemExit to suppress tracebacks.
try:
ok = dist.parse_command_line()
except DistutilsArgError as msg:
raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
if DEBUG:
print("options (after parsing command line):")
dist.dump_option_dicts()
if _setup_stop_after == "commandline":
return dist
# And finally, run all the commands found on the command line.
if ok:
try:
dist.run_commands()
except KeyboardInterrupt:
raise SystemExit("interrupted")
except (IOError, os.error) as exc:
error = grok_environment_error(exc)
if DEBUG:
sys.stderr.write(error + "\n")
raise
else:
raise SystemExit(error)
except (DistutilsError,
CCompilerError) as msg:
if DEBUG:
raise
else:
raise SystemExit("error: " + str(msg))
return dist
# setup ()
def run_setup (script_name, script_args=None, stop_after="run"):
"""Run a setup script in a somewhat controlled environment, and
return the Distribution instance that drives things. This is useful
if you need to find out the distribution meta-data (passed as
keyword args from 'script' to 'setup()', or the contents of the
config files or command-line.
'script_name' is a file that will be read and run with 'exec()';
'sys.argv[0]' will be replaced with 'script' for the duration of the
call. 'script_args' is a list of strings; if supplied,
'sys.argv[1:]' will be replaced by 'script_args' for the duration of
the call.
'stop_after' tells 'setup()' when to stop processing; possible
values:
init
stop after the Distribution instance has been created and
populated with the keyword arguments to 'setup()'
config
stop after config files have been parsed (and their data
stored in the Distribution instance)
commandline
stop after the command-line ('sys.argv[1:]' or 'script_args')
have been parsed (and the data stored in the Distribution)
run [default]
stop after all commands have been run (the same as if 'setup()'
had been called in the usual way
Returns the Distribution instance, which provides all information
used to drive the Distutils.
"""
if stop_after not in ('init', 'config', 'commandline', 'run'):
raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
global _setup_stop_after, _setup_distribution
_setup_stop_after = stop_after
save_argv = sys.argv
g = {'__file__': script_name}
l = {}
try:
try:
sys.argv[0] = script_name
if script_args is not None:
sys.argv[1:] = script_args
with open(script_name, 'rb') as f:
exec(f.read(), g, l)
finally:
sys.argv = save_argv
_setup_stop_after = None
except SystemExit:
# Hmm, should we do something if exiting with a non-zero code
# (ie. error)?
pass
except:
raise
if _setup_distribution is None:
raise RuntimeError(("'distutils.core.setup()' was never called -- "
"perhaps '%s' is not a Distutils setup script?") % \
script_name)
# I wonder if the setup script's namespace -- g and l -- would be of
# any interest to callers?
#print "_setup_distribution:", _setup_distribution
return _setup_distribution
# run_setup ()
|
badlands-model/pyBadlands | refs/heads/master | badlands/badlands/surface/elevationTIN.py | 1 | ##~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~##
## ##
## This file forms part of the Badlands surface processes modelling application. ##
## ##
## For full license and copyright information, please refer to the LICENSE.md file ##
## located at the project root, or contact the authors. ##
## ##
##~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~#~##
"""
This file contains functions to evaluate elevation on the *computational irregular grid*.
.. image:: img/tin.png
:scale: 70 %
:alt: TIN grid
:align: center
From the initial regular grid, a triangular irregular network (**TIN**) is automatically created
with a resolution that is as high as the regular grid one but that could be down-sampled if the
<resfactor> is used in the input XML file under the :code:`grid structure` module.
.. note::
Look at the full documentation for additional information.
"""
import time
import numpy
import os
if 'READTHEDOCS' not in os.environ:
from badlands import pdalgo
from scipy.interpolate import interpn
from scipy.interpolate import LinearNDInterpolator
from scipy.interpolate import NearestNDInterpolator
def _boundary_elevation(elevation, neighbours, edge_length, boundPts, btype):
"""
This function defines the elevation of the TIN surface edges for 2 different types of conditions:
* Infinitely flat condition,
* Continuous slope condition.
Args:
elevation: Numpy arrays containing the internal nodes elevation.
neighbours: Numpy integer-type array containing for each nodes its neigbhours IDs.
edge_length: Numpy float-type array containing the lengths to each neighbour.
boundPts: Number of nodes on the edges of the TIN surface.
btype: Integer defining the type of boundary: 0 for flat and 1 for slope condition.
Returns:
- elevation - numpy array containing the updated elevations on the edges.
"""
# Flat/fixed/wall
if btype == 0:
missedPts = []
for id in range(boundPts):
ngbhs = neighbours[id,:]
ids = numpy.where(ngbhs >= boundPts)[0]
if len(ids) == 1:
elevation[id] = elevation[ngbhs[ids]]
elif len(ids) > 1:
lselect = edge_length[id,ids]
picked = numpy.argmin(lselect)
elevation[id] = elevation[ngbhs[ids[picked]]]
else:
missedPts = numpy.append(missedPts,id)
if len(missedPts) > 0 :
for p in range(len(missedPts)):
id = int(missedPts[p])
ngbhs = neighbours[id,:]
ids = numpy.where((elevation[ngbhs] < 9.e6) & (ngbhs >= 0))[0]
if len(ids) == 0:
raise ValueError('Error while getting boundary elevation for point ''%d''.' % id)
lselect = edge_length[id,ids]
picked = numpy.argmin(lselect)
elevation[id] = elevation[ngbhs[ids[picked]]]
# Slope
elif btype == 1:
missedPts = []
for id in range(boundPts):
ngbhs = neighbours[id,:]
ids = numpy.where(ngbhs >= boundPts)[0]
if len(ids) == 1:
# Pick closest non-boundary vertice
ln1 = edge_length[id,ids[0]]
id1 = ngbhs[ids[0]]
# Pick closest non-boundary vertice to first picked
ngbhs2 = neighbours[id1,:]
ids2 = numpy.where(ngbhs2 >= boundPts)[0]
lselect = edge_length[id1,ids2]
if len(lselect) > 0:
picked = numpy.argmin(lselect)
id2 = ngbhs2[ids2[picked]]
ln2 = lselect[picked]
elevation[id] = (elevation[id1]-elevation[id2])*(ln2+ln1)/ln2 + elevation[id2]
else:
missedPts = numpy.append(missedPts,id)
elif len(ids) > 1:
# Pick closest non-boundary vertice
lselect = edge_length[id,ids]
picked = numpy.argmin(lselect)
id1 = ngbhs[ids[picked]]
ln1 = lselect[picked]
# Pick closest non-boundary vertice to first picked
ngbhs2 = neighbours[id1,:]
ids2 = numpy.where(ngbhs2 >= boundPts)[0]
lselect2 = edge_length[id1,ids2]
if len(lselect2) > 0:
picked2 = numpy.argmin(lselect2)
id2 = ngbhs2[ids2[picked2]]
ln2 = lselect2[picked2]
elevation[id] = (elevation[id1]-elevation[id2])*(ln2+ln1)/ln2 + elevation[id2]
else:
missedPts = numpy.append(missedPts,id)
else:
missedPts = numpy.append(missedPts,id)
if len(missedPts) > 0 :
for p in range(0,len(missedPts)):
id = int(missedPts[p])
ngbhs = neighbours[id,:]
ids = numpy.where((elevation[ngbhs] < 9.e6) & (ngbhs >= 0))[0]
if len(ids) == 0:
raise ValueError('Error while getting boundary elevation for point ''%d''.' % id)
lselect = edge_length[id,ids]
picked = numpy.argmin(lselect)
elevation[id] = elevation[ngbhs[ids[picked]]]
elevation[:boundPts] -= 0.5
# Associate TIN edge point to the border for ero/dep updates
parentID = numpy.zeros(boundPts,dtype=int)
missedPts = []
for id in range(boundPts):
ngbhs = neighbours[id,:]
ids = numpy.where(ngbhs >= boundPts)[0]
if len(ids) == 1:
parentID[id] = ngbhs[ids]
elif len(ids) > 1:
lselect = edge_length[id,ids]
picked = numpy.argmin(lselect)
parentID[id] = ngbhs[ids[picked]]
else:
missedPts = numpy.append(missedPts,id)
if len(missedPts) > 0 :
for p in range(len(missedPts)):
id = int(missedPts[p])
ngbhs = neighbours[id,:]
ids = numpy.where((elevation[ngbhs] < 9.e6) & (ngbhs >= 0))[0]
if len(ids) == 0:
raise ValueError('Error while getting boundary elevation for point ''%d''.' % id)
lselect = edge_length[id,ids]
picked = numpy.argmin(lselect)
parentID[id] = ngbhs[ids[picked]]
return elevation, parentID
def update_border_elevation(elev, neighbours, edge_length, boundPts, btype='flat'):
"""
This function computes the boundary elevation based on 3 different conditions:
1. infinitely flat,
2. continuous slope ,
3. wall boundary (closed domain).
Note:
The boundary condition are defined via the input XML file.
Args:
elev: numpy arrays containing the internal nodes elevation.
neighbours: numpy integer-type array containing for each nodes its neigbhours IDs.
edge_length: numpy float-type array containing the lengths to each neighbour.
boundPts: number of nodes on the edges of the TIN surface.
btype: integer defining the type of boundary (default: 'flat').
Returns
-------
newelev
numpy array containing the updated elevations on the edges.
parentID
numpy array containing the indices of the associated *inside* node to each boundary node.
"""
newelev = elev
if btype == 'wall' or btype == 'flat' or btype == 'slope' or btype == 'fixed' or btype == 'outlet' or btype == 'wall1':
newelev[:boundPts] = 1.e7
thetype = 0
if btype == 'slope' or btype == 'outlet' or btype == 'wall1':
thetype = 1
newelev, parentID = _boundary_elevation(elev, neighbours, edge_length, boundPts, thetype)
if btype == 'wall':
newelev[:boundPts] = 1.e7
if btype == 'outlet':
newelev[1:boundPts] = 1.e7
else:
raise ValueError('Unknown boundary type ''%s''.' % btype)
return newelev, parentID
def getElevation(rX, rY, rZ, coords, interp='linear'):
"""
This function interpolates elevation from the regular grid to the triamgular mesh
using **SciPy** *interpn* funciton.
Args:
rX: numpy arrays containing the X coordinates from the regular grid.
rY: numpy arrays containing the Y coordinates from the regular grid.
rZ: numpy arrays containing the Z coordinates from the regular grid.
coords: numpy float-type array containing X, Y coordinates for the TIN nodes.
interp: interpolation method as in *SciPy interpn function* (default: 'linear')
Returns:
- elev - numpy array containing the updated elevations for the local domain.
"""
# Set new elevation to 0
elev = numpy.zeros(len(coords[:,0]))
# Get the TIN points elevation values using the regular grid dataset
elev = interpn( (rX, rY), rZ, (coords[:,:2]), method=interp)
return elev
def assign_parameter_pit(neighbours, area, diffnb, prop, boundPts, fillTH=1., epsilon=1.e-6):
"""
This function defines the global variables used in the **pit filling algorithm** described in
the :code:`pit_stack` function_.
Args:
neighbours: numpy integer-type array containing for each nodes its neigbhours IDs.
edge_length: numpy float-type array containing the length to each neighbour.
area: numpy float-type array containing the area of each cell.
diffnb: marine diffusion distribution steps.
prop: proportion of marine sediment deposited on downstream nodes.
boundPts: number of nodes on the edges of the TIN surface.
fillTH: limit the filling algorithm to a specific height to prevent complete filling of depression (default: 1. m).
epsilon: force the creation of a minimal slope instead of a flat area to ensure continuous flow pathways (default: 1.e-6 m).
.. _function: https://badlands.readthedocs.io/en/latest/api.html#surface.elevationTIN.pit_stack
Tip:
Planchon and Darboux, 2001: A Fast, Simple and Versatile Algorithm to Fill the Depressions of Digital Elevation Models - Catena,
46, 159-176, `doi:10.1016/S0341-8162(01)00164-3`_.
"""
pdalgo.pitparams(neighbours, area, diffnb, prop, fillTH, epsilon, boundPts)
def pit_stack(elev, allFill, sealevel):
"""
This function calls a **pit filling algorithm** to compute depression-less elevation grid.
Tip:
Planchon and Darboux, 2001: A Fast, Simple and Versatile Algorithm to Fill the Depressions of Digital Elevation Models - Catena,
46, 159-176, `doi:10.1016/S0341-8162(01)00164-3`_.
.. _doi:10.1016/S0341-8162(01)00164-3: http://dx.doi.org/10.1016/S0341-8162(01)00164-3
Args:
elev: numpy arrays containing the nodes elevation.
allFill: produce depression-less surface.
sealevel: current elevation of sea level.
Returns:
- fillH - numpy array containing the filled elevations.
Caution:
The Planchon & Darboux (2001) algorithm is not as efficient as priority-queue approaches such as the one
proposed in Barnes et al. (2014) and we now use this latest algorithm.
Barnes, Lehman & Mulla 2014: An Efficient Assignment of Drainage Direction Over Flat Surfaces in Raster Digital Elevation Models -
Computers & Geosciences, doi: 10.1016/`j.cageo.2013.01.009`_.
.. _j.cageo.2013.01.009: http://dx.doi.org/10.1016/j.cageo.2013.01.009
"""
# Call stack based pit filling function from libUtils
fillH = pdalgo.pitfilling(elev, allFill, sealevel)
return fillH
|
jagguli/intellij-community | refs/heads/master | python/testData/quickFixes/PyMakeMethodStaticQuickFixTest/emptyParam_after.py | 249 | __author__ = 'ktisha'
class Child(Base):
def __init__(self):
super(Child, self).__init__()
@staticmethod
def f():
test = 1 |
bnoordhuis/gyp | refs/heads/master | test/win/gyptest-link-enable-winrt.py | 95 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure msvs_enable_winrt works correctly.
"""
import TestGyp
import os
import sys
import struct
CHDIR = 'enable-winrt'
print 'This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466'
sys.exit(0)
if (sys.platform == 'win32' and
int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2013):
test = TestGyp.TestGyp(formats=['msvs'])
test.run_gyp('enable-winrt.gyp', chdir=CHDIR)
test.build('enable-winrt.gyp', 'enable_winrt_dll', chdir=CHDIR)
test.build('enable-winrt.gyp', 'enable_winrt_missing_dll', chdir=CHDIR,
status=1)
test.build('enable-winrt.gyp', 'enable_winrt_winphone_dll', chdir=CHDIR)
test.pass_test()
|
akshayparopkari/phylotoast | refs/heads/master | bin/core_overlap_plot.py | 2 | #!/usr/bin/env python
# coding: utf-8
"""
Given a set of core microbiome files, create a matching set of ovelapping
barplots that visualize which species belong to each core microbiome.
"""
from __future__ import absolute_import, division, print_function
import ast
import argparse
from collections import Counter, OrderedDict
import itertools as it
import os.path as osp
import sys
from phylotoast import otu_calc as oc, util
importerrors = []
try:
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib.collections import PolyCollection
from matplotlib.ticker import MaxNLocator
except ImportError as ie:
importerrors.append(ie)
if len(importerrors) != 0:
for item in importerrors:
print("Import Error. Please install missing module:", item)
sys.exit()
__author__ = "Shareef M. Dabdoub"
__copyright__ = "Copyright 2016, Shareef M. Dabdoub"
__credits__ = ["Shareef M. Dabdoub", "Akshay Paropkari",
"Sukirth Ganesan", "Purnima Kumar"]
__license__ = "MIT"
__maintainer__ = "Shareef M. Dabdoub"
__email__ = "[email protected]"
fontsize = 20
font = {"weight": "bold", "size": fontsize}
mpl.rc("font", **font)
def merge_dicts(*dict_args):
"""
Given any number of dicts, shallow copy and merge into a new dict,
precedence goes to key value pairs in latter dicts.
"""
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def translate(rect, x, y, width=1):
"""
Given four points of a rectangle, translate the
rectangle to the specified x and y coordinates and,
optionally, change the width.
:type rect: list of tuples
:param rect: Four points describing a rectangle.
:type x: float
:param x: The amount to shift the rectangle along the x-axis.
:type y: float
:param y: The amount to shift the rectangle along the y-axis.
:type width: float
:param width: The amount by which to change the width of the
rectangle.
"""
return ((rect[0][0]+x, rect[0][1]+y), (rect[1][0]+x, rect[1][1]+y),
(rect[2][0]+x+width, rect[2][1]+y), (rect[3][0]+x+width, rect[3][1]+y))
def load_core_file(core_fp):
"""
For core OTU data file, returns Genus-species identifier for each data
entry.
:type core_fp: str
:param core_fp: A file containing core OTU data.
:rtype: str
:return: Returns genus-species identifier based on identified taxonomical
level.
"""
core = {}
with open(core_fp) as in_f:
for line in in_f.read().splitlines():
if not line.startswith("#"):
otu_id, tax = line.split("\t")
core[otu_id] = oc.otu_name(ast.literal_eval(tax))
return core
def load_tsv_core(core_fp, skip_header=False):
with open(core_fp) as inf:
data = inf.read().splitlines()
if skip_header:
data = data[1:]
return [line.split("\t")[0].strip() for line in data]
def plot_overlaps(otus, group_otus, group_colors,
out_fp, fig_size=None, title="",
filter_common=False):
"""
Given a list of OTUs and a number of groups containing subsets of
the OTU set, plot a presence/absence bar chart showing which species
belong to which groups.
:type otus: list
:param otus: A list of OTU identifiers (names or otherwise) ordered
by greatest presence across the groups, i.e. those that
come first appear in all the groups, next come the OTUs
that appear in n-1 groups, etc...
:type group_otus: OrderedDict
:param group_otus: A dictionary of OTU identifiers (subset of otus)
keyed on group name (for display purposes) in
display order (bottom to top).
:type group_colors: dict
:param group_colors: Color assignment for each group.
"""
def sort_order_group(sp):
"""
Assign a score (for use with sorting) to each OTU based
on the number of groups they occur in and the order
within those groups (order priority as set by group_otus).
"""
count = 0
rank = 0
in_prev = True
max_penalty = len(group_otus)
for i, grp in enumerate(group_otus):
if sp in group_otus[grp]:
count += 1
if in_prev:
rank += 1
else:
rank -= max_penalty - i
in_prev = False
return count, rank
if filter_common:
otus = [otu for otu in otus if sort_order_group(otu)[0] < len(group_otus)]
otus = sorted(otus, key=sort_order_group, reverse=True)
#TODO: fill shared_table during the double loop below and add arg to enable output to file
shared_table = [merge_dicts({grp: None for grp in group_otus},{"OTU": otu_id}) for otu_id in otus]
fig, ax = plt.subplots(figsize=fig_size)
ax.xaxis.set_major_locator(MaxNLocator(nbins=len(otus), integer=True))
# rectangle prototype modified for each plot marker
base = [(0,0),(0,0.5),(0,0.5),(0,0)]
y_step = 1
x_step = 2
bars = []
bar_colors = []
for i, grp in enumerate(group_otus):
for j, otu in enumerate(otus):
if otu in group_otus[grp]:
bars.append(translate(base, j*x_step+0.5, i*y_step))
bar_colors.append(group_colors[grp])
black = (0,0,0,1)
collection = PolyCollection(
verts=bars,
facecolors = bar_colors,
edgecolors = (black,),
linewidths = (1,),
transOffset = ax.transData,
zorder=3
)
ax.add_collection(collection)
# ax.legend([plt.Rectangle((0, 0), 1, 1, fc=color) for color in group_colors.values()],
# group_colors.keys(), loc="best")
# Title
axttl = ax.title
axttl.set_position([.5, 1.05])
ax.set_title(title, {"fontsize": fontsize*1.5, "fontweight": "bold"})
plt.xticks(range(1, len(otus)*x_step, x_step), otus, rotation="vertical")
plt.yticks([i-0.75 for i in range(1, len(group_otus)*y_step+1, y_step)],
group_otus.keys(), rotation="horizontal")
ax.margins(0.05)
ax.yaxis.set_visible(True)
ax.set_xlim((0, len(otus)*x_step))
# save or display result
if out_fp:
plt.savefig(out_fp, facecolors="0.9", edgecolor="none",
bbox_inches="tight", pad_inches=0.1)
else:
plt.show()
def handle_program_options():
"""Parses the given options passed in at the command line."""
parser = argparse.ArgumentParser(description="Given a set of core "
"microbiome files, create a matching set "
"of overlapping barplots that visualize "
"the species belonging to each core "
"microbiome.")
input_grp = parser.add_mutually_exclusive_group(required=True)
input_grp.add_argument("-i", "--core_files", nargs="+",
help="Path to each core microbiome file (i.e. from "
"compute_core_microbiome.py) to visualize as "
"an overlap plot. NOTE: The files should be "
"given in the same order that the groups "
"appear in the mapping file.")
input_grp.add_argument("-tsv", "--tsv_core_files", nargs="+",
help="Path to each core microbiome file in TSV "
"format. The first column is expected to have "
"the OTUs or other names that will be matched "
"between the group cores. All other columns "
"will be ignored. NOTE: The files should be "
"given in the same order that the groups "
"appear in the mapping file.")
parser.add_argument("--skipheader", action="store_true",
help="If using TSV files (-tsv) for input, the header "
"line will be skipped when reading each file.")
parser.add_argument("-m", "--map_fp",
help="Metadata mapping file.")
parser.add_argument("-g", "--group_by", required=True,
help="A column name in the mapping file containing "
"categorical values that will be used to identify"
"groups. Each sample ID must have a group entry."
"Default is no categories and all the data will"
"be treated as a single group. [REQUIRED]")
parser.add_argument("-c", "--color_by", required=True,
help="A column name in the mapping file containing\
hexadecimal (#FF0000) color values that will\
be used to color the groups. Each sample ID must\
have a color entry. [REQUIRED]")
parser.add_argument("--filtercommon", action="store_true",
help="Specifying this option will hide OTUs that \
are shared among all groups.")
parser.add_argument("--title", default="",
help="A descriptive title that will appear at the top \
of the output plot. Surround with quotes if there are\
spaces in the title.")
parser.add_argument("--figsize", default=[10, 5], type=int, nargs=2,
help="Specify the 'width height' in inches for the "
"core overlap plot. By default, figure size is "
"10x5 inches.")
parser.add_argument("-o", "--out_fp", default=None,
help="The path and file name to save the plot under. "
"If specified, the figure will be saved directly "
"instead of opening a window in which the plot "
"can be viewed before saving.")
return parser.parse_args()
def main():
args = handle_program_options()
# Parse and read mapping file
try:
header, imap = util.parse_map_file(args.map_fp)
category_idx = header.index(args.group_by)
except IOError as ioe:
err_msg = "\nError in metadata mapping filepath (-m): {}\n"
sys.exit(err_msg.format(ioe))
# map groups to colors
class_colors = util.color_mapping(imap, header, args.group_by, args.color_by)
core_files = args.core_files
tsv = False
if args.core_files is None:
core_files = args.tsv_core_files
tsv = True
# map each core file to its matching category in the mapping file
group_cores = OrderedDict()
for group, fp in zip(class_colors, core_files):
if not tsv:
core = load_core_file(fp)
group_cores[group] = [name.replace("_", " ") for name in core.values()
if not name.startswith("Unclassified")]
else:
group_cores[group] = load_tsv_core(fp, args.skipheader)
# create the overlap set of OTUs and plot
overlap = set()
overlap.update(*group_cores.values())
plot_overlaps(overlap, group_cores, class_colors,
out_fp=args.out_fp, fig_size=args.figsize, title=args.title,
filter_common=args.filtercommon)
if __name__ == '__main__':
main()
|
Tong-Chen/scikit-learn | refs/heads/master | benchmarks/bench_glm.py | 297 | """
A comparison of different methods in GLM
Data comes from a random square matrix.
"""
from datetime import datetime
import numpy as np
from sklearn import linear_model
from sklearn.utils.bench import total_seconds
if __name__ == '__main__':
import pylab as pl
n_iter = 40
time_ridge = np.empty(n_iter)
time_ols = np.empty(n_iter)
time_lasso = np.empty(n_iter)
dimensions = 500 * np.arange(1, n_iter + 1)
for i in range(n_iter):
print('Iteration %s of %s' % (i, n_iter))
n_samples, n_features = 10 * i + 3, 10 * i + 3
X = np.random.randn(n_samples, n_features)
Y = np.random.randn(n_samples)
start = datetime.now()
ridge = linear_model.Ridge(alpha=1.)
ridge.fit(X, Y)
time_ridge[i] = total_seconds(datetime.now() - start)
start = datetime.now()
ols = linear_model.LinearRegression()
ols.fit(X, Y)
time_ols[i] = total_seconds(datetime.now() - start)
start = datetime.now()
lasso = linear_model.LassoLars()
lasso.fit(X, Y)
time_lasso[i] = total_seconds(datetime.now() - start)
pl.figure('scikit-learn GLM benchmark results')
pl.xlabel('Dimensions')
pl.ylabel('Time (s)')
pl.plot(dimensions, time_ridge, color='r')
pl.plot(dimensions, time_ols, color='g')
pl.plot(dimensions, time_lasso, color='b')
pl.legend(['Ridge', 'OLS', 'LassoLars'], loc='upper left')
pl.axis('tight')
pl.show()
|
alxgu/ansible | refs/heads/devel | lib/ansible/plugins/action/wait_for_connection.py | 16 | # (c) 2017, Dag Wieers <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# CI-required python3 boilerplate
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import time
from datetime import datetime, timedelta
from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
display = Display()
class TimedOutException(Exception):
pass
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset(('connect_timeout', 'delay', 'sleep', 'timeout'))
DEFAULT_CONNECT_TIMEOUT = 5
DEFAULT_DELAY = 0
DEFAULT_SLEEP = 1
DEFAULT_TIMEOUT = 600
def do_until_success_or_timeout(self, what, timeout, connect_timeout, what_desc, sleep=1):
max_end_time = datetime.utcnow() + timedelta(seconds=timeout)
e = None
while datetime.utcnow() < max_end_time:
try:
what(connect_timeout)
if what_desc:
display.debug("wait_for_connection: %s success" % what_desc)
return
except Exception as e:
error = e # PY3 compatibility to store exception for use outside of this block
if what_desc:
display.debug("wait_for_connection: %s fail (expected), retrying in %d seconds..." % (what_desc, sleep))
time.sleep(sleep)
raise TimedOutException("timed out waiting for %s: %s" % (what_desc, error))
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
connect_timeout = int(self._task.args.get('connect_timeout', self.DEFAULT_CONNECT_TIMEOUT))
delay = int(self._task.args.get('delay', self.DEFAULT_DELAY))
sleep = int(self._task.args.get('sleep', self.DEFAULT_SLEEP))
timeout = int(self._task.args.get('timeout', self.DEFAULT_TIMEOUT))
if self._play_context.check_mode:
display.vvv("wait_for_connection: skipping for check_mode")
return dict(skipped=True)
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
def ping_module_test(connect_timeout):
''' Test ping module, if available '''
display.vvv("wait_for_connection: attempting ping module test")
# call connection reset between runs if it's there
try:
self._connection.reset()
except AttributeError:
pass
# Use win_ping on winrm/powershell, else use ping
if getattr(self._connection._shell, "_IS_WINDOWS", False):
ping_result = self._execute_module(module_name='win_ping', module_args=dict(), task_vars=task_vars)
else:
ping_result = self._execute_module(module_name='ping', module_args=dict(), task_vars=task_vars)
# Test module output
if ping_result['ping'] != 'pong':
raise Exception('ping test failed')
start = datetime.now()
if delay:
time.sleep(delay)
try:
# If the connection has a transport_test method, use it first
if hasattr(self._connection, 'transport_test'):
self.do_until_success_or_timeout(self._connection.transport_test, timeout, connect_timeout, what_desc="connection port up", sleep=sleep)
# Use the ping module test to determine end-to-end connectivity
self.do_until_success_or_timeout(ping_module_test, timeout, connect_timeout, what_desc="ping module test success", sleep=sleep)
except TimedOutException as e:
result['failed'] = True
result['msg'] = to_text(e)
elapsed = datetime.now() - start
result['elapsed'] = elapsed.seconds
return result
|
jcoady9/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/lib2to3/tests/__init__.py | 308 | """Make tests/ into a package. This allows us to "import tests" and
have tests.all_tests be a TestSuite representing all test cases
from all test_*.py files in tests/."""
# Author: Collin Winter
import os
import os.path
import unittest
import types
from . import support
all_tests = unittest.TestSuite()
tests_dir = os.path.join(os.path.dirname(__file__), '..', 'tests')
tests = [t[0:-3] for t in os.listdir(tests_dir)
if t.startswith('test_') and t.endswith('.py')]
loader = unittest.TestLoader()
for t in tests:
__import__("",globals(),locals(),[t],level=1)
mod = globals()[t]
all_tests.addTests(loader.loadTestsFromModule(mod))
|
amyvmiwei/chromium | refs/heads/trunk | o3d/site_scons/site_tools/command_output.py | 26 | #!/usr/bin/python2.4
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Command output builder for SCons."""
import os
import signal
import subprocess
import sys
import threading
import time
import SCons.Script
# TODO: Move KillProcessTree() and RunCommand() into their own module, since
# they're used by other tools.
def KillProcessTree(pid):
"""Kills the process and all of its child processes.
Args:
pid: process to kill.
Raises:
OSError: Unsupported OS.
"""
if sys.platform in ('win32', 'cygwin'):
# Use Windows' taskkill utility
killproc_path = '%s;%s\\system32;%s\\system32\\wbem' % (
(os.environ['SYSTEMROOT'],) * 3)
killproc_cmd = 'taskkill /F /T /PID %d' % pid
killproc_task = subprocess.Popen(killproc_cmd, shell=True,
stdout=subprocess.PIPE,
env={'PATH':killproc_path})
killproc_task.communicate()
elif sys.platform in ('linux', 'linux2', 'darwin'):
# Use ps to get a list of processes
ps_task = subprocess.Popen(['/bin/ps', 'x', '-o', 'pid,ppid'], stdout=subprocess.PIPE)
ps_out = ps_task.communicate()[0]
# Parse out a dict of pid->ppid
ppid = {}
for ps_line in ps_out.split('\n'):
w = ps_line.strip().split()
if len(w) < 2:
continue # Not enough words in this line to be a process list
try:
ppid[int(w[0])] = int(w[1])
except ValueError:
pass # Header or footer
# For each process, kill it if it or any of its parents is our child
for p in ppid:
p2 = p
while p2:
if p2 == pid:
os.kill(p, signal.SIGKILL)
break
p2 = ppid.get(p2)
else:
raise OSError('Unsupported OS for KillProcessTree()')
def RunCommand(cmdargs, cwdir=None, env=None, echo_output=True, timeout=None,
timeout_errorlevel=14):
"""Runs an external command.
Args:
cmdargs: A command string, or a tuple containing the command and its
arguments.
cwdir: Working directory for the command, if not None.
env: Environment variables dict, if not None.
echo_output: If True, output will be echoed to stdout.
timeout: If not None, timeout for command in seconds. If command times
out, it will be killed and timeout_errorlevel will be returned.
timeout_errorlevel: The value to return if the command times out.
Returns:
The integer errorlevel from the command.
The combined stdout and stderr as a string.
"""
# Force unicode string in the environment to strings.
if env:
env = dict([(k, str(v)) for k, v in env.items()])
start_time = time.time()
child = subprocess.Popen(cmdargs, cwd=cwdir, env=env, shell=True,
universal_newlines=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
child_out = []
child_retcode = None
def _ReadThread():
"""Thread worker function to read output from child process.
Necessary since there is no cross-platform way of doing non-blocking
reads of the output pipe.
"""
read_run = True
while read_run:
time.sleep(.1) # So we don't poll too frequently
# Need to have a delay of 1 cycle between child completing and
# thread exit, to pick up the final output from the child.
if child_retcode is not None:
read_run = False
new_out = child.stdout.read()
if new_out:
if echo_output:
print new_out,
child_out.append(new_out)
read_thread = threading.Thread(target=_ReadThread)
read_thread.setDaemon(True)
read_thread.start()
# Wait for child to exit or timeout
while child_retcode is None:
time.sleep(.1) # So we don't poll too frequently
child_retcode = child.poll()
if timeout and child_retcode is None:
elapsed = time.time() - start_time
if elapsed > timeout:
print '*** RunCommand() timeout:', cmdargs
KillProcessTree(child.pid)
child_retcode = timeout_errorlevel
# Wait a bit for worker thread to pick up final output and die. No need to
# worry if it's still alive at the end of this, since it's a daemon thread
# and won't block python from exiting. (And since it's blocked, it doesn't
# chew up CPU.)
read_thread.join(5)
if echo_output:
print # end last line of output
return child_retcode, ''.join(child_out)
def CommandOutputBuilder(target, source, env):
"""Command output builder.
Args:
self: Environment in which to build
target: List of target nodes
source: List of source nodes
Returns:
None or 0 if successful; nonzero to indicate failure.
Runs the command specified in the COMMAND_OUTPUT_CMDLINE environment variable
and stores its output in the first target file. Additional target files
should be specified if the command creates additional output files.
Runs the command in the COMMAND_OUTPUT_RUN_DIR subdirectory.
"""
env = env.Clone()
cmdline = env.subst('$COMMAND_OUTPUT_CMDLINE', target=target, source=source)
cwdir = env.subst('$COMMAND_OUTPUT_RUN_DIR', target=target, source=source)
if cwdir:
cwdir = os.path.normpath(cwdir)
env.AppendENVPath('PATH', cwdir)
env.AppendENVPath('LD_LIBRARY_PATH', cwdir)
else:
cwdir = None
cmdecho = env.get('COMMAND_OUTPUT_ECHO', True)
timeout = env.get('COMMAND_OUTPUT_TIMEOUT')
timeout_errorlevel = env.get('COMMAND_OUTPUT_TIMEOUT_ERRORLEVEL')
retcode, output = RunCommand(cmdline, cwdir=cwdir, env=env['ENV'],
echo_output=cmdecho, timeout=timeout,
timeout_errorlevel=timeout_errorlevel)
# Save command line output
output_file = open(str(target[0]), 'w')
output_file.write(output)
output_file.close()
return retcode
def generate(env):
# NOTE: SCons requires the use of this name, which fails gpylint.
"""SCons entry point for this tool."""
# Add the builder and tell it which build environment variables we use.
action = SCons.Script.Action(
CommandOutputBuilder,
'Output "$COMMAND_OUTPUT_CMDLINE" to $TARGET',
varlist=[
'COMMAND_OUTPUT_CMDLINE',
'COMMAND_OUTPUT_RUN_DIR',
'COMMAND_OUTPUT_TIMEOUT',
'COMMAND_OUTPUT_TIMEOUT_ERRORLEVEL',
# We use COMMAND_OUTPUT_ECHO also, but that doesn't change the
# command being run or its output.
], )
builder = SCons.Script.Builder(action = action)
env.Append(BUILDERS={'CommandOutput': builder})
# Default command line is to run the first input
env['COMMAND_OUTPUT_CMDLINE'] = '$SOURCE'
# TODO: Add a pseudo-builder which takes an additional command line as an
# argument.
|
ghickman/couchdbkit | refs/heads/master | examples/django_blogapp/blog_app/views.py | 3 | from couchdbkit.ext.django.forms import DocumentForm
from django.forms.fields import CharField
from django.forms.widgets import HiddenInput
from django.shortcuts import render_to_response
from django.template import RequestContext
from models import Post, Comment
class PostForm(DocumentForm):
class Meta:
document = Post
class CommentForm(DocumentForm):
post = CharField(widget=HiddenInput(), required=False)
class Meta:
document = Comment
def home(request):
post = None
form = PostForm(request.POST or None)
if request.POST:
if form.is_valid():
post = form.save()
posts = Post.view('blog_app/all_posts', descending=True)
return render_to_response("home.html", {
"form": form,
"post": post,
"posts": posts
}, context_instance=RequestContext(request))
def view_post(request, post_id):
post = Post.get(post_id)
form = CommentForm(request.POST or None)
if request.POST:
if form.is_valid():
form.cleaned_data['post'] = post_id
form.save()
comments = Comment.view('blog_app/commets_by_post', key=post_id)
return render_to_response("post_details.html", {
"form": form,
"post": post,
"comments": comments
}, context_instance=RequestContext(request))
def edit_post(request, post_id):
post = Post.get(post_id)
form = PostForm(request.POST or None, instance=post)
if form.is_valid():
post = form.save()
return render_to_response("post_edit.html", {
"form": form,
"post": post
}, context_instance=RequestContext(request)) |
danieldmm/minerva | refs/heads/master | retrieval/lucene_index.py | 1 | # Prebuild bag-of-words representations
#
# Copyright: (c) Daniel Duma 2014
# Author: Daniel Duma <[email protected]>
# For license information, see LICENSE.TXT
from __future__ import absolute_import
from __future__ import print_function
import sys, json, datetime, math
import db.corpora as cp
import proc.doc_representation as doc_representation
from proc.general_utils import loadFileText, writeFileText, ensureDirExists
from .base_index import BaseIndexer
# lucene
import lucene
from org.apache.lucene.store import SimpleFSDirectory
from org.apache.lucene.document import Field
from org.apache.lucene.analysis.standard import StandardAnalyzer
from org.apache.lucene.document import Document
from org.apache.lucene.index import IndexWriter
from org.apache.lucene.search import IndexSearcher
from org.apache.lucene.queryparser.classic import QueryParser
from org.apache.lucene.search.similarities import DefaultSimilarity, FieldAgnosticSimilarity
from org.apache.lucene.util import Version as LuceneVersion
from org.apache.lucene.index import IndexWriterConfig
from java.io import File
class LuceneIndexer(BaseIndexer):
"""
Prebuilds BOWs etc. for tests
"""
def __init__(self):
"""
"""
pass
def initializeIndexer(self):
"""
Initializes the Java VM, creates directories if needed
"""
print("Initializing VM...")
lucene.initVM(maxheap="768m")
baseFullIndexDir=cp.Corpus.paths.fileLuceneIndex+os.sep
ensureDirExists(baseFullIndexDir)
def createIndexWriter(self, actual_dir, max_field_length=20000000):
"""
Returns an IndexWriter object created for the actual_dir specified
"""
ensureDirExists(actual_dir)
index = SimpleFSDirectory(File(actual_dir))
analyzer = StandardAnalyzer(LuceneVersion.LUCENE_CURRENT)
writerConfig=IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, analyzer)
similarity=FieldAgnosticSimilarity()
writerConfig.setSimilarity(similarity)
writerConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
## res= IndexWriter(index, analyzer, True, IndexWriter.MaxFieldLength(max_field_length))
res= IndexWriter(index, writerConfig)
res.deleteAll()
return res
def addDocument(self, writer, new_doc, metadata, fields_to_process, bow_info):
"""
Add a document to the index. Does this using direct Lucene access.
:param new_doc: dict of fields with values
:type new_doc:dict
:param metadata: ditto
:type metadata:dict
:param fields_to_process: only add these fields from the doc dict
:type fields_to_process:list
"""
doc = Document()
total_numTerms=bow_info["total_numterms"]
# each BOW now comes with its field
for field in fields_to_process:
field_object=Field(field, new_doc[field], Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES)
## boost=math.sqrt(numTerms[field]) / float(math.sqrt(total_numTerms)) if total_numTerms > 0 else float(0)
boost=1 / float(math.sqrt(total_numTerms)) if total_numTerms > 0 else float(0)
field_object.setBoost(float(boost))
doc.add(field_object)
json_metadata=json.dumps(metadata)
doc.add(Field("guid", guid, Field.Store.YES, Field.Index.ANALYZED))
doc.add(Field("bow_info", json.dumps(bow_info), Field.Store.YES, Field.Index.NO))
doc.add(Field("metadata", json_metadata, Field.Store.YES, Field.Index.NO))
doc.add(Field("year_from", metadata["year"], Field.Store.YES, Field.Index.ANALYZED))
writer.addDocument(doc)
def main():
pass
if __name__ == '__main__':
main()
|
jmcorgan/gnuradio | refs/heads/master | gr-blocks/python/blocks/qa_throttle.py | 57 | #!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
class test_throttle(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_01(self):
# Test that we can make the block
op = blocks.throttle(gr.sizeof_gr_complex, 1)
if __name__ == '__main__':
gr_unittest.run(test_throttle, "test_throttle.xml")
|
ezequielpereira/Time-Line | refs/heads/master | libs64/wx/lib/art/img2pyartprov.py | 7 | #-----------------------------------------------------------------------------
# Name: img2pyartprov.py
# Purpose:
#
# Author: Riaan Booysen
#
# RCS-ID: $Id: img2pyartprov.py 51004 2008-01-03 08:17:39Z RD $
# Copyright: (c) 2006
# Licence: wxPython
#-----------------------------------------------------------------------------
""" ArtProvider class that publishes images from modules generated by img2py.
Image modules must be generated with the -u and -n <name> parameters.
Typical usage:
>>> import wx, wx.lib.art.img2pyartprov, myimagemodule
>>> wx.ArtProvider.PushProvider(wx.lib.art.img2pyartprov.Img2PyArtProvider(myimagemodule))
If myimagemodule.catalog['MYIMAGE'] is defined, it can be accessed as:
>>> wx.ArtProvider.GetBitmap('wxART_MYIMAGE')
"""
import wx
_NULL_BMP = wx.NullBitmap
class Img2PyArtProvider(wx.ArtProvider):
def __init__(self, imageModule, artIdPrefix='wxART_'):
self.catalog = {}
self.index = []
self.UpdateFromImageModule(imageModule)
self.artIdPrefix = artIdPrefix
wx.ArtProvider.__init__(self)
def UpdateFromImageModule(self, imageModule):
try:
self.catalog.update(imageModule.catalog)
except AttributeError:
raise Exception, 'No catalog dictionary defined for the image module'
try:
self.index.extend(imageModule.index)
except AttributeError:
raise Exception, 'No index list defined for the image module'
def GenerateArtIdList(self):
return [self.artIdPrefix+name for name in self.index]
def CreateBitmap(self, artId, artClient, size):
if artId.startswith(self.artIdPrefix):
name = artId[len(self.artIdPrefix):]
if name in self.catalog:
return self.catalog[name].GetBitmap()
return _NULL_BMP
|
msiebuhr/v8.go | refs/heads/master | v8/tools/testrunner/network/__init__.py | 651 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
nerzhul/ansible | refs/heads/devel | lib/ansible/modules/monitoring/logicmonitor_facts.py | 13 | #!/usr/bin/python
"""LogicMonitor Ansible module for managing Collectors, Hosts and Hostgroups
Copyright (C) 2015 LogicMonitor
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA"""
import socket
import types
import urllib
HAS_LIB_JSON = True
try:
import json
# Detect the python-json library which is incompatible
# Look for simplejson if that's the case
try:
if (
not isinstance(json.loads, types.FunctionType) or
not isinstance(json.dumps, types.FunctionType)
):
raise ImportError
except AttributeError:
raise ImportError
except ImportError:
try:
import simplejson as json
except ImportError:
print(
'\n{"msg": "Error: ansible requires the stdlib json or ' +
'simplejson module, neither was found!", "failed": true}'
)
HAS_LIB_JSON = False
except SyntaxError:
print(
'\n{"msg": "SyntaxError: probably due to installed simplejson ' +
'being for a different python version", "failed": true}'
)
HAS_LIB_JSON = False
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: logicmonitor_facts
short_description: Collect facts about LogicMonitor objects
description:
- LogicMonitor is a hosted, full-stack, infrastructure monitoring platform.
- This module collects facts about hosts and host groups within your LogicMonitor account.
version_added: "2.2"
author: [Ethan Culler-Mayeno (@ethanculler), Jeff Wozniak (@woz5999)]
notes:
- You must have an existing LogicMonitor account for this module to function.
requirements: ["An existing LogicMonitor account", "Linux"]
options:
target:
description:
- The LogicMonitor object you wish to manage.
required: true
default: null
choices: ['host', 'hostgroup']
company:
description:
- The LogicMonitor account company name. If you would log in to your account at "superheroes.logicmonitor.com" you would use "superheroes".
required: true
default: null
user:
description:
- A LogicMonitor user name. The module will authenticate and perform actions on behalf of this user.
required: true
default: null
password:
description:
- The password for the chosen LogicMonitor User.
- If an md5 hash is used, the digest flag must be set to true.
required: true
default: null
collector:
description:
- The fully qualified domain name of a collector in your LogicMonitor account.
- This is optional for querying a LogicMonitor host when a displayname is specified.
- This is required for querying a LogicMonitor host when a displayname is not specified.
required: false
default: null
hostname:
description:
- The hostname of a host in your LogicMonitor account, or the desired hostname of a device to add into monitoring.
- Required for managing hosts (target=host).
required: false
default: 'hostname -f'
displayname:
description:
- The display name of a host in your LogicMonitor account or the desired display name of a device to add into monitoring.
required: false
default: 'hostname -f'
fullpath:
description:
- The fullpath of the hostgroup object you would like to manage.
- Recommend running on a single ansible host.
- Required for management of LogicMonitor host groups (target=hostgroup).
required: false
default: null
...
'''
EXAMPLES = '''
# Always run those modules on localhost using delegate_to:localhost, or localaction
- name: query a list of hosts
logicmonitor_facts:
target: host
company: yourcompany
user: Luigi
password: ImaLuigi,number1!
delegate_to: localhost
- name: query a host group
logicmonitor_facts:
target: hostgroup
fullpath: /servers/production
company: yourcompany
user: mario
password: itsame.Mario!
delegate_to: localhost
'''
RETURN = '''
---
ansible_facts:
description: LogicMonitor properties set for the specified object
returned: success
type: list of dicts containing name/value pairs
example: >
{
"name": "dc",
"value": "1"
},
{
"name": "type",
"value": "prod"
},
{
"name": "system.categories",
"value": ""
},
{
"name": "snmp.community",
"value": "********"
}
...
'''
class LogicMonitor(object):
def __init__(self, module, **params):
self.__version__ = "1.0-python"
self.module = module
self.module.debug("Instantiating LogicMonitor object")
self.check_mode = False
self.company = params["company"]
self.user = params["user"]
self.password = params["password"]
self.fqdn = socket.getfqdn()
self.lm_url = "logicmonitor.com/santaba"
self.__version__ = self.__version__ + "-ansible-module"
def rpc(self, action, params):
"""Make a call to the LogicMonitor RPC library
and return the response"""
self.module.debug("Running LogicMonitor.rpc")
param_str = urllib.urlencode(params)
creds = urllib.urlencode(
{"c": self.company,
"u": self.user,
"p": self.password})
if param_str:
param_str = param_str + "&"
param_str = param_str + creds
try:
url = ("https://" + self.company + "." + self.lm_url +
"/rpc/" + action + "?" + param_str)
# Set custom LogicMonitor header with version
headers = {"X-LM-User-Agent": self.__version__}
# Set headers
f = open_url(url, headers=headers)
raw = f.read()
resp = json.loads(raw)
if resp["status"] == 403:
self.module.debug("Authentication failed.")
self.fail(msg="Error: " + resp["errmsg"])
else:
return raw
except IOError:
ioe = get_exception()
self.fail(msg="Error: Exception making RPC call to " +
"https://" + self.company + "." + self.lm_url +
"/rpc/" + action + "\nException" + str(ioe))
def get_collectors(self):
"""Returns a JSON object containing a list of
LogicMonitor collectors"""
self.module.debug("Running LogicMonitor.get_collectors...")
self.module.debug("Making RPC call to 'getAgents'")
resp = self.rpc("getAgents", {})
resp_json = json.loads(resp)
if resp_json["status"] is 200:
self.module.debug("RPC call succeeded")
return resp_json["data"]
else:
self.fail(msg=resp)
def get_host_by_hostname(self, hostname, collector):
"""Returns a host object for the host matching the
specified hostname"""
self.module.debug("Running LogicMonitor.get_host_by_hostname...")
self.module.debug("Looking for hostname " + hostname)
self.module.debug("Making RPC call to 'getHosts'")
hostlist_json = json.loads(self.rpc("getHosts", {"hostGroupId": 1}))
if collector:
if hostlist_json["status"] == 200:
self.module.debug("RPC call succeeded")
hosts = hostlist_json["data"]["hosts"]
self.module.debug(
"Looking for host matching: hostname " + hostname +
" and collector " + str(collector["id"]))
for host in hosts:
if (host["hostName"] == hostname and
host["agentId"] == collector["id"]):
self.module.debug("Host match found")
return host
self.module.debug("No host match found")
return None
else:
self.module.debug("RPC call failed")
self.module.debug(hostlist_json)
else:
self.module.debug("No collector specified")
return None
def get_host_by_displayname(self, displayname):
"""Returns a host object for the host matching the
specified display name"""
self.module.debug("Running LogicMonitor.get_host_by_displayname...")
self.module.debug("Looking for displayname " + displayname)
self.module.debug("Making RPC call to 'getHost'")
host_json = (json.loads(self.rpc("getHost",
{"displayName": displayname})))
if host_json["status"] == 200:
self.module.debug("RPC call succeeded")
return host_json["data"]
else:
self.module.debug("RPC call failed")
self.module.debug(host_json)
return None
def get_collector_by_description(self, description):
"""Returns a JSON collector object for the collector
matching the specified FQDN (description)"""
self.module.debug(
"Running LogicMonitor.get_collector_by_description..."
)
collector_list = self.get_collectors()
if collector_list is not None:
self.module.debug("Looking for collector with description " +
description)
for collector in collector_list:
if collector["description"] == description:
self.module.debug("Collector match found")
return collector
self.module.debug("No collector match found")
return None
def get_group(self, fullpath):
"""Returns a JSON group object for the group matching the
specified path"""
self.module.debug("Running LogicMonitor.get_group...")
self.module.debug("Making RPC call to getHostGroups")
resp = json.loads(self.rpc("getHostGroups", {}))
if resp["status"] == 200:
self.module.debug("RPC called succeeded")
groups = resp["data"]
self.module.debug("Looking for group matching " + fullpath)
for group in groups:
if group["fullPath"] == fullpath.lstrip('/'):
self.module.debug("Group match found")
return group
self.module.debug("No group match found")
return None
else:
self.module.debug("RPC call failed")
self.module.debug(resp)
return None
def create_group(self, fullpath):
"""Recursively create a path of host groups.
Returns the id of the newly created hostgroup"""
self.module.debug("Running LogicMonitor.create_group...")
res = self.get_group(fullpath)
if res:
self.module.debug("Group " + fullpath + " exists.")
return res["id"]
if fullpath == "/":
self.module.debug("Specified group is root. Doing nothing.")
return 1
else:
self.module.debug("Creating group named " + fullpath)
self.module.debug("System changed")
self.change = True
if self.check_mode:
self.exit(changed=True)
parentpath, name = fullpath.rsplit('/', 1)
parentgroup = self.get_group(parentpath)
parentid = 1
if parentpath == "":
parentid = 1
elif parentgroup:
parentid = parentgroup["id"]
else:
parentid = self.create_group(parentpath)
h = None
# Determine if we're creating a group from host or hostgroup class
if hasattr(self, '_build_host_group_hash'):
h = self._build_host_group_hash(
fullpath,
self.description,
self.properties,
self.alertenable)
h["name"] = name
h["parentId"] = parentid
else:
h = {"name": name,
"parentId": parentid,
"alertEnable": True,
"description": ""}
self.module.debug("Making RPC call to 'addHostGroup'")
resp = json.loads(
self.rpc("addHostGroup", h))
if resp["status"] == 200:
self.module.debug("RPC call succeeded")
return resp["data"]["id"]
elif resp["errmsg"] == "The record already exists":
self.module.debug("The hostgroup already exists")
group = self.get_group(fullpath)
return group["id"]
else:
self.module.debug("RPC call failed")
self.fail(
msg="Error: unable to create new hostgroup \"" + name +
"\".\n" + resp["errmsg"])
def fail(self, msg):
self.module.fail_json(msg=msg, changed=self.change)
def exit(self, changed):
self.module.debug("Changed: " + changed)
self.module.exit_json(changed=changed)
def output_info(self, info):
self.module.debug("Registering properties as Ansible facts")
self.module.exit_json(changed=False, ansible_facts=info)
class Host(LogicMonitor):
def __init__(self, params, module=None):
"""Initializor for the LogicMonitor host object"""
self.change = False
self.params = params
self.collector = None
LogicMonitor.__init__(self, module, **self.params)
self.module.debug("Instantiating Host object")
if self.params["hostname"]:
self.module.debug("Hostname is " + self.params["hostname"])
self.hostname = self.params['hostname']
else:
self.module.debug("No hostname specified. Using " + self.fqdn)
self.hostname = self.fqdn
if self.params["displayname"]:
self.module.debug("Display name is " + self.params["displayname"])
self.displayname = self.params['displayname']
else:
self.module.debug("No display name specified. Using " + self.fqdn)
self.displayname = self.fqdn
# Attempt to host information via display name of host name
self.module.debug("Attempting to find host by displayname " +
self.displayname)
info = self.get_host_by_displayname(self.displayname)
if info is not None:
self.module.debug("Host found by displayname")
# Used the host information to grab the collector description
# if not provided
if (not hasattr(self.params, "collector") and
"agentDescription" in info):
self.module.debug("Setting collector from host response. " +
"Collector " + info["agentDescription"])
self.params["collector"] = info["agentDescription"]
else:
self.module.debug("Host not found by displayname")
# At this point, a valid collector description is required for success
# Check that the description exists or fail
if self.params["collector"]:
self.module.debug("Collector specified is " +
self.params["collector"])
self.collector = (self.get_collector_by_description(
self.params["collector"]))
else:
self.fail(msg="No collector specified.")
# If the host wasn't found via displayname, attempt by hostname
if info is None:
self.module.debug("Attempting to find host by hostname " +
self.hostname)
info = self.get_host_by_hostname(self.hostname, self.collector)
self.info = info
def get_properties(self):
"""Returns a hash of the properties
associated with this LogicMonitor host"""
self.module.debug("Running Host.get_properties...")
if self.info:
self.module.debug("Making RPC call to 'getHostProperties'")
properties_json = (json.loads(self.rpc("getHostProperties",
{'hostId': self.info["id"],
"filterSystemProperties": True})))
if properties_json["status"] == 200:
self.module.debug("RPC call succeeded")
return properties_json["data"]
else:
self.module.debug("Error: there was an issue retrieving the " +
"host properties")
self.module.debug(properties_json["errmsg"])
self.fail(msg=properties_json["status"])
else:
self.module.debug(
"Unable to find LogicMonitor host which matches " +
self.displayname + " (" + self.hostname + ")"
)
return None
def site_facts(self):
"""Output current properties information for the Host"""
self.module.debug("Running Host.site_facts...")
if self.info:
self.module.debug("Host exists")
props = self.get_properties()
self.output_info(props)
else:
self.fail(msg="Error: Host doesn't exit.")
class Hostgroup(LogicMonitor):
def __init__(self, params, module=None):
"""Initializor for the LogicMonitor host object"""
self.change = False
self.params = params
LogicMonitor.__init__(self, module, **self.params)
self.module.debug("Instantiating Hostgroup object")
self.fullpath = self.params["fullpath"]
self.info = self.get_group(self.fullpath)
def get_properties(self, final=False):
"""Returns a hash of the properties
associated with this LogicMonitor host"""
self.module.debug("Running Hostgroup.get_properties...")
if self.info:
self.module.debug("Group found")
self.module.debug("Making RPC call to 'getHostGroupProperties'")
properties_json = json.loads(self.rpc(
"getHostGroupProperties",
{'hostGroupId': self.info["id"],
"finalResult": final}))
if properties_json["status"] == 200:
self.module.debug("RPC call succeeded")
return properties_json["data"]
else:
self.module.debug("RPC call failed")
self.fail(msg=properties_json["status"])
else:
self.module.debug("Group not found")
return None
def site_facts(self):
"""Output current properties information for the Hostgroup"""
self.module.debug("Running Hostgroup.site_facts...")
if self.info:
self.module.debug("Group exists")
props = self.get_properties(True)
self.output_info(props)
else:
self.fail(msg="Error: Group doesn't exit.")
def selector(module):
"""Figure out which object and which actions
to take given the right parameters"""
if module.params["target"] == "host":
target = Host(module.params, module)
target.site_facts()
elif module.params["target"] == "hostgroup":
# Validate target specific required parameters
if module.params["fullpath"] is not None:
target = Hostgroup(module.params, module)
target.site_facts()
else:
module.fail_json(
msg="Parameter 'fullpath' required for target 'hostgroup'")
else:
module.fail_json(
msg="Error: Unexpected target \"" + module.params["target"] +
"\" was specified.")
def main():
TARGETS = [
"host",
"hostgroup"]
module = AnsibleModule(
argument_spec=dict(
target=dict(required=True, default=None, choices=TARGETS),
company=dict(required=True, default=None),
user=dict(required=True, default=None),
password=dict(required=True, default=None, no_log=True),
collector=dict(require=False, default=None),
hostname=dict(required=False, default=None),
displayname=dict(required=False, default=None),
fullpath=dict(required=False, default=None)
),
supports_check_mode=True
)
if HAS_LIB_JSON is not True:
module.fail_json(msg="Unable to load JSON library")
selector(module)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.urls import open_url
if __name__ == "__main__":
main()
|
nightlydash/dash-abe | refs/heads/master | Abe/Chain/BlackCoin.py | 22 | # Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .NvcChain import NvcChain
class BlackCoin(NvcChain):
def __init__(chain, **kwargs):
chain.name = "BlackCoin"
chain.code3 = "BC"
chain.address_version = "\x19"
chain.script_addr_vers = "\x55"
chain.magic = "\x70\x35\x22\x05"
NvcChain.__init__(chain, **kwargs)
def block_header_hash(chain, header):
b = chain.parse_block_header(header)
if (b['version'] > 6):
from .. import util
return util.double_sha256(header)
import ltc_scrypt
return ltc_scrypt.getPoWHash(header)
datadir_conf_file_name = "blackcoin.conf"
datadir_rpcport = 15715
|
JamesMGreene/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/factory_unittest.py | 118 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.port import factory
from webkitpy.port import gtk
from webkitpy.port import mac
from webkitpy.port import qt
from webkitpy.port import test
from webkitpy.port import win
class FactoryTest(unittest.TestCase):
"""Test that the factory creates the proper port object for given combination of port_name, host.platform, and options."""
# FIXME: The ports themselves should expose what options they require,
# instead of passing generic "options".
def setUp(self):
self.webkit_options = MockOptions(pixel_tests=False)
def assert_port(self, port_name=None, os_name=None, os_version=None, options=None, cls=None):
host = MockSystemHost(os_name=os_name, os_version=os_version)
port = factory.PortFactory(host).get(port_name, options=options)
self.assertIsInstance(port, cls)
def test_mac(self):
self.assert_port(port_name='mac-lion', cls=mac.MacPort)
self.assert_port(port_name='mac-lion-wk2', cls=mac.MacPort)
self.assert_port(port_name='mac', os_name='mac', os_version='lion', cls=mac.MacPort)
self.assert_port(port_name=None, os_name='mac', os_version='lion', cls=mac.MacPort)
def test_win(self):
self.assert_port(port_name='win-xp', cls=win.WinPort)
self.assert_port(port_name='win-xp-wk2', cls=win.WinPort)
self.assert_port(port_name='win', os_name='win', os_version='xp', cls=win.WinPort)
self.assert_port(port_name=None, os_name='win', os_version='xp', cls=win.WinPort)
self.assert_port(port_name=None, os_name='win', os_version='xp', options=self.webkit_options, cls=win.WinPort)
def test_gtk(self):
self.assert_port(port_name='gtk', cls=gtk.GtkPort)
def test_qt(self):
self.assert_port(port_name='qt', cls=qt.QtPort)
def test_unknown_specified(self):
self.assertRaises(NotImplementedError, factory.PortFactory(MockSystemHost()).get, port_name='unknown')
def test_unknown_default(self):
self.assertRaises(NotImplementedError, factory.PortFactory(MockSystemHost(os_name='vms')).get)
def test_get_from_builder_name(self):
self.assertEqual(factory.PortFactory(MockSystemHost()).get_from_builder_name('Apple Lion Release WK1 (Tests)').name(),
'mac-lion')
|
nfedera/rg3-youtube-dl | refs/heads/master | youtube_dl/extractor/ign.py | 50 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_iso8601,
)
class IGNIE(InfoExtractor):
"""
Extractor for some of the IGN sites, like www.ign.com, es.ign.com de.ign.com.
Some videos of it.ign.com are also supported
"""
_VALID_URL = r'https?://.+?\.ign\.com/(?:[^/]+/)?(?P<type>videos|show_videos|articles|feature|(?:[^/]+/\d+/video))(/.+)?/(?P<name_or_id>.+)'
IE_NAME = 'ign.com'
_API_URL_TEMPLATE = 'http://apis.ign.com/video/v3/videos/%s'
_EMBED_RE = r'<iframe[^>]+?["\']((?:https?:)?//.+?\.ign\.com.+?/embed.+?)["\']'
_TESTS = [
{
'url': 'http://www.ign.com/videos/2013/06/05/the-last-of-us-review',
'md5': 'febda82c4bafecd2d44b6e1a18a595f8',
'info_dict': {
'id': '8f862beef863986b2785559b9e1aa599',
'ext': 'mp4',
'title': 'The Last of Us Review',
'description': 'md5:c8946d4260a4d43a00d5ae8ed998870c',
'timestamp': 1370440800,
'upload_date': '20130605',
'uploader_id': '[email protected]',
}
},
{
'url': 'http://me.ign.com/en/feature/15775/100-little-things-in-gta-5-that-will-blow-your-mind',
'info_dict': {
'id': '100-little-things-in-gta-5-that-will-blow-your-mind',
},
'playlist': [
{
'info_dict': {
'id': '5ebbd138523268b93c9141af17bec937',
'ext': 'mp4',
'title': 'GTA 5 Video Review',
'description': 'Rockstar drops the mic on this generation of games. Watch our review of the masterly Grand Theft Auto V.',
'timestamp': 1379339880,
'upload_date': '20130916',
'uploader_id': '[email protected]',
},
},
{
'info_dict': {
'id': '638672ee848ae4ff108df2a296418ee2',
'ext': 'mp4',
'title': '26 Twisted Moments from GTA 5 in Slow Motion',
'description': 'The twisted beauty of GTA 5 in stunning slow motion.',
'timestamp': 1386878820,
'upload_date': '20131212',
'uploader_id': '[email protected]',
},
},
],
'params': {
'skip_download': True,
},
},
{
'url': 'http://www.ign.com/articles/2014/08/15/rewind-theater-wild-trailer-gamescom-2014?watch',
'md5': '618fedb9c901fd086f6f093564ef8558',
'info_dict': {
'id': '078fdd005f6d3c02f63d795faa1b984f',
'ext': 'mp4',
'title': 'Rewind Theater - Wild Trailer Gamescom 2014',
'description': 'Brian and Jared explore Michel Ancel\'s captivating new preview.',
'timestamp': 1408047180,
'upload_date': '20140814',
'uploader_id': '[email protected]',
},
},
{
'url': 'http://me.ign.com/en/videos/112203/video/how-hitman-aims-to-be-different-than-every-other-s',
'only_matching': True,
},
{
'url': 'http://me.ign.com/ar/angry-birds-2/106533/video/lrd-ldyy-lwl-lfylm-angry-birds',
'only_matching': True,
},
]
def _find_video_id(self, webpage):
res_id = [
r'"video_id"\s*:\s*"(.*?)"',
r'class="hero-poster[^"]*?"[^>]*id="(.+?)"',
r'data-video-id="(.+?)"',
r'<object id="vid_(.+?)"',
r'<meta name="og:image" content=".*/(.+?)-(.+?)/.+.jpg"',
]
return self._search_regex(res_id, webpage, 'video id', default=None)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
name_or_id = mobj.group('name_or_id')
page_type = mobj.group('type')
webpage = self._download_webpage(url, name_or_id)
if page_type != 'video':
multiple_urls = re.findall(
r'<param name="flashvars"[^>]*value="[^"]*?url=(https?://www\.ign\.com/videos/.*?)["&]',
webpage)
if multiple_urls:
entries = [self.url_result(u, ie='IGN') for u in multiple_urls]
return {
'_type': 'playlist',
'id': name_or_id,
'entries': entries,
}
video_id = self._find_video_id(webpage)
if not video_id:
return self.url_result(self._search_regex(
self._EMBED_RE, webpage, 'embed url'))
return self._get_video_info(video_id)
def _get_video_info(self, video_id):
api_data = self._download_json(
self._API_URL_TEMPLATE % video_id, video_id)
formats = []
m3u8_url = api_data['refs'].get('m3uUrl')
if m3u8_url:
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
f4m_url = api_data['refs'].get('f4mUrl')
if f4m_url:
formats.extend(self._extract_f4m_formats(
f4m_url, video_id, f4m_id='hds', fatal=False))
for asset in api_data['assets']:
formats.append({
'url': asset['url'],
'tbr': asset.get('actual_bitrate_kbps'),
'fps': asset.get('frame_rate'),
'height': int_or_none(asset.get('height')),
'width': int_or_none(asset.get('width')),
})
self._sort_formats(formats)
thumbnails = [{
'url': thumbnail['url']
} for thumbnail in api_data.get('thumbnails', [])]
metadata = api_data['metadata']
return {
'id': api_data.get('videoId') or video_id,
'title': metadata.get('longTitle') or metadata.get('name') or metadata.get['title'],
'description': metadata.get('description'),
'timestamp': parse_iso8601(metadata.get('publishDate')),
'duration': int_or_none(metadata.get('duration')),
'display_id': metadata.get('slug') or video_id,
'uploader_id': metadata.get('creator'),
'thumbnails': thumbnails,
'formats': formats,
}
class OneUPIE(IGNIE):
_VALID_URL = r'https?://gamevideos\.1up\.com/(?P<type>video)/id/(?P<name_or_id>.+)\.html'
IE_NAME = '1up.com'
_TESTS = [{
'url': 'http://gamevideos.1up.com/video/id/34976.html',
'md5': 'c9cc69e07acb675c31a16719f909e347',
'info_dict': {
'id': '34976',
'ext': 'mp4',
'title': 'Sniper Elite V2 - Trailer',
'description': 'md5:bf0516c5ee32a3217aa703e9b1bc7826',
'timestamp': 1313099220,
'upload_date': '20110811',
'uploader_id': 'IGN',
}
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
result = super(OneUPIE, self)._real_extract(url)
result['id'] = mobj.group('name_or_id')
return result
class PCMagIE(IGNIE):
_VALID_URL = r'https?://(?:www\.)?pcmag\.com/(?P<type>videos|article2)(/.+)?/(?P<name_or_id>.+)'
IE_NAME = 'pcmag'
_EMBED_RE = r'iframe.setAttribute\("src",\s*__util.objToUrlString\("http://widgets\.ign\.com/video/embed/content.html?[^"]*url=([^"]+)["&]'
_TESTS = [{
'url': 'http://www.pcmag.com/videos/2015/01/06/010615-whats-new-now-is-gogo-snooping-on-your-data',
'md5': '212d6154fd0361a2781075f1febbe9ad',
'info_dict': {
'id': 'ee10d774b508c9b8ec07e763b9125b91',
'ext': 'mp4',
'title': '010615_What\'s New Now: Is GoGo Snooping on Your Data?',
'description': 'md5:a7071ae64d2f68cc821c729d4ded6bb3',
'timestamp': 1420571160,
'upload_date': '20150106',
'uploader_id': '[email protected]',
}
}, {
'url': 'http://www.pcmag.com/article2/0,2817,2470156,00.asp',
'md5': '94130c1ca07ba0adb6088350681f16c1',
'info_dict': {
'id': '042e560ba94823d43afcb12ddf7142ca',
'ext': 'mp4',
'title': 'HTC\'s Weird New Re Camera - What\'s New Now',
'description': 'md5:53433c45df96d2ea5d0fda18be2ca908',
'timestamp': 1412953920,
'upload_date': '20141010',
'uploader_id': '[email protected]',
}
}]
|
Krossom/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/encodings/iso2022_jp_2004.py | 816 | #
# iso2022_jp_2004.py: Python Unicode Codec for ISO2022_JP_2004
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_2004')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_2004',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
yeahwhat-mc/CloudBotLegacy | refs/heads/develop | plugins/factoids.py | 1 | # Written by Scaevolus 2010
import string
import re
from util import hook, http, text, pyexec
re_lineends = re.compile(r'[\r\n]*')
db_ready = []
# some simple "shortcodes" for formatting purposes
shortcodes = {
'[b]': '\x02',
'[/b]': '\x02',
'[u]': '\x1F',
'[/u]': '\x1F',
'[i]': '\x16',
'[/i]': '\x16'}
def db_init(db, conn):
global db_ready
if not conn.name in db_ready:
db.execute("create table if not exists mem(word, data, nick,"
" primary key(word))")
db.commit()
db_ready.append(conn.name)
def get_memory(db, word):
row = db.execute("select data from mem where word=lower(?)",
[word]).fetchone()
if row:
return row[0]
else:
return None
@hook.command("r", permissions=["addfactoid"])
@hook.command(permissions=["addfactoid"])
def remember(inp, nick='', db=None, notice=None, conn=None):
"""remember <word> [+]<data> -- Remembers <data> with <word>. Add +
to <data> to append."""
db_init(db, conn)
append = False
inp = string.replace(inp, "\\n", "\n")
try:
word, data = inp.split(None, 1)
except ValueError:
return remember.__doc__
old_data = get_memory(db, word)
if data.startswith('+') and old_data:
append = True
# remove + symbol
new_data = data[1:]
# append new_data to the old_data
if len(new_data) > 1 and new_data[1] in (string.punctuation + ' '):
data = old_data + new_data
else:
data = old_data + ' ' + new_data
db.execute("replace into mem(word, data, nick) values"
" (lower(?),?,?)", (word, data, nick))
db.commit()
if old_data:
if append:
notice(u"Appending \x02{}\x02 to \x02{}\x02. Type ?{} to see it.".format(new_data, old_data, word))
else:
notice(u'Remembering \x02{}\x02 for \x02{}\x02. Type ?{} to see it.'.format(data, word, word))
notice(u'Previous data was \x02{}\x02'.format(old_data))
else:
notice(u'Remembering \x02{}\x02 for \x02{}\x02. Type ?{} to see it.'.format(data, word, word))
@hook.command("f", permissions=["delfactoid"])
@hook.command(permissions=["delfactoid"])
def forget(inp, db=None, notice=None, conn=None):
"""forget <word> -- Forgets a remembered <word>."""
db_init(db, conn)
data = get_memory(db, inp)
if data:
db.execute("delete from mem where word=lower(?)",
[inp])
db.commit()
notice(u'"%s" has been forgotten.' % data.replace('`', "'"))
return
else:
notice("I don't know about that.")
return
@hook.command
def factinfo(inp, notice=None, db=None, conn=None):
"""factinfo <factoid> -- Shows the source of a factoid."""
db_init(db, conn)
# attempt to get the factoid from the database
data = get_memory(db, inp.strip())
if data:
notice(data)
else:
notice("Unknown Factoid.")
@hook.regex(r'^\? ?(.+)')
def factoid(inp, message=None, db=None, bot=None, action=None, conn=None, input=None):
"""?<word> -- Shows what data is associated with <word>."""
try:
prefix_on = bot.config["plugins"]["factoids"].get("prefix", False)
except KeyError:
prefix_on = False
db_init(db, conn)
# split up the input
split = inp.group(1).strip().split(" ")
factoid_id = split[0]
if len(split) >= 1:
arguments = " ".join(split[1:])
else:
arguments = ""
data = get_memory(db, factoid_id)
if data:
# factoid preprocessors
if data.startswith("<py>"):
code = data[4:].strip()
variables = u'input="""{}"""; nick="{}"; chan="{}"; bot_nick="{}";'.format(arguments.replace('"', '\\"'),
input.nick, input.chan,
input.conn.nick)
result = pyexec.eval_py(variables + code)
else:
result = data
# factoid postprocessors
result = text.multiword_replace(result, shortcodes)
if result.startswith("<act>"):
result = result[5:].strip()
action(result)
elif result.startswith("<url>"):
url = result[5:].strip()
try:
message(http.get(url))
except http.HttpError:
message("Could not fetch URL.")
else:
if prefix_on:
message(u"\x02[{}]:\x02 {}".format(factoid_id, result))
else:
message(result)
@hook.command(autoHelp=False, permissions=["listfactoids"])
def listfactoids(inp, db=None, conn=None, reply=None):
db_init(db, conn)
text = False
for word in db.execute("select word from mem").fetchall():
if not text:
text = word[0]
else:
text += u", {}".format(word[0])
if len(text) > 400:
reply(text.rsplit(u', ', 1)[0])
text = word[0]
return text
|
2buntu/2buntu-blog | refs/heads/master | twobuntu/shorturls/migrations/0001_initial.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import twobuntu.utils
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ShortURL',
fields=[
('key', models.CharField(default=twobuntu.utils.uuid6, max_length=6, serialize=False, primary_key=True)),
('url', models.URLField(help_text=b'URL to redirect the client to.')),
],
options={
},
bases=(models.Model,),
),
]
|
aaronfang/personal_scripts | refs/heads/master | af_scripts/blendshapes/toggleCtrls.py | 2 | import maya.cmds as cmds
def toggleCtrls():
curPanel = cmds.getPanel(withFocus=1)
if cmds.modelEditor(curPanel,nurbsCurves=1,q=1)==True and cmds.modelEditor(curPanel,nurbsSurfaces=1,q=1)==True:
cmds.modelEditor(curPanel,e=1,nurbsCurves=0)
cmds.modelEditor(curPanel,e=1,nurbsSurfaces=0)
elif cmds.modelEditor(curPanel,nurbsCurves=1,q=1)==False and cmds.modelEditor(curPanel,nurbsSurfaces=1,q=1)==False:
cmds.modelEditor(curPanel,e=1,nurbsCurves=1)
cmds.modelEditor(curPanel,e=1,nurbsSurfaces=1)
toggleCtrls()
|
onitake/ansible | refs/heads/devel | lib/ansible/modules/storage/glusterfs/gluster_volume.py | 33 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Taneli Leppä <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: gluster_volume
short_description: Manage GlusterFS volumes
description:
- Create, remove, start, stop and tune GlusterFS volumes
version_added: '1.9'
options:
name:
description:
- The volume name.
required: true
aliases: ['volume']
state:
description:
- Use present/absent ensure if a volume exists or not.
Use started/stopped to control its availability.
required: true
choices: ['absent', 'present', 'started', 'stopped']
cluster:
description:
- List of hosts to use for probing and brick setup.
host:
description:
- Override local hostname (for peer probing purposes).
replicas:
description:
- Replica count for volume.
arbiters:
description:
- Arbiter count for volume.
version_added: '2.3'
stripes:
description:
- Stripe count for volume.
disperses:
description:
- Disperse count for volume.
version_added: '2.2'
redundancies:
description:
- Redundancy count for volume.
version_added: '2.2'
transport:
description:
- Transport type for volume.
default: tcp
choices: [ tcp, rdma, 'tcp,rdma' ]
bricks:
description:
- Brick paths on servers. Multiple brick paths can be separated by commas.
aliases: [ brick ]
start_on_create:
description:
- Controls whether the volume is started after creation or not.
type: bool
default: 'yes'
rebalance:
description:
- Controls whether the cluster is rebalanced after changes.
type: bool
default: 'no'
directory:
description:
- Directory for limit-usage.
options:
description:
- A dictionary/hash with options/settings for the volume.
quota:
description:
- Quota value for limit-usage (be sure to use 10.0MB instead of 10MB, see quota list).
force:
description:
- If brick is being created in the root partition, module will fail.
Set force to true to override this behaviour.
type: bool
notes:
- Requires cli tools for GlusterFS on servers.
- Will add new bricks, but not remove them.
author:
- Taneli Leppä (@rosmo)
"""
EXAMPLES = """
- name: create gluster volume
gluster_volume:
state: present
name: test1
bricks: /bricks/brick1/g1
rebalance: yes
cluster:
- 192.0.2.10
- 192.0.2.11
run_once: true
- name: tune
gluster_volume:
state: present
name: test1
options:
performance.cache-size: 256MB
- name: Set multiple options on GlusterFS volume
gluster_volume:
state: present
name: test1
options:
{ performance.cache-size: 128MB,
write-behind: 'off',
quick-read: 'on'
}
- name: start gluster volume
gluster_volume:
state: started
name: test1
- name: limit usage
gluster_volume:
state: present
name: test1
directory: /foo
quota: 20.0MB
- name: stop gluster volume
gluster_volume:
state: stopped
name: test1
- name: remove gluster volume
gluster_volume:
state: absent
name: test1
- name: create gluster volume with multiple bricks
gluster_volume:
state: present
name: test2
bricks: /bricks/brick1/g2,/bricks/brick2/g2
cluster:
- 192.0.2.10
- 192.0.2.11
run_once: true
- name: Remove the bricks from gluster volume
gluster_volume:
state: present
name: testvol
bricks: /bricks/brick1/b1,/bricks/brick2/b2
cluster:
- 10.70.42.85
force: true
run_once: true
- name: Reduce cluster configuration
gluster_volume:
state: present
name: testvol
bricks: /bricks/brick3/b1,/bricks/brick4/b2
replicas: 2
cluster:
- 10.70.42.85
force: true
run_once: true
"""
import re
import socket
import time
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
glusterbin = ''
def run_gluster(gargs, **kwargs):
global glusterbin
global module
args = [glusterbin, '--mode=script']
args.extend(gargs)
try:
rc, out, err = module.run_command(args, **kwargs)
if rc != 0:
module.fail_json(msg='error running gluster (%s) command (rc=%d): %s' %
(' '.join(args), rc, out or err), exception=traceback.format_exc())
except Exception as e:
module.fail_json(msg='error running gluster (%s) command: %s' % (' '.join(args),
to_native(e)), exception=traceback.format_exc())
return out
def run_gluster_nofail(gargs, **kwargs):
global glusterbin
global module
args = [glusterbin]
args.extend(gargs)
rc, out, err = module.run_command(args, **kwargs)
if rc != 0:
return None
return out
def get_peers():
out = run_gluster(['peer', 'status'])
peers = {}
hostname = None
uuid = None
state = None
shortNames = False
for row in out.split('\n'):
if ': ' in row:
key, value = row.split(': ')
if key.lower() == 'hostname':
hostname = value
shortNames = False
if key.lower() == 'uuid':
uuid = value
if key.lower() == 'state':
state = value
peers[hostname] = [uuid, state]
elif row.lower() == 'other names:':
shortNames = True
elif row != '' and shortNames is True:
peers[row] = [uuid, state]
elif row == '':
shortNames = False
return peers
def get_volumes():
out = run_gluster(['volume', 'info'])
volumes = {}
volume = {}
for row in out.split('\n'):
if ': ' in row:
key, value = row.split(': ')
if key.lower() == 'volume name':
volume['name'] = value
volume['options'] = {}
volume['quota'] = False
if key.lower() == 'volume id':
volume['id'] = value
if key.lower() == 'status':
volume['status'] = value
if key.lower() == 'transport-type':
volume['transport'] = value
if value.lower().endswith(' (arbiter)'):
if 'arbiters' not in volume:
volume['arbiters'] = []
value = value[:-10]
volume['arbiters'].append(value)
elif key.lower() == 'number of bricks':
volume['replicas'] = value[-1:]
if key.lower() != 'bricks' and key.lower()[:5] == 'brick':
if 'bricks' not in volume:
volume['bricks'] = []
volume['bricks'].append(value)
# Volume options
if '.' in key:
if 'options' not in volume:
volume['options'] = {}
volume['options'][key] = value
if key == 'features.quota' and value == 'on':
volume['quota'] = True
else:
if row.lower() != 'bricks:' and row.lower() != 'options reconfigured:':
if len(volume) > 0:
volumes[volume['name']] = volume
volume = {}
return volumes
def get_quotas(name, nofail):
quotas = {}
if nofail:
out = run_gluster_nofail(['volume', 'quota', name, 'list'])
if not out:
return quotas
else:
out = run_gluster(['volume', 'quota', name, 'list'])
for row in out.split('\n'):
if row[:1] == '/':
q = re.split(r'\s+', row)
quotas[q[0]] = q[1]
return quotas
def wait_for_peer(host):
for x in range(0, 4):
peers = get_peers()
if host in peers and peers[host][1].lower().find('peer in cluster') != -1:
return True
time.sleep(1)
return False
def probe(host, myhostname):
global module
out = run_gluster(['peer', 'probe', host])
if out.find('localhost') == -1 and not wait_for_peer(host):
module.fail_json(msg='failed to probe peer %s on %s' % (host, myhostname))
def probe_all_peers(hosts, peers, myhostname):
for host in hosts:
host = host.strip() # Clean up any extra space for exact comparison
if host not in peers:
probe(host, myhostname)
def create_volume(name, stripe, replica, arbiter, disperse, redundancy, transport, hosts, bricks, force):
args = ['volume', 'create']
args.append(name)
if stripe:
args.append('stripe')
args.append(str(stripe))
if replica:
args.append('replica')
args.append(str(replica))
if arbiter:
args.append('arbiter')
args.append(str(arbiter))
if disperse:
args.append('disperse')
args.append(str(disperse))
if redundancy:
args.append('redundancy')
args.append(str(redundancy))
args.append('transport')
args.append(transport)
for brick in bricks:
for host in hosts:
args.append(('%s:%s' % (host, brick)))
if force:
args.append('force')
run_gluster(args)
def start_volume(name):
run_gluster(['volume', 'start', name])
def stop_volume(name):
run_gluster(['volume', 'stop', name])
def set_volume_option(name, option, parameter):
run_gluster(['volume', 'set', name, option, parameter])
def add_bricks(name, new_bricks, stripe, replica, force):
args = ['volume', 'add-brick', name]
if stripe:
args.append('stripe')
args.append(str(stripe))
if replica:
args.append('replica')
args.append(str(replica))
args.extend(new_bricks)
if force:
args.append('force')
run_gluster(args)
def remove_bricks(name, removed_bricks, force):
# max-tries=12 with default_interval=10 secs
max_tries = 12
retries = 0
success = False
args = ['volume', 'remove-brick', name]
args.extend(removed_bricks)
# create a copy of args to use for commit operation
args_c = args[:]
args.append('start')
run_gluster(args)
# remove-brick operation needs to be followed by commit operation.
if not force:
module.fail_json(msg="Force option is mandatory.")
else:
while retries < max_tries:
last_brick = removed_bricks[-1]
out = run_gluster(['volume', 'remove-brick', name, last_brick, 'status'])
for row in out.split('\n')[1:]:
if 'completed' in row:
# remove-brick successful, call commit operation.
args_c.append('commit')
out = run_gluster(args_c)
success = True
break
else:
time.sleep(10)
if success:
break
retries += 1
if not success:
# remove-brick still in process, needs to be committed after completion.
module.fail_json(msg="Exceeded number of tries, check remove-brick status.\n"
"Commit operation needs to be followed.")
def reduce_config(name, removed_bricks, replicas, force):
out = run_gluster(['volume', 'heal', name, 'info'])
summary = out.split("\n")
for line in summary:
if 'Number' in line and int(line.split(":")[1].strip()) != 0:
module.fail_json(msg="Operation aborted, self-heal in progress.")
args = ['volume', 'remove-brick', name, 'replica', replicas]
args.extend(removed_bricks)
if force:
args.append('force')
else:
module.fail_json(msg="Force option is mandatory")
run_gluster(args)
def do_rebalance(name):
run_gluster(['volume', 'rebalance', name, 'start'])
def enable_quota(name):
run_gluster(['volume', 'quota', name, 'enable'])
def set_quota(name, directory, value):
run_gluster(['volume', 'quota', name, 'limit-usage', directory, value])
def main():
# MAIN
global module
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True, aliases=['volume']),
state=dict(type='str', required=True, choices=['absent', 'started', 'stopped', 'present']),
cluster=dict(type='list'),
host=dict(type='str'),
stripes=dict(type='int'),
replicas=dict(type='int'),
arbiters=dict(type='int'),
disperses=dict(type='int'),
redundancies=dict(type='int'),
transport=dict(type='str', default='tcp', choices=['tcp', 'rdma', 'tcp,rdma']),
bricks=dict(type='str', aliases=['brick']),
start_on_create=dict(type='bool', default=True),
rebalance=dict(type='bool', default=False),
options=dict(type='dict', default={}),
quota=dict(type='str'),
directory=dict(type='str'),
force=dict(type='bool', default=False),
),
)
global glusterbin
glusterbin = module.get_bin_path('gluster', True)
changed = False
action = module.params['state']
volume_name = module.params['name']
cluster = module.params['cluster']
brick_paths = module.params['bricks']
stripes = module.params['stripes']
replicas = module.params['replicas']
arbiters = module.params['arbiters']
disperses = module.params['disperses']
redundancies = module.params['redundancies']
transport = module.params['transport']
myhostname = module.params['host']
start_on_create = module.boolean(module.params['start_on_create'])
rebalance = module.boolean(module.params['rebalance'])
force = module.boolean(module.params['force'])
if not myhostname:
myhostname = socket.gethostname()
# Clean up if last element is empty. Consider that yml can look like this:
# cluster="{% for host in groups['glusterfs'] %}{{ hostvars[host]['private_ip'] }},{% endfor %}"
if cluster is not None and len(cluster) > 1 and cluster[-1] == '':
cluster = cluster[0:-1]
if cluster is None:
cluster = []
if brick_paths is not None and "," in brick_paths:
brick_paths = brick_paths.split(",")
else:
brick_paths = [brick_paths]
options = module.params['options']
quota = module.params['quota']
directory = module.params['directory']
# get current state info
peers = get_peers()
volumes = get_volumes()
quotas = {}
if volume_name in volumes and volumes[volume_name]['quota'] and volumes[volume_name]['status'].lower() == 'started':
quotas = get_quotas(volume_name, True)
# do the work!
if action == 'absent':
if volume_name in volumes:
if volumes[volume_name]['status'].lower() != 'stopped':
stop_volume(volume_name)
run_gluster(['volume', 'delete', volume_name])
changed = True
if action == 'present':
probe_all_peers(cluster, peers, myhostname)
# create if it doesn't exist
if volume_name not in volumes:
create_volume(volume_name, stripes, replicas, arbiters, disperses, redundancies, transport, cluster, brick_paths, force)
volumes = get_volumes()
changed = True
if volume_name in volumes:
if volumes[volume_name]['status'].lower() != 'started' and start_on_create:
start_volume(volume_name)
changed = True
# switch bricks
new_bricks = []
removed_bricks = []
all_bricks = []
bricks_in_volume = volumes[volume_name]['bricks']
for node in cluster:
for brick_path in brick_paths:
brick = '%s:%s' % (node, brick_path)
all_bricks.append(brick)
if brick not in bricks_in_volume:
new_bricks.append(brick)
if not new_bricks and len(all_bricks) < bricks_in_volume:
for brick in bricks_in_volume:
if brick not in all_bricks:
removed_bricks.append(brick)
if new_bricks:
add_bricks(volume_name, new_bricks, stripes, replicas, force)
changed = True
if removed_bricks:
if replicas and int(replicas) < int(volumes[volume_name]['replicas']):
reduce_config(volume_name, removed_bricks, str(replicas), force)
else:
remove_bricks(volume_name, removed_bricks, force)
changed = True
# handle quotas
if quota:
if not volumes[volume_name]['quota']:
enable_quota(volume_name)
quotas = get_quotas(volume_name, False)
if directory not in quotas or quotas[directory] != quota:
set_quota(volume_name, directory, quota)
changed = True
# set options
for option in options.keys():
if option not in volumes[volume_name]['options'] or volumes[volume_name]['options'][option] != options[option]:
set_volume_option(volume_name, option, options[option])
changed = True
else:
module.fail_json(msg='failed to create volume %s' % volume_name)
if action != 'absent' and volume_name not in volumes:
module.fail_json(msg='volume not found %s' % volume_name)
if action == 'started':
if volumes[volume_name]['status'].lower() != 'started':
start_volume(volume_name)
changed = True
if action == 'stopped':
if volumes[volume_name]['status'].lower() != 'stopped':
stop_volume(volume_name)
changed = True
if changed:
volumes = get_volumes()
if rebalance:
do_rebalance(volume_name)
facts = {}
facts['glusterfs'] = {'peers': peers, 'volumes': volumes, 'quotas': quotas}
module.exit_json(changed=changed, ansible_facts=facts)
if __name__ == '__main__':
main()
|
zachjanicki/osf.io | refs/heads/develop | scripts/admin_permission_email.py | 54 | #!/usr/bin/env python
# encoding: utf-8
import logging
import datetime
from modularodm import Q
from framework.email.tasks import send_email
from website import mails
from website import models
from website import settings
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
FROM_ADDR = 'OSF Support <[email protected]>'
MESSAGE_NAME = 'permissions_change'
SECURITY_MESSAGE = mails.Mail(
'security_permissions_change',
subject='OSF Privacy Notice',
)
def send_security_message(user, label, mail):
if label in user.security_messages:
return
# Pass mailer so that celery is not used
# Email synchronously so that user is only saved after email has been sent
mails.send_mail(
user.username,
mail,
from_addr=FROM_ADDR,
mailer=send_email,
user=user,
username=settings.MANDRILL_USERNAME,
password=settings.MANDRILL_PASSWORD,
mail_server=settings.MANDRILL_MAIL_SERVER,
)
user.security_messages[label] = datetime.datetime.utcnow()
user.save()
def get_targets():
# Active users who have not received the email
query = (
Q('security_messages.{0}'.format(MESSAGE_NAME), 'exists', False) &
Q('is_registered', 'eq', True) &
Q('password', 'ne', None) &
Q('is_merged', 'ne', True) &
Q('is_disabled', 'ne', True) &
Q('date_confirmed', 'ne', None)
)
return models.User.find(query)
def main(dry_run):
users = get_targets()
for user in users:
logger.info('Sending message to user {0!r}'.format(user))
if not dry_run:
send_security_message(user, MESSAGE_NAME, SECURITY_MESSAGE)
if __name__ == '__main__':
import sys
script_utils.add_file_logger(logger, __file__)
dry_run = 'dry' in sys.argv
init_app(set_backends=True, routes=False)
main(dry_run=dry_run)
import mock
from nose.tools import * # noqa
from tests.base import OsfTestCase
from tests.factories import UserFactory
class TestSendSecurityMessage(OsfTestCase):
def tearDown(self):
super(TestSendSecurityMessage, self).tearDown()
models.User.remove()
def test_get_targets(self):
users = [UserFactory() for _ in range(3)]
users[0].security_messages[MESSAGE_NAME] = datetime.datetime.utcnow()
users[0].save()
targets = get_targets()
assert_equal(set(targets), set(users[1:]))
@mock.patch('scripts.admin_permission_email.send_email')
def test_send_mail(self, mock_send_mail):
user = UserFactory()
send_security_message(user, MESSAGE_NAME, SECURITY_MESSAGE)
user.reload()
assert_in(MESSAGE_NAME, user.security_messages)
@mock.patch('scripts.admin_permission_email.send_email')
def test_main(self, mock_send_mail):
[UserFactory() for _ in range(3)]
assert_equal(len(get_targets()), 3)
main(dry_run=False)
assert_true(mock_send_mail.called)
assert_equal(len(get_targets()), 0)
@mock.patch('scripts.admin_permission_email.send_email')
def test_main_dry(self, mock_send_mail):
[UserFactory() for _ in range(3)]
assert_equal(len(get_targets()), 3)
main(dry_run=True)
assert_false(mock_send_mail.called)
assert_equal(len(get_targets()), 3)
|
ericzolf/ansible | refs/heads/devel | lib/ansible/plugins/inventory/toml.py | 29 | # Copyright (c) 2018 Matt Martz <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = r'''
name: toml
version_added: "2.8"
short_description: Uses a specific TOML file as an inventory source.
description:
- TOML based inventory format
- File MUST have a valid '.toml' file extension
notes:
- Requires the 'toml' python library
'''
EXAMPLES = r'''# fmt: toml
# Example 1
[all.vars]
has_java = false
[web]
children = [
"apache",
"nginx"
]
vars = { http_port = 8080, myvar = 23 }
[web.hosts]
host1 = {}
host2 = { ansible_port = 222 }
[apache.hosts]
tomcat1 = {}
tomcat2 = { myvar = 34 }
tomcat3 = { mysecret = "03#pa33w0rd" }
[nginx.hosts]
jenkins1 = {}
[nginx.vars]
has_java = true
# Example 2
[all.vars]
has_java = false
[web]
children = [
"apache",
"nginx"
]
[web.vars]
http_port = 8080
myvar = 23
[web.hosts.host1]
[web.hosts.host2]
ansible_port = 222
[apache.hosts.tomcat1]
[apache.hosts.tomcat2]
myvar = 34
[apache.hosts.tomcat3]
mysecret = "03#pa33w0rd"
[nginx.hosts.jenkins1]
[nginx.vars]
has_java = true
# Example 3
[ungrouped.hosts]
host1 = {}
host2 = { ansible_host = "127.0.0.1", ansible_port = 44 }
host3 = { ansible_host = "127.0.0.1", ansible_port = 45 }
[g1.hosts]
host4 = {}
[g2.hosts]
host4 = {}
'''
import os
from functools import partial
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
from ansible.module_utils.six import string_types, text_type
from ansible.parsing.yaml.objects import AnsibleSequence, AnsibleUnicode
from ansible.plugins.inventory import BaseFileInventoryPlugin
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import AnsibleUnsafeBytes, AnsibleUnsafeText
try:
import toml
HAS_TOML = True
except ImportError:
HAS_TOML = False
display = Display()
if HAS_TOML and hasattr(toml, 'TomlEncoder'):
class AnsibleTomlEncoder(toml.TomlEncoder):
def __init__(self, *args, **kwargs):
super(AnsibleTomlEncoder, self).__init__(*args, **kwargs)
# Map our custom YAML object types to dump_funcs from ``toml``
self.dump_funcs.update({
AnsibleSequence: self.dump_funcs.get(list),
AnsibleUnicode: self.dump_funcs.get(str),
AnsibleUnsafeBytes: self.dump_funcs.get(str),
AnsibleUnsafeText: self.dump_funcs.get(str),
})
toml_dumps = partial(toml.dumps, encoder=AnsibleTomlEncoder())
else:
def toml_dumps(data):
return toml.dumps(convert_yaml_objects_to_native(data))
def convert_yaml_objects_to_native(obj):
"""Older versions of the ``toml`` python library, don't have a pluggable
way to tell the encoder about custom types, so we need to ensure objects
that we pass are native types.
Only used on ``toml<0.10.0`` where ``toml.TomlEncoder`` is missing.
This function recurses an object and ensures we cast any of the types from
``ansible.parsing.yaml.objects`` into their native types, effectively cleansing
the data before we hand it over to ``toml``
This function doesn't directly check for the types from ``ansible.parsing.yaml.objects``
but instead checks for the types those objects inherit from, to offer more flexibility.
"""
if isinstance(obj, dict):
return dict((k, convert_yaml_objects_to_native(v)) for k, v in obj.items())
elif isinstance(obj, list):
return [convert_yaml_objects_to_native(v) for v in obj]
elif isinstance(obj, text_type):
return text_type(obj)
else:
return obj
class InventoryModule(BaseFileInventoryPlugin):
NAME = 'toml'
def _parse_group(self, group, group_data):
if group_data is not None and not isinstance(group_data, MutableMapping):
self.display.warning("Skipping '%s' as this is not a valid group definition" % group)
return
group = self.inventory.add_group(group)
if group_data is None:
return
for key, data in group_data.items():
if key == 'vars':
if not isinstance(data, MutableMapping):
raise AnsibleParserError(
'Invalid "vars" entry for "%s" group, requires a dict, found "%s" instead.' %
(group, type(data))
)
for var, value in data.items():
self.inventory.set_variable(group, var, value)
elif key == 'children':
if not isinstance(data, MutableSequence):
raise AnsibleParserError(
'Invalid "children" entry for "%s" group, requires a list, found "%s" instead.' %
(group, type(data))
)
for subgroup in data:
self._parse_group(subgroup, {})
self.inventory.add_child(group, subgroup)
elif key == 'hosts':
if not isinstance(data, MutableMapping):
raise AnsibleParserError(
'Invalid "hosts" entry for "%s" group, requires a dict, found "%s" instead.' %
(group, type(data))
)
for host_pattern, value in data.items():
hosts, port = self._expand_hostpattern(host_pattern)
self._populate_host_vars(hosts, value, group, port)
else:
self.display.warning(
'Skipping unexpected key "%s" in group "%s", only "vars", "children" and "hosts" are valid' %
(key, group)
)
def _load_file(self, file_name):
if not file_name or not isinstance(file_name, string_types):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name))
b_file_name = to_bytes(self.loader.path_dwim(file_name))
if not self.loader.path_exists(b_file_name):
raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name)
try:
(b_data, private) = self.loader._get_file_contents(file_name)
return toml.loads(to_text(b_data, errors='surrogate_or_strict'))
except toml.TomlDecodeError as e:
raise AnsibleParserError(
'TOML file (%s) is invalid: %s' % (file_name, to_native(e)),
orig_exc=e
)
except (IOError, OSError) as e:
raise AnsibleParserError(
"An error occurred while trying to read the file '%s': %s" % (file_name, to_native(e)),
orig_exc=e
)
except Exception as e:
raise AnsibleParserError(
"An unexpected error occurred while parsing the file '%s': %s" % (file_name, to_native(e)),
orig_exc=e
)
def parse(self, inventory, loader, path, cache=True):
''' parses the inventory file '''
if not HAS_TOML:
raise AnsibleParserError(
'The TOML inventory plugin requires the python "toml" library'
)
super(InventoryModule, self).parse(inventory, loader, path)
self.set_options()
try:
data = self._load_file(path)
except Exception as e:
raise AnsibleParserError(e)
if not data:
raise AnsibleParserError('Parsed empty TOML file')
elif data.get('plugin'):
raise AnsibleParserError('Plugin configuration TOML file, not TOML inventory')
for group_name in data:
self._parse_group(group_name, data[group_name])
def verify_file(self, path):
if super(InventoryModule, self).verify_file(path):
file_name, ext = os.path.splitext(path)
if ext == '.toml':
return True
return False
|
pombredanne/bup | refs/heads/master | cmd/newliner-cmd.py | 10 | #!/usr/bin/env python
import sys, os, re
from bup import options
from bup import _helpers # fixes up sys.argv on import
optspec = """
bup newliner
"""
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])
if extra:
o.fatal("no arguments expected")
r = re.compile(r'([\r\n])')
lastlen = 0
all = ''
width = options._tty_width() or 78
while 1:
l = r.split(all, 1)
if len(l) <= 1:
if len(all) >= 160:
sys.stdout.write('%s\n' % all[:78])
sys.stdout.flush()
all = all[78:]
try:
b = os.read(sys.stdin.fileno(), 4096)
except KeyboardInterrupt:
break
if not b:
break
all += b
else:
assert(len(l) == 3)
(line, splitchar, all) = l
if splitchar == '\r':
line = line[:width]
sys.stdout.write('%-*s%s' % (lastlen, line, splitchar))
if splitchar == '\r':
lastlen = len(line)
else:
lastlen = 0
sys.stdout.flush()
if lastlen:
sys.stdout.write('%-*s\r' % (lastlen, ''))
if all:
sys.stdout.write('%s\n' % all)
|
ShanghaiTimes/Audacity2015 | refs/heads/master | lib-src/lv2/sratom/waflib/Tools/fc_scan.py | 183 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import re
from waflib import Utils,Task,TaskGen,Logs
from waflib.TaskGen import feature,before_method,after_method,extension
from waflib.Configure import conf
INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
re_inc=re.compile(INC_REGEX,re.I)
re_use=re.compile(USE_REGEX,re.I)
re_mod=re.compile(MOD_REGEX,re.I)
class fortran_parser(object):
def __init__(self,incpaths):
self.seen=[]
self.nodes=[]
self.names=[]
self.incpaths=incpaths
def find_deps(self,node):
txt=node.read()
incs=[]
uses=[]
mods=[]
for line in txt.splitlines():
m=re_inc.search(line)
if m:
incs.append(m.group(1))
m=re_use.search(line)
if m:
uses.append(m.group(1))
m=re_mod.search(line)
if m:
mods.append(m.group(1))
return(incs,uses,mods)
def start(self,node):
self.waiting=[node]
while self.waiting:
nd=self.waiting.pop(0)
self.iter(nd)
def iter(self,node):
path=node.abspath()
incs,uses,mods=self.find_deps(node)
for x in incs:
if x in self.seen:
continue
self.seen.append(x)
self.tryfind_header(x)
for x in uses:
name="USE@%s"%x
if not name in self.names:
self.names.append(name)
for x in mods:
name="MOD@%s"%x
if not name in self.names:
self.names.append(name)
def tryfind_header(self,filename):
found=None
for n in self.incpaths:
found=n.find_resource(filename)
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)
|
rhdedgar/openshift-tools | refs/heads/stg | openshift/installer/vendored/openshift-ansible-3.7.0/roles/lib_openshift/src/ansible/oc_obj.py | 25 | # pylint: skip-file
# flake8: noqa
# pylint: disable=too-many-branches
def main():
'''
ansible oc module for services
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default='default', type='str'),
all_namespaces=dict(defaul=False, type='bool'),
name=dict(default=None, type='str'),
files=dict(default=None, type='list'),
kind=dict(required=True, type='str'),
delete_after=dict(default=False, type='bool'),
content=dict(default=None, type='dict'),
force=dict(default=False, type='bool'),
selector=dict(default=None, type='str'),
),
mutually_exclusive=[["content", "files"], ["selector", "name"]],
supports_check_mode=True,
)
rval = OCObject.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
|
dwf/numpy | refs/heads/master | doc/example.py | 8 | """This is the docstring for the example.py module. Modules names should
have short, all-lowercase names. The module name may have underscores if
this improves readability.
Every module should have a docstring at the very top of the file. The
module's docstring may extend over multiple lines. If your docstring does
extend over multiple lines, the closing three quotation marks must be on
a line by itself, preferably preceeded by a blank line.
"""
import os # standard library imports first
# Do NOT import using *, e.g. from numpy import *
#
# Import the module using
#
# import numpy
#
# instead or import individual functions as needed, e.g
#
# from numpy import array, zeros
#
# If you prefer the use of abbreviated module names, we suggest the
# convention used by NumPy itself::
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
# These abbreviated names are not to be used in docstrings; users must
# be able to paste and execute docstrings after importing only the
# numpy module itself, unabbreviated.
from my_module import my_func, other_func
def foo(var1, var2, long_var_name='hi') :
r"""A one-line summary that does not use variable names or the
function name.
Several sentences providing an extended description. Refer to
variables using back-ticks, e.g. `var`.
Parameters
----------
var1 : array_like
Array_like means all those objects -- lists, nested lists, etc. --
that can be converted to an array. We can also refer to
variables like `var1`.
var2 : int
The type above can either refer to an actual Python type
(e.g. ``int``), or describe the type of the variable in more
detail, e.g. ``(N,) ndarray`` or ``array_like``.
Long_variable_name : {'hi', 'ho'}, optional
Choices in brackets, default first when optional.
Returns
-------
describe : type
Explanation
output : type
Explanation
tuple : type
Explanation
items : type
even more explaining
Other Parameters
----------------
only_seldom_used_keywords : type
Explanation
common_parameters_listed_above : type
Explanation
Raises
------
BadException
Because you shouldn't have done that.
See Also
--------
otherfunc : relationship (optional)
newfunc : Relationship (optional), which could be fairly long, in which
case the line wraps here.
thirdfunc, fourthfunc, fifthfunc
Notes
-----
Notes about the implementation algorithm (if needed).
This can have multiple paragraphs.
You may include some math:
.. math:: X(e^{j\omega } ) = x(n)e^{ - j\omega n}
And even use a greek symbol like :math:`omega` inline.
References
----------
Cite the relevant literature, e.g. [1]_. You may also cite these
references in the notes section above.
.. [1] O. McNoleg, "The integration of GIS, remote sensing,
expert systems and adaptive co-kriging for environmental habitat
modelling of the Highland Haggis using object-oriented, fuzzy-logic
and neural-network techniques," Computers & Geosciences, vol. 22,
pp. 585-588, 1996.
Examples
--------
These are written in doctest format, and should illustrate how to
use the function.
>>> a=[1,2,3]
>>> print [x + 3 for x in a]
[4, 5, 6]
>>> print "a\n\nb"
a
b
"""
pass
|
kgullikson88/GSSP_Analyzer | refs/heads/master | gsspy/analyzer.py | 1 | from __future__ import print_function, division, absolute_import
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
import logging
from ._utils import get_minimum
# Default labels for the Chi^2 output table
CHI2_LABELS = ['feh', 'Teff', 'logg', 'micro_turb', 'vsini',
'chi2_inter', 'contin_factor', 'chi2', 'chi2_1sig']
# Which labels are parameters (again, default)
PAR_LABELS = ['feh', 'Teff', 'logg', 'micro_turb', 'vsini', 'dilution']
class GSSP_Analyzer(object):
def __init__(self, basedir, chi2_labels=None, par_labels=None):
"""
Analyze the output of a GSSP_single run.
Parameters:
===========
basedir: string
The name of the GSSP output directory.
chi2_labels: iterable, optional
Labels to apply to the columns in the 'Chi2_table.dat',
which is found in basedir
par_labels: iterable, optional
The names of the parameters that were fit. This is
mostly the same as chi2_labels, but without the chi^2
columns
"""
if chi2_labels is None:
chi2_labels = CHI2_LABELS
if par_labels is None:
par_labels = PAR_LABELS
fname = os.path.join(basedir, 'Chi2_table.dat')
try:
df = pd.read_fwf(fname, header=None, names=chi2_labels)
except IOError as e:
logging.warning('File {} not found!'.format(fname))
raise e
self.chi2_labels = chi2_labels
self.par_labels = par_labels
self.chi2_df = df
self.basedir = basedir
return
def estimate_best_parameters(self):
"""
Estimate the best parameters by interpolating the grid
Returns:
=========
pd.Series object with the best parameter and associated uncertainties
for each parameter
A tuple of matplotlib.Figure instances with plots for each parameter.
"""
best_grid_pars = self._get_best_grid_pars()
parameters = [p for p in self.par_labels if p in self.chi2_df.columns]
figures = {}
for i, par in enumerate(parameters):
logging.debug('Slicing to find best {}'.format(par))
# Get all the other parameters
other_pars = [p for p in parameters if p != par]
# Get the chi^2 dependence on the current parameter alone
cond = np.all([self.chi2_df[p] == best_grid_pars[p] for p in other_pars], axis=0)
par_dependence = self.chi2_df[cond][[par, 'chi2']]
if len(par_dependence) < 2:
continue
logging.debug(par_dependence)
# Fit the dependence to a polynomial
polypars = np.polyfit(par_dependence[par],
par_dependence['chi2']-best_grid_pars['chi2_1sig'],
2)
chi2_fcn = np.poly1d(polypars)
roots = sorted(np.roots(polypars))
minimum = get_minimum(chi2_fcn, search_range=roots)
if len(minimum) == 1:
minimum = minimum[0]
elif len(minimum) > 1:
chi2_vals = chi2_fcn(minimum)
minimum = minimum[np.argmin(chi2_vals)]
else:
minimum = par_dependence.sort_values(by='chi2')[par].values[0]
# Plot
fig, ax = plt.subplots(1, 1)
ax.scatter(par_dependence[par], par_dependence['chi2'],
marker='x', color='red')
ax.scatter(minimum, chi2_fcn(minimum) + best_grid_pars['chi2_1sig'],
marker='o', color='blue')
x = np.linspace(par_dependence[par].min(), par_dependence[par].max(), 25)
ax.plot(x, chi2_fcn(x) + best_grid_pars['chi2_1sig'], 'g--')
ax.set_xlabel(par)
ax.set_ylabel('$\chi^2$')
# Save the best_parameters
best_grid_pars['best_{}'.format(par)] = minimum
best_grid_pars['1sig_CI_lower_{}'.format(par)] = min(roots)
best_grid_pars['1sig_CI_upper_{}'.format(par)] = max(roots)
figures[par] = fig
return best_grid_pars, figures
def plot_best_model(self):
""" Plot the observed spectrum with the best model
"""
obs_fname = os.path.join(self.basedir, 'Observed_spectrum.dat')
model_fname = os.path.join(self.basedir, 'Synthetic_best_fit.rgs')
obs_spec = np.loadtxt(obs_fname, unpack=True)
model_spec = np.loadtxt(model_fname, usecols=(0,1), unpack=True)
fig, ax = plt.subplots(1, 1, figsize=(12,7))
ax.plot(obs_spec[0], obs_spec[1], 'k-', alpha=0.7, label='Observed spectrum')
ax.plot(model_spec[0], model_spec[1], 'r-', alpha=0.8, label='Model Spectrum')
ax.set_xlabel('Wavelength ($\AA$)')
ax.set_ylabel('Normalized Flux')
leg = ax.legend(loc='best', fancybox=True)
leg.get_frame().set_alpha(0.5)
plt.show()
def _get_best_grid_pars(self):
"""
Finds the best set of parameters (lowest chi2) within the grid
The parameters to search are given in self.par_labels as an iterable
"""
best_row = self.chi2_df.sort('chi2', ascending=True).ix[0]
best_pars = {}
for par in self.par_labels:
if par in best_row:
best_pars[par] = best_row[par]
# Add the chi^2 information
best_pars['chi2'] = best_row['chi2']
best_pars['chi2_1sig'] = best_row['chi2_1sig']
return pd.Series(data=best_pars) |
synthicity/activitysim | refs/heads/master | activitysim/__init__.py | 12 | # ActivitySim
# See full license in LICENSE.txt.
|
nextstrain/fauna | refs/heads/master | tdb/append.py | 1 | import os, argparse, sys
from rethinkdb import r
sys.path.append('') # need to import from base
from base.rethink_io import rethink_io
from base.rethink_interact import rethink_interact
from vdb.append import parser
class append(object):
def __init__(self, **kwargs):
self.rethink_io = rethink_io()
self.rethink_host, self.auth_key = self.rethink_io.assign_rethink(**kwargs)
self.rethink_interact = rethink_interact()
def append(self, virus, from_database='vdb', to_database='test_vdb', **kwargs):
'''
Append documents in database.table to another database.table"
'''
kwargs['rethink_host'] = self.rethink_host
kwargs['auth_key'] = self.auth_key
kwargs['key'] = 'index'
virus = virus.lower()
from_table, to_table = from_database+"."+virus, to_database+"."+virus
self.rethink_interact.append(from_table=from_table, to_table=to_table, **kwargs)
if __name__=="__main__":
args = parser.parse_args()
connVDB = append(**args.__dict__)
connVDB.append(**args.__dict__)
|
kuri65536/python-for-android | refs/heads/master | python-build/python-libs/gdata/src/gdata/tlslite/Checker.py | 359 | """Class for post-handshake certificate checking."""
from utils.cryptomath import hashAndBase64
from X509 import X509
from X509CertChain import X509CertChain
from errors import *
class Checker:
"""This class is passed to a handshake function to check the other
party's certificate chain.
If a handshake function completes successfully, but the Checker
judges the other party's certificate chain to be missing or
inadequate, a subclass of
L{tlslite.errors.TLSAuthenticationError} will be raised.
Currently, the Checker can check either an X.509 or a cryptoID
chain (for the latter, cryptoIDlib must be installed).
"""
def __init__(self, cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
checkResumedSession=False):
"""Create a new Checker instance.
You must pass in one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
@type cryptoID: str
@param cryptoID: A cryptoID which the other party's certificate
chain must match. The cryptoIDlib module must be installed.
Mutually exclusive with all of the 'x509...' arguments.
@type protocol: str
@param protocol: A cryptoID protocol URI which the other
party's certificate chain must match. Requires the 'cryptoID'
argument.
@type x509Fingerprint: str
@param x509Fingerprint: A hex-encoded X.509 end-entity
fingerprint which the other party's end-entity certificate must
match. Mutually exclusive with the 'cryptoID' and
'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed. Mutually exclusive with the 'cryptoID' and
'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type checkResumedSession: bool
@param checkResumedSession: If resumed sessions should be
checked. This defaults to False, on the theory that if the
session was checked once, we don't need to bother
re-checking it.
"""
if cryptoID and (x509Fingerprint or x509TrustList):
raise ValueError()
if x509Fingerprint and x509TrustList:
raise ValueError()
if x509CommonName and not x509TrustList:
raise ValueError()
if protocol and not cryptoID:
raise ValueError()
if cryptoID:
import cryptoIDlib #So we raise an error here
if x509TrustList:
import cryptlib_py #So we raise an error here
self.cryptoID = cryptoID
self.protocol = protocol
self.x509Fingerprint = x509Fingerprint
self.x509TrustList = x509TrustList
self.x509CommonName = x509CommonName
self.checkResumedSession = checkResumedSession
def __call__(self, connection):
"""Check a TLSConnection.
When a Checker is passed to a handshake function, this will
be called at the end of the function.
@type connection: L{tlslite.TLSConnection.TLSConnection}
@param connection: The TLSConnection to examine.
@raise tlslite.errors.TLSAuthenticationError: If the other
party's certificate chain is missing or bad.
"""
if not self.checkResumedSession and connection.resumed:
return
if self.cryptoID or self.x509Fingerprint or self.x509TrustList:
if connection._client:
chain = connection.session.serverCertChain
else:
chain = connection.session.clientCertChain
if self.x509Fingerprint or self.x509TrustList:
if isinstance(chain, X509CertChain):
if self.x509Fingerprint:
if chain.getFingerprint() != self.x509Fingerprint:
raise TLSFingerprintError(\
"X.509 fingerprint mismatch: %s, %s" % \
(chain.getFingerprint(), self.x509Fingerprint))
else: #self.x509TrustList
if not chain.validate(self.x509TrustList):
raise TLSValidationError("X.509 validation failure")
if self.x509CommonName and \
(chain.getCommonName() != self.x509CommonName):
raise TLSAuthorizationError(\
"X.509 Common Name mismatch: %s, %s" % \
(chain.getCommonName(), self.x509CommonName))
elif chain:
raise TLSAuthenticationTypeError()
else:
raise TLSNoAuthenticationError()
elif self.cryptoID:
import cryptoIDlib.CertChain
if isinstance(chain, cryptoIDlib.CertChain.CertChain):
if chain.cryptoID != self.cryptoID:
raise TLSFingerprintError(\
"cryptoID mismatch: %s, %s" % \
(chain.cryptoID, self.cryptoID))
if self.protocol:
if not chain.checkProtocol(self.protocol):
raise TLSAuthorizationError(\
"cryptoID protocol mismatch")
if not chain.validate():
raise TLSValidationError("cryptoID validation failure")
elif chain:
raise TLSAuthenticationTypeError()
else:
raise TLSNoAuthenticationError()
|
wwright2/dcim3-angstrom1 | refs/heads/master | sources/bitbake/lib/codegen.py | 17 | # -*- coding: utf-8 -*-
"""
codegen
~~~~~~~
Extension to ast that allow ast -> python code generation.
:copyright: Copyright 2008 by Armin Ronacher.
:license: BSD.
"""
from ast import *
BOOLOP_SYMBOLS = {
And: 'and',
Or: 'or'
}
BINOP_SYMBOLS = {
Add: '+',
Sub: '-',
Mult: '*',
Div: '/',
FloorDiv: '//',
Mod: '%',
LShift: '<<',
RShift: '>>',
BitOr: '|',
BitAnd: '&',
BitXor: '^'
}
CMPOP_SYMBOLS = {
Eq: '==',
Gt: '>',
GtE: '>=',
In: 'in',
Is: 'is',
IsNot: 'is not',
Lt: '<',
LtE: '<=',
NotEq: '!=',
NotIn: 'not in'
}
UNARYOP_SYMBOLS = {
Invert: '~',
Not: 'not',
UAdd: '+',
USub: '-'
}
ALL_SYMBOLS = {}
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
ALL_SYMBOLS.update(BINOP_SYMBOLS)
ALL_SYMBOLS.update(CMPOP_SYMBOLS)
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
def to_source(node, indent_with=' ' * 4, add_line_information=False):
"""This function can convert a node tree back into python sourcecode.
This is useful for debugging purposes, especially if you're dealing with
custom asts not generated by python itself.
It could be that the sourcecode is evaluable when the AST itself is not
compilable / evaluable. The reason for this is that the AST contains some
more data than regular sourcecode does, which is dropped during
conversion.
Each level of indentation is replaced with `indent_with`. Per default this
parameter is equal to four spaces as suggested by PEP 8, but it might be
adjusted to match the application's styleguide.
If `add_line_information` is set to `True` comments for the line numbers
of the nodes are added to the output. This can be used to spot wrong line
number information of statement nodes.
"""
generator = SourceGenerator(indent_with, add_line_information)
generator.visit(node)
return ''.join(generator.result)
class SourceGenerator(NodeVisitor):
"""This visitor is able to transform a well formed syntax tree into python
sourcecode. For more details have a look at the docstring of the
`node_to_source` function.
"""
def __init__(self, indent_with, add_line_information=False):
self.result = []
self.indent_with = indent_with
self.add_line_information = add_line_information
self.indentation = 0
self.new_lines = 0
def write(self, x):
if self.new_lines:
if self.result:
self.result.append('\n' * self.new_lines)
self.result.append(self.indent_with * self.indentation)
self.new_lines = 0
self.result.append(x)
def newline(self, node=None, extra=0):
self.new_lines = max(self.new_lines, 1 + extra)
if node is not None and self.add_line_information:
self.write('# line: %s' % node.lineno)
self.new_lines = 1
def body(self, statements):
self.new_line = True
self.indentation += 1
for stmt in statements:
self.visit(stmt)
self.indentation -= 1
def body_or_else(self, node):
self.body(node.body)
if node.orelse:
self.newline()
self.write('else:')
self.body(node.orelse)
def signature(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(', ')
else:
want_comma.append(True)
padding = [None] * (len(node.args) - len(node.defaults))
for arg, default in zip(node.args, padding + node.defaults):
write_comma()
self.visit(arg)
if default is not None:
self.write('=')
self.visit(default)
if node.vararg is not None:
write_comma()
self.write('*' + node.vararg)
if node.kwarg is not None:
write_comma()
self.write('**' + node.kwarg)
def decorators(self, node):
for decorator in node.decorator_list:
self.newline(decorator)
self.write('@')
self.visit(decorator)
# Statements
def visit_Assign(self, node):
self.newline(node)
for idx, target in enumerate(node.targets):
if idx:
self.write(', ')
self.visit(target)
self.write(' = ')
self.visit(node.value)
def visit_AugAssign(self, node):
self.newline(node)
self.visit(node.target)
self.write(BINOP_SYMBOLS[type(node.op)] + '=')
self.visit(node.value)
def visit_ImportFrom(self, node):
self.newline(node)
self.write('from %s%s import ' % ('.' * node.level, node.module))
for idx, item in enumerate(node.names):
if idx:
self.write(', ')
self.write(item)
def visit_Import(self, node):
self.newline(node)
for item in node.names:
self.write('import ')
self.visit(item)
def visit_Expr(self, node):
self.newline(node)
self.generic_visit(node)
def visit_FunctionDef(self, node):
self.newline(extra=1)
self.decorators(node)
self.newline(node)
self.write('def %s(' % node.name)
self.signature(node.args)
self.write('):')
self.body(node.body)
def visit_ClassDef(self, node):
have_args = []
def paren_or_comma():
if have_args:
self.write(', ')
else:
have_args.append(True)
self.write('(')
self.newline(extra=2)
self.decorators(node)
self.newline(node)
self.write('class %s' % node.name)
for base in node.bases:
paren_or_comma()
self.visit(base)
# XXX: the if here is used to keep this module compatible
# with python 2.6.
if hasattr(node, 'keywords'):
for keyword in node.keywords:
paren_or_comma()
self.write(keyword.arg + '=')
self.visit(keyword.value)
if node.starargs is not None:
paren_or_comma()
self.write('*')
self.visit(node.starargs)
if node.kwargs is not None:
paren_or_comma()
self.write('**')
self.visit(node.kwargs)
self.write(have_args and '):' or ':')
self.body(node.body)
def visit_If(self, node):
self.newline(node)
self.write('if ')
self.visit(node.test)
self.write(':')
self.body(node.body)
while True:
else_ = node.orelse
if len(else_) == 1 and isinstance(else_[0], If):
node = else_[0]
self.newline()
self.write('elif ')
self.visit(node.test)
self.write(':')
self.body(node.body)
else:
self.newline()
self.write('else:')
self.body(else_)
break
def visit_For(self, node):
self.newline(node)
self.write('for ')
self.visit(node.target)
self.write(' in ')
self.visit(node.iter)
self.write(':')
self.body_or_else(node)
def visit_While(self, node):
self.newline(node)
self.write('while ')
self.visit(node.test)
self.write(':')
self.body_or_else(node)
def visit_With(self, node):
self.newline(node)
self.write('with ')
self.visit(node.context_expr)
if node.optional_vars is not None:
self.write(' as ')
self.visit(node.optional_vars)
self.write(':')
self.body(node.body)
def visit_Pass(self, node):
self.newline(node)
self.write('pass')
def visit_Print(self, node):
# XXX: python 2.6 only
self.newline(node)
self.write('print ')
want_comma = False
if node.dest is not None:
self.write(' >> ')
self.visit(node.dest)
want_comma = True
for value in node.values:
if want_comma:
self.write(', ')
self.visit(value)
want_comma = True
if not node.nl:
self.write(',')
def visit_Delete(self, node):
self.newline(node)
self.write('del ')
for idx, target in enumerate(node):
if idx:
self.write(', ')
self.visit(target)
def visit_TryExcept(self, node):
self.newline(node)
self.write('try:')
self.body(node.body)
for handler in node.handlers:
self.visit(handler)
def visit_TryFinally(self, node):
self.newline(node)
self.write('try:')
self.body(node.body)
self.newline(node)
self.write('finally:')
self.body(node.finalbody)
def visit_Global(self, node):
self.newline(node)
self.write('global ' + ', '.join(node.names))
def visit_Nonlocal(self, node):
self.newline(node)
self.write('nonlocal ' + ', '.join(node.names))
def visit_Return(self, node):
self.newline(node)
self.write('return ')
self.visit(node.value)
def visit_Break(self, node):
self.newline(node)
self.write('break')
def visit_Continue(self, node):
self.newline(node)
self.write('continue')
def visit_Raise(self, node):
# XXX: Python 2.6 / 3.0 compatibility
self.newline(node)
self.write('raise')
if hasattr(node, 'exc') and node.exc is not None:
self.write(' ')
self.visit(node.exc)
if node.cause is not None:
self.write(' from ')
self.visit(node.cause)
elif hasattr(node, 'type') and node.type is not None:
self.visit(node.type)
if node.inst is not None:
self.write(', ')
self.visit(node.inst)
if node.tback is not None:
self.write(', ')
self.visit(node.tback)
# Expressions
def visit_Attribute(self, node):
self.visit(node.value)
self.write('.' + node.attr)
def visit_Call(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(', ')
else:
want_comma.append(True)
self.visit(node.func)
self.write('(')
for arg in node.args:
write_comma()
self.visit(arg)
for keyword in node.keywords:
write_comma()
self.write(keyword.arg + '=')
self.visit(keyword.value)
if node.starargs is not None:
write_comma()
self.write('*')
self.visit(node.starargs)
if node.kwargs is not None:
write_comma()
self.write('**')
self.visit(node.kwargs)
self.write(')')
def visit_Name(self, node):
self.write(node.id)
def visit_Str(self, node):
self.write(repr(node.s))
def visit_Bytes(self, node):
self.write(repr(node.s))
def visit_Num(self, node):
self.write(repr(node.n))
def visit_Tuple(self, node):
self.write('(')
idx = -1
for idx, item in enumerate(node.elts):
if idx:
self.write(', ')
self.visit(item)
self.write(idx and ')' or ',)')
def sequence_visit(left, right):
def visit(self, node):
self.write(left)
for idx, item in enumerate(node.elts):
if idx:
self.write(', ')
self.visit(item)
self.write(right)
return visit
visit_List = sequence_visit('[', ']')
visit_Set = sequence_visit('{', '}')
del sequence_visit
def visit_Dict(self, node):
self.write('{')
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
if idx:
self.write(', ')
self.visit(key)
self.write(': ')
self.visit(value)
self.write('}')
def visit_BinOp(self, node):
self.visit(node.left)
self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
self.visit(node.right)
def visit_BoolOp(self, node):
self.write('(')
for idx, value in enumerate(node.values):
if idx:
self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
self.visit(value)
self.write(')')
def visit_Compare(self, node):
self.write('(')
self.write(node.left)
for op, right in zip(node.ops, node.comparators):
self.write(' %s %%' % CMPOP_SYMBOLS[type(op)])
self.visit(right)
self.write(')')
def visit_UnaryOp(self, node):
self.write('(')
op = UNARYOP_SYMBOLS[type(node.op)]
self.write(op)
if op == 'not':
self.write(' ')
self.visit(node.operand)
self.write(')')
def visit_Subscript(self, node):
self.visit(node.value)
self.write('[')
self.visit(node.slice)
self.write(']')
def visit_Slice(self, node):
if node.lower is not None:
self.visit(node.lower)
self.write(':')
if node.upper is not None:
self.visit(node.upper)
if node.step is not None:
self.write(':')
if not (isinstance(node.step, Name) and node.step.id == 'None'):
self.visit(node.step)
def visit_ExtSlice(self, node):
for idx, item in node.dims:
if idx:
self.write(', ')
self.visit(item)
def visit_Yield(self, node):
self.write('yield ')
self.visit(node.value)
def visit_Lambda(self, node):
self.write('lambda ')
self.signature(node.args)
self.write(': ')
self.visit(node.body)
def visit_Ellipsis(self, node):
self.write('Ellipsis')
def generator_visit(left, right):
def visit(self, node):
self.write(left)
self.visit(node.elt)
for comprehension in node.generators:
self.visit(comprehension)
self.write(right)
return visit
visit_ListComp = generator_visit('[', ']')
visit_GeneratorExp = generator_visit('(', ')')
visit_SetComp = generator_visit('{', '}')
del generator_visit
def visit_DictComp(self, node):
self.write('{')
self.visit(node.key)
self.write(': ')
self.visit(node.value)
for comprehension in node.generators:
self.visit(comprehension)
self.write('}')
def visit_IfExp(self, node):
self.visit(node.body)
self.write(' if ')
self.visit(node.test)
self.write(' else ')
self.visit(node.orelse)
def visit_Starred(self, node):
self.write('*')
self.visit(node.value)
def visit_Repr(self, node):
# XXX: python 2.6 only
self.write('`')
self.visit(node.value)
self.write('`')
# Helper Nodes
def visit_alias(self, node):
self.write(node.name)
if node.asname is not None:
self.write(' as ' + node.asname)
def visit_comprehension(self, node):
self.write(' for ')
self.visit(node.target)
self.write(' in ')
self.visit(node.iter)
if node.ifs:
for if_ in node.ifs:
self.write(' if ')
self.visit(if_)
def visit_excepthandler(self, node):
self.newline(node)
self.write('except')
if node.type is not None:
self.write(' ')
self.visit(node.type)
if node.name is not None:
self.write(' as ')
self.visit(node.name)
self.write(':')
self.body(node.body)
|
stackforge/compass-adapters | refs/heads/master | cobbler/triggers/sync/post/migrate_ks.py | 2 | #!/usr/bin/python
"""script to migrate rendered kickstart files from cobbler to outside."""
import logging
from cobbler import api
def main():
"""main entry"""
cobbler_api = api.BootAPI()
for system in cobbler_api.systems():
cobbler_api.kickgen.generate_kickstart_for_system(system.name)
try:
with open(
'/var/www/cblr_ks/%s' % system.name, 'w'
) as kickstart_file:
logging.info("Migrating kickstart for %s", system.name)
data = cobbler_api.kickgen.generate_kickstart_for_system(
system.name)
kickstart_file.write(data)
except Exception as error:
logging.error("Directory /var/www/cblr_ks/ does not exist.")
logging.exception(error)
raise error
if __name__ == '__main__':
logging.info("Running kickstart migration")
main()
|
stoilov/Programming101 | refs/heads/master | week2/Dungeon/fight.py | 1 | from random import randint
from weapon import Weapon
class Fight:
def __init__(self, hero, orc):
self.hero = hero
self.orc = orc
def flip_coin(self):
chance = randint(0, 100)
hero_first = False
if chance < 50:
hero_first = True
return hero_first
def simulate_fight(self):
hero_turn = self.flip_coin()
while self.hero.health > 0 and self.orc.health > 0:
if hero_turn:
dmg = self.hero.attack()
self.orc.take_damage(dmg)
else:
dmg = self.orc.attack()
self.hero.take_damage(dmg)
hero_turn = not hero_turn
if not self.orc.is_alive():
return "Hero wins."
return "Orc wins."
|
aspose-slides/Aspose.Slides-for-Cloud | refs/heads/master | SDKs/Aspose.Slides-Cloud-SDK-for-Python/asposeslidescloud/models/PresentationStringReplaceResponse.py | 4 | #!/usr/bin/env python
class PresentationStringReplaceResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'Document': 'Document',
'Matches': 'int',
'Code': 'str',
'Status': 'str'
}
self.attributeMap = {
'Document': 'Document','Matches': 'Matches','Code': 'Code','Status': 'Status'}
self.Document = None # Document
self.Matches = None # int
self.Code = None # str
self.Status = None # str
|
gangadhar-kadam/mic-wnframework | refs/heads/master | core/page/modules_setup/modules_setup.py | 6 | from __future__ import unicode_literals
import webnotes
@webnotes.whitelist()
def update(ml):
"""update modules"""
webnotes.conn.set_global('hidden_modules', ml)
webnotes.msgprint('Updated')
webnotes.clear_cache() |
PetrDlouhy/django | refs/heads/master | tests/m2m_through/tests.py | 295 | from __future__ import unicode_literals
from datetime import datetime
from operator import attrgetter
from django.test import TestCase
from .models import (
CustomMembership, Employee, Event, Friendship, Group, Ingredient,
Invitation, Membership, Person, PersonSelfRefM2M, Recipe, RecipeIngredient,
Relationship,
)
class M2mThroughTests(TestCase):
def setUp(self):
self.bob = Person.objects.create(name='Bob')
self.jim = Person.objects.create(name='Jim')
self.jane = Person.objects.create(name='Jane')
self.rock = Group.objects.create(name='Rock')
self.roll = Group.objects.create(name='Roll')
def test_retrieve_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
expected = ['Jane', 'Jim']
self.assertQuerysetEqual(
self.rock.members.all(),
expected,
attrgetter("name")
)
def test_get_on_intermediate_model(self):
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.get(person=self.jane, group=self.rock)
self.assertEqual(
repr(queryset),
'<Membership: Jane is a member of Rock>'
)
def test_filter_on_intermediate_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.filter(group=self.rock)
expected = [
'<Membership: Jim is a member of Rock>',
'<Membership: Jane is a member of Rock>',
]
self.assertQuerysetEqual(
queryset,
expected
)
def test_cannot_use_add_on_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.add(self.bob)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.create(name='Annie')
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_remove_on_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.remove(self.jim)
self.assertQuerysetEqual(
self.rock.members.all(),
['Jim', ],
attrgetter("name")
)
def test_cannot_use_setattr_on_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Person.objects.filter(name__in=['Bob', 'Jim']))
with self.assertRaisesMessage(AttributeError, msg):
setattr(self.rock, 'members', members)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_clear_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
self.rock.members.clear()
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_retrieve_reverse_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
expected = ['Rock', 'Roll']
self.assertQuerysetEqual(
self.jim.group_set.all(),
expected,
attrgetter("name")
)
def test_cannot_use_add_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.add(self.bob)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.create(name='Funk')
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_remove_on_reverse_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.bob, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.remove(self.rock)
self.assertQuerysetEqual(
self.bob.group_set.all(),
['Rock', ],
attrgetter('name')
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Group.objects.filter(name__in=['Rock', 'Roll']))
with self.assertRaisesMessage(AttributeError, msg):
setattr(self.bob, 'group_set', members)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_clear_on_reverse_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
self.jim.group_set.clear()
self.assertQuerysetEqual(
self.jim.group_set.all(),
[]
)
def test_query_model_by_attribute_name_of_related_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Group.objects.filter(members__name='Bob'),
['Roll', ],
attrgetter("name")
)
def test_query_first_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Group.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Roll'],
attrgetter("name")
)
def test_query_second_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Person.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Jane'],
attrgetter("name")
)
def test_query_model_by_related_model_name(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Person.objects.filter(group__name="Rock"),
['Jane', 'Jim'],
attrgetter("name")
)
def test_query_model_by_custom_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
Person.objects.filter(custom__name="Rock"),
['Bob', 'Jim'],
attrgetter("name")
)
def test_query_model_by_intermediate_can_return_non_unique_queryset(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(
person=self.jane, group=self.rock,
date_joined=datetime(2006, 1, 1)
)
Membership.objects.create(
person=self.bob, group=self.roll,
date_joined=datetime(2004, 1, 1))
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(
person=self.jane, group=self.roll,
date_joined=datetime(2004, 1, 1))
qs = Person.objects.filter(
membership__date_joined__gt=datetime(2004, 1, 1)
)
self.assertQuerysetEqual(
qs,
['Jane', 'Jim', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_forward_empty_qs(self):
self.assertQuerysetEqual(
self.rock.custom_members.all(),
[]
)
def test_custom_related_name_reverse_empty_qs(self):
self.assertQuerysetEqual(
self.bob.custom.all(),
[]
)
def test_custom_related_name_forward_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.rock.custom_members.all(),
['Bob', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_reverse_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom.all(),
['Rock'],
attrgetter("name")
)
def test_custom_related_name_doesnt_conflict_with_fky_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom_person_related_name.all(),
['<CustomMembership: Bob is a member of Rock>']
)
def test_through_fields(self):
"""
Tests that relations with intermediary tables with multiple FKs
to the M2M's ``to`` model are possible.
"""
event = Event.objects.create(title='Rockwhale 2014')
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jim)
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jane)
self.assertQuerysetEqual(
event.invitees.all(),
['Jane', 'Jim'],
attrgetter('name')
)
class M2mThroughReferentialTests(TestCase):
def test_self_referential_empty_qs(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
self.assertQuerysetEqual(
tony.friends.all(),
[]
)
def test_self_referential_non_symmentrical_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_non_symmentrical_second_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
def test_self_referential_non_symmentrical_clear_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
chris.friends.clear()
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
# Since this isn't a symmetrical relation, Tony's friend link still exists.
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_symmentrical(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
Friendship.objects.create(
first=chris, second=tony, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
self.assertQuerysetEqual(
chris.friends.all(),
['Tony'],
attrgetter("name")
)
def test_through_fields_self_referential(self):
john = Employee.objects.create(name='john')
peter = Employee.objects.create(name='peter')
mary = Employee.objects.create(name='mary')
harry = Employee.objects.create(name='harry')
Relationship.objects.create(source=john, target=peter, another=None)
Relationship.objects.create(source=john, target=mary, another=None)
Relationship.objects.create(source=john, target=harry, another=peter)
self.assertQuerysetEqual(
john.subordinates.all(),
['peter', 'mary', 'harry'],
attrgetter('name')
)
class M2mThroughToFieldsTests(TestCase):
def setUp(self):
self.pea = Ingredient.objects.create(iname='pea')
self.potato = Ingredient.objects.create(iname='potato')
self.tomato = Ingredient.objects.create(iname='tomato')
self.curry = Recipe.objects.create(rname='curry')
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.potato)
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.pea)
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.tomato)
def test_retrieval(self):
# Forward retrieval
self.assertQuerysetEqual(
self.curry.ingredients.all(),
[self.pea, self.potato, self.tomato], lambda x: x
)
# Backward retrieval
self.assertEqual(self.tomato.recipes.get(), self.curry)
def test_choices(self):
field = Recipe._meta.get_field('ingredients')
self.assertEqual(
[choice[0] for choice in field.get_choices(include_blank=False)],
['pea', 'potato', 'tomato']
)
|
leesdolphin/rentme | refs/heads/master | rentme/urls.py | 1 | """rentme URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + [
url(r'^api/', include('rentme.api.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('rentme.data.urls')),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
GeotrekCE/Geotrek-admin | refs/heads/master | geotrek/trekking/migrations/0013_auto_20200228_1755.py | 2 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2020-02-28 16:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trekking', '0012_auto_20200211_1011'),
]
operations = [
migrations.AlterField(
model_name='accessibility',
name='pictogram',
field=models.FileField(blank=True, max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='difficultylevel',
name='pictogram',
field=models.FileField(blank=True, max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='poi',
name='name',
field=models.CharField(help_text='Public name (Change carefully)', max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='poi',
name='publication_date',
field=models.DateField(blank=True, editable=False, null=True, verbose_name='Publication date'),
),
migrations.AlterField(
model_name='poi',
name='published',
field=models.BooleanField(default=False, help_text='Online', verbose_name='Published'),
),
migrations.AlterField(
model_name='poi',
name='review',
field=models.BooleanField(default=False, verbose_name='Waiting for publication'),
),
migrations.AlterField(
model_name='poitype',
name='pictogram',
field=models.FileField(max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='practice',
name='pictogram',
field=models.FileField(max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='route',
name='pictogram',
field=models.FileField(blank=True, max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='servicetype',
name='name',
field=models.CharField(help_text='Public name (Change carefully)', max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='servicetype',
name='pictogram',
field=models.FileField(max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='servicetype',
name='publication_date',
field=models.DateField(blank=True, editable=False, null=True, verbose_name='Publication date'),
),
migrations.AlterField(
model_name='servicetype',
name='published',
field=models.BooleanField(default=False, help_text='Online', verbose_name='Published'),
),
migrations.AlterField(
model_name='servicetype',
name='review',
field=models.BooleanField(default=False, verbose_name='Waiting for publication'),
),
migrations.AlterField(
model_name='trek',
name='name',
field=models.CharField(help_text='Public name (Change carefully)', max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='trek',
name='publication_date',
field=models.DateField(blank=True, editable=False, null=True, verbose_name='Publication date'),
),
migrations.AlterField(
model_name='trek',
name='published',
field=models.BooleanField(default=False, help_text='Online', verbose_name='Published'),
),
migrations.AlterField(
model_name='trek',
name='review',
field=models.BooleanField(default=False, verbose_name='Waiting for publication'),
),
migrations.AlterField(
model_name='treknetwork',
name='pictogram',
field=models.FileField(max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
migrations.AlterField(
model_name='weblinkcategory',
name='pictogram',
field=models.FileField(max_length=512, null=True, upload_to='upload', verbose_name='Pictogram'),
),
]
|
wilsonxiao/machinekit | refs/heads/master | nc_files/involute.py | 43 | from math import *
print "G20 F60"
print "G64 P0.001"
print "G0 X0 Y0 Z0"
a=.1
for i in range(100):
t = i/10.
x = a * (cos(t) + t * sin(t))
y = a * (sin(t) - t * cos(t))
print "G1 X%f Y%f" % (x,y)
print "M2"
|
KaranToor/MA450 | refs/heads/master | google-cloud-sdk/lib/surface/emulators/__init__.py | 4 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gcloud emulators command group."""
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class Emulators(base.Group):
"""Set up your local development environment using emulators."""
detailed_help = {
'DESCRIPTION': '{description}',
}
|
Stane1983/xbmc | refs/heads/master | tools/EventClients/examples/python/example_mouse.py | 262 | #!/usr/bin/python
# This is a simple example showing how you can send mouse movement
# events to XBMC.
# NOTE: Read the comments in 'example_button1.py' for a more detailed
# explanation.
import sys
sys.path.append("../../lib/python")
from xbmcclient import *
from socket import *
def main():
import time
import sys
host = "localhost"
port = 9777
addr = (host, port)
sock = socket(AF_INET,SOCK_DGRAM)
# First packet must be HELO and can contain an icon
packet = PacketHELO("Example Mouse", ICON_PNG,
"../../icons/mouse.png")
packet.send(sock, addr)
# wait for notification window to close (in XBMC)
time.sleep(2)
# send mouse events to take cursor from top left to bottom right of the screen
# here 0 to 65535 will map to XBMC's screen width and height.
# Specifying absolute mouse coordinates is unsupported currently.
for i in range(0, 65535, 2):
packet = PacketMOUSE(i,i)
packet.send(sock, addr)
# ok we're done, close the connection
packet = PacketBYE()
packet.send(sock, addr)
if __name__=="__main__":
main()
|
DamnWidget/anaconda_go | refs/heads/master | commands/doc.py | 2 |
# Copyright (C) 2016 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import os
import traceback
import sublime
import sublime_plugin
from anaconda_go.lib.plugin import typing
from anaconda_go.lib import go, cache, panels
from anaconda_go.lib.helpers import get_symbol, get_settings
from anaconda_go.lib.plugin import Worker, Callback, is_code
class AnacondaGoDoc(sublime_plugin.WindowCommand):
"""Execute godef/guru or both and goto any returnd definition
"""
@property
def go_version(self):
"""Return back a tuple containing the go version
"""
ver = go.go_version
if ver == b'':
return (0, 0)
ver = ver.decode('utf8')
return tuple(int(v) for v in ver.replace('go', '').split('.'))
def run(self, package: bool=False, callstack: bool=False) -> None:
if package is True:
self.run_for_packages()
return
try:
view = self.window.active_view()
self.view = view
row, col = view.rowcol(view.sel()[0].begin())
code = view.substr(sublime.Region(0, view.size()))
data = {
'vid': view.id(),
'path': view.file_name(),
'expr': get_symbol(code, row, col),
'private': get_settings(
view, 'anaconda_go_doc_private_symbols', False),
'force': get_settings(
view, 'anaconda_go_force_go_doc_usage', False),
'offset': view.text_point(*view.rowcol(view.sel()[0].begin())),
'buf': self.modified_buffer(view),
'go_env': {
'GOROOT': go.GOROOT,
'GOPATH': go.GOPATH,
'CGO_ENABLED': go.CGO_ENABLED,
'GO_VERSION': self.go_version
},
'method': 'doc',
'handler': 'anaGonda'
}
Worker().execute(
Callback(
on_success=self.on_success,
on_failure=self._on_failure,
on_timeout=self._on_timeout
),
**data
)
except Exception as err:
print('anaconda_go: go doc error')
print(traceback.print_exc())
def run_for_packages(self) -> None:
"""Run documentation for packages using go doc always
"""
if os.name == 'nt':
sublime.status_message('Sorry, this does not work on Windows')
return
self._packages = []
for pkg in cache.lookup():
self._packages.append(pkg['ImportPath'])
self.window.show_quick_panel(self._packages, self._on_select)
def is_enabled(self) -> bool:
"""Determine if this command is enabled or not
"""
if len(sublime.active_window().views()) == 0:
return False
if not go.ANAGONDA_PRESENT:
return False
view = self.window.active_view()
return is_code(view, lang='go', ignore_comments=True)
def modified_buffer(self, view: sublime.View) -> str:
"""Guru needs this to use unsaved buffers instead of files
"""
code = view.substr(sublime.Region(0, view.size()))
return '\n'.join([
view.file_name(), str(len(code.encode('utf8'))), code
])
def on_success(self, data):
"""Process the results and show them into the exec panel
"""
panel = panels.DocPanel(self.view)
panel.show()
panel.print(data['result'])
def _on_failure(self, data: typing.Dict) -> None:
"""Fired on failures from the callback
"""
print('anaconda_go: go doc error')
print(data['error'])
sublime.status_message(data['error'])
def _on_timeout(self, data: typing.Dict) -> None:
"""Fired when the callback times out
"""
print('Golang go doc definition timed out')
def _on_select(self, index: int) -> None:
"""Called when a package is selected from the quick panel
"""
if index == -1:
return
package = self._packages[index]
try:
view = self.window.active_view()
self.view = view
data = {
'vid': view.id(),
'path': view.file_name(),
'expr': package,
'private': get_settings(
view, 'anaconda_go_doc_private_symbols', False),
'force': True,
'offset': 0,
'buf': '',
'go_env': {
'GOROOT': go.GOROOT,
'GOPATH': go.GOPATH,
'CGO_ENABLED': go.CGO_ENABLED,
'GO_VERSION': self.go_version
},
'method': 'doc',
'handler': 'anaGonda'
}
Worker().execute(
Callback(
on_success=self.on_success,
on_failure=self._on_failure,
on_timeout=self._on_timeout
),
**data
)
except Exception as err:
print('anaconda_go: go doc error')
print(traceback.print_exc())
|
wjr1005/p2pool | refs/heads/master | setup.py | 30 | import os
import shutil
import sys
import zipfile
import platform
from distutils.core import setup
from distutils.sysconfig import get_python_lib
import py2exe
version = __import__('p2pool').__version__
im64 = '64' in platform.architecture()[0]
extra_includes = []
import p2pool.networks
extra_includes.extend('p2pool.networks.' + x for x in p2pool.networks.nets)
import p2pool.bitcoin.networks
extra_includes.extend('p2pool.bitcoin.networks.' + x for x in p2pool.bitcoin.networks.nets)
if os.path.exists('INITBAK'):
os.remove('INITBAK')
os.rename(os.path.join('p2pool', '__init__.py'), 'INITBAK')
try:
open(os.path.join('p2pool', '__init__.py'), 'wb').write('__version__ = %r%s%sDEBUG = False%s' % (version, os.linesep, os.linesep, os.linesep))
mfcdir = get_python_lib() + '\pythonwin\\'
mfcfiles = [os.path.join(mfcdir, i) for i in ["mfc90.dll", "mfc90u.dll", "mfcm90.dll", "mfcm90u.dll", "Microsoft.VC90.MFC.manifest"]]
bundle = 1
if im64:
bundle = bundle + 2
sys.argv[1:] = ['py2exe']
setup(name='p2pool',
version=version,
description='Peer-to-peer Bitcoin mining pool',
author='Forrest Voight',
author_email='[email protected]',
url='http://p2pool.forre.st/',
data_files=[
('', ['README.md']),
("Microsoft.VC90.MFC", mfcfiles),
('web-static', [
'web-static/d3.v2.min.js',
'web-static/favicon.ico',
'web-static/graphs.html',
'web-static/index.html',
'web-static/share.html',
]),
],
console=['run_p2pool.py'],
options=dict(py2exe=dict(
bundle_files=bundle,
dll_excludes=['w9xpopen.exe', "mswsock.dll", "MSWSOCK.dll"],
includes=['twisted.web.resource', 'ltc_scrypt'] + extra_includes,
)),
zipfile=None,
)
finally:
os.remove(os.path.join('p2pool', '__init__.py'))
os.rename('INITBAK', os.path.join('p2pool', '__init__.py'))
win = '32'
if im64:
win = '64'
dir_name = 'p2pool_win' + win + '_' + version
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
os.rename('dist', dir_name)
with zipfile.ZipFile(dir_name + '.zip', 'w', zipfile.ZIP_DEFLATED) as zf:
for dirpath, dirnames, filenames in os.walk(dir_name):
for filename in filenames:
zf.write(os.path.join(dirpath, filename))
print dir_name
|
liorvh/infernal-twin | refs/heads/master | build/pip/pip/_vendor/progress/__init__.py | 916 | # Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import division
from collections import deque
from datetime import timedelta
from math import ceil
from sys import stderr
from time import time
__version__ = '1.2'
class Infinite(object):
file = stderr
sma_window = 10
def __init__(self, *args, **kwargs):
self.index = 0
self.start_ts = time()
self._ts = self.start_ts
self._dt = deque(maxlen=self.sma_window)
for key, val in kwargs.items():
setattr(self, key, val)
def __getitem__(self, key):
if key.startswith('_'):
return None
return getattr(self, key, None)
@property
def avg(self):
return sum(self._dt) / len(self._dt) if self._dt else 0
@property
def elapsed(self):
return int(time() - self.start_ts)
@property
def elapsed_td(self):
return timedelta(seconds=self.elapsed)
def update(self):
pass
def start(self):
pass
def finish(self):
pass
def next(self, n=1):
if n > 0:
now = time()
dt = (now - self._ts) / n
self._dt.append(dt)
self._ts = now
self.index = self.index + n
self.update()
def iter(self, it):
for x in it:
yield x
self.next()
self.finish()
class Progress(Infinite):
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(*args, **kwargs)
self.max = kwargs.get('max', 100)
@property
def eta(self):
return int(ceil(self.avg * self.remaining))
@property
def eta_td(self):
return timedelta(seconds=self.eta)
@property
def percent(self):
return self.progress * 100
@property
def progress(self):
return min(1, self.index / self.max)
@property
def remaining(self):
return max(self.max - self.index, 0)
def start(self):
self.update()
def goto(self, index):
incr = index - self.index
self.next(incr)
def iter(self, it):
try:
self.max = len(it)
except TypeError:
pass
for x in it:
yield x
self.next()
self.finish()
|
Vishluck/sympy | refs/heads/master | sympy/combinatorics/named_groups.py | 41 | from __future__ import print_function, division
from sympy.core.compatibility import range
from sympy.combinatorics.perm_groups import PermutationGroup
from sympy.combinatorics.group_constructs import DirectProduct
from sympy.combinatorics.permutations import Permutation
_af_new = Permutation._af_new
def AbelianGroup(*cyclic_orders):
"""
Returns the direct product of cyclic groups with the given orders.
According to the structure theorem for finite abelian groups ([1]),
every finite abelian group can be written as the direct product of finitely
many cyclic groups.
[1] http://groupprops.subwiki.org/wiki/Structure_theorem_for_finitely_generated_abelian_groups
Examples
========
>>> from sympy.combinatorics import Permutation
>>> Permutation.print_cyclic = True
>>> from sympy.combinatorics.named_groups import AbelianGroup
>>> AbelianGroup(3, 4)
PermutationGroup([
Permutation(6)(0, 1, 2),
Permutation(3, 4, 5, 6)])
>>> _.is_group()
False
See Also
========
DirectProduct
"""
groups = []
degree = 0
order = 1
for size in cyclic_orders:
degree += size
order *= size
groups.append(CyclicGroup(size))
G = DirectProduct(*groups)
G._is_abelian = True
G._degree = degree
G._order = order
return G
def AlternatingGroup(n):
"""
Generates the alternating group on ``n`` elements as a permutation group.
For ``n > 2``, the generators taken are ``(0 1 2), (0 1 2 ... n-1)`` for
``n`` odd
and ``(0 1 2), (1 2 ... n-1)`` for ``n`` even (See [1], p.31, ex.6.9.).
After the group is generated, some of its basic properties are set.
The cases ``n = 1, 2`` are handled separately.
Examples
========
>>> from sympy.combinatorics.named_groups import AlternatingGroup
>>> G = AlternatingGroup(4)
>>> G.is_group()
False
>>> a = list(G.generate_dimino())
>>> len(a)
12
>>> all(perm.is_even for perm in a)
True
See Also
========
SymmetricGroup, CyclicGroup, DihedralGroup
References
==========
[1] Armstrong, M. "Groups and Symmetry"
"""
# small cases are special
if n in (1, 2):
return PermutationGroup([Permutation([0])])
a = list(range(n))
a[0], a[1], a[2] = a[1], a[2], a[0]
gen1 = a
if n % 2:
a = list(range(1, n))
a.append(0)
gen2 = a
else:
a = list(range(2, n))
a.append(1)
a.insert(0, 0)
gen2 = a
gens = [gen1, gen2]
if gen1 == gen2:
gens = gens[:1]
G = PermutationGroup([_af_new(a) for a in gens], dups=False)
if n < 4:
G._is_abelian = True
G._is_nilpotent = True
else:
G._is_abelian = False
G._is_nilpotent = False
if n < 5:
G._is_solvable = True
else:
G._is_solvable = False
G._degree = n
G._is_transitive = True
G._is_alt = True
return G
def CyclicGroup(n):
"""
Generates the cyclic group of order ``n`` as a permutation group.
The generator taken is the ``n``-cycle ``(0 1 2 ... n-1)``
(in cycle notation). After the group is generated, some of its basic
properties are set.
Examples
========
>>> from sympy.combinatorics.named_groups import CyclicGroup
>>> G = CyclicGroup(6)
>>> G.is_group()
False
>>> G.order()
6
>>> list(G.generate_schreier_sims(af=True))
[[0, 1, 2, 3, 4, 5], [1, 2, 3, 4, 5, 0], [2, 3, 4, 5, 0, 1],
[3, 4, 5, 0, 1, 2], [4, 5, 0, 1, 2, 3], [5, 0, 1, 2, 3, 4]]
See Also
========
SymmetricGroup, DihedralGroup, AlternatingGroup
"""
a = list(range(1, n))
a.append(0)
gen = _af_new(a)
G = PermutationGroup([gen])
G._is_abelian = True
G._is_nilpotent = True
G._is_solvable = True
G._degree = n
G._is_transitive = True
G._order = n
return G
def DihedralGroup(n):
r"""
Generates the dihedral group `D_n` as a permutation group.
The dihedral group `D_n` is the group of symmetries of the regular
``n``-gon. The generators taken are the ``n``-cycle ``a = (0 1 2 ... n-1)``
(a rotation of the ``n``-gon) and ``b = (0 n-1)(1 n-2)...``
(a reflection of the ``n``-gon) in cycle rotation. It is easy to see that
these satisfy ``a**n = b**2 = 1`` and ``bab = ~a`` so they indeed generate
`D_n` (See [1]). After the group is generated, some of its basic properties
are set.
Examples
========
>>> from sympy.combinatorics.named_groups import DihedralGroup
>>> G = DihedralGroup(5)
>>> G.is_group()
False
>>> a = list(G.generate_dimino())
>>> [perm.cyclic_form for perm in a]
[[], [[0, 1, 2, 3, 4]], [[0, 2, 4, 1, 3]],
[[0, 3, 1, 4, 2]], [[0, 4, 3, 2, 1]], [[0, 4], [1, 3]],
[[1, 4], [2, 3]], [[0, 1], [2, 4]], [[0, 2], [3, 4]],
[[0, 3], [1, 2]]]
See Also
========
SymmetricGroup, CyclicGroup, AlternatingGroup
References
==========
[1] http://en.wikipedia.org/wiki/Dihedral_group
"""
# small cases are special
if n == 1:
return PermutationGroup([Permutation([1, 0])])
if n == 2:
return PermutationGroup([Permutation([1, 0, 3, 2]),
Permutation([2, 3, 0, 1]), Permutation([3, 2, 1, 0])])
a = list(range(1, n))
a.append(0)
gen1 = _af_new(a)
a = list(range(n))
a.reverse()
gen2 = _af_new(a)
G = PermutationGroup([gen1, gen2])
# if n is a power of 2, group is nilpotent
if n & (n-1) == 0:
G._is_nilpotent = True
else:
G._is_nilpotent = False
G._is_abelian = False
G._is_solvable = True
G._degree = n
G._is_transitive = True
G._order = 2*n
return G
def SymmetricGroup(n):
"""
Generates the symmetric group on ``n`` elements as a permutation group.
The generators taken are the ``n``-cycle
``(0 1 2 ... n-1)`` and the transposition ``(0 1)`` (in cycle notation).
(See [1]). After the group is generated, some of its basic properties
are set.
Examples
========
>>> from sympy.combinatorics.named_groups import SymmetricGroup
>>> G = SymmetricGroup(4)
>>> G.is_group()
False
>>> G.order()
24
>>> list(G.generate_schreier_sims(af=True))
[[0, 1, 2, 3], [1, 2, 3, 0], [2, 3, 0, 1], [3, 1, 2, 0], [0, 2, 3, 1],
[1, 3, 0, 2], [2, 0, 1, 3], [3, 2, 0, 1], [0, 3, 1, 2], [1, 0, 2, 3],
[2, 1, 3, 0], [3, 0, 1, 2], [0, 1, 3, 2], [1, 2, 0, 3], [2, 3, 1, 0],
[3, 1, 0, 2], [0, 2, 1, 3], [1, 3, 2, 0], [2, 0, 3, 1], [3, 2, 1, 0],
[0, 3, 2, 1], [1, 0, 3, 2], [2, 1, 0, 3], [3, 0, 2, 1]]
See Also
========
CyclicGroup, DihedralGroup, AlternatingGroup
References
==========
[1] http://en.wikipedia.org/wiki/Symmetric_group#Generators_and_relations
"""
if n == 1:
G = PermutationGroup([Permutation([0])])
elif n == 2:
G = PermutationGroup([Permutation([1, 0])])
else:
a = list(range(1, n))
a.append(0)
gen1 = _af_new(a)
a = list(range(n))
a[0], a[1] = a[1], a[0]
gen2 = _af_new(a)
G = PermutationGroup([gen1, gen2])
if n < 3:
G._is_abelian = True
G._is_nilpotent = True
else:
G._is_abelian = False
G._is_nilpotent = False
if n < 5:
G._is_solvable = True
else:
G._is_solvable = False
G._degree = n
G._is_transitive = True
G._is_sym = True
return G
def RubikGroup(n):
"""Return a group of Rubik's cube generators
>>> from sympy.combinatorics.named_groups import RubikGroup
>>> RubikGroup(2).is_group()
False
"""
from sympy.combinatorics.generators import rubik
if n <= 1:
raise ValueError("Invalid cube . n has to be greater than 1")
return PermutationGroup(rubik(n))
|
pap/nupic | refs/heads/master | tests/integration/nupic/algorithms/tp_test.py | 34 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file performs a variety of tests on the reference temporal pooler code.
basic_test
==========
Tests creation and serialization of the TP class. Sets parameters and ensures
they are the same after a serialization and de-serialization step. Runs learning
and inference on a small number of random patterns and ensures it doesn't crash.
===============================================================================
Basic First Order Sequences
===============================================================================
These tests ensure the most basic (first order) sequence learning mechanism is
working.
Parameters: Use a "fast learning mode": turn off global decay, temporal pooling
and hilo (make minThreshold really high). initPerm should be greater than
connectedPerm and permanenceDec should be zero. With these settings sequences
should be learned in one pass:
minThreshold = newSynapseCount
globalDecay = 0
temporalPooling = False
initialPerm = 0.8
connectedPerm = 0.7
permanenceDec = 0
permanenceInc = 0.4
Other Parameters:
numCols = 100
cellsPerCol = 1
newSynapseCount=11
activationThreshold = 8
permanenceMax = 1
Note: this is not a high order sequence, so one cell per column is fine.
Input Sequence: We train with M input sequences, each consisting of N random
patterns. Each pattern consists of a random number of bits on. The number of 1's
in each pattern should be between 21 and 25 columns. The sequences are
constructed so that consecutive patterns within a sequence don't share any
columns.
Training: The TP is trained with P passes of the M sequences. There
should be a reset between sequences. The total number of iterations during
training is P*N*M.
Testing: Run inference through the same set of sequences, with a reset before
each sequence. For each sequence the system should accurately predict the
pattern at the next time step up to and including the N-1'st pattern. A perfect
prediction consists of getting every column correct in the prediction, with no
extra columns. We report the number of columns that are incorrect and report a
failure if more than 2 columns are incorrectly predicted.
We can also calculate the number of segments and synapses that should be
learned. We raise an error if too many or too few were learned.
B1) Basic sequence learner. M=1, N=100, P=1.
B2) Same as above, except P=2. Test that permanences go up and that no
additional synapses or segments are learned.
B3) N=300, M=1, P=1. (See how high we can go with M)
B4) N=100, M=3, P=1 (See how high we can go with N*M)
B5) Like B1) but only have newSynapseCount columns ON in each pattern (instead of
between 21 and 25), and set activationThreshold to newSynapseCount.
B6) Like B1 but with cellsPerCol = 4. First order sequences should still work
just fine.
B7) Like B1 but with slower learning. Set the following parameters differently:
activationThreshold = newSynapseCount
minThreshold = activationThreshold
initialPerm = 0.2
connectedPerm = 0.7
permanenceInc = 0.2
Now we train the TP with the B1 sequence 4 times (P=4). This will increment
the permanences to be above 0.8 and at that point the inference will be correct.
This test will ensure the basic match function and segment activation rules are
working correctly.
B8) Like B7 but with 4 cells per column. Should still work.
B9) Like B7 but present the sequence less than 4 times: the inference should be
incorrect.
B10) Like B2, except that cells per column = 4. Should still add zero additional
synapses.
===============================================================================
High Order Sequences
===============================================================================
These tests ensure that high order sequences can be learned in a multiple cells
per column instantiation.
Parameters: Same as Basic First Order Tests above, but with varying cells per
column.
Input Sequence: We train with M input sequences, each consisting of N random
patterns. Each pattern consists of a random number of bits on. The number of 1's
in each pattern should be between 21 and 25 columns (except for H0). The
sequences are constructed so that consecutive patterns within a sequence don't
share any columns. The sequences are constructed to contain shared subsequences,
such as:
A B C D E F G H I J
K L M D E F N O P Q
The position and length of shared subsequences are parameters in the tests.
Training: Identical to basic first order tests above.
Testing: Identical to basic first order tests above unless noted.
We can also calculate the number of segments and synapses that should be
learned. We raise an error if too many or too few were learned.
H0) Two simple high order sequences, each of length 7, with a shared
subsequence in positions 2-4. Each pattern has a consecutive set of 5 bits on.
No pattern shares any columns with the others. These sequences are easy to
visualize and is very useful for debugging.
H1) Learn two sequences with a short shared pattern. Parameters
should be the same as B1. This test will FAIL since cellsPerCol == 1. No
consecutive patterns share any column.
H2) As above but with cellsPerCol == 4. This test should PASS. No consecutive
patterns share any column.
H2a) Same as above, except P=2. Test that permanences go up and that no
additional synapses or segments are learned.
H3) Same parameters as H.2 except sequences are created such that they share a
single significant sub-sequence. Subsequences should be reasonably long and in
the middle of sequences. No consecutive patterns share any column.
H4) Like H.3, except the shared subsequence is in the beginning. (e.g.
"ABCDEF" and "ABCGHIJ". At the point where the shared subsequence ends, all
possible next patterns should be predicted. As soon as you see the first unique
pattern, the predictions should collapse to be a perfect prediction.
H5) Shared patterns. Similar to H3 except that patterns are shared between
sequences. All sequences are different shufflings of the same set of N
patterns (there is no shared subsequence). Care should be taken such that the
same three patterns never follow one another in two sequences.
H6) Combination of H5) and H3). Shared patterns in different sequences, with a
shared subsequence.
H7) Stress test: every other pattern is shared. [Unimplemented]
H8) Start predicting in the middle of a sequence. [Unimplemented]
H9) Hub capacity. How many patterns can use that hub?
[Implemented, but does not run by default.]
H10) Sensitivity to small amounts of noise during inference. [Unimplemented]
H11) Higher order patterns with alternating elements.
Create the following 4 sequences:
A B A B A C
A B A B D E
A B F G H I
A J K L M N
After training we should verify that the expected transitions are in the
model. Prediction accuracy should be perfect. In addition, during inference,
after the first element is presented, the columns should not burst any more.
Need to verify, for the first sequence, that the high order representation
when presented with the second A and B is different from the representation
in the first presentation.
===============================================================================
Temporal Pooling Tests [UNIMPLEMENTED]
===============================================================================
Parameters: Use a "fast learning mode": With these settings sequences should be
learned in one pass:
minThreshold = newSynapseCount
globalDecay = 0
initialPerm = 0.8
connectedPerm = 0.7
permanenceDec = 0
permanenceInc = 0.4
Other Parameters:
cellsPerCol = 4
newSynapseCount=11
activationThreshold = 11
permanenceMax = 1
doPooling = True
Input Sequence: We train with M input sequences, each consisting of N random
patterns. Each pattern consists of a random number of bits on. The number of 1's
in each pattern should be between 17 and 21 columns. The sequences are
constructed so that consecutive patterns within a sequence don't share any
columns.
Note: for pooling tests the density of input patterns should be pretty low
since each pooling step increases the output density. At the same time, we need
enough bits on in the input for the temporal pooler to find enough synapses. So,
for the tests, constraints should be something like:
(Input Density) * (Number of pooling steps) < 25 %.
AND
sum(Input) > newSynapseCount*1.5
Training: The TP is trained with P passes of the M sequences. There
should be a reset between sequences. The total number of iterations during
training is P*N*M.
Testing: Run inference through the same set of sequences, with a reset before
each sequence. For each sequence the system should accurately predict the
pattern at the next P time steps, up to and including the N-P'th pattern. A
perfect prediction consists of getting every column correct in the prediction,
with no extra columns. We report the number of columns that are incorrect and
report a failure if more than 2 columns are incorrectly predicted.
P1) Train the TP two times (P=2) on a single long sequence consisting of random
patterns (N=20, M=1). There should be no overlapping columns between successive
patterns. During inference, the TP should be able reliably predict the pattern
two time steps in advance. numCols should be about 350 to meet the above
constraints and also to maintain consistency with test P2.
P2) Increase TP rate to 3 time steps in advance (P=3). At each step during
inference, the TP should be able to reliably predict the pattern coming up at
t+1, t+2, and t+3..
P3) Set segUpdateValidDuration to 2 and set P=3. This should behave almost
identically to P1. It should only predict the next time step correctly and not
two time steps in advance. (Check off by one error in this logic.)
P4) As above, but with multiple sequences.
P5) Same as P3 but with shared subsequences.
Continuous mode tests
=====================
Slow changing inputs.
Orphan Decay Tests
==================
HiLo Tests
==========
A high order sequence memory like the TP can memorize very long sequences. In
many applications though you don't want to memorize. You see a long sequence of
patterns but there are actually lower order repeating sequences embedded within
it. A simplistic example is words in a sentence. Words such as You'd like the TP to learn those sequences.
Tests should capture number of synapses learned and compare against
theoretically optimal numbers to pass/fail.
HL0a) For debugging, similar to H0. We want to learn a 3 pattern long sequence presented
with noise before and after, with no resets. Two steps of noise will be presented.
The noise will be 20 patterns, presented in random order. Every pattern has a
consecutive set of 5 bits on, so the vector will be 115 bits long. No pattern
shares any columns with the others. These sequences are easy to visualize and is
very useful for debugging.
TP parameters should be the same as B7 except that permanenceDec should be 0.05:
activationThreshold = newSynapseCount
minThreshold = activationThreshold
initialPerm = 0.2
connectedPerm = 0.7
permanenceInc = 0.2
permanenceDec = 0.05
So, this means it should learn a sequence after 4 repetitions. It will take
4 orphan decay steps to get an incorrect synapse to go away completely.
HL0b) Like HL0a, but after the 3-sequence is learned, try to learn a 4-sequence that
builds on the 3-sequence. For example, if learning A-B-C we train also on
D-A-B-C. It should learn that ABC is separate from DABC. Note: currently this
test is disabled in the code. It is a bit tricky to test this. When you present DAB,
you should predict the same columns as when you present AB (i.e. in both cases
C should be predicted). However, the representation for C in DABC should be
different than the representation for C in ABC. Furthermore, when you present
AB, the representation for C should be an OR of the representation in DABC and ABC
since you could also be starting in the middle of the DABC sequence. All this is
actually happening in the code, but verified by visual inspection only.
HL1) Noise + sequence + noise + sequence repeatedly without resets until it has
learned that sequence. Train the TP repeatedly with N random sequences that all
share a single subsequence. Each random sequence can be 10 patterns long,
sharing a subsequence that is 5 patterns long. There should be no resets
between presentations. Inference should then be on that 5 long shared subsequence.
Example (3-long shared subsequence):
A B C D E F G H I J
K L M D E F N O P Q
R S T D E F U V W X
Y Z 1 D E F 2 3 4 5
TP parameters should be the same as HL0.
HL2) Like HL1, but after A B C has learned, try to learn D A B C . It should learn
ABC is separate from DABC.
HL3) Like HL2, but test with resets.
HL4) Like HL1 but with minThreshold high. This should FAIL and learn a ton
of synapses.
HiLo but with true high order sequences embedded in noise
Present 25 sequences in random order with no resets but noise between
sequences (1-20 samples). Learn all 25 sequences. Test global decay vs non-zero
permanenceDec .
Pooling + HiLo Tests [UNIMPLEMENTED]
====================
Needs to be defined.
Global Decay Tests [UNIMPLEMENTED]
==================
Simple tests to ensure global decay is actually working.
Sequence Likelihood Tests
=========================
These tests are in the file TPLikelihood.py
Segment Learning Tests [UNIMPLEMENTED]
======================
Multi-attribute sequence tests.
SL1) Train the TP repeatedly using a single (multiple) sequence plus noise. The
sequence can be relatively short, say 20 patterns. No two consecutive patterns
in the sequence should share columns. Add random noise each time a pattern is
presented. The noise should be different for each presentation and can be equal
to the number of on bits in the pattern. After N iterations of the noisy
sequences, the TP should should achieve perfect inference on the true sequence.
There should be resets between each presentation of the sequence.
Check predictions in the sequence only. And test with clean sequences.
Vary percentage of bits that are signal vs noise.
Noise can be a fixed alphabet instead of being randomly generated.
HL2) As above, but with no resets.
Shared Column Tests [UNIMPLEMENTED]
===================
Carefully test what happens when consecutive patterns in a sequence share
columns.
Sequence Noise Tests [UNIMPLEMENTED]
====================
Note: I don't think these will work with the current logic. Need to discuss
whether we want to accommodate sequence noise like this.
SN1) Learn sequence with pooling up to T timesteps. Run inference on a sequence
and occasionally drop elements of a sequence. Inference should still work.
SN2) As above, but occasionally add a random pattern into a sequence.
SN3) A combination of the above two.
Capacity Tests [UNIMPLEMENTED]
==============
These are stress tests that verify that the temporal pooler can learn a large
number of sequences and can predict a large number of possible next steps. Some
research needs to be done first to understand the capacity of the system as it
relates to the number of columns, cells per column, etc.
Token Prediction Tests: Test how many predictions of individual tokens we can
superimpose and still recover.
Online Learning Tests [UNIMPLEMENTED]
=====================
These tests will verify that the temporal pooler continues to work even if
sequence statistics (and the actual sequences) change slowly over time. The TP
should adapt to the changes and learn to recognize newer sequences (and forget
the older sequences?).
"""
import random
import numpy
from numpy import *
import sys
import pickle
import cPickle
import pprint
from nupic.research.TP import TP
from nupic.research.TP10X2 import TP10X2
from nupic.research import fdrutilities as fdrutils
#---------------------------------------------------------------------------------
TEST_CPP_TP = 1 # temporarily disabled until it can be updated
VERBOSITY = 0 # how chatty the unit tests should be
SEED = 33 # the random seed used throughout
TPClass = TP
checkSynapseConsistency = False
rgen = numpy.random.RandomState(SEED) # always call this rgen, NOT random
#---------------------------------------------------------------------------------
# Helper routines
#--------------------------------------------------------------------------------
def printOneTrainingVector(x):
print ''.join('1' if k != 0 else '.' for k in x)
def printAllTrainingSequences(trainingSequences, upTo = 99999):
for t in xrange(min(len(trainingSequences[0]), upTo)):
print 't=',t,
for i,trainingSequence in enumerate(trainingSequences):
print "\tseq#",i,'\t',
printOneTrainingVector(trainingSequences[i][t])
def generatePattern(numCols = 100,
minOnes =21,
maxOnes =25,
colSet = [],
prevPattern =numpy.array([])):
"""Generate a single test pattern with given parameters.
Parameters:
--------------------------------------------
numCols: Number of columns in each pattern.
minOnes: The minimum number of 1's in each pattern.
maxOnes: The maximum number of 1's in each pattern.
colSet: The set of column indices for the pattern.
prevPattern: Pattern to avoid (null intersection).
"""
assert minOnes < maxOnes
assert maxOnes < numCols
nOnes = rgen.randint(minOnes, maxOnes)
candidates = list(colSet.difference(set(prevPattern.nonzero()[0])))
rgen.shuffle(candidates)
ind = candidates[:nOnes]
x = numpy.zeros(numCols, dtype='float32')
x[ind] = 1
return x
def buildTrainingSet(numSequences = 2,
sequenceLength = 100,
pctShared = 0.2,
seqGenMode = 'shared sequence',
subsequenceStartPos = 10,
numCols = 100,
minOnes=21,
maxOnes = 25,
disjointConsecutive =True):
"""Build random high order test sequences.
Parameters:
--------------------------------------------
numSequences: The number of sequences created.
sequenceLength: The length of each sequence.
pctShared: The percentage of sequenceLength that is shared across
every sequence. If sequenceLength is 100 and pctShared
is 0.2, then a subsequence consisting of 20 patterns
will be in every sequence. Can also be the keyword
'one pattern', in which case a single time step is shared.
seqGenMode: What kind of sequence to generate. If contains 'shared'
generates shared subsequence. If contains 'no shared',
does not generate any shared subsequence. If contains
'shuffle', will use common patterns shuffle among the
different sequences. If contains 'beginning', will
place shared subsequence at the beginning.
subsequenceStartPos: The position where the shared subsequence starts
numCols: Number of columns in each pattern.
minOnes: The minimum number of 1's in each pattern.
maxOnes: The maximum number of 1's in each pattern.
disjointConsecutive: Whether to generate disjoint consecutive patterns or not.
"""
# Calculate the set of column indexes once to be used in each call to generatePattern()
colSet = set(range(numCols))
if 'beginning' in seqGenMode:
assert 'shared' in seqGenMode and 'no shared' not in seqGenMode
if 'no shared' in seqGenMode or numSequences == 1:
pctShared = 0.0
#--------------------------------------------------------------------------------
# Build shared subsequence
if 'no shared' not in seqGenMode and 'one pattern' not in seqGenMode:
sharedSequenceLength = int(pctShared*sequenceLength)
elif 'one pattern' in seqGenMode:
sharedSequenceLength = 1
else:
sharedSequenceLength = 0
assert sharedSequenceLength + subsequenceStartPos < sequenceLength
sharedSequence = []
for i in xrange(sharedSequenceLength):
if disjointConsecutive and i > 0:
x = generatePattern(numCols, minOnes, maxOnes, colSet, sharedSequence[i-1])
else:
x = generatePattern(numCols, minOnes, maxOnes, colSet)
sharedSequence.append(x)
#--------------------------------------------------------------------------------
# Build random training set, splicing in the shared subsequence
trainingSequences = []
if 'beginning' not in seqGenMode:
trailingLength = sequenceLength - sharedSequenceLength - subsequenceStartPos
else:
trailingLength = sequenceLength - sharedSequenceLength
for k,s in enumerate(xrange(numSequences)):
# TODO: implement no repetitions
if len(trainingSequences) > 0 and 'shuffle' in seqGenMode:
r = range(subsequenceStartPos) \
+ range(subsequenceStartPos + sharedSequenceLength, sequenceLength)
rgen.shuffle(r)
r = r[:subsequenceStartPos] \
+ range(subsequenceStartPos, subsequenceStartPos + sharedSequenceLength) \
+ r[subsequenceStartPos:]
sequence = [trainingSequences[k-1][j] for j in r]
else:
sequence = []
if 'beginning' not in seqGenMode:
for i in xrange(subsequenceStartPos):
if disjointConsecutive and i > 0:
x = generatePattern(numCols, minOnes, maxOnes, colSet, sequence[i-1])
else:
x = generatePattern(numCols, minOnes, maxOnes, colSet)
sequence.append(x)
if 'shared' in seqGenMode and 'no shared' not in seqGenMode:
sequence.extend(sharedSequence)
for i in xrange(trailingLength):
if disjointConsecutive and i > 0:
x = generatePattern(numCols, minOnes, maxOnes, colSet, sequence[i-1])
else:
x = generatePattern(numCols, minOnes, maxOnes, colSet)
sequence.append(x)
assert len(sequence) == sequenceLength
trainingSequences.append(sequence)
assert len(trainingSequences) == numSequences
if VERBOSITY >= 2:
print "Training Sequences"
pprint.pprint(trainingSequences)
if sharedSequenceLength > 0:
return (trainingSequences, subsequenceStartPos + sharedSequenceLength)
else:
return (trainingSequences, -1)
def getSimplePatterns(numOnes, numPatterns):
"""Very simple patterns. Each pattern has numOnes consecutive
bits on. There are numPatterns*numOnes bits in the vector."""
numCols = numOnes * numPatterns
p = []
for i in xrange(numPatterns):
x = numpy.zeros(numCols, dtype='float32')
x[i*numOnes:(i+1)*numOnes] = 1
p.append(x)
return p
def buildSimpleTrainingSet(numOnes=5):
"""Two very simple high order sequences for debugging. Each pattern in the
sequence has a series of 1's in a specific set of columns."""
numPatterns = 11
p = getSimplePatterns(numOnes, numPatterns)
s1 = [p[0], p[1], p[2], p[3], p[4], p[5], p[6] ]
s2 = [p[7], p[8], p[2], p[3], p[4], p[9], p[10]]
trainingSequences = [s1, s2]
return (trainingSequences, 5)
def buildAlternatingTrainingSet(numOnes=5):
"""High order sequences that alternate elements. Pattern i has one's in
i*numOnes to (i+1)*numOnes.
The sequences are:
A B A B A C
A B A B D E
A B F G H I
A J K L M N
"""
numPatterns = 14
p = getSimplePatterns(numOnes, numPatterns)
s1 = [p[0], p[1], p[0], p[1], p[0], p[2]]
s2 = [p[0], p[1], p[0], p[1], p[3], p[4]]
s3 = [p[0], p[1], p[5], p[6], p[7], p[8]]
s4 = [p[0], p[9], p[10], p[11], p[12], p[13]]
trainingSequences = [s1, s2, s3, s4]
return (trainingSequences, 5)
def buildHL0aTrainingSet(numOnes=5):
"""Simple sequences for HL0. Each pattern in the sequence has a series of 1's
in a specific set of columns.
There are 23 patterns, p0 to p22.
The sequence we want to learn is p0->p1->p2
We create a very long sequence consisting of N N p0 p1 p2 N N p0 p1 p2
N is randomly chosen from p3 to p22
"""
numPatterns = 23
p = getSimplePatterns(numOnes, numPatterns)
s = []
s.append(p[rgen.randint(3,23)])
for i in xrange(20):
s.append(p[rgen.randint(3,23)])
s.append(p[0])
s.append(p[1])
s.append(p[2])
s.append(p[rgen.randint(3,23)])
return ([s], [[p[0], p[1], p[2]]])
def buildHL0bTrainingSet(numOnes=5):
"""Simple sequences for HL0b. Each pattern in the sequence has a series of 1's
in a specific set of columns.
There are 23 patterns, p0 to p22.
The sequences we want to learn are p1->p2->p3 and p0->p1->p2->p4.
We create a very long sequence consisting of these two sub-sequences
intermixed with noise, such as:
N N p0 p1 p2 p4 N N p1 p2 p3 N N p1 p2 p3
N is randomly chosen from p5 to p22
"""
numPatterns = 23
p = getSimplePatterns(numOnes, numPatterns)
s = []
s.append(p[rgen.randint(5,numPatterns)])
for i in xrange(50):
r = rgen.randint(5,numPatterns)
print r,
s.append(p[r])
if rgen.binomial(1, 0.5) > 0:
print "S1",
s.append(p[0])
s.append(p[1])
s.append(p[2])
s.append(p[4])
else:
print "S2",
s.append(p[1])
s.append(p[2])
s.append(p[3])
r = rgen.randint(5,numPatterns)
s.append(p[r])
print r,
print
return ([s], [ [p[0], p[1], p[2], p[4]], [p[1], p[2], p[3]] ])
# Basic test (creation, pickling, basic run of learning and inference)
def basicTest():
global TPClass, SEED, VERBOSITY, checkSynapseConsistency
#--------------------------------------------------------------------------------
# Create TP object
numberOfCols =10
cellsPerColumn =3
initialPerm =.2
connectedPerm =.8
minThreshold =2
newSynapseCount =5
permanenceInc =.1
permanenceDec =.05
permanenceMax =1
globalDecay =.05
activationThreshold =4 # low for those basic tests on purpose
doPooling =True
segUpdateValidDuration =5
seed =SEED
verbosity =VERBOSITY
tp = TPClass(numberOfCols, cellsPerColumn,
initialPerm, connectedPerm,
minThreshold, newSynapseCount,
permanenceInc, permanenceDec, permanenceMax,
globalDecay, activationThreshold,
doPooling, segUpdateValidDuration,
seed=seed, verbosity=verbosity,
pamLength = 1000,
checkSynapseConsistency=checkSynapseConsistency)
print "Creation ok"
#--------------------------------------------------------------------------------
# Save and reload
pickle.dump(tp, open("test_tp.pkl", "wb"))
tp2 = pickle.load(open("test_tp.pkl"))
assert tp2.numberOfCols == numberOfCols
assert tp2.cellsPerColumn == cellsPerColumn
print tp2.initialPerm
assert tp2.initialPerm == numpy.float32(.2)
assert tp2.connectedPerm == numpy.float32(.8)
assert tp2.minThreshold == minThreshold
assert tp2.newSynapseCount == newSynapseCount
assert tp2.permanenceInc == numpy.float32(.1)
assert tp2.permanenceDec == numpy.float32(.05)
assert tp2.permanenceMax == 1
assert tp2.globalDecay == numpy.float32(.05)
assert tp2.activationThreshold == activationThreshold
assert tp2.doPooling == doPooling
assert tp2.segUpdateValidDuration == segUpdateValidDuration
assert tp2.seed == SEED
assert tp2.verbosity == verbosity
print "Save/load ok"
#--------------------------------------------------------------------------------
# Learn
for i in xrange(5):
xi = rgen.randint(0,2,(numberOfCols))
x = numpy.array(xi, dtype="uint32")
y = tp.learn(x)
#--------------------------------------------------------------------------------
# Infer
patterns = rgen.randint(0,2,(4,numberOfCols))
for i in xrange(10):
xi = rgen.randint(0,2,(numberOfCols))
x = numpy.array(xi, dtype="uint32")
y = tp.infer(x)
if i > 0:
p = tp.checkPrediction2([pattern.nonzero()[0] for pattern in patterns])
print "basicTest ok"
#---------------------------------------------------------------------------------
# Figure out acceptable patterns if none were passed to us.
def findAcceptablePatterns(tp, t, whichSequence, trainingSequences, nAcceptable = 1):
"""
Tries to infer the set of acceptable patterns for prediction at the given
time step and for the give sequence. Acceptable patterns are: the current one,
plus a certain number of patterns after timeStep, in the sequence that the TP
is currently tracking. Any other pattern is not acceptable.
TODO:
====
- Doesn't work for noise cases.
- Might run in trouble if shared subsequence at the beginning.
Parameters:
==========
tp the whole TP, so that we can look at its parameters
t the current time step
whichSequence the sequence we are currently tracking
trainingSequences all the training sequences
nAcceptable the number of steps forward from the current timeStep
we are willing to consider acceptable. In the case of
pooling, it is less than or equal to the min of the
number of training reps and the segUpdateValidDuration
parameter of the TP, depending on the test case.
The default value is 1, because by default, the pattern
after the current one should always be predictable.
Return value:
============
acceptablePatterns A list of acceptable patterns for prediction.
"""
# Determine how many steps forward we want to see in the prediction
upTo = t + 2 # always predict current and next
# If the TP is pooling, more steps can be predicted
if tp.doPooling:
upTo += min(tp.segUpdateValidDuration, nAcceptable)
assert upTo <= len(trainingSequences[whichSequence])
acceptablePatterns = []
# Check whether we were in a shared subsequence at the beginning.
# If so, at the point of exiting the shared subsequence (t), we should
# be predicting multiple patterns for 1 time step, then collapse back
# to a single sequence.
if len(trainingSequences) == 2 and \
(trainingSequences[0][0] == trainingSequences[1][0]).all():
if (trainingSequences[0][t] == trainingSequences[1][t]).all() \
and (trainingSequences[0][t+1] != trainingSequences[1][t+1]).any():
acceptablePatterns.append(trainingSequences[0][t+1])
acceptablePatterns.append(trainingSequences[1][t+1])
# Add patterns going forward
acceptablePatterns += [trainingSequences[whichSequence][t] \
for t in xrange(t,upTo)]
return acceptablePatterns
def _testSequence(trainingSequences,
nTrainingReps = 1,
numberOfCols = 40,
cellsPerColumn =5,
initialPerm =.8,
connectedPerm =.7,
minThreshold = 11,
newSynapseCount =5,
permanenceInc =.4,
permanenceDec =0.0,
permanenceMax =1,
globalDecay =0.0,
pamLength = 1000,
activationThreshold =5,
acceptablePatterns = [], # if empty, try to infer what they are
doPooling = False,
nAcceptable = -1, # if doPooling, number of acceptable steps
noiseModel = None,
noiseLevel = 0,
doResets = True,
shouldFail = False,
testSequences = None,
predJustAfterHubOnly = None,
compareToPy = False,
nMultiStepPrediction = 0,
highOrder = False):
"""Test a single set of sequences once and return the number of
prediction failures, the number of errors, and the number of perfect
predictions"""
global TP, SEED, checkSynapseConsistency, VERBOSITY
numPerfect = 0 # When every column is correct in the prediction
numStrictErrors = 0 # When at least one column is incorrect
numFailures = 0 # When > 2 columns are incorrect
sequenceLength = len(trainingSequences[0])
segUpdateValidDuration =5
verbosity = VERBOSITY
# override default maxSeqLEngth value for high-order sequences
if highOrder:
tp = TPClass(numberOfCols, cellsPerColumn,
initialPerm, connectedPerm,
minThreshold, newSynapseCount,
permanenceInc, permanenceDec, permanenceMax,
globalDecay, activationThreshold,
doPooling, segUpdateValidDuration,
seed=SEED, verbosity=verbosity,
checkSynapseConsistency=checkSynapseConsistency,
pamLength=pamLength,
maxSeqLength=0
)
else:
tp = TPClass(numberOfCols, cellsPerColumn,
initialPerm, connectedPerm,
minThreshold, newSynapseCount,
permanenceInc, permanenceDec, permanenceMax,
globalDecay, activationThreshold,
doPooling, segUpdateValidDuration,
seed=SEED, verbosity=verbosity,
checkSynapseConsistency=checkSynapseConsistency,
pamLength=pamLength
)
if compareToPy:
# override default maxSeqLEngth value for high-order sequences
if highOrder:
py_tp = TP(numberOfCols, cellsPerColumn,
initialPerm, connectedPerm,
minThreshold, newSynapseCount,
permanenceInc, permanenceDec, permanenceMax,
globalDecay, activationThreshold,
doPooling, segUpdateValidDuration,
seed=SEED, verbosity=verbosity,
pamLength=pamLength,
maxSeqLength=0
)
else:
py_tp = TP(numberOfCols, cellsPerColumn,
initialPerm, connectedPerm,
minThreshold, newSynapseCount,
permanenceInc, permanenceDec, permanenceMax,
globalDecay, activationThreshold,
doPooling, segUpdateValidDuration,
seed=SEED, verbosity=verbosity,
pamLength=pamLength,
)
trainingSequences = trainingSequences[0]
if testSequences == None: testSequences = trainingSequences
inferAcceptablePatterns = acceptablePatterns == []
#--------------------------------------------------------------------------------
# Learn
for r in xrange(nTrainingReps):
if VERBOSITY > 1:
print "============= Learning round",r,"================="
for sequenceNum, trainingSequence in enumerate(trainingSequences):
if VERBOSITY > 1:
print "============= New sequence ================="
if doResets:
tp.reset()
if compareToPy:
py_tp.reset()
for t,x in enumerate(trainingSequence):
if noiseModel is not None and \
'xor' in noiseModel and 'binomial' in noiseModel \
and 'training' in noiseModel:
noise_vector = rgen.binomial(len(x), noiseLevel, (len(x)))
x = logical_xor(x, noise_vector)
if VERBOSITY > 2:
print "Time step",t, "learning round",r, "sequence number", sequenceNum
print "Input: ",tp.printInput(x)
print "NNZ:", x.nonzero()
x = numpy.array(x).astype('float32')
y = tp.learn(x)
if compareToPy:
py_y = py_tp.learn(x)
if t % 25 == 0: # To track bugs, do that every iteration, but very slow
assert fdrutils.tpDiff(tp, py_tp, VERBOSITY) == True
if VERBOSITY > 3:
tp.printStates(printPrevious = (VERBOSITY > 4))
print
if VERBOSITY > 3:
print "Sequence finished. Complete state after sequence"
tp.printCells()
print
numPerfectAtHub = 0
if compareToPy:
print "End of training"
assert fdrutils.tpDiff(tp, py_tp, VERBOSITY) == True
#--------------------------------------------------------------------------------
# Infer
if VERBOSITY > 1: print "============= Inference ================="
for s,testSequence in enumerate(testSequences):
if VERBOSITY > 1: print "============= New sequence ================="
if doResets:
tp.reset()
if compareToPy:
py_tp.reset()
slen = len(testSequence)
for t,x in enumerate(testSequence):
# Generate noise (optional)
if noiseModel is not None and \
'xor' in noiseModel and 'binomial' in noiseModel \
and 'inference' in noiseModel:
noise_vector = rgen.binomial(len(x), noiseLevel, (len(x)))
x = logical_xor(x, noise_vector)
if VERBOSITY > 2: print "Time step",t, '\nInput:', tp.printInput(x)
x = numpy.array(x).astype('float32')
y = tp.infer(x)
if compareToPy:
py_y = py_tp.infer(x)
assert fdrutils.tpDiff(tp, py_tp, VERBOSITY) == True
# if t == predJustAfterHubOnly:
# z = sum(y, axis = 1)
# print '\t\t',
# print ''.join('.' if z[i] == 0 else '1' for i in xrange(len(z)))
if VERBOSITY > 3: tp.printStates(printPrevious = (VERBOSITY > 4),
printLearnState = False); print
if nMultiStepPrediction > 0:
y_ms = tp.predict(nSteps=nMultiStepPrediction)
if VERBOSITY > 3:
print "Multi step prediction at Time step", t
for i in range(nMultiStepPrediction):
print "Prediction at t+", i+1
tp.printColConfidence(y_ms[i])
# Error Checking
for i in range(nMultiStepPrediction):
predictedTimeStep = t+i+1
if predictedTimeStep < slen:
input = testSequence[predictedTimeStep].nonzero()[0]
prediction = y_ms[i].nonzero()[0]
foundInInput, totalActiveInInput, \
missingFromInput, totalActiveInPrediction = \
fdrutils.checkMatch(input, prediction, sparse=True)
falseNegatives = totalActiveInInput - foundInInput
falsePositives = missingFromInput
if VERBOSITY > 2:
print "Predition from %d to %d" % (t, t+i+1)
print "\t\tFalse Negatives:", falseNegatives
print "\t\tFalse Positivies:", falsePositives
if falseNegatives > 0 or falsePositives > 0:
numStrictErrors += 1
if falseNegatives > 0 and VERBOSITY > 1:
print "Multi step prediction from t=", t, "to t=", t+i+1,\
"false negative with error=",falseNegatives,
print "out of", totalActiveInInput,"ones"
if falsePositives > 0 and VERBOSITY > 1:
print "Multi step prediction from t=", t, "to t=", t+i+1,\
"false positive with error=",falsePositives,
print "out of",totalActiveInInput,"ones"
if falsePositives > 3 or falseNegatives > 3:
numFailures += 1
# Analyze the failure if we care about it
if VERBOSITY > 1 and not shouldFail:
print 'Input at t=', t
print '\t\t',; printOneTrainingVector(testSequence[t])
print 'Prediction for t=', t+i+1
print '\t\t',; printOneTrainingVector(y_ms[i])
print 'Actual input at t=', t+i+1
print '\t\t',; printOneTrainingVector(testSequence[t+i+1])
if t < slen-1:
# If no acceptable patterns were passed to us, we need to infer them
# for the current sequence and time step by looking at the testSequences.
# nAcceptable is used to reduce the number of automatically determined
# acceptable patterns.
if inferAcceptablePatterns:
acceptablePatterns = findAcceptablePatterns(tp, t, s, testSequences,
nAcceptable)
scores = tp.checkPrediction2([pattern.nonzero()[0] \
for pattern in acceptablePatterns])
falsePositives, falseNegatives = scores[0], scores[1]
# We report an error if FN or FP is > 0.
# We report a failure if number of FN or number of FP is > 2 for any
# pattern. We also count the number of perfect predictions.
if falseNegatives > 0 or falsePositives > 0:
numStrictErrors += 1
if falseNegatives > 0 and VERBOSITY > 1:
print "Pattern",s,"time",t,\
"prediction false negative with error=",falseNegatives,
print "out of",int(testSequence[t+1].sum()),"ones"
if falsePositives > 0 and VERBOSITY > 1:
print "Pattern",s,"time",t,\
"prediction false positive with error=",falsePositives,
print "out of",int(testSequence[t+1].sum()),"ones"
if falseNegatives > 3 or falsePositives > 3:
numFailures += 1
# Analyze the failure if we care about it
if VERBOSITY > 1 and not shouldFail:
print 'Test sequences'
if len(testSequences) > 1:
printAllTrainingSequences(testSequences, t+1)
else:
print '\t\t',; printOneTrainingVector(testSequence[t])
print '\t\t',; printOneTrainingVector(testSequence[t+1])
print 'Acceptable'
for p in acceptablePatterns:
print '\t\t',; printOneTrainingVector(p)
print 'Output'
diagnostic = ''
output = sum(tp.currentOutput,axis=1)
print '\t\t',; printOneTrainingVector(output)
else:
numPerfect += 1
if predJustAfterHubOnly is not None and predJustAfterHubOnly == t:
numPerfectAtHub += 1
if predJustAfterHubOnly is None:
return numFailures, numStrictErrors, numPerfect, tp
else:
return numFailures, numStrictErrors, numPerfect, numPerfectAtHub, tp
def TestB1(numUniquePatterns, nTests, cellsPerColumn = 1, name = "B1"):
numCols = 100
sequenceLength = numUniquePatterns
nFailed = 0
for numSequences in [1]:
print "Test "+name+" (sequence memory - 1 repetition - 1 sequence)"
for k in range(nTests): # Test that configuration several times
trainingSet = buildTrainingSet(numSequences =numSequences,
sequenceLength = sequenceLength,
pctShared = 0.0,
subsequenceStartPos = 0,
numCols = numCols,
minOnes = 15, maxOnes = 20)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = 1,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 8,
newSynapseCount = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
activationThreshold = 8,
doPooling = False)
if numFailures == 0:
print "Test "+name+" ok"
else:
print "Test "+name+" failed"
nFailed = nFailed + 1
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return nFailed
def TestB7(numUniquePatterns, nTests, cellsPerColumn = 1, name = "B7"):
numCols = 100
sequenceLength = numUniquePatterns
nFailed = 0
for numSequences in [1]:
print "Test "+name+" (sequence memory - 4 repetition - 1 sequence - slow learning)"
for k in range(nTests): # Test that configuration several times
trainingSet = buildTrainingSet(numSequences =numSequences,
sequenceLength = sequenceLength,
pctShared = 0.0,
subsequenceStartPos = 0,
numCols = numCols,
minOnes = 15, maxOnes = 20)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = 4,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
minThreshold = 11,
newSynapseCount = 11,
activationThreshold = 11,
initialPerm = .2,
connectedPerm = .6,
permanenceInc = .2,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
doPooling = False)
if numFailures == 0:
print "Test "+name+" ok"
else:
print "Test "+name+" failed"
nFailed = nFailed + 1
print "numFailures=", numFailures,
print "numStrictErrors=", numStrictErrors,
print "numPerfect=", numPerfect
return nFailed
def TestB2(numUniquePatterns, nTests, cellsPerColumn = 1, name = "B2"):
numCols = 100
sequenceLength = numUniquePatterns
nFailed = 0
for numSequences in [1]: # TestC has multiple sequences
print "Test",name,"(sequence memory - second repetition of the same sequence" +\
" should not add synapses)"
print "Num patterns in sequence =", numUniquePatterns,
print "cellsPerColumn=",cellsPerColumn
for k in range(nTests): # Test that configuration several times
trainingSet = buildTrainingSet(numSequences =numSequences,
sequenceLength = sequenceLength,
pctShared = 0.0,
subsequenceStartPos = 0,
numCols = numCols,
minOnes = 15, maxOnes = 20)
# Do one pass through the training set
numFailures1, numStrictErrors1, numPerfect1, tp1 = \
_testSequence(trainingSet,
nTrainingReps = 1,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 8,
newSynapseCount = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
activationThreshold = 8)
# Do two passes through the training set
numFailures, numStrictErrors, numPerfect, tp2 = \
_testSequence(trainingSet,
nTrainingReps = 2,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 8,
newSynapseCount = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
activationThreshold = 8)
# Check that training with a second pass did not result in more synapses
segmentInfo1 = tp1.getSegmentInfo()
segmentInfo2 = tp2.getSegmentInfo()
if (segmentInfo1[0] != segmentInfo2[0]) or \
(segmentInfo1[1] != segmentInfo2[1]) :
print "Training twice incorrectly resulted in more segments or synapses"
print "Number of segments: ", segmentInfo1[0], segmentInfo2[0]
numFailures += 1
if numFailures == 0:
print "Test",name,"ok"
else:
print "Test",name,"failed"
nFailed = nFailed + 1
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return nFailed
def TestB3(numUniquePatterns, nTests):
numCols = 100
sequenceLength = numUniquePatterns
nFailed = 0
for numSequences in [2,5]:
print "Test B3 (sequence memory - 2 repetitions -", numSequences, "sequences)"
for k in range(nTests): # Test that configuration several times
trainingSet = buildTrainingSet(numSequences =numSequences,
sequenceLength = sequenceLength,
pctShared = 0.0,
subsequenceStartPos = 0,
numCols = numCols,
minOnes = 15, maxOnes = 20)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = 2,
numberOfCols = numCols,
cellsPerColumn = 4,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 11,
activationThreshold = 8,
doPooling = False)
if numFailures == 0:
print "Test B3 ok"
else:
print "Test B3 failed"
nFailed = nFailed + 1
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return nFailed
def TestH0(numOnes = 5,nMultiStepPrediction=0):
cellsPerColumn = 4
print "Higher order test 0 with cellsPerColumn=",cellsPerColumn
trainingSet = buildSimpleTrainingSet(numOnes)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = 20,
numberOfCols = trainingSet[0][0][0].size,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 6,
permanenceInc = .4,
permanenceDec = .2,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 5,
activationThreshold = 4,
doPooling = False,
nMultiStepPrediction=nMultiStepPrediction)
if numFailures == 0 and \
numStrictErrors == 0 and \
numPerfect == len(trainingSet[0])*(len(trainingSet[0][0]) - 1):
print "Test PASS"
return 0
else:
print "Test FAILED"
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return 1
def TestH(sequenceLength, nTests, cellsPerColumn, numCols =100, nSequences =[2],
pctShared = 0.1, seqGenMode = 'shared sequence', nTrainingReps = 2,
shouldFail = False, compareToPy = False, highOrder = False):
nFailed = 0
subsequenceStartPos = 10
assert subsequenceStartPos < sequenceLength
for numSequences in nSequences:
print "Higher order test with sequenceLength=",sequenceLength,
print "cellsPerColumn=",cellsPerColumn,"nTests=",nTests,
print "numSequences=",numSequences, "pctShared=", pctShared
for k in range(nTests): # Test that configuration several times
trainingSet = buildTrainingSet(numSequences = numSequences,
sequenceLength = sequenceLength,
pctShared = pctShared, seqGenMode = seqGenMode,
subsequenceStartPos = subsequenceStartPos,
numCols = numCols,
minOnes = 21, maxOnes = 25)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = nTrainingReps,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 12,
permanenceInc = .4,
permanenceDec = .1,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 11,
activationThreshold = 8,
doPooling = False,
shouldFail = shouldFail,
compareToPy = compareToPy,
highOrder = highOrder)
if numFailures == 0 and not shouldFail \
or numFailures > 0 and shouldFail:
print "Test PASS",
if shouldFail:
print '(should fail, and failed)'
else:
print
else:
print "Test FAILED"
nFailed = nFailed + 1
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return nFailed
def TestH11(numOnes = 3):
cellsPerColumn = 4
print "Higher order test 11 with cellsPerColumn=",cellsPerColumn
trainingSet = buildAlternatingTrainingSet(numOnes= 3)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = 1,
numberOfCols = trainingSet[0][0][0].size,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 6,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 1,
activationThreshold = 1,
doPooling = False)
if numFailures == 0 and \
numStrictErrors == 0 and \
numPerfect == len(trainingSet[0])*(len(trainingSet[0][0]) - 1):
print "Test PASS"
return 0
else:
print "Test FAILED"
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return 1
def TestH2a(sequenceLength, nTests, cellsPerColumn, numCols =100, nSequences =[2],
pctShared = 0.02, seqGenMode = 'shared sequence',
shouldFail = False):
"""
Still need to test:
Two overlapping sequences. OK to get new segments but check that we can
get correct high order prediction after multiple reps.
"""
print "Test H2a - second repetition of the same sequence should not add synapses"
nFailed = 0
subsequenceStartPos = 10
assert subsequenceStartPos < sequenceLength
for numSequences in nSequences:
print "Higher order test with sequenceLength=",sequenceLength,
print "cellsPerColumn=",cellsPerColumn,"nTests=",nTests,"numCols=", numCols
print "numSequences=",numSequences, "pctShared=", pctShared,
print "sharing mode=", seqGenMode
for k in range(nTests): # Test that configuration several times
trainingSet = buildTrainingSet(numSequences = numSequences,
sequenceLength = sequenceLength,
pctShared = pctShared, seqGenMode = seqGenMode,
subsequenceStartPos = subsequenceStartPos,
numCols = numCols,
minOnes = 21, maxOnes = 25)
print "============== 10 ======================"
numFailures3, numStrictErrors3, numPerfect3, tp3 = \
_testSequence(trainingSet,
nTrainingReps = 10,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .4,
connectedPerm = .7,
minThreshold = 12,
permanenceInc = .1,
permanenceDec = 0.1,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 15,
activationThreshold = 12,
doPooling = False,
shouldFail = shouldFail)
print "============== 2 ======================"
numFailures, numStrictErrors, numPerfect, tp2 = \
_testSequence(trainingSet,
nTrainingReps = 2,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 12,
permanenceInc = .1,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 15,
activationThreshold = 12,
doPooling = False,
shouldFail = shouldFail)
print "============== 1 ======================"
numFailures1, numStrictErrors1, numPerfect1, tp1 = \
_testSequence(trainingSet,
nTrainingReps = 1,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 12,
permanenceInc = .1,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 15,
activationThreshold = 12,
doPooling = False,
shouldFail = shouldFail)
# Check that training with a second pass did not result in more synapses
segmentInfo1 = tp1.getSegmentInfo()
segmentInfo2 = tp2.getSegmentInfo()
if (abs(segmentInfo1[0] - segmentInfo2[0]) > 3) or \
(abs(segmentInfo1[1] - segmentInfo2[1]) > 3*15) :
print "Training twice incorrectly resulted in too many segments or synapses"
print segmentInfo1
print segmentInfo2
print tp3.getSegmentInfo()
tp3.trimSegments()
print tp3.getSegmentInfo()
print "Failures for 1, 2, and N reps"
print numFailures1, numStrictErrors1, numPerfect1
print numFailures, numStrictErrors, numPerfect
print numFailures3, numStrictErrors3, numPerfect3
numFailures += 1
if numFailures == 0 and not shouldFail \
or numFailures > 0 and shouldFail:
print "Test PASS",
if shouldFail:
print '(should fail, and failed)'
else:
print
else:
print "Test FAILED"
nFailed = nFailed + 1
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return nFailed
def TestP(sequenceLength, nTests, cellsPerColumn, numCols =300, nSequences =[2],
pctShared = 0.1, seqGenMode = 'shared subsequence', nTrainingReps = 2):
nFailed = 0
newSynapseCount = 7
activationThreshold = newSynapseCount - 2
minOnes = 1.5 * newSynapseCount
maxOnes = .3 * numCols / nTrainingReps
for numSequences in nSequences:
print "Pooling test with sequenceLength=",sequenceLength,
print 'numCols=', numCols,
print "cellsPerColumn=",cellsPerColumn,"nTests=",nTests,
print "numSequences=",numSequences, "pctShared=", pctShared,
print "nTrainingReps=", nTrainingReps, "minOnes=", minOnes,
print "maxOnes=", maxOnes
for k in range(nTests): # Test that configuration several times
minOnes = 1.5 * newSynapseCount
trainingSet = buildTrainingSet(numSequences =numSequences,
sequenceLength = sequenceLength,
pctShared = pctShared, seqGenMode = seqGenMode,
subsequenceStartPos = 10,
numCols = numCols,
minOnes = minOnes, maxOnes = maxOnes)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = nTrainingReps,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = newSynapseCount,
activationThreshold = activationThreshold,
doPooling = True)
if numFailures == 0 and \
numStrictErrors == 0 and \
numPerfect == numSequences*(sequenceLength - 1):
print "Test PASS"
else:
print "Test FAILED"
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
nFailed = nFailed + 1
return nFailed
def TestHL0a(numOnes = 5):
cellsPerColumn = 4
newSynapseCount = 5
activationThreshold = newSynapseCount
print "HiLo test 0a with cellsPerColumn=",cellsPerColumn
trainingSet, testSet = buildHL0aTrainingSet()
numCols = trainingSet[0][0].size
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence([trainingSet],
nTrainingReps = 1,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .2,
connectedPerm = .7,
permanenceInc = .2,
permanenceDec = 0.05,
permanenceMax = 1,
globalDecay = .0,
minThreshold = activationThreshold,
newSynapseCount = newSynapseCount,
activationThreshold = activationThreshold,
pamLength = 2,
doPooling = False,
testSequences = testSet)
tp.trimSegments()
retAfter = tp.getSegmentInfo()
print retAfter[0], retAfter[1]
if retAfter[0] > 20:
print "Too many segments"
numFailures += 1
if retAfter[1] > 100:
print "Too many synapses"
numFailures += 1
if numFailures == 0:
print "Test HL0a ok"
return 0
else:
print "Test HL0a failed"
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return 1
def TestHL0b(numOnes = 5):
cellsPerColumn = 4
newSynapseCount = 5
activationThreshold = newSynapseCount
print "HiLo test 0b with cellsPerColumn=",cellsPerColumn
trainingSet, testSet = buildHL0bTrainingSet()
numCols = trainingSet[0][0].size
print "numCols=", numCols
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence([trainingSet],
nTrainingReps = 1,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .2,
connectedPerm = .7,
permanenceInc = .2,
permanenceDec = 0.05,
permanenceMax = 1,
globalDecay = .0,
minThreshold = activationThreshold,
newSynapseCount = newSynapseCount,
activationThreshold = activationThreshold,
doPooling = False,
testSequences = testSet)
tp.trimSegments()
retAfter = tp.getSegmentInfo()
tp.printCells()
if numFailures == 0:
print "Test HL0 ok"
return 0
else:
print "Test HL0 failed"
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
return 1
def TestHL(sequenceLength, nTests, cellsPerColumn, numCols =200, nSequences =[2],
pctShared = 0.1, seqGenMode = 'shared subsequence', nTrainingReps = 3,
noiseModel = 'xor binomial in learning only', noiseLevel = 0.1,
hiloOn = True):
nFailed = 0
newSynapseCount = 8
activationThreshold = newSynapseCount
minOnes = 1.5 * newSynapseCount
maxOnes = 0.3 * numCols / nTrainingReps
if hiloOn == False:
minThreshold = 0.9
for numSequences in nSequences:
print "Hilo test with sequenceLength=", sequenceLength,
print "cellsPerColumn=", cellsPerColumn, "nTests=", nTests,
print "numSequences=", numSequences, "pctShared=", pctShared,
print "nTrainingReps=", nTrainingReps, "minOnes=", minOnes,
print "maxOnes=", maxOnes,
print 'noiseModel=', noiseModel, 'noiseLevel=', noiseLevel
for k in range(nTests): # Test that configuration several times
minOnes = 1.5 * newSynapseCount
trainingSet = buildTrainingSet(numSequences =numSequences,
sequenceLength = sequenceLength,
pctShared = pctShared, seqGenMode = seqGenMode,
subsequenceStartPos = 10,
numCols = numCols,
minOnes = minOnes, maxOnes = maxOnes)
numFailures, numStrictErrors, numPerfect, tp = \
_testSequence(trainingSet,
nTrainingReps = nTrainingReps,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .2,
connectedPerm = .7,
minThreshold = activationThreshold,
newSynapseCount = newSynapseCount,
activationThreshold = activationThreshold,
permanenceInc = .2,
permanenceDec = 0.05,
permanenceMax = 1,
globalDecay = .0,
doPooling = False,
noiseModel = noiseModel,
noiseLevel = noiseLevel)
if numFailures == 0 and \
numStrictErrors == 0 and \
numPerfect == numSequences*(sequenceLength - 1):
print "Test PASS"
else:
print "Test FAILED"
print "numFailures=", numFailures
print "numStrictErrors=", numStrictErrors
print "numPerfect=", numPerfect
nFailed = nFailed + 1
return nFailed
def worker(x):
"""Worker function to use in parallel hub capacity test below."""
cellsPerColumn, numSequences = x[0], x[1]
nTrainingReps = 1
sequenceLength = 10
numCols = 200
print 'Started', cellsPerColumn, numSequences
seqGenMode = 'shared subsequence, one pattern'
subsequenceStartPos = 5
trainingSet = buildTrainingSet(numSequences = numSequences,
sequenceLength = sequenceLength,
pctShared = .1, seqGenMode = seqGenMode,
subsequenceStartPos = subsequenceStartPos,
numCols = numCols,
minOnes = 21, maxOnes = 25)
numFailures1, numStrictErrors1, numPerfect1, atHub, tp = \
_testSequence(trainingSet,
nTrainingReps = nTrainingReps,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 8,
activationThreshold = 8,
doPooling = False,
shouldFail = False,
predJustAfterHubOnly = 5)
seqGenMode = 'no shared subsequence'
trainingSet = buildTrainingSet(numSequences = numSequences,
sequenceLength = sequenceLength,
pctShared = 0, seqGenMode = seqGenMode,
subsequenceStartPos = 0,
numCols = numCols,
minOnes = 21, maxOnes = 25)
numFailures2, numStrictErrors2, numPerfect2, tp = \
_testSequence(trainingSet,
nTrainingReps = nTrainingReps,
numberOfCols = numCols,
cellsPerColumn = cellsPerColumn,
initialPerm = .8,
connectedPerm = .7,
minThreshold = 11,
permanenceInc = .4,
permanenceDec = 0,
permanenceMax = 1,
globalDecay = .0,
newSynapseCount = 8,
activationThreshold = 8,
doPooling = False,
shouldFail = False)
print 'Completed',
print cellsPerColumn, numSequences, numFailures1, numStrictErrors1, numPerfect1, atHub, \
numFailures2, numStrictErrors2, numPerfect2
return cellsPerColumn, numSequences, numFailures1, numStrictErrors1, numPerfect1, atHub, \
numFailures2, numStrictErrors2, numPerfect2
def hubCapacity():
"""
Study hub capacity. Figure out how many sequences can share a pattern
for a given number of cells per column till we the system fails.
DON'T RUN IN BUILD SYSTEM!!! (takes too long)
"""
from multiprocessing import Pool
import itertools
print "Hub capacity test"
# scalar value on predictions by looking at max perm over column
p = Pool(2)
results = p.map(worker, itertools.product([1,2,3,4,5,6,7,8], xrange(1,2000,200)))
f = open('results-numPerfect.11.22.10.txt', 'w')
for i,r in enumerate(results):
print >>f, '{%d,%d,%d,%d,%d,%d,%d,%d,%d},' % r
f.close()
def runTests(testLength = "short"):
# Data structure to collect results of tests
# TODO: put numFailures, numStrictErrors and numPerfect in here for reporting
tests = {}
# always run this one: if that one fails, we can't do anything
basicTest()
print
#---------------------------------------------------------------------------------
if testLength == "long":
tests['B1'] = TestB1(numUniquePatterns, nTests)
tests['B2'] = TestB2(numUniquePatterns, nTests)
tests['B8'] = TestB7(4, nTests, cellsPerColumn = 4, name="B8")
tests['B10'] = TestB2(numUniquePatterns, nTests, cellsPerColumn = 4,
name = "B10")
# Run these always
tests['B3'] = TestB3(numUniquePatterns, nTests)
tests['B6'] = TestB1(numUniquePatterns, nTests,
cellsPerColumn = 4, name="B6")
tests['B7'] = TestB7(numUniquePatterns, nTests)
print
#---------------------------------------------------------------------------------
#print "Test H11"
#tests['H11'] = TestH11()
if True:
print "Test H0"
tests['H0'] = TestH0(numOnes = 5)
print "Test H2"
#tests['H2'] = TestH(numUniquePatterns, nTests, cellsPerColumn = 4,
# nTrainingReps = numUniquePatterns, compareToPy = False)
print "Test H3"
tests['H3'] = TestH(numUniquePatterns, nTests,
numCols = 200,
cellsPerColumn = 20,
pctShared = 0.3, nTrainingReps=numUniquePatterns,
compareToPy = False,
highOrder = True)
print "Test H4" # Produces 3 false positives, but otherwise fine.
# TODO: investigate initial false positives?
tests['H4'] = TestH(numUniquePatterns, nTests,
cellsPerColumn = 20,
pctShared = 0.1,
seqGenMode='shared subsequence at beginning')
if True:
print "Test H0 with multistep prediction"
tests['H0_MS'] = TestH0(numOnes = 5, nMultiStepPrediction=2)
if True:
print "Test H1" # - Should Fail
tests['H1'] = TestH(numUniquePatterns, nTests,
cellsPerColumn = 1, nTrainingReps = 1,
shouldFail = True)
# Also fails in --long mode. See H2 above
#print "Test H2a"
#tests['H2a'] = TestH2a(numUniquePatterns,
# nTests, pctShared = 0.02, numCols = 300, cellsPerColumn = 4)
if False:
print "Test H5" # make sure seqs are good even with shuffling, fast learning
tests['H5'] = TestH(numUniquePatterns, nTests,
cellsPerColumn = 10,
pctShared = 0.0,
seqGenMode='shuffle, no shared subsequence')
print "Test H6" # should work
tests['H6'] = TestH(numUniquePatterns, nTests,
cellsPerColumn = 10,
pctShared = 0.4,
seqGenMode='shuffle, shared subsequence')
# Try with 2 sequences, then 3 sequences interleaved so that there is
# always a shared pattern, but it belongs to 2 different sequences each
# time!
#print "Test H7"
#tests['H7'] = TestH(numUniquePatterns, nTests,
# cellsPerColumn = 10,
# pctShared = 0.4,
# seqGenMode='shuffle, shared subsequence')
# tricky: if start predicting in middle of subsequence, several predictions
# are possible
#print "Test H8"
#tests['H8'] = TestH(numUniquePatterns, nTests,
# cellsPerColumn = 10,
# pctShared = 0.4,
# seqGenMode='shuffle, shared subsequence')
print "Test H9" # plot hub capacity
tests['H9'] = TestH(numUniquePatterns, nTests,
cellsPerColumn = 10,
pctShared = 0.4,
seqGenMode='shuffle, shared subsequence')
#print "Test H10" # plot
#tests['H10'] = TestH(numUniquePatterns, nTests,
# cellsPerColumn = 10,
# pctShared = 0.4,
# seqGenMode='shuffle, shared subsequence')
print
#---------------------------------------------------------------------------------
if False:
print "Test P1"
tests['P1'] = TestP(numUniquePatterns, nTests,
cellsPerColumn = 4,
pctShared = 0.0,
seqGenMode = 'no shared subsequence',
nTrainingReps = 3)
if False:
print "Test P2"
tests['P2'] = TestP(numUniquePatterns, nTests,
cellsPerColumn = 4,
pctShared = 0.0,
seqGenMode = 'no shared subsequence',
nTrainingReps = 5)
print "Test P3"
tests['P3'] = TestP(numUniquePatterns, nTests,
cellsPerColumn = 4,
pctShared = 0.0,
seqGenMode = 'no shared subsequence',
nSequences = [2] if testLength == 'short' else [2,5],
nTrainingReps = 5)
print "Test P4"
tests['P4'] = TestP(numUniquePatterns, nTests,
cellsPerColumn = 4,
pctShared = 0.0,
seqGenMode = 'shared subsequence',
nSequences = [2] if testLength == 'short' else [2,5],
nTrainingReps = 5)
print
#---------------------------------------------------------------------------------
if True:
print "Test HL0a"
tests['HL0a'] = TestHL0a(numOnes = 5)
if False:
print "Test HL0b"
tests['HL0b'] = TestHL0b(numOnes = 5)
print "Test HL1"
tests['HL1'] = TestHL(sequenceLength = 20,
nTests = nTests,
numCols = 100,
nSequences = [1],
nTrainingReps = 3,
cellsPerColumn = 1,
seqGenMode = 'no shared subsequence',
noiseModel = 'xor binomial in learning only',
noiseLevel = 0.1,
doResets = False)
print "Test HL2"
tests['HL2'] = TestHL(numUniquePatterns = 20,
nTests = nTests,
numCols = 200,
nSequences = [1],
nTrainingReps = 3,
cellsPerColumn = 1,
seqGenMode = 'no shared subsequence',
noiseModel = 'xor binomial in learning only',
noiseLevel = 0.1,
doResets = False)
print "Test HL3"
tests['HL3'] = TestHL(numUniquePatterns = 30,
nTests = nTests,
numCols = 200,
nSequences = [2],
pctShared = 0.66,
nTrainingReps = 3,
cellsPerColumn = 1,
seqGenMode = 'shared subsequence',
noiseModel = None,
noiseLevel = 0.0,
doResets = True)
print "Test HL4"
tests['HL4'] = TestHL(numUniquePatterns = 30,
nTests = nTests,
numCols = 200,
nSequences = [2],
pctShared = 0.66,
nTrainingReps = 3,
cellsPerColumn = 1,
seqGenMode = 'shared subsequence',
noiseModel = None,
noiseLevel = 0.0,
doResets = False)
print "Test HL5"
tests['HL5'] = TestHL(numUniquePatterns = 30,
nTests = nTests,
numCols = 200,
nSequences = [2],
pctShared = 0.66,
nTrainingReps = 3,
cellsPerColumn = 1,
seqGenMode = 'shared subsequence',
noiseModel = 'xor binomial in learning only',
noiseLevel = 0.1,
doResets = False)
print "Test HL6"
tests['HL6'] = nTests - TestHL(numUniquePatterns = 20,
nTests = nTests,
numCols = 200,
nSequences = [1],
nTrainingReps = 3,
cellsPerColumn = 1,
seqGenMode = 'no shared subsequence',
noiseModel = 'xor binomial in learning only',
noiseLevel = 0.1,
doResets = True,
hiloOn = False)
print
#---------------------------------------------------------------------------------
nFailures = 0
for k,v in tests.iteritems():
nFailures = nFailures + v
if nFailures > 0: # 1 to account for H1
print "There are failed tests"
print "Test\tn failures"
for k,v in tests.iteritems():
print k, "\t", v
assert 0
else:
print "All tests pass"
#---------------------------------------------------------------------------------
# Keep
if False:
import hotshot, hotshot.stats
prof = hotshot.Profile("profile.prof")
prof.runcall(TestB2, numUniquePatterns=100, nTests=2)
prof.close()
stats = hotshot.stats.load("profile.prof")
stats.strip_dirs()
stats.sort_stats('time', 'calls')
stats.print_stats(50)
if __name__=="__main__":
if not TEST_CPP_TP:
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
print "!! WARNING: C++ TP testing is DISABLED until it can be updated."
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
# Three different test lengths are passed in through the command line.
# Developer tests use --short. Autobuild does not pass in anything.
# Acceptance tests pass in --long. testLength reflects these possibilities
# as "autobuild", "short", and "long"
testLength = "autobuild"
# Scan command line arguments to see what to do for the seed
# TODO: make default be a random seed, once we're sure it will pass reliably!
for i,arg in enumerate(sys.argv):
if 'seed' in arg:
try:
# used specified seed
SEED = int(sys.argv[i+1])
except ValueError as e:
# random seed
SEED = numpy.random.randint(100)
if 'verbosity' in arg:
VERBOSITY = int(sys.argv[i+1])
if 'help' in arg:
print "TPTest.py --short|long --seed number|'rand' --verbosity number"
sys.exit()
if "short" in arg:
testLength = "short"
if "long" in arg:
testLength = "long"
rgen = numpy.random.RandomState(SEED) # always call this rgen, NOT random
# Setup the severity and length of the tests
if testLength == "short":
numUniquePatterns = 50
nTests = 1
elif testLength == "autobuild":
print "Running autobuild tests"
numUniquePatterns = 50
nTests = 1
elif testLength == "long":
numUniquePatterns = 100
nTests = 3
print "TP tests", testLength, "numUniquePatterns=", numUniquePatterns, "nTests=", nTests,
print "seed=", SEED
print
if testLength == "long":
print 'Testing Python TP'
TPClass = TP
runTests(testLength)
if testLength != 'long':
checkSynapseConsistency = False
else:
# Setting this to True causes test to take way too long
# Temporarily turned off so we can investigate
checkSynapseConsistency = False
if TEST_CPP_TP:
print 'Testing C++ TP'
TPClass = TP10X2
runTests(testLength)
|
cristianquaglio/odoo | refs/heads/master | addons/crm_helpdesk/report/__init__.py | 442 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crm_helpdesk_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
roadmapper/ansible | refs/heads/devel | lib/ansible/modules/storage/netapp/na_elementsw_ldap.py | 44 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_ldap
short_description: NetApp Element Software Manage ldap admin users
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <[email protected]>
description:
- Enable, disable ldap, and add ldap users
options:
state:
description:
- Whether the specified volume should exist or not.
required: true
choices: ['present', 'absent']
authType:
description:
- Identifies which user authentication method to use.
choices: ['DirectBind', 'SearchAndBind']
groupSearchBaseDn:
description:
- The base DN of the tree to start the group search (will do a subtree search from here)
groupSearchType:
description:
- Controls the default group search filter used
choices: ['NoGroup', 'ActiveDirectory', 'MemberDN']
serverURIs:
description:
- A comma-separated list of LDAP server URIs
userSearchBaseDN:
description:
- The base DN of the tree to start the search (will do a subtree search from here)
searchBindDN:
description:
- A dully qualified DN to log in with to perform an LDAp search for the user (needs read access to the LDAP directory).
searchBindPassword:
description:
- The password for the searchBindDN account used for searching
userSearchFilter:
description:
- the LDAP Filter to use
userDNTemplate:
description:
- A string that is used form a fully qualified user DN.
groupSearchCustomFilter:
description:
- For use with the CustomFilter Search type
'''
EXAMPLES = """
- name: disable ldap authentication
na_elementsw_ldap:
state: absent
username: "{{ admin username }}"
password: "{{ admin password }}"
hostname: "{{ hostname }}"
- name: Enable ldap authentication
na_elementsw_ldap:
state: present
username: "{{ admin username }}"
password: "{{ admin password }}"
hostname: "{{ hostname }}"
authType: DirectBind
serverURIs: ldap://svmdurlabesx01spd_ldapclnt
groupSearchType: MemberDN
userDNTemplate: uid=%USERNAME%,cn=users,cn=accounts,dc=corp,dc="{{ company name }}",dc=com
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_SF_SDK = netapp_utils.has_sf_sdk()
try:
import solidfire.common
except Exception:
HAS_SF_SDK = False
class NetappElementLdap(object):
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(
state=dict(type='str', required=True, choices=['absent', 'present']),
authType=dict(type='str', choices=['DirectBind', 'SearchAndBind']),
groupSearchBaseDn=dict(type='str'),
groupSearchType=dict(type='str', choices=['NoGroup', 'ActiveDirectory', 'MemberDN']),
serverURIs=dict(type='str'),
userSearchBaseDN=dict(type='str'),
searchBindDN=dict(type='str'),
searchBindPassword=dict(type='str', no_log=True),
userSearchFilter=dict(type='str'),
userDNTemplate=dict(type='str'),
groupSearchCustomFilter=dict(type='str'),
)
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True,
)
param = self.module.params
# set up state variables
self.state = param['state']
self.authType = param['authType']
self.groupSearchBaseDn = param['groupSearchBaseDn']
self.groupSearchType = param['groupSearchType']
self.serverURIs = param['serverURIs']
if self.serverURIs is not None:
self.serverURIs = self.serverURIs.split(',')
self.userSearchBaseDN = param['userSearchBaseDN']
self.searchBindDN = param['searchBindDN']
self.searchBindPassword = param['searchBindPassword']
self.userSearchFilter = param['userSearchFilter']
self.userDNTemplate = param['userDNTemplate']
self.groupSearchCustomFilter = param['groupSearchCustomFilter']
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
def get_ldap_configuration(self):
"""
Return ldap configuration if found
:return: Details about the ldap configuration. None if not found.
:rtype: solidfire.models.GetLdapConfigurationResult
"""
ldap_config = self.sfe.get_ldap_configuration()
return ldap_config
def enable_ldap(self):
"""
Enable LDAP
:return: nothing
"""
try:
self.sfe.enable_ldap_authentication(self.serverURIs, auth_type=self.authType,
group_search_base_dn=self.groupSearchBaseDn,
group_search_type=self.groupSearchType,
group_search_custom_filter=self.groupSearchCustomFilter,
search_bind_dn=self.searchBindDN,
search_bind_password=self.searchBindPassword,
user_search_base_dn=self.userSearchBaseDN,
user_search_filter=self.userSearchFilter,
user_dntemplate=self.userDNTemplate)
except solidfire.common.ApiServerError as error:
self.module.fail_json(msg='Error enabling LDAP %s: %s' % (self.account_id, to_native(error)),
exception=traceback.format_exc())
def check_config(self, ldap_config):
"""
Check to see if the ldap config has been modified.
:param ldap_config: The LDAP configuration
:return: False if the config is the same as the playbook, True if it is not
"""
if self.authType != ldap_config.ldap_configuration.auth_type:
return True
if self.serverURIs != ldap_config.ldap_configuration.server_uris:
return True
if self.groupSearchBaseDn != ldap_config.ldap_configuration.group_search_base_dn:
return True
if self.groupSearchType != ldap_config.ldap_configuration.group_search_type:
return True
if self.groupSearchCustomFilter != ldap_config.ldap_configuration.group_search_custom_filter:
return True
if self.searchBindDN != ldap_config.ldap_configuration.search_bind_dn:
return True
if self.searchBindPassword != ldap_config.ldap_configuration.search_bind_password:
return True
if self.userSearchBaseDN != ldap_config.ldap_configuration.user_search_base_dn:
return True
if self.userSearchFilter != ldap_config.ldap_configuration.user_search_filter:
return True
if self.userDNTemplate != ldap_config.ldap_configuration.user_dntemplate:
return True
return False
def apply(self):
changed = False
ldap_config = self.get_ldap_configuration()
if self.state == 'absent':
if ldap_config and ldap_config.ldap_configuration.enabled:
changed = True
if self.state == 'present' and self.check_config(ldap_config):
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
self.enable_ldap()
elif self.state == 'absent':
self.sfe.disable_ldap_authentication()
self.module.exit_json(changed=changed)
def main():
v = NetappElementLdap()
v.apply()
if __name__ == '__main__':
main()
|
google/material-design-icons | refs/heads/master | update/venv/lib/python3.9/site-packages/fontTools/ttLib/tables/V_D_M_X_.py | 5 | from . import DefaultTable
from fontTools.misc import sstruct
from fontTools.misc.textTools import safeEval
import struct
VDMX_HeaderFmt = """
> # big endian
version: H # Version number (0 or 1)
numRecs: H # Number of VDMX groups present
numRatios: H # Number of aspect ratio groupings
"""
# the VMDX header is followed by an array of RatRange[numRatios] (i.e. aspect
# ratio ranges);
VDMX_RatRangeFmt = """
> # big endian
bCharSet: B # Character set
xRatio: B # Value to use for x-Ratio
yStartRatio: B # Starting y-Ratio value
yEndRatio: B # Ending y-Ratio value
"""
# followed by an array of offset[numRatios] from start of VDMX table to the
# VDMX Group for this ratio range (offsets will be re-calculated on compile);
# followed by an array of Group[numRecs] records;
VDMX_GroupFmt = """
> # big endian
recs: H # Number of height records in this group
startsz: B # Starting yPelHeight
endsz: B # Ending yPelHeight
"""
# followed by an array of vTable[recs] records.
VDMX_vTableFmt = """
> # big endian
yPelHeight: H # yPelHeight to which values apply
yMax: h # Maximum value (in pels) for this yPelHeight
yMin: h # Minimum value (in pels) for this yPelHeight
"""
class table_V_D_M_X_(DefaultTable.DefaultTable):
def decompile(self, data, ttFont):
pos = 0 # track current position from to start of VDMX table
dummy, data = sstruct.unpack2(VDMX_HeaderFmt, data, self)
pos += sstruct.calcsize(VDMX_HeaderFmt)
self.ratRanges = []
for i in range(self.numRatios):
ratio, data = sstruct.unpack2(VDMX_RatRangeFmt, data)
pos += sstruct.calcsize(VDMX_RatRangeFmt)
# the mapping between a ratio and a group is defined further below
ratio['groupIndex'] = None
self.ratRanges.append(ratio)
lenOffset = struct.calcsize('>H')
_offsets = [] # temporarily store offsets to groups
for i in range(self.numRatios):
offset = struct.unpack('>H', data[0:lenOffset])[0]
data = data[lenOffset:]
pos += lenOffset
_offsets.append(offset)
self.groups = []
for groupIndex in range(self.numRecs):
# the offset to this group from beginning of the VDMX table
currOffset = pos
group, data = sstruct.unpack2(VDMX_GroupFmt, data)
# the group lenght and bounding sizes are re-calculated on compile
recs = group.pop('recs')
startsz = group.pop('startsz')
endsz = group.pop('endsz')
pos += sstruct.calcsize(VDMX_GroupFmt)
for j in range(recs):
vTable, data = sstruct.unpack2(VDMX_vTableFmt, data)
vTableLength = sstruct.calcsize(VDMX_vTableFmt)
pos += vTableLength
# group is a dict of (yMax, yMin) tuples keyed by yPelHeight
group[vTable['yPelHeight']] = (vTable['yMax'], vTable['yMin'])
# make sure startsz and endsz match the calculated values
minSize = min(group.keys())
maxSize = max(group.keys())
assert startsz == minSize, \
"startsz (%s) must equal min yPelHeight (%s): group %d" % \
(group.startsz, minSize, groupIndex)
assert endsz == maxSize, \
"endsz (%s) must equal max yPelHeight (%s): group %d" % \
(group.endsz, maxSize, groupIndex)
self.groups.append(group)
# match the defined offsets with the current group's offset
for offsetIndex, offsetValue in enumerate(_offsets):
# when numRecs < numRatios there can more than one ratio range
# sharing the same VDMX group
if currOffset == offsetValue:
# map the group with the ratio range thas has the same
# index as the offset to that group (it took me a while..)
self.ratRanges[offsetIndex]['groupIndex'] = groupIndex
# check that all ratio ranges have a group
for i in range(self.numRatios):
ratio = self.ratRanges[i]
if ratio['groupIndex'] is None:
from fontTools import ttLib
raise ttLib.TTLibError(
"no group defined for ratRange %d" % i)
def _getOffsets(self):
"""
Calculate offsets to VDMX_Group records.
For each ratRange return a list of offset values from the beginning of
the VDMX table to a VDMX_Group.
"""
lenHeader = sstruct.calcsize(VDMX_HeaderFmt)
lenRatRange = sstruct.calcsize(VDMX_RatRangeFmt)
lenOffset = struct.calcsize('>H')
lenGroupHeader = sstruct.calcsize(VDMX_GroupFmt)
lenVTable = sstruct.calcsize(VDMX_vTableFmt)
# offset to the first group
pos = lenHeader + self.numRatios*lenRatRange + self.numRatios*lenOffset
groupOffsets = []
for group in self.groups:
groupOffsets.append(pos)
lenGroup = lenGroupHeader + len(group) * lenVTable
pos += lenGroup # offset to next group
offsets = []
for ratio in self.ratRanges:
groupIndex = ratio['groupIndex']
offsets.append(groupOffsets[groupIndex])
return offsets
def compile(self, ttFont):
if not(self.version == 0 or self.version == 1):
from fontTools import ttLib
raise ttLib.TTLibError(
"unknown format for VDMX table: version %s" % self.version)
data = sstruct.pack(VDMX_HeaderFmt, self)
for ratio in self.ratRanges:
data += sstruct.pack(VDMX_RatRangeFmt, ratio)
# recalculate offsets to VDMX groups
for offset in self._getOffsets():
data += struct.pack('>H', offset)
for group in self.groups:
recs = len(group)
startsz = min(group.keys())
endsz = max(group.keys())
gHeader = {'recs': recs, 'startsz': startsz, 'endsz': endsz}
data += sstruct.pack(VDMX_GroupFmt, gHeader)
for yPelHeight, (yMax, yMin) in sorted(group.items()):
vTable = {'yPelHeight': yPelHeight, 'yMax': yMax, 'yMin': yMin}
data += sstruct.pack(VDMX_vTableFmt, vTable)
return data
def toXML(self, writer, ttFont):
writer.simpletag("version", value=self.version)
writer.newline()
writer.begintag("ratRanges")
writer.newline()
for ratio in self.ratRanges:
groupIndex = ratio['groupIndex']
writer.simpletag(
"ratRange",
bCharSet=ratio['bCharSet'],
xRatio=ratio['xRatio'],
yStartRatio=ratio['yStartRatio'],
yEndRatio=ratio['yEndRatio'],
groupIndex=groupIndex
)
writer.newline()
writer.endtag("ratRanges")
writer.newline()
writer.begintag("groups")
writer.newline()
for groupIndex in range(self.numRecs):
group = self.groups[groupIndex]
recs = len(group)
startsz = min(group.keys())
endsz = max(group.keys())
writer.begintag("group", index=groupIndex)
writer.newline()
writer.comment("recs=%d, startsz=%d, endsz=%d" %
(recs, startsz, endsz))
writer.newline()
for yPelHeight, (yMax, yMin) in sorted(group.items()):
writer.simpletag(
"record",
[('yPelHeight', yPelHeight), ('yMax', yMax), ('yMin', yMin)])
writer.newline()
writer.endtag("group")
writer.newline()
writer.endtag("groups")
writer.newline()
def fromXML(self, name, attrs, content, ttFont):
if name == "version":
self.version = safeEval(attrs["value"])
elif name == "ratRanges":
if not hasattr(self, "ratRanges"):
self.ratRanges = []
for element in content:
if not isinstance(element, tuple):
continue
name, attrs, content = element
if name == "ratRange":
if not hasattr(self, "numRatios"):
self.numRatios = 1
else:
self.numRatios += 1
ratio = {
"bCharSet": safeEval(attrs["bCharSet"]),
"xRatio": safeEval(attrs["xRatio"]),
"yStartRatio": safeEval(attrs["yStartRatio"]),
"yEndRatio": safeEval(attrs["yEndRatio"]),
"groupIndex": safeEval(attrs["groupIndex"])
}
self.ratRanges.append(ratio)
elif name == "groups":
if not hasattr(self, "groups"):
self.groups = []
for element in content:
if not isinstance(element, tuple):
continue
name, attrs, content = element
if name == "group":
if not hasattr(self, "numRecs"):
self.numRecs = 1
else:
self.numRecs += 1
group = {}
for element in content:
if not isinstance(element, tuple):
continue
name, attrs, content = element
if name == "record":
yPelHeight = safeEval(attrs["yPelHeight"])
yMax = safeEval(attrs["yMax"])
yMin = safeEval(attrs["yMin"])
group[yPelHeight] = (yMax, yMin)
self.groups.append(group)
|
mandeepdhami/horizon | refs/heads/master | horizon/test/tests/middleware.py | 61 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from django.conf import settings
from django.http import HttpResponseRedirect # noqa
from horizon import exceptions
from horizon import middleware
from horizon.test import helpers as test
class MiddlewareTests(test.TestCase):
def test_redirect_login_fail_to_login(self):
url = settings.LOGIN_URL
request = self.factory.post(url)
mw = middleware.HorizonMiddleware()
resp = mw.process_exception(request, exceptions.NotAuthorized())
resp.client = self.client
self.assertRedirects(resp, url)
def test_session_timeout(self):
requested_url = '/project/instances/'
request = self.factory.get(requested_url)
try:
timeout = settings.SESSION_TIMEOUT
except AttributeError:
timeout = 1800
request.session['last_activity'] = int(time.time()) - (timeout + 10)
mw = middleware.HorizonMiddleware()
resp = mw.process_request(request)
self.assertEqual(302, resp.status_code)
self.assertEqual(requested_url, resp.get('Location'))
def test_process_response_redirect_on_ajax_request(self):
url = settings.LOGIN_URL
mw = middleware.HorizonMiddleware()
request = self.factory.post(url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
request.horizon = {'async_messages':
[('error', 'error_msg', 'extra_tag')]}
response = HttpResponseRedirect(url)
response.client = self.client
resp = mw.process_response(request, response)
self.assertEqual(200, resp.status_code)
self.assertEqual(url, resp['X-Horizon-Location'])
|
trenton3983/Fluent_Python | refs/heads/master | 09-pythonic-obj/mem_test.py | 7 | import importlib
import sys
import resource
NUM_VECTORS = 10**7
if len(sys.argv) == 2:
module_name = sys.argv[1].replace('.py', '')
module = importlib.import_module(module_name)
else:
print('Usage: {} <vector-module-to-test>'.format())
sys.exit(1)
fmt = 'Selected Vector2d type: {.__name__}.{.__name__}'
print(fmt.format(module, module.Vector2d))
mem_init = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
print('Creating {:,} Vector2d instances'.format(NUM_VECTORS))
vectors = [module.Vector2d(3.0, 4.0) for i in range(NUM_VECTORS)]
mem_final = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
print('Initial RAM usage: {:14,}'.format(mem_init))
print(' Final RAM usage: {:14,}'.format(mem_final))
|
Subsets and Splits