repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
h2oai/h2o-dev | h2o-py/h2o/exceptions.py | 4 | 6460 | # -*- encoding: utf-8 -*-
# Copyright: (c) 2016 H2O.ai
# License: Apache License Version 2.0 (see LICENSE for details)
"""
:mod:`h2o.exceptions` -- all exceptions classes in h2o module.
All H2O exceptions derive from :class:`H2OError`.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
__all__ = ("H2OStartupError", "H2OConnectionError", "H2OServerError", "H2OResponseError",
"H2OValueError", "H2OTypeError", "H2OJobCancelled")
class H2OError(Exception):
"""Base class for all H2O exceptions."""
class H2OSoftError(H2OError):
"""Base class for exceptions that trigger "soft" exception handling hook."""
#-----------------------------------------------------------------------------------------------------------------------
# H2OValueError
#-----------------------------------------------------------------------------------------------------------------------
class H2OValueError(H2OSoftError, ValueError):
"""Error indicating that wrong parameter value was passed to a function."""
def __init__(self, message, var_name=None, skip_frames=0):
"""Create an H2OValueError exception object."""
super(H2OValueError, self).__init__(message)
self.var_name = var_name
self.skip_frames = skip_frames
#-----------------------------------------------------------------------------------------------------------------------
# H2OTypeError
#-----------------------------------------------------------------------------------------------------------------------
class H2OTypeError(H2OSoftError, TypeError):
"""
Error indicating that the user passed a parameter of wrong type.
This error will trigger "soft" exception handling, in the sense that the stack trace will be much more compact
than usual.
"""
def __init__(self, var_name=None, var_value=None, var_type_name=None, exp_type_name=None, message=None,
skip_frames=0):
"""
Create an H2OTypeError exception object.
:param message: error message that will be shown to the user. If not given, this message will be constructed
from ``var_name``, ``var_value``, etc.
:param var_name: name of the variable whose type is wrong (can be used for highlighting etc).
:param var_value: the value of the variable.
:param var_type_name: the name of the variable's actual type.
:param exp_type_name: the name of the variable's expected type.
:param skip_frames: how many auxiliary function calls have been made since the moment of the exception. This
many local frames will be skipped in the output of the exception message. For example if you want to check
a variables type, and call a helper function ``assert_is_type()`` to do that job for you, then
``skip_frames`` should be 1 (thus making the call to ``assert_is_type`` invisible).
"""
super(H2OTypeError, self).__init__(message)
self._var_name = var_name
self._var_value = var_value
self._var_type_name = var_type_name or str(type(var_value))
self._exp_type_name = exp_type_name
self._message = message
self._skip_frames = skip_frames
def __str__(self):
"""Used when printing out the exception message."""
if self._message:
return self._message
# Otherwise construct the message
var = self._var_name
val = self._var_value
atn = self._var_type_name
etn = self._exp_type_name or ""
article = "an" if etn.lstrip("?")[0] in "aioeH" else "a"
return "Argument `{var}` should be {an} {expected_type}, got {actual_type} {value}".\
format(var=var, an=article, expected_type=etn, actual_type=atn, value=val)
@property
def var_name(self):
"""Variable name."""
return self._var_name
@property
def skip_frames(self):
"""Number of local frames to skip when printing our the stacktrace."""
return self._skip_frames
#-----------------------------------------------------------------------------------------------------------------------
# Backend exceptions
#-----------------------------------------------------------------------------------------------------------------------
class H2OStartupError(H2OSoftError):
"""Raised by H2OLocalServer when the class fails to launch a server."""
class H2OConnectionError(H2OSoftError):
"""
Raised when connection to an H2O server cannot be established.
This can be raised if the connection was not initialized; or the server cannot be reached at the specified address;
or there is an authentication error; or the request times out; etc.
"""
# This should have been extending from Exception as well; however in old code version all exceptions were
# EnvironmentError's, so for old code to work we extend H2OResponseError from EnvironmentError.
class H2OResponseError(H2OError, EnvironmentError):
"""Raised when the server encounters a user error and sends back an H2OErrorV3 response."""
class H2OServerError(H2OError):
"""
Raised when any kind of server error is encountered.
This includes: server returning HTTP status 500; or server sending malformed JSON; or server returning an
unexpected response (e.g. lacking a "__schema" field); or server indicating that it is in an unhealthy state; etc.
"""
def __init__(self, message, stacktrace=None):
"""
Instantiate a new H2OServerError exception.
:param message: error message describing the exception.
:param stacktrace: (optional, list(str)) server-side stacktrace, if available. This will be printed out by
our custom except hook (see debugging.py).
"""
super(H2OServerError, self).__init__(message)
self.stacktrace = stacktrace
#-----------------------------------------------------------------------------------------------------------------------
# H2OJobCancelled
#-----------------------------------------------------------------------------------------------------------------------
class H2OJobCancelled(H2OError):
"""
Raised when the user interrupts a running job.
By default, this exception will not trigger any output (as if it is caught and ignored), however the user still
has an ability to catch this explicitly and perform a custom action.
"""
| apache-2.0 | -7,529,625,104,596,845,000 | 41.781457 | 120 | 0.571672 | false |
recap/pumpkin | examples/tweeter/filters-cat2/filterhaikus.py | 1 | 3418 | __author__ = 'reggie'
###START-CONF
##{
##"object_name": "filterhaikus",
##"object_poi": "qpwo-2345",
##"parameters": [
## {
## "name": "tweet",
## "description": "english tweets",
## "required": true,
## "type": "TweetString",
## "format": "",
## "state" : "ENGLISH"
## }
## ],
##"return": [
## {
## "name": "tweet",
## "description": "haiku tweet",
## "required": true,
## "type": "TweetString",
## "format": "",
## "state" : "HAIKU|NO_HAIKU"
## }
##
## ] }
##END-CONF
import re
import nltk
from nltk.corpus import cmudict
from curses.ascii import isdigit
from pumpkin import PmkSeed
class filterhaikus(PmkSeed.Seed):
def __init__(self, context, poi=None):
PmkSeed.Seed.__init__(self, context,poi)
self.d = None
pass
def on_load(self):
print "Loading: " + self.__class__.__name__
wd = self.context.getWorkingDir()
nltk.data.path.append(wd + "nltk_data")
self.d = cmudict.dict()
pass
def run(self, pkt, tweet):
#print "RECEIVED TWEET: "+tweet
m = re.search('W(\s+)(.*)(\n)', tweet, re.S)
try:
if m:
tw = m.group(2)
if self.is_haiku(tw):
self.dispatch(pkt, tweet, "HAIKU")
#else:
# self.dispatch(pkt, tweet, "RUBBSIH")
except:
pass
pass
def is_haiku(self, text):
text_orig = text
text = text.lower()
if filter(str.isdigit, str(text)):
return False
words = nltk.wordpunct_tokenize(re.sub('[^a-zA-Z_ ]', '',text))
#print words
syl_count = 0
word_count = 0
haiku_line_count = 0
lines = []
d = self.d
for word in words:
if word.lower() in d.keys():
syl_count += [len(list(y for y in x if isdigit(y[-1]))) for x in
d[word.lower()]][0]
if haiku_line_count == 0:
if syl_count == 5:
lines.append(word)
haiku_line_count += 1
elif haiku_line_count == 1:
if syl_count == 12:
lines.append(word)
haiku_line_count += 1
else:
if syl_count == 17:
lines.append(word)
haiku_line_count += 1
if syl_count == 17:
try:
final_lines = []
str_tmp = ""
counter = 0
for word in text_orig.split():
str_tmp += str(word) + " "
if lines[counter].lower() in str(word).lower():
final_lines.append(str_tmp.strip())
counter += 1
str_tmp = ""
if len(str_tmp) > 0:
final_lines.append(str_tmp.strip())
return True
except Exception as e:
print e
return False
else:
return False
return True
| mit | 3,389,837,827,390,922,000 | 26.126984 | 80 | 0.404915 | false |
Diegojnb/JdeRobot | src/drivers/MAVLinkServer/MAVProxy/modules/mavproxy_misc.py | 11 | 8845 | #!/usr/bin/env python
'''miscellaneous commands'''
import time, math
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
from os import kill
from signal import signal
from subprocess import PIPE, Popen
class RepeatCommand(object):
'''repeated command object'''
def __init__(self, period, cmd):
self.period = period
self.cmd = cmd
self.event = mavutil.periodic_event(1.0/period)
def __str__(self):
return "Every %.1f seconds: %s" % (self.period, self.cmd)
def run_command(args, cwd = None, shell = False, timeout = None, env = None):
'''
Run a shell command with a timeout.
See http://stackoverflow.com/questions/1191374/subprocess-with-timeout
'''
from subprocess import PIPE, Popen
from StringIO import StringIO
import fcntl, os, signal
p = Popen(args, shell = shell, cwd = cwd, stdout = PIPE, stderr = PIPE, env = env)
tstart = time.time()
buf = StringIO()
# try to make it non-blocking
try:
fcntl.fcntl(p.stdout, fcntl.F_SETFL, fcntl.fcntl(p.stdout, fcntl.F_GETFL) | os.O_NONBLOCK)
except Exception:
pass
while True:
time.sleep(0.1)
retcode = p.poll()
try:
buf.write(p.stdout.read())
except Exception:
pass
if retcode is not None:
break
if timeout is not None and time.time() > tstart + timeout:
print("timeout in process %u" % p.pid)
try:
os.kill(p.pid, signal.SIGKILL)
except OSError:
pass
p.wait()
return buf.getvalue()
class MiscModule(mp_module.MPModule):
def __init__(self, mpstate):
super(MiscModule, self).__init__(mpstate, "misc", "misc commands")
self.add_command('alt', self.cmd_alt, "show altitude information")
self.add_command('up', self.cmd_up, "adjust pitch trim by up to 5 degrees")
self.add_command('reboot', self.cmd_reboot, "reboot autopilot")
self.add_command('time', self.cmd_time, "show autopilot time")
self.add_command('shell', self.cmd_shell, "run shell command")
self.add_command('changealt', self.cmd_changealt, "change target altitude")
self.add_command('land', self.cmd_land, "auto land")
self.add_command('repeat', self.cmd_repeat, "repeat a command at regular intervals",
["<add|remove|clear>"])
self.add_command('version', self.cmd_version, "show version")
self.add_command('rcbind', self.cmd_rcbind, "bind RC receiver")
self.repeats = []
def altitude_difference(self, pressure1, pressure2, ground_temp):
'''calculate barometric altitude'''
scaling = pressure2 / pressure1
temp = ground_temp + 273.15
return 153.8462 * temp * (1.0 - math.exp(0.190259 * math.log(scaling)))
def qnh_estimate(self):
'''estimate QNH pressure from GPS altitude and scaled pressure'''
alt_gps = self.master.field('GPS_RAW_INT', 'alt', 0) * 0.001
pressure2 = self.master.field('SCALED_PRESSURE', 'press_abs', 0)
ground_temp = self.get_mav_param('GND_TEMP', 21)
temp = ground_temp + 273.15
pressure1 = pressure2 / math.exp(math.log(1.0 - (alt_gps / (153.8462 * temp))) / 0.190259)
return pressure1
def cmd_alt(self, args):
'''show altitude'''
print("Altitude: %.1f" % self.status.altitude)
qnh_pressure = self.get_mav_param('AFS_QNH_PRESSURE', None)
if qnh_pressure is not None and qnh_pressure > 0:
ground_temp = self.get_mav_param('GND_TEMP', 21)
pressure = self.master.field('SCALED_PRESSURE', 'press_abs', 0)
qnh_alt = self.altitude_difference(qnh_pressure, pressure, ground_temp)
print("QNH Alt: %u meters %u feet for QNH pressure %.1f" % (qnh_alt, qnh_alt*3.2808, qnh_pressure))
print("QNH Estimate: %.1f millibars" % self.qnh_estimate())
def cmd_shell(self, args):
'''shell command'''
print(run_command(args, shell=False, timeout=3))
def cmd_up(self, args):
'''adjust TRIM_PITCH_CD up by 5 degrees'''
if len(args) == 0:
adjust = 5.0
else:
adjust = float(args[0])
old_trim = self.get_mav_param('TRIM_PITCH_CD', None)
if old_trim is None:
print("Existing trim value unknown!")
return
new_trim = int(old_trim + (adjust*100))
if math.fabs(new_trim - old_trim) > 1000:
print("Adjustment by %d too large (from %d to %d)" % (adjust*100, old_trim, new_trim))
return
print("Adjusting TRIM_PITCH_CD from %d to %d" % (old_trim, new_trim))
self.param_set('TRIM_PITCH_CD', new_trim)
def cmd_reboot(self, args):
'''reboot autopilot'''
self.master.reboot_autopilot()
def cmd_time(self, args):
'''show autopilot time'''
tusec = self.master.field('SYSTEM_TIME', 'time_unix_usec', 0)
if tusec == 0:
print("No SYSTEM_TIME time available")
return
print("%s (%s)\n" % (time.ctime(tusec * 1.0e-6), time.ctime()))
def cmd_changealt(self, args):
'''change target altitude'''
if len(args) < 1:
print("usage: changealt <relaltitude>")
return
relalt = float(args[0])
self.master.mav.mission_item_send(self.settings.target_system,
self.settings.target_component,
0,
3,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
3, 1, 0, 0, 0, 0,
0, 0, relalt)
print("Sent change altitude command for %.1f meters" % relalt)
def cmd_land(self, args):
'''auto land commands'''
if len(args) < 1:
self.master.mav.command_long_send(self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_LAND_START,
0, 0, 0, 0, 0, 0, 0, 0)
elif args[0] == 'abort':
self.master.mav.command_long_send(self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_GO_AROUND,
0, 0, 0, 0, 0, 0, 0, 0)
else:
print("Usage: land [abort]")
def cmd_version(self, args):
'''show version'''
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
mavutil.mavlink.MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES,
0,
1, 0, 0, 0, 0, 0, 0)
def cmd_rcbind(self, args):
'''start RC bind'''
if len(args) < 1:
print("Usage: rcbind <dsmmode>")
return
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
mavutil.mavlink.MAV_CMD_START_RX_PAIR,
0,
float(args[0]), 0, 0, 0, 0, 0, 0)
def cmd_repeat(self, args):
'''repeat a command at regular intervals'''
if len(args) == 0:
if len(self.repeats) == 0:
print("No repeats")
return
for i in range(len(self.repeats)):
print("%u: %s" % (i, self.repeats[i]))
return
if args[0] == 'add':
if len(args) < 3:
print("Usage: repeat add PERIOD CMD")
return
self.repeats.append(RepeatCommand(float(args[1]), " ".join(args[2:])))
elif args[0] == 'remove':
if len(args) < 2:
print("Usage: repeat remove INDEX")
return
i = int(args[1])
if i < 0 or i >= len(self.repeats):
print("Invalid index %d" % i)
return
self.repeats.pop(i)
return
elif args[0] == 'clean':
self.repeats = []
else:
print("Usage: repeat <add|remove|clean>")
def idle_task(self):
'''called on idle'''
for r in self.repeats:
if r.event.trigger():
self.mpstate.functions.process_stdin(r.cmd, immediate=True)
def init(mpstate):
'''initialise module'''
return MiscModule(mpstate)
| gpl-3.0 | -931,757,112,820,439,600 | 38.842342 | 111 | 0.520746 | false |
cjellick/rancher | tests/integration/suite/test_globaldns.py | 4 | 12409 | from .common import random_str
from rancher import ApiError
from kubernetes.client import CustomObjectsApi
from kubernetes.client import CoreV1Api
import pytest
import time
import kubernetes
import base64
def test_dns_fqdn_unique(admin_mc):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
fqdn = random_str() + ".example.com"
globaldns_entry = \
client.create_global_dns(fqdn=fqdn, providerId=provider_name)
with pytest.raises(ApiError) as e:
client.create_global_dns(fqdn=fqdn, providerId=provider_name)
assert e.value.error.status == 422
client.delete(globaldns_entry)
client.delete(globaldns_provider)
def test_dns_provider_deletion(admin_mc):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
fqdn = random_str() + ".example.com"
provider_id = "cattle-global-data:"+provider_name
globaldns_entry = \
client.create_global_dns(fqdn=fqdn, providerId=provider_id)
with pytest.raises(ApiError) as e:
client.delete(globaldns_provider)
assert e.value.error.status == 403
client.delete(globaldns_entry)
client.delete(globaldns_provider)
def test_share_globaldns_provider_entry(admin_mc, user_factory,
remove_resource):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
# Add regular user as member to gdns provider
user_member = user_factory()
remove_resource(user_member)
user_client = user_member.client
members = [{"userPrincipalId": "local://" + user_member.user.id,
"accessType": "owner"}]
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret},
members=members)
remove_resource(globaldns_provider)
fqdn = random_str() + ".example.com"
globaldns_entry = \
client.create_global_dns(fqdn=fqdn, providerId=provider_name,
members=members)
remove_resource(globaldns_entry)
# Make sure creator can access both, provider and entry
gdns_provider_id = "cattle-global-data:" + provider_name
gdns_provider = client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
gdns_entry_id = "cattle-global-data:" + globaldns_entry.name
gdns = client.by_id_global_dns(gdns_entry_id)
assert gdns is not None
# user should be able to list this gdns provider
api_instance = kubernetes.client.RbacAuthorizationV1Api(
admin_mc.k8s_client)
provider_rb_name = provider_name + "-gp-a"
wait_to_ensure_user_in_rb_subject(api_instance, provider_rb_name,
user_member.user.id)
gdns_provider = user_client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
# user should be able to list this gdns entry
entry_rb_name = globaldns_entry.name + "-g-a"
wait_to_ensure_user_in_rb_subject(api_instance, entry_rb_name,
user_member.user.id)
gdns = user_client.by_id_global_dns(gdns_entry_id)
assert gdns is not None
def test_user_access_global_dns(admin_mc, user_factory, remove_resource):
user1 = user_factory()
remove_resource(user1)
user_client = user1.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
user_client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
fqdn = random_str() + ".example.com"
globaldns_entry = \
user_client.create_global_dns(fqdn=fqdn, providerId=provider_name)
remove_resource(globaldns_entry)
# Make sure creator can access both, provider and entry
api_instance = kubernetes.client.RbacAuthorizationV1Api(
admin_mc.k8s_client)
provider_rb_name = provider_name + "-gp-a"
wait_to_ensure_user_in_rb_subject(api_instance, provider_rb_name,
user1.user.id)
gdns_provider_id = "cattle-global-data:" + provider_name
gdns_provider = user_client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
entry_rb_name = globaldns_entry.name + "-g-a"
wait_to_ensure_user_in_rb_subject(api_instance, entry_rb_name,
user1.user.id)
gdns_entry_id = "cattle-global-data:" + globaldns_entry.name
gdns = user_client.by_id_global_dns(gdns_entry_id)
assert gdns is not None
def test_update_gdns_entry(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
fqdn = random_str() + ".example.com"
gdns_entry_name = random_str()
globaldns_entry = \
client.create_global_dns(name=gdns_entry_name,
fqdn=fqdn, providerId=provider_name)
remove_resource(globaldns_entry)
new_fqdn = random_str()
wait_for_gdns_entry_creation(admin_mc, gdns_entry_name)
client.update(globaldns_entry, fqdn=new_fqdn)
wait_for_gdns_update(admin_mc, gdns_entry_name, new_fqdn)
def test_create_globaldns_provider_regular_user(remove_resource,
user_factory):
provider_name = random_str()
access = random_str()
secret = random_str()
user = user_factory()
globaldns_provider = \
user.client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
def wait_to_ensure_user_in_rb_subject(api, name,
userId, timeout=60):
found = False
interval = 0.5
start = time.time()
while not found:
time.sleep(interval)
interval *= 2
try:
rb = api.read_namespaced_role_binding(name, "cattle-global-data")
for i in range(0, len(rb.subjects)):
if rb.subjects[i].name == userId:
found = True
except kubernetes.client.rest.ApiException:
found = False
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for user to get added to rb")
def wait_for_gdns_update(admin_mc, gdns_entry_name, new_fqdn, timeout=60):
client = admin_mc.client
updated = False
interval = 0.5
start = time.time()
id = "cattle-global-data:" + gdns_entry_name
while not updated:
if time.time() - start > timeout:
raise Exception('Timeout waiting for gdns entry to update')
gdns = client.by_id_global_dns(id)
if gdns is not None and gdns.fqdn == new_fqdn:
updated = True
time.sleep(interval)
interval *= 2
def wait_for_gdns_entry_creation(admin_mc, gdns_name, timeout=60):
start = time.time()
interval = 0.5
client = admin_mc.client
found = False
while not found:
if time.time() - start > timeout:
raise Exception('Timeout waiting for globalDNS entry creation')
gdns = client.list_global_dns(name=gdns_name)
if len(gdns) > 0:
found = True
time.sleep(interval)
interval *= 2
def test_cloudflare_provider_proxy_setting(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
apiEmail = random_str()
apiKey = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
cloudflareProviderConfig={
'proxySetting': True,
'apiEmail': apiEmail,
'apiKey': apiKey})
gdns_provider_id = "cattle-global-data:" + provider_name
gdns_provider = client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
assert gdns_provider.cloudflareProviderConfig.proxySetting is True
remove_resource(globaldns_provider)
def test_dns_fqdn_hostname(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
fqdn = random_str() + ".example!!!*.com"
with pytest.raises(ApiError) as e:
client.create_global_dns(fqdn=fqdn, providerId=provider_name)
assert e.value.error.status == 422
def test_globaldnsprovider_secret(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
access_key = random_str()
secret_key = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access_key,
'secretKey': secret_key})
# Test password not present in api
assert globaldns_provider is not None
assert globaldns_provider.route53ProviderConfig.get('secretKey') is None
crdClient, k8sclient = getClients(admin_mc)
ns, name = globaldns_provider["id"].split(":")
# Test password is in k8s secret after creation
verifyGDNSPassword(crdClient, k8sclient, ns, name, secret_key)
# Test updating password
newSecretPassword = random_str()
_ = client.update(globaldns_provider, route53ProviderConfig={
'accessKey': access_key,
'secretKey': newSecretPassword})
verifyGDNSPassword(crdClient, k8sclient, ns, name, newSecretPassword)
def getClients(admin_mc):
return CustomObjectsApi(admin_mc.k8s_client), \
CoreV1Api(admin_mc.k8s_client)
def verifyGDNSPassword(crdClient, k8sclient, ns, name, secretPassword):
k8es = crdClient.get_namespaced_custom_object(
"management.cattle.io", "v3", ns, 'globaldnsproviders', name)
secretName = k8es['spec']['route53ProviderConfig']['secretKey']
ns, name = secretName.split(":")
assert ns is not None
assert name is not None
secret = k8sclient.read_namespaced_secret(name, ns)
assert base64.b64decode(secret.data[name]).\
decode("utf-8") == secretPassword
| apache-2.0 | -4,750,172,076,927,140,000 | 35.931548 | 77 | 0.593037 | false |
bioinfo-center-pasteur-fr/ReGaTE | regate/remag.py | 2 | 7997 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Jun. 16, 2014
@author: Olivia Doppelt-Azeroual, CIB-C3BI, Institut Pasteur, Paris
@author: Fabien Mareuil, CIB-C3BI, Institut Pasteur, Paris
@author: Hervé Ménager, CIB-C3BI, Institut Pasteur, Paris
@contact: [email protected]
@project: ReGaTE
@githuborganization: bioinfo-center-pasteur-fr
"""
import string
import os
import sys
import ruamel.yaml
import rdflib
import argparse
import regate
from bioblend.galaxy import GalaxyInstance
from bioblend.galaxy.client import ConnectionError
from bioblend.galaxy.datatypes import DatatypesClient
from bioblend.galaxy.client import Client
class EdamDatatypesClient(DatatypesClient):
"""
Override of the bioblend DatatypesClient class to add a get_edam_formats method
"""
def get_edam_formats(self):
"""
Displays a collection (dict) of edam formats.
:rtype: dict
:return: A dict of individual edam_format.
For example::
{
"RData": "format_2333",
"Roadmaps": "format_2561",
"Sequences": "format_1929",
"ab1": "format_2333",
"acedb": "format_2330",
"affybatch": "format_2331",
"afg": "format_2561",
"arff": "format_2330",
"asn1": "format_2330",
"asn1-binary": "format_2333"}
"""
url = self.gi._make_url(self)
url = '/'.join([url, "edam_formats"])
return Client._get(self, url=url)
def is_true(value):
"""
:param value:
:return:
"""
return value.lower() == "true"
def is_edamtype(dic_child):
"""
:param dic_child:
:return:
"""
if 'edam' in dic_child:
if dic_child['edam'] not in ['', "None", "Null"]:
return True
else:
return False
else:
return False
def return_formatted_edam(edam):
"""
:param edam:
:return:
"""
edam = string.split(edam, '_')
edam = "EDAM_{}:{:0>4d}".format(edam[0], int(edam[1]))
return edam
def http_to_edamform(url):
"""
:param url:
:return:
"""
base = string.split(os.path.basename(url), '_')
return str("EDAM_{}:{:0>4d}").format(base[0], int(base[1]))
def edam_to_dict(edam_file):
"""
:param edam_file:
:return:
"""
g = rdflib.Graph()
g.parse(edam_file)
query1 = """SELECT ?format ?is_format_of WHERE {
?format rdfs:subClassOf ?format_sc .
?format_sc owl:onProperty
<http://edamontology.org/is_format_of> .
?format_sc owl:someValuesFrom ?is_format_of
}"""
query2 = """SELECT ?format ?superformat WHERE {
?format rdfs:subClassOf ?superformat .
?superformat oboInOwl:inSubset <http://purl.obolibrary.org/obo/edam#formats>
}"""
query3 = """SELECT ?format ?label WHERE {
?format rdfs:label ?label.
?format oboInOwl:inSubset ?subset.
FILTER (?subset = <http://purl.obolibrary.org/obo/edam#formats> ||
?subset = <http://purl.obolibrary.org/obo/edam#data>)}"""
# Property = {"oboInOwl": "http://www.geneontology.org/formats/oboInOwl#"}
format_with_formats = {}
format_with_data = {}
term_labels = {}
for row in g.query(query1):
format_with_data[http_to_edamform(row[0])] = http_to_edamform(row[1])
for row in g.query(query2):
child_format = http_to_edamform(row[0])
parent_format = http_to_edamform(row[1])
if child_format in format_with_formats:
format_with_formats[child_format].append(parent_format)
else:
format_with_formats[child_format] = [parent_format]
for row in g.query(query3):
term_labels[http_to_edamform(row[0].toPython())]=str(row[1].toPython())
return format_with_formats, format_with_data, term_labels
def add_data(formats, relation_formats, relation_data, list_edam_data):
"""
:param formats:
:param relation_formats:
:param relation_data:
:param list_edam_data:
:return:
"""
if len(formats) != 0:
for format_tool in formats:
if format_tool in relation_data:
list_edam_data.append(relation_data[format_tool])
formats.remove(format_tool)
return add_data(formats, relation_formats, relation_data, list_edam_data)
elif format_tool in relation_formats:
formats.remove(format_tool)
formats = formats + relation_formats[format_tool]
return add_data(formats, relation_formats, relation_data, list_edam_data)
else:
sys.stdout.write("NO FORMAT AND NO DATA FOR {0}\n".format(format_tool))
formats.remove(format_tool)
if format_tool in ("Not Mapped Yet", "NONE Known"):
return add_data(formats, relation_formats, relation_data, list_edam_data)
else:
list_edam_data.append("EDAM_data:0006")
return add_data(formats, relation_formats, relation_data, list_edam_data)
else:
return list_edam_data
def add_datas(dict_map, rel_format_formats, rel_format_data, term_labels):
"""
:param dict_map:
:param rel_format_formats:
:param rel_format_data:
:return:
"""
import copy
for key, value in dict_map.iteritems():
formats = copy.copy(value)
datas = add_data(formats, rel_format_formats, rel_format_data, list_edam_data=[])
datas_v = [{'uri':data_item,'term':term_labels.get(data_item,'')} for data_item in datas]
formats_v = [{'uri':format_item,'term':term_labels.get(format_item,'')} for format_item in value]
dict_map[key] = {'formats':formats_v, 'data':datas_v}
return dict_map
def dict_to_yaml(mapping_dict, yamlfile):
"""
:param mapping_dict:
:param yamlfile:
:return:
"""
stream = file(yamlfile, 'w')
ruamel.yaml.dump(mapping_dict, stream, default_flow_style=False)
def galaxy_to_edamdict(url, key):
"""
:param url:
:param key:
:return:
"""
gi = GalaxyInstance(url, key=key)
datatypeclient = EdamDatatypesClient(gi)
try:
dict_map = datatypeclient.get_edam_formats()
except ConnectionError, e:
raise ConnectionError(
'{0}, The Galaxy data can\'t be used, It\'s possible that Galaxy is too old, please update it\n'.format(e))
dictmapping = {}
for key, value in dict_map.iteritems():
form_edam = return_formatted_edam(value)
dictmapping[str(key)] = [form_edam]
return dictmapping
def run():
parser = argparse.ArgumentParser(description="Galaxy instance tool\
parsing, for integration in biotools/bioregistry")
parser.add_argument("--config_file", help="config.ini file for regate or remag")
parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if not args.templateconfig:
if not os.path.exists(args.config_file):
raise IOError("{0} doesn't exist".format(args.config_file))
config = regate.Config(args.config_file, "remag")
dict_mapping = galaxy_to_edamdict(config.galaxy_url_api, config.api_key)
relation_format_formats, relation_format_data, term_labels = edam_to_dict(config.edam_file)
yaml_file = config.output_yaml
dict_mapping = add_datas(dict_mapping, relation_format_formats, relation_format_data, term_labels)
dict_to_yaml(dict_mapping, yaml_file)
elif args.templateconfig:
regate.generate_template()
else:
parser.print_help()
| gpl-2.0 | 5,132,948,319,338,434,000 | 32.592437 | 119 | 0.593996 | false |
dcclogin/TextGenerator | TitleCrawler/ccf_conference/categories/network/lcn2015.py | 1 | 3390 |
# -*- coding: utf-8 -*-
import re
import copy
import random
import os, sys
import MySQLdb
import requests
from time import sleep
from threading import Thread
from bs4 import BeautifulSoup
reload(sys)
sys.setdefaultencoding('utf-8')
clade = 'http://dblp.uni-trier.de/db/conf/lcn/'
months = {
'January': '01',
'February': '02',
'March': '03',
'April': '04',
'May': '05',
'June': '06',
'July': '07',
'August': '08',
'September': '09',
'October': '10',
'November': '11',
'December': '12'
}
# regex to match months in <h2> tags
re_mons=r'(January|February|March|April|May|June|July|August|September|October|November|December)'
repeato_mons=r'([ /-]*'+re_mons+r'*)*'
pattern_mons=re_mons+repeato_mons
# regex to match years in <h2> tags
re_year=r'((19|20)\d+)'
repeato_year=r'([ /-]*'+re_year+r'*)*'
pattern_year=re_year+repeato_year
def get_leaves(clade):
r = requests.get(clade)
if r.status_code == 200:
soup = BeautifulSoup(r.text, 'lxml')
leaves = []
late = soup.find('ul', class_='publ-list')
tags = late.find_all('div', class_='data', itemprop='headline')
for tag in tags:
leaves.append(tag.find_all('a')[-1]['href'])
return leaves
def sub_months(match_obj):
""" transfer months to digital form (in-place change)
"""
for m in months:
match_obj = re.sub(m, months[m], match_obj)
return match_obj
def get_yymm(leaf):
r = requests.get(leaf)
if r.status_code == 200:
soup = BeautifulSoup(r.text, 'lxml')
lat = soup.find('div', class_='data', itemprop='headline')
tag = lat.find('span', class_='title', itemprop='name')
txt = tag.get_text()
try:
match_obj_mons = re.search(pattern_mons, txt)
match_obj_mons = match_obj_mons.group().strip()
match_obj_mons = sub_months(match_obj_mons)
month = match_obj_mons
except Exception, error_mons:
print '[-]', error_mons
month = None
try:
match_obj_year = re.search(pattern_year, txt)
match_obj_year = match_obj_year.group().strip()
year = match_obj_year
except Exception, error_year:
print '[-]', error_year
year = None
return year, month
def get_titles(leaf):
r = requests.get(leaf)
if r.status_code == 200:
soup = BeautifulSoup(r.text, 'lxml')
title_lst = []
tags = soup.find_all('span', class_='title', itemprop='name')
for tag in tags:
title_lst.append(tag.get_text())
return title_lst
def incert_mysql(year, month, title_lst):
try:
tablename = 'papertitle'
conn = MySQLdb.connect(host='127.0.0.1', user='root', passwd='13917331612', db='conference')
c = conn.cursor()
conn.set_character_set('utf8')
c.execute('SET NAMES utf8;')
c.execute('SET CHARACTER SET utf8;')
c.execute('SET character_set_connection=utf8;')
for p in title_lst:
try:
sql = "insert into " + tablename + "(year, month, name, title, class, category) \
values(%s, %s, %s, %s, %s, %s)"
param = (year, month, 'LCN', p, 'C', 'network')
c.execute(sql, param)
print ">>>> [+] Insert paper <%s> : done." %(p)
except MySQLdb.Error, e:
print "[-] Mysql Error %d: %s" % (e.args[0], e.args[1])
continue
conn.commit()
c.close()
except MySQLdb.Error, e:
print "[-] Mysql Error %d: %s" % (e.args[0], e.args[1])
return None
def build():
leaves = get_leaves(clade)
for leaf in leaves:
title_lst = get_titles(leaf)
year, month = get_yymm(leaf)
incert_mysql(year, month, title_lst)
return None
build() | mit | 1,076,613,601,963,601,900 | 23.751825 | 98 | 0.638053 | false |
cmelange/ansible | lib/ansible/modules/cloud/misc/virt.py | 44 | 15378 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Virt management features
Copyright 2007, 2012 Red Hat, Inc
Michael DeHaan <[email protected]>
Seth Vidal <[email protected]>
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: virt
short_description: Manages virtual machines supported by libvirt
description:
- Manages virtual machines supported by I(libvirt).
version_added: "0.2"
options:
name:
description:
- name of the guest VM being managed. Note that VM must be previously
defined with xml.
required: true
default: null
aliases: []
state:
description:
- Note that there may be some lag for state requests like C(shutdown)
since these refer only to VM states. After starting a guest, it may not
be immediately accessible.
required: false
choices: [ "running", "shutdown", "destroyed", "paused" ]
default: "no"
command:
description:
- in addition to state management, various non-idempotent commands are available. See examples
required: false
choices: ["create","status", "start", "stop", "pause", "unpause",
"shutdown", "undefine", "destroy", "get_xml",
"freemem", "list_vms", "info", "nodeinfo", "virttype", "define"]
autostart:
description:
- start VM at host startup
choices: [True, False]
version_added: "2.3"
default: null
uri:
description:
- libvirt connection uri
required: false
default: qemu:///system
xml:
description:
- XML document used with the define command
required: false
default: null
requirements:
- "python >= 2.6"
- "libvirt-python"
author:
- "Ansible Core Team"
- "Michael DeHaan"
- "Seth Vidal"
'''
EXAMPLES = '''
# a playbook task line:
- virt:
name: alpha
state: running
# /usr/bin/ansible invocations
# ansible host -m virt -a "name=alpha command=status"
# ansible host -m virt -a "name=alpha command=get_xml"
# ansible host -m virt -a "name=alpha command=create uri=lxc:///"
---
# a playbook example of defining and launching an LXC guest
tasks:
- name: define vm
virt:
name: foo
command: define
xml: "{{ lookup('template', 'container-template.xml.j2') }}"
uri: 'lxc:///'
- name: start vm
virt:
name: foo
state: running
uri: 'lxc:///'
'''
RETURN = '''
# for list_vms command
list_vms:
description: The list of vms defined on the remote system
type: dictionary
returned: success
sample: [
"build.example.org",
"dev.example.org"
]
# for status command
status:
description: The status of the VM, among running, crashed, paused and shutdown
type: string
sample: "success"
returned: success
'''
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE=2
import sys
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
ALL_COMMANDS = []
VM_COMMANDS = ['create','status', 'start', 'stop', 'pause', 'unpause',
'shutdown', 'undefine', 'destroy', 'get_xml', 'define']
HOST_COMMANDS = ['freemem', 'list_vms', 'info', 'nodeinfo', 'virttype']
ALL_COMMANDS.extend(VM_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
VIRT_STATE_NAME_MAP = {
0 : "running",
1 : "running",
2 : "running",
3 : "paused",
4 : "shutdown",
5 : "shutdown",
6 : "crashed"
}
class VMNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
cmd = "uname -r"
rc, stdout, stderr = self.module.run_command(cmd)
if "xen" in stdout:
conn = libvirt.open(None)
elif "esx" in uri:
auth = [[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_NOECHOPROMPT], [], None]
conn = libvirt.openAuth(uri, auth)
else:
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_vm(self, vmid):
"""
Extra bonus feature: vmid = -1 returns a list of everything
"""
conn = self.conn
vms = []
# this block of code borrowed from virt-manager:
# get working domain's name
ids = conn.listDomainsID()
for id in ids:
vm = conn.lookupByID(id)
vms.append(vm)
# get defined domain
names = conn.listDefinedDomains()
for name in names:
vm = conn.lookupByName(name)
vms.append(vm)
if vmid == -1:
return vms
for vm in vms:
if vm.name() == vmid:
return vm
raise VMNotFound("virtual machine %s not found" % vmid)
def shutdown(self, vmid):
return self.find_vm(vmid).shutdown()
def pause(self, vmid):
return self.suspend(self.conn,vmid)
def unpause(self, vmid):
return self.resume(self.conn,vmid)
def suspend(self, vmid):
return self.find_vm(vmid).suspend()
def resume(self, vmid):
return self.find_vm(vmid).resume()
def create(self, vmid):
return self.find_vm(vmid).create()
def destroy(self, vmid):
return self.find_vm(vmid).destroy()
def undefine(self, vmid):
return self.find_vm(vmid).undefine()
def get_status2(self, vm):
state = vm.info()[0]
return VIRT_STATE_NAME_MAP.get(state,"unknown")
def get_status(self, vmid):
state = self.find_vm(vmid).info()[0]
return VIRT_STATE_NAME_MAP.get(state,"unknown")
def nodeinfo(self):
return self.conn.getInfo()
def get_type(self):
return self.conn.getType()
def get_xml(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.XMLDesc(0)
def get_maxVcpus(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.maxVcpus()
def get_maxMemory(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.maxMemory()
def getFreeMemory(self):
return self.conn.getFreeMemory()
def get_autostart(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.autostart()
def set_autostart(self, vmid, val):
vm = self.conn.lookupByName(vmid)
return vm.setAutostart(val)
def define_from_xml(self, xml):
return self.conn.defineXML(xml)
class Virt(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
def __get_conn(self):
self.conn = LibvirtConnection(self.uri, self.module)
return self.conn
def get_vm(self, vmid):
self.__get_conn()
return self.conn.find_vm(vmid)
def state(self):
vms = self.list_vms()
state = []
for vm in vms:
state_blurb = self.conn.get_status(vm)
state.append("%s %s" % (vm,state_blurb))
return state
def info(self):
vms = self.list_vms()
info = dict()
for vm in vms:
data = self.conn.find_vm(vm).info()
# libvirt returns maxMem, memory, and cpuTime as long()'s, which
# xmlrpclib tries to convert to regular int's during serialization.
# This throws exceptions, so convert them to strings here and
# assume the other end of the xmlrpc connection can figure things
# out or doesn't care.
info[vm] = {
"state" : VIRT_STATE_NAME_MAP.get(data[0],"unknown"),
"maxMem" : str(data[1]),
"memory" : str(data[2]),
"nrVirtCpu" : data[3],
"cpuTime" : str(data[4]),
}
info[vm]["autostart"] = self.conn.get_autostart(vm)
return info
def nodeinfo(self):
self.__get_conn()
info = dict()
data = self.conn.nodeinfo()
info = {
"cpumodel" : str(data[0]),
"phymemory" : str(data[1]),
"cpus" : str(data[2]),
"cpumhz" : str(data[3]),
"numanodes" : str(data[4]),
"sockets" : str(data[5]),
"cpucores" : str(data[6]),
"cputhreads" : str(data[7])
}
return info
def list_vms(self, state=None):
self.conn = self.__get_conn()
vms = self.conn.find_vm(-1)
results = []
for x in vms:
try:
if state:
vmstate = self.conn.get_status2(x)
if vmstate == state:
results.append(x.name())
else:
results.append(x.name())
except:
pass
return results
def virttype(self):
return self.__get_conn().get_type()
def autostart(self, vmid, as_flag):
self.conn = self.__get_conn()
# Change autostart flag only if needed
if self.conn.get_autostart(vmid) != as_flag:
self.conn.set_autostart(vmid, as_flag)
return True
return False
def freemem(self):
self.conn = self.__get_conn()
return self.conn.getFreeMemory()
def shutdown(self, vmid):
""" Make the machine with the given vmid stop running. Whatever that takes. """
self.__get_conn()
self.conn.shutdown(vmid)
return 0
def pause(self, vmid):
""" Pause the machine with the given vmid. """
self.__get_conn()
return self.conn.suspend(vmid)
def unpause(self, vmid):
""" Unpause the machine with the given vmid. """
self.__get_conn()
return self.conn.resume(vmid)
def create(self, vmid):
""" Start the machine via the given vmid """
self.__get_conn()
return self.conn.create(vmid)
def start(self, vmid):
""" Start the machine via the given id/name """
self.__get_conn()
return self.conn.create(vmid)
def destroy(self, vmid):
""" Pull the virtual power from the virtual domain, giving it virtually no time to virtually shut down. """
self.__get_conn()
return self.conn.destroy(vmid)
def undefine(self, vmid):
""" Stop a domain, and then wipe it from the face of the earth. (delete disk/config file) """
self.__get_conn()
return self.conn.undefine(vmid)
def status(self, vmid):
"""
Return a state suitable for server consumption. Aka, codes.py values, not XM output.
"""
self.__get_conn()
return self.conn.get_status(vmid)
def get_xml(self, vmid):
"""
Receive a Vm id as input
Return an xml describing vm config returned by a libvirt call
"""
self.__get_conn()
return self.conn.get_xml(vmid)
def get_maxVcpus(self, vmid):
"""
Gets the max number of VCPUs on a guest
"""
self.__get_conn()
return self.conn.get_maxVcpus(vmid)
def get_max_memory(self, vmid):
"""
Gets the max memory on a guest
"""
self.__get_conn()
return self.conn.get_MaxMemory(vmid)
def define(self, xml):
"""
Define a guest with the given xml
"""
self.__get_conn()
return self.conn.define_from_xml(xml)
def core(module):
state = module.params.get('state', None)
autostart = module.params.get('autostart', None)
guest = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
v = Virt(uri, module)
res = {}
if state and command=='list_vms':
res = v.list_vms(state=state)
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
if state:
if not guest:
module.fail_json(msg = "state change requires a guest specified")
if state == 'running':
if v.status(guest) is 'paused':
res['changed'] = True
res['msg'] = v.unpause(guest)
elif v.status(guest) is not 'running':
res['changed'] = True
res['msg'] = v.start(guest)
elif state == 'shutdown':
if v.status(guest) is not 'shutdown':
res['changed'] = True
res['msg'] = v.shutdown(guest)
elif state == 'destroyed':
if v.status(guest) is not 'shutdown':
res['changed'] = True
res['msg'] = v.destroy(guest)
elif state == 'paused':
if v.status(guest) is 'running':
res['changed'] = True
res['msg'] = v.pause(guest)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if autostart is not None and v.autostart(guest, autostart):
res['changed'] = True
if command:
if command in VM_COMMANDS:
if not guest:
module.fail_json(msg = "%s requires 1 argument: guest" % command)
if command == 'define':
if not xml:
module.fail_json(msg = "define requires xml argument")
try:
v.get_vm(guest)
except VMNotFound:
v.define(xml)
res = {'changed': True, 'created': guest}
return VIRT_SUCCESS, res
res = getattr(v, command)(guest)
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % basecmd)
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule(argument_spec=dict(
name = dict(aliases=['guest']),
state = dict(choices=['running', 'shutdown', 'destroyed', 'paused']),
autostart = dict(type='bool'),
command = dict(choices=ALL_COMMANDS),
uri = dict(default='qemu:///system'),
xml = dict(),
))
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
| gpl-3.0 | -1,194,582,161,413,826,300 | 26.708108 | 116 | 0.55872 | false |
atopuzov/nitro-python | nssrc/com/citrix/netscaler/nitro/resource/config/tm/tmsessionpolicy_tmglobal_binding.py | 3 | 5241 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class tmsessionpolicy_tmglobal_binding(base_resource) :
""" Binding class showing the tmglobal that can be bound to tmsessionpolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def boundto(self) :
ur"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
ur"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def name(self) :
ur"""Name of the session policy for which to display detailed information.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the session policy for which to display detailed information.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(tmsessionpolicy_tmglobal_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.tmsessionpolicy_tmglobal_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch tmsessionpolicy_tmglobal_binding resources.
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of tmsessionpolicy_tmglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count tmsessionpolicy_tmglobal_binding resources configued on NetScaler.
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of tmsessionpolicy_tmglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class tmsessionpolicy_tmglobal_binding_response(base_response) :
def __init__(self, length=1) :
self.tmsessionpolicy_tmglobal_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.tmsessionpolicy_tmglobal_binding = [tmsessionpolicy_tmglobal_binding() for _ in range(length)]
| apache-2.0 | -7,394,561,106,741,248,000 | 27.796703 | 134 | 0.702538 | false |
seungjin/app5-seungjin-net.appspot.com | dbindexer/backends.py | 74 | 18859 | from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.db.models.sql.constants import JOIN_TYPE, LHS_ALIAS, LHS_JOIN_COL, \
TABLE_NAME, RHS_JOIN_COL
from django.utils.tree import Node
from djangotoolbox.fields import ListField
from .lookups import StandardLookup
OR = 'OR'
# TODO: optimize code
class BaseResolver(object):
def __init__(self):
# mapping from lookups to indexes
self.index_map = {}
# mapping from column names to field names
self.column_to_name = {}
''' API called by resolver'''
def create_index(self, lookup):
field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
# backend doesn't now how to handle this index definition
if not field_to_index:
return
index_field = lookup.get_field_to_add(field_to_index)
config_field = index_field.item_field if \
isinstance(index_field, ListField) else index_field
if hasattr(field_to_index, 'max_length') and \
isinstance(config_field, models.CharField):
config_field.max_length = field_to_index.max_length
# don't install a field if it already exists
try:
lookup.model._meta.get_field(self.index_name(lookup))
except:
lookup.model.add_to_class(self.index_name(lookup), index_field)
self.index_map[lookup] = index_field
self.add_column_to_name(lookup.model, lookup.field_name)
else:
# makes dbindexer unit test compatible
if lookup not in self.index_map:
self.index_map[lookup] = lookup.model._meta.get_field(
self.index_name(lookup))
self.add_column_to_name(lookup.model, lookup.field_name)
def convert_insert_query(self, query):
'''Converts a database saving query.'''
for lookup in self.index_map.keys():
self._convert_insert_query(query, lookup)
def _convert_insert_query(self, query, lookup):
if not lookup.model == query.model:
return
position = self.get_query_position(query, lookup)
if position is None:
return
value = self.get_value(lookup.model, lookup.field_name, query)
value = lookup.convert_value(value)
query.values[position] = (self.get_index(lookup), value)
def convert_filters(self, query):
self._convert_filters(query, query.where)
''' helper methods '''
def _convert_filters(self, query, filters):
for index, child in enumerate(filters.children[:]):
if isinstance(child, Node):
self._convert_filters(query, child)
continue
self.convert_filter(query, filters, child, index)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
if constraint.field is None:
return
field_name = self.column_to_name.get(constraint.field.column)
if field_name and constraint.alias == \
query.table_map[query.model._meta.db_table][0]:
for lookup in self.index_map.keys():
if lookup.matches_filter(query.model, field_name, lookup_type,
value):
new_lookup_type, new_value = lookup.convert_lookup(value,
lookup_type)
index_name = self.index_name(lookup)
self._convert_filter(query, filters, child, index,
new_lookup_type, new_value, index_name)
def _convert_filter(self, query, filters, child, index, new_lookup_type,
new_value, index_name):
constraint, lookup_type, annotation, value = child
lookup_type, value = new_lookup_type, new_value
constraint.field = query.get_meta().get_field(index_name)
constraint.col = constraint.field.column
child = constraint, lookup_type, annotation, value
filters.children[index] = child
def index_name(self, lookup):
return lookup.index_name
def get_field_to_index(self, model, field_name):
try:
return model._meta.get_field(field_name)
except:
return None
def get_value(self, model, field_name, query):
field_to_index = self.get_field_to_index(model, field_name)
for query_field, value in query.values[:]:
if field_to_index == query_field:
return value
raise FieldDoesNotExist('Cannot find field in query.')
def add_column_to_name(self, model, field_name):
column_name = model._meta.get_field(field_name).column
self.column_to_name[column_name] = field_name
def get_index(self, lookup):
return self.index_map[lookup]
def get_query_position(self, query, lookup):
for index, (field, query_value) in enumerate(query.values[:]):
if field is self.get_index(lookup):
return index
return None
def unref_alias(query, alias):
table_name = query.alias_map[alias][TABLE_NAME]
query.alias_refcount[alias] -= 1
if query.alias_refcount[alias] < 1:
# Remove all information about the join
del query.alias_refcount[alias]
del query.join_map[query.rev_join_map[alias]]
del query.rev_join_map[alias]
del query.alias_map[alias]
query.table_map[table_name].remove(alias)
if len(query.table_map[table_name]) == 0:
del query.table_map[table_name]
query.used_aliases.discard(alias)
class FKNullFix(BaseResolver):
'''
Django doesn't generate correct code for ForeignKey__isnull.
It becomes a JOIN with pk__isnull which won't work on nonrel DBs,
so we rewrite the JOIN here.
'''
def create_index(self, lookup):
pass
def convert_insert_query(self, query):
pass
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
if constraint.field is not None and lookup_type == 'isnull' and \
isinstance(constraint.field, models.ForeignKey):
self.fix_fk_null_filter(query, constraint)
def unref_alias(self, query, alias):
unref_alias(query, alias)
def fix_fk_null_filter(self, query, constraint):
alias = constraint.alias
table_name = query.alias_map[alias][TABLE_NAME]
lhs_join_col = query.alias_map[alias][LHS_JOIN_COL]
rhs_join_col = query.alias_map[alias][RHS_JOIN_COL]
if table_name != constraint.field.rel.to._meta.db_table or \
rhs_join_col != constraint.field.rel.to._meta.pk.column or \
lhs_join_col != constraint.field.column:
return
next_alias = query.alias_map[alias][LHS_ALIAS]
if not next_alias:
return
self.unref_alias(query, alias)
alias = next_alias
constraint.col = constraint.field.column
constraint.alias = alias
class ConstantFieldJOINResolver(BaseResolver):
def create_index(self, lookup):
if '__' in lookup.field_name:
super(ConstantFieldJOINResolver, self).create_index(lookup)
def convert_insert_query(self, query):
'''Converts a database saving query.'''
for lookup in self.index_map.keys():
if '__' in lookup.field_name:
self._convert_insert_query(query, lookup)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
field_chain = self.get_field_chain(query, constraint)
if field_chain is None:
return
for lookup in self.index_map.keys():
if lookup.matches_filter(query.model, field_chain, lookup_type,
value):
self.resolve_join(query, child)
new_lookup_type, new_value = lookup.convert_lookup(value,
lookup_type)
index_name = self.index_name(lookup)
self._convert_filter(query, filters, child, index,
new_lookup_type, new_value, index_name)
def get_field_to_index(self, model, field_name):
model = self.get_model_chain(model, field_name)[-1]
field_name = field_name.split('__')[-1]
return super(ConstantFieldJOINResolver, self).get_field_to_index(model,
field_name)
def get_value(self, model, field_name, query):
value = super(ConstantFieldJOINResolver, self).get_value(model,
field_name.split('__')[0],
query)
if value is not None:
value = self.get_target_value(model, field_name, value)
return value
def get_field_chain(self, query, constraint):
if constraint.field is None:
return
column_index = self.get_column_index(query, constraint)
return self.column_to_name.get(column_index)
def get_model_chain(self, model, field_chain):
model_chain = [model, ]
for value in field_chain.split('__')[:-1]:
model = model._meta.get_field(value).rel.to
model_chain.append(model)
return model_chain
def get_target_value(self, start_model, field_chain, pk):
fields = field_chain.split('__')
foreign_key = start_model._meta.get_field(fields[0])
if not foreign_key.rel:
# field isn't a related one, so return the value itself
return pk
target_model = foreign_key.rel.to
foreignkey = target_model.objects.all().get(pk=pk)
for value in fields[1:-1]:
foreignkey = getattr(foreignkey, value)
if isinstance(foreignkey._meta.get_field(fields[-1]), models.ForeignKey):
return getattr(foreignkey, '%s_id' % fields[-1])
else:
return getattr(foreignkey, fields[-1])
def add_column_to_name(self, model, field_name):
model_chain = self.get_model_chain(model, field_name)
column_chain = ''
field_names = field_name.split('__')
for model, name in zip(model_chain, field_names):
column_chain += model._meta.get_field(name).column + '__'
self.column_to_name[column_chain[:-2]] = field_name
def unref_alias(self, query, alias):
unref_alias(query, alias)
def get_column_index(self, query, constraint):
if constraint.field:
column_chain = constraint.field.column
alias = constraint.alias
while alias:
join = query.alias_map.get(alias)
if join and join[JOIN_TYPE] == 'INNER JOIN':
column_chain += '__' + join[LHS_JOIN_COL]
alias = query.alias_map[alias][LHS_ALIAS]
else:
alias = None
return '__'.join(reversed(column_chain.split('__')))
def resolve_join(self, query, child):
constraint, lookup_type, annotation, value = child
if not constraint.field:
return
alias = constraint.alias
while True:
next_alias = query.alias_map[alias][LHS_ALIAS]
if not next_alias:
break
self.unref_alias(query, alias)
alias = next_alias
constraint.alias = alias
# TODO: distinguish in memory joins from standard joins somehow
class InMemoryJOINResolver(ConstantFieldJOINResolver):
def __init__(self):
self.field_chains = []
super(InMemoryJOINResolver, self).__init__()
def create_index(self, lookup):
if '__' in lookup.field_name:
field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
if not field_to_index:
return
# save old column_to_name so we can make in memory queries later on
self.add_column_to_name(lookup.model, lookup.field_name)
# don't add an extra field for standard lookups!
if isinstance(lookup, StandardLookup):
return
# install lookup on target model
model = self.get_model_chain(lookup.model, lookup.field_name)[-1]
lookup.model = model
lookup.field_name = lookup.field_name.split('__')[-1]
super(ConstantFieldJOINResolver, self).create_index(lookup)
def convert_insert_query(self, query):
super(ConstantFieldJOINResolver, self).convert_insert_query(query)
def _convert_filters(self, query, filters):
# or queries are not supported for in-memory-JOINs
if self.contains_OR(query.where, OR):
return
# start with the deepest JOIN level filter!
all_filters = self.get_all_filters(filters)
all_filters.sort(key=lambda item: self.get_field_chain(query, item[1][0]) and \
-len(self.get_field_chain(query, item[1][0])) or 0)
for filters, child, index in all_filters:
# check if convert_filter removed a given child from the where-tree
if not self.contains_child(query.where, child):
continue
self.convert_filter(query, filters, child, index)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
field_chain = self.get_field_chain(query, constraint)
if field_chain is None:
return
if '__' not in field_chain:
return super(ConstantFieldJOINResolver, self).convert_filter(query,
filters, child, index)
pks = self.get_pks(query, field_chain, lookup_type, value)
self.resolve_join(query, child)
self._convert_filter(query, filters, child, index, 'in',
(pk for pk in pks), field_chain.split('__')[0])
def tree_contains(self, filters, to_find, func):
result = False
for child in filters.children[:]:
if func(child, to_find):
result = True
break
if isinstance(child, Node):
result = self.tree_contains(child, to_find, func)
if result:
break
return result
def contains_OR(self, filters, or_):
return self.tree_contains(filters, or_,
lambda c, f: isinstance(c, Node) and c.connector == f)
def contains_child(self, filters, to_find):
return self.tree_contains(filters, to_find, lambda c, f: c is f)
def get_all_filters(self, filters):
all_filters = []
for index, child in enumerate(filters.children[:]):
if isinstance(child, Node):
all_filters.extend(self.get_all_filters(child))
continue
all_filters.append((filters, child, index))
return all_filters
def index_name(self, lookup):
# use another index_name to avoid conflicts with lookups defined on the
# target model which are handled by the BaseBackend
return lookup.index_name + '_in_memory_join'
def get_pks(self, query, field_chain, lookup_type, value):
model_chain = self.get_model_chain(query.model, field_chain)
first_lookup = {'%s__%s' %(field_chain.rsplit('__', 1)[-1],
lookup_type): value}
self.combine_with_same_level_filter(first_lookup, query, field_chain)
pks = model_chain[-1].objects.all().filter(**first_lookup).values_list(
'id', flat=True)
chains = [field_chain.rsplit('__', i+1)[0]
for i in range(field_chain.count('__'))]
lookup = {}
for model, chain in reversed(zip(model_chain[1:-1], chains[:-1])):
lookup.update({'%s__%s' %(chain.rsplit('__', 1)[-1], 'in'):
(pk for pk in pks)})
self.combine_with_same_level_filter(lookup, query, chain)
pks = model.objects.all().filter(**lookup).values_list('id', flat=True)
return pks
def combine_with_same_level_filter(self, lookup, query, field_chain):
lookup_updates = {}
field_chains = self.get_all_field_chains(query, query.where)
for chain, child in field_chains.items():
if chain == field_chain:
continue
if field_chain.rsplit('__', 1)[0] == chain.rsplit('__', 1)[0]:
lookup_updates ['%s__%s' %(chain.rsplit('__', 1)[1], child[1])] \
= child[3]
self.remove_child(query.where, child)
self.resolve_join(query, child)
# TODO: update query.alias_refcount correctly!
lookup.update(lookup_updates)
def remove_child(self, filters, to_remove):
''' Removes a child object from filters. If filters doesn't contain
children afterwoods, filters will be removed from its parent. '''
for child in filters.children[:]:
if child is to_remove:
self._remove_child(filters, to_remove)
return
elif isinstance(child, Node):
self.remove_child(child, to_remove)
if hasattr(child, 'children') and not child.children:
self.remove_child(filters, child)
def _remove_child(self, filters, to_remove):
result = []
for child in filters.children[:]:
if child is to_remove:
continue
result.append(child)
filters.children = result
def get_all_field_chains(self, query, filters):
''' Returns a dict mapping from field_chains to the corresponding child.'''
field_chains = {}
all_filters = self.get_all_filters(filters)
for filters, child, index in all_filters:
field_chain = self.get_field_chain(query, child[0])
# field_chain can be None if the user didn't specified an index for it
if field_chain:
field_chains[field_chain] = child
return field_chains | bsd-3-clause | 5,055,951,464,444,808,000 | 39.55914 | 87 | 0.570709 | false |
KaranToor/MA450 | google-cloud-sdk/platform/gsutil/third_party/boto/boto/ec2/instancestatus.py | 181 | 6854 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Details(dict):
"""
A dict object that contains name/value pairs which provide
more detailed information about the status of the system
or the instance.
"""
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'name':
self._name = value
elif name == 'status':
self[self._name] = value
else:
setattr(self, name, value)
class Event(object):
"""
A status event for an instance.
:ivar code: A string indicating the event type.
:ivar description: A string describing the reason for the event.
:ivar not_before: A datestring describing the earliest time for
the event.
:ivar not_after: A datestring describing the latest time for
the event.
"""
def __init__(self, code=None, description=None,
not_before=None, not_after=None):
self.code = code
self.description = description
self.not_before = not_before
self.not_after = not_after
def __repr__(self):
return 'Event:%s' % self.code
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'code':
self.code = value
elif name == 'description':
self.description = value
elif name == 'notBefore':
self.not_before = value
elif name == 'notAfter':
self.not_after = value
else:
setattr(self, name, value)
class Status(object):
"""
A generic Status object used for system status and instance status.
:ivar status: A string indicating overall status.
:ivar details: A dict containing name-value pairs which provide
more details about the current status.
"""
def __init__(self, status=None, details=None):
self.status = status
if not details:
details = Details()
self.details = details
def __repr__(self):
return 'Status:%s' % self.status
def startElement(self, name, attrs, connection):
if name == 'details':
return self.details
return None
def endElement(self, name, value, connection):
if name == 'status':
self.status = value
else:
setattr(self, name, value)
class EventSet(list):
def startElement(self, name, attrs, connection):
if name == 'item':
event = Event()
self.append(event)
return event
else:
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
class InstanceStatus(object):
"""
Represents an EC2 Instance status as reported by
DescribeInstanceStatus request.
:ivar id: The instance identifier.
:ivar zone: The availability zone of the instance.
:ivar events: A list of events relevant to the instance.
:ivar state_code: An integer representing the current state
of the instance.
:ivar state_name: A string describing the current state
of the instance.
:ivar system_status: A Status object that reports impaired
functionality that stems from issues related to the systems
that support an instance, such as such as hardware failures
and network connectivity problems.
:ivar instance_status: A Status object that reports impaired
functionality that arises from problems internal to the instance.
"""
def __init__(self, id=None, zone=None, events=None,
state_code=None, state_name=None):
self.id = id
self.zone = zone
self.events = events
self.state_code = state_code
self.state_name = state_name
self.system_status = Status()
self.instance_status = Status()
def __repr__(self):
return 'InstanceStatus:%s' % self.id
def startElement(self, name, attrs, connection):
if name == 'eventsSet':
self.events = EventSet()
return self.events
elif name == 'systemStatus':
return self.system_status
elif name == 'instanceStatus':
return self.instance_status
else:
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.id = value
elif name == 'availabilityZone':
self.zone = value
elif name == 'code':
self.state_code = int(value)
elif name == 'name':
self.state_name = value
else:
setattr(self, name, value)
class InstanceStatusSet(list):
"""
A list object that contains the results of a call to
DescribeInstanceStatus request. Each element of the
list will be an InstanceStatus object.
:ivar next_token: If the response was truncated by
the EC2 service, the next_token attribute of the
object will contain the string that needs to be
passed in to the next request to retrieve the next
set of results.
"""
def __init__(self, connection=None):
list.__init__(self)
self.connection = connection
self.next_token = None
def startElement(self, name, attrs, connection):
if name == 'item':
status = InstanceStatus()
self.append(status)
return status
else:
return None
def endElement(self, name, value, connection):
if name == 'nextToken':
self.next_token = value
setattr(self, name, value)
| apache-2.0 | 8,925,168,710,787,599,000 | 31.330189 | 74 | 0.628976 | false |
mach6/selenium | py/test/unit/selenium/webdriver/remote/test_new_session.py | 9 | 2867 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from copy import deepcopy
from importlib import import_module
import pytest
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.remote.command import Command
from selenium.webdriver.remote.webdriver import WebDriver
def test_converts_oss_capabilities_to_w3c(mocker):
mock = mocker.patch('selenium.webdriver.remote.webdriver.WebDriver.execute')
oss_caps = {'platform': 'WINDOWS', 'version': '11', 'acceptSslCerts': True}
w3c_caps = {'platformName': 'windows', 'browserVersion': '11', 'acceptInsecureCerts': True}
WebDriver(desired_capabilities=deepcopy(oss_caps))
expected_params = {'capabilities': {'firstMatch': [{}], 'alwaysMatch': w3c_caps},
'desiredCapabilities': oss_caps}
mock.assert_called_with(Command.NEW_SESSION, expected_params)
def test_converts_proxy_type_value_to_lowercase_for_w3c(mocker):
mock = mocker.patch('selenium.webdriver.remote.webdriver.WebDriver.execute')
oss_caps = {'proxy': {'proxyType': 'MANUAL', 'httpProxy': 'foo'}}
w3c_caps = {'proxy': {'proxyType': 'manual', 'httpProxy': 'foo'}}
WebDriver(desired_capabilities=deepcopy(oss_caps))
expected_params = {'capabilities': {'firstMatch': [{}], 'alwaysMatch': w3c_caps},
'desiredCapabilities': oss_caps}
mock.assert_called_with(Command.NEW_SESSION, expected_params)
@pytest.mark.parametrize('browser_name', ['firefox', 'chrome', 'ie', 'opera'])
def test_accepts_firefox_options_to_remote_driver(mocker, browser_name):
options = import_module('selenium.webdriver.{}.options'.format(browser_name))
caps_name = browser_name.upper() if browser_name != 'ie' else 'INTERNETEXPLORER'
mock = mocker.patch('selenium.webdriver.remote.webdriver.WebDriver.start_session')
opts = options.Options()
opts.add_argument('foo')
expected_caps = getattr(DesiredCapabilities, caps_name)
caps = expected_caps.copy()
expected_caps.update(opts.to_capabilities())
WebDriver(desired_capabilities=caps, options=opts)
mock.assert_called_with(expected_caps, None)
| apache-2.0 | 377,334,481,453,727,170 | 45.241935 | 95 | 0.729683 | false |
SaganBolliger/nupic | src/nupic/swarming/hypersearch/experimentutils.py | 40 | 9312 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This file contains utility functions that are used
# internally by the prediction framework. It should not be
# imported by description files. (see helpers.py)
from nupic.support.enum import Enum
# TODO: This file contains duplicates of 'InferenceElement', 'InferenceType',
# and 'ModelResult' copied from nupic.frameworks.opf
# Will want to change this in the future!
class InferenceElement(Enum(
prediction="prediction",
encodings="encodings",
classification="classification",
anomalyScore="anomalyScore",
anomalyLabel="anomalyLabel",
classConfidences="classConfidences",
multiStepPredictions="multiStepPredictions",
multiStepBestPredictions="multiStepBestPredictions",
multiStepBucketLikelihoods="multiStepBucketLikelihoods",
multiStepBucketValues="multiStepBucketValues",
)):
__inferenceInputMap = {
"prediction": "dataRow",
"encodings": "dataEncodings",
"classification": "category",
"classConfidences": "category",
"multiStepPredictions": "dataDict",
"multiStepBestPredictions": "dataDict",
}
__temporalInferenceElements = None
@staticmethod
def getInputElement(inferenceElement):
""" Get the sensor input element that corresponds to the given inference
element. This is mainly used for metrics and prediction logging
"""
return InferenceElement.__inferenceInputMap.get(inferenceElement, None)
@staticmethod
def isTemporal(inferenceElement):
""" Returns True if the inference from this timestep is predicted the input
for the NEXT timestep.
NOTE: This should only be checked IF THE MODEL'S INFERENCE TYPE IS ALSO
TEMPORAL. That is, a temporal model CAN have non-temporal inference elements,
but a non-temporal model CANNOT have temporal inference elements
"""
if InferenceElement.__temporalInferenceElements is None:
InferenceElement.__temporalInferenceElements = \
set([InferenceElement.prediction])
return inferenceElement in InferenceElement.__temporalInferenceElements
@staticmethod
def getTemporalDelay(inferenceElement, key=None):
""" Returns the number of records that elapse between when an inference is
made and when the corresponding input record will appear. For example, a
multistep prediction for 3 timesteps out will have a delay of 3
Parameters:
-----------------------------------------------------------------------
inferenceElement: The InferenceElement value being delayed
key: If the inference is a dictionary type, this specifies
key for the sub-inference that is being delayed
"""
# -----------------------------------------------------------------------
# For next step prediction, we shift by 1
if inferenceElement in (InferenceElement.prediction,
InferenceElement.encodings):
return 1
# -----------------------------------------------------------------------
# For classification, anomaly scores, the inferences immediately succeed the
# inputs
if inferenceElement in (InferenceElement.anomalyScore,
InferenceElement.anomalyLabel,
InferenceElement.classification,
InferenceElement.classConfidences):
return 0
# -----------------------------------------------------------------------
# For multistep prediction, the delay is based on the key in the inference
# dictionary
if inferenceElement in (InferenceElement.multiStepPredictions,
InferenceElement.multiStepBestPredictions):
return int(key)
# -----------------------------------------------------------------------
# default: return 0
return 0
@staticmethod
def getMaxDelay(inferences):
"""
Returns the maximum delay for the InferenceElements in the inference
dictionary
Parameters:
-----------------------------------------------------------------------
inferences: A dictionary where the keys are InferenceElements
"""
maxDelay = 0
for inferenceElement, inference in inferences.iteritems():
if isinstance(inference, dict):
for key in inference.iterkeys():
maxDelay = max(InferenceElement.getTemporalDelay(inferenceElement,
key),
maxDelay)
else:
maxDelay = max(InferenceElement.getTemporalDelay(inferenceElement),
maxDelay)
return maxDelay
class InferenceType(Enum("TemporalNextStep",
"TemporalClassification",
"NontemporalClassification",
"TemporalAnomaly",
"NontemporalAnomaly",
"TemporalMultiStep",
"NontemporalMultiStep")):
__temporalInferenceTypes = None
@staticmethod
def isTemporal(inferenceType):
""" Returns True if the inference type is 'temporal', i.e. requires a
temporal pooler in the network.
"""
if InferenceType.__temporalInferenceTypes is None:
InferenceType.__temporalInferenceTypes = \
set([InferenceType.TemporalNextStep,
InferenceType.TemporalClassification,
InferenceType.TemporalAnomaly,
InferenceType.TemporalMultiStep,
InferenceType.NontemporalMultiStep])
return inferenceType in InferenceType.__temporalInferenceTypes
# ModelResult - A structure that contains the input to a model and the resulting
# predictions as well as any related information related to the predictions.
#
# predictionNumber: The prediction number. This should start at 0 and increase
# with each new ModelResult.
#
# rawInput: The input record, as input by the user. This is a dictionary-like
# object which has attributes whose names are the same as the input
# field names
#
# sensorInput: A SensorInput object that represents the input record, as it
# appears right before it is encoded. This may differ from the raw
# input in that certain input fields (such as DateTime fields) may
# be split into multiple encoded fields
#
# inferences: A dictionary of inferences. Each key is a InferenceType constant
# which corresponds to the type of prediction being made. Each value
# is a ___ element that corresponds to the actual prediction by the
# model, including auxillary information; TODO: fix description.
#
# metrics: The metrics corresponding to the most-recent prediction/ground
# truth pair
class ModelResult(object):
__slots__= ("predictionNumber", "rawInput", "sensorInput", "inferences",
"metrics", "predictedFieldIdx", "predictedFieldName")
def __init__(self,
predictionNumber=None,
rawInput=None,
sensorInput=None,
inferences=None,
metrics=None,
predictedFieldIdx=None,
predictedFieldName=None):
self.predictionNumber = predictionNumber
self.rawInput = rawInput
self.sensorInput = sensorInput
self.inferences = inferences
self.metrics = metrics
self.predictedFieldIdx = predictedFieldIdx
self.predictedFieldName = predictedFieldName
def __repr__(self):
return ("ModelResult("
"\tpredictionNumber={0}\n"
"\trawInput={1}\n"
"\tsensorInput={2}\n"
"\tinferences={3}\n"
"\tmetrics={4}\n"
"\tpredictedFieldIdx={5}\n"
"\tpredictedFieldName={6}\n"
")").format(self.predictionNumber,
self.rawInput,
self.sensorInput,
self.inferences,
self.metrics,
self.predictedFieldIdx,
self.predictedFieldName)
| agpl-3.0 | 7,031,728,772,097,014,000 | 39.486957 | 81 | 0.602234 | false |
ak2703/edx-platform | lms/djangoapps/certificates/tests/test_create_fake_cert.py | 107 | 2003 | """Tests for the create_fake_certs management command. """
from django.test import TestCase
from django.core.management.base import CommandError
from nose.plugins.attrib import attr
from opaque_keys.edx.locator import CourseLocator
from student.tests.factories import UserFactory
from certificates.management.commands import create_fake_cert
from certificates.models import GeneratedCertificate
@attr('shard_1')
class CreateFakeCertTest(TestCase):
"""Tests for the create_fake_certs management command. """
USERNAME = "test"
COURSE_KEY = CourseLocator(org='edX', course='DemoX', run='Demo_Course')
def setUp(self):
super(CreateFakeCertTest, self).setUp()
self.user = UserFactory.create(username=self.USERNAME)
def test_create_fake_cert(self):
# No existing cert, so create it
self._run_command(
self.USERNAME,
unicode(self.COURSE_KEY),
cert_mode='verified',
grade='0.89'
)
cert = GeneratedCertificate.objects.get(user=self.user, course_id=self.COURSE_KEY)
self.assertEqual(cert.status, 'downloadable')
self.assertEqual(cert.mode, 'verified')
self.assertEqual(cert.grade, '0.89')
self.assertEqual(cert.download_uuid, 'test')
self.assertEqual(cert.download_url, 'http://www.example.com')
# Cert already exists; modify it
self._run_command(
self.USERNAME,
unicode(self.COURSE_KEY),
cert_mode='honor'
)
cert = GeneratedCertificate.objects.get(user=self.user, course_id=self.COURSE_KEY)
self.assertEqual(cert.mode, 'honor')
def test_too_few_args(self):
with self.assertRaisesRegexp(CommandError, 'Usage'):
self._run_command(self.USERNAME)
def _run_command(self, *args, **kwargs):
"""Run the management command to generate a fake cert. """
command = create_fake_cert.Command()
return command.handle(*args, **kwargs)
| agpl-3.0 | -1,804,695,863,457,671,400 | 36.092593 | 90 | 0.664004 | false |
SohKai/ChronoLogger | web/flask/lib/python2.7/site-packages/whoosh/spelling.py | 39 | 12750 | # Copyright 2007 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
"""This module contains helper functions for correcting typos in user queries.
"""
from collections import defaultdict
from heapq import heappush, heapreplace
from whoosh import analysis, fields, highlight, query, scoring
from whoosh.automata import fst
from whoosh.compat import xrange, string_type
from whoosh.support.levenshtein import distance
from whoosh.util.text import utf8encode
# Corrector objects
class Corrector(object):
"""Base class for spelling correction objects. Concrete sub-classes should
implement the ``_suggestions`` method.
"""
def suggest(self, text, limit=5, maxdist=2, prefix=0):
"""
:param text: the text to check. This word will **not** be added to the
suggestions, even if it appears in the word graph.
:param limit: only return up to this many suggestions. If there are not
enough terms in the field within ``maxdist`` of the given word, the
returned list will be shorter than this number.
:param maxdist: the largest edit distance from the given word to look
at. Values higher than 2 are not very effective or efficient.
:param prefix: require suggestions to share a prefix of this length
with the given word. This is often justifiable since most
misspellings do not involve the first letter of the word. Using a
prefix dramatically decreases the time it takes to generate the
list of words.
"""
_suggestions = self._suggestions
heap = []
seen = set([text])
for k in xrange(1, maxdist + 1):
for item in _suggestions(text, k, prefix):
if item[1] in seen:
continue
seen.add(item[1])
# Note that the *higher* scores (item[0]) are better!
if len(heap) < limit:
heappush(heap, item)
elif item > heap[0]:
heapreplace(heap, item)
# If the heap is already at the required length, don't bother going
# to a higher edit distance
if len(heap) >= limit:
break
sugs = sorted(heap, key=lambda item: (0 - item[0], item[1]))
return [sug for _, sug in sugs]
def _suggestions(self, text, maxdist, prefix):
"""Low-level method that yields a series of (score, "suggestion")
tuples.
:param text: the text to check.
:param maxdist: the maximum edit distance.
:param prefix: require suggestions to share a prefix of this length
with the given word.
"""
raise NotImplementedError
class ReaderCorrector(Corrector):
"""Suggests corrections based on the content of a field in a reader.
Ranks suggestions by the edit distance, then by highest to lowest
frequency.
"""
def __init__(self, reader, fieldname):
self.reader = reader
self.fieldname = fieldname
def _suggestions(self, text, maxdist, prefix):
fieldname = self.fieldname
freq = self.reader.frequency
for sug in self.reader.terms_within(fieldname, text, maxdist,
prefix=prefix):
# Higher scores are better, so negate the distance and frequency
# TODO: store spelling frequencies in the graph
f = freq(fieldname, sug) or 1
score = 0 - (maxdist + (1.0 / f * 0.5))
yield (score, sug)
class GraphCorrector(Corrector):
"""Suggests corrections based on the content of a raw
:class:`whoosh.automata.fst.GraphReader` object.
By default ranks suggestions based on the edit distance.
"""
def __init__(self, graph):
self.graph = graph
def _suggestions(self, text, maxdist, prefix):
for sug in fst.within(self.graph, text, k=maxdist, prefix=prefix):
# Higher scores are better, so negate the edit distance
yield (0 - maxdist, sug)
class MultiCorrector(Corrector):
"""Merges suggestions from a list of sub-correctors.
"""
def __init__(self, correctors):
self.correctors = correctors
def _suggestions(self, text, maxdist, prefix):
for corr in self.correctors:
for item in corr._suggestions(text, maxdist, prefix):
yield item
def wordlist_to_graph_file(wordlist, dbfile, fieldname="_", strip=True):
"""Writes a word graph file from a list of words.
>>> # Open a word list file with one word on each line, and write the
>>> # word graph to a graph file
>>> wordlist_to_graph_file("mywords.txt", "mywords.dawg")
:param wordlist: an iterable containing the words for the graph. The words
must be in sorted order.
:param dbfile: a filename string or file-like object to write the word
graph to. This function will close the file.
"""
from whoosh.filedb.structfile import StructFile
if isinstance(dbfile, string_type):
dbfile = open(dbfile, "wb")
if not isinstance(dbfile, StructFile):
dbfile = StructFile(dbfile)
gw = fst.GraphWriter(dbfile)
gw.start_field(fieldname)
for word in wordlist:
if strip:
word = word.strip()
gw.insert(word)
gw.finish_field()
gw.close()
# Query correction
class Correction(object):
"""Represents the corrected version of a user query string. Has the
following attributes:
``query``
The corrected :class:`whoosh.query.Query` object.
``string``
The corrected user query string.
``original_query``
The original :class:`whoosh.query.Query` object that was corrected.
``original_string``
The original user query string.
``tokens``
A list of token objects representing the corrected words.
You can also use the :meth:`Correction.format_string` method to reformat the
corrected query string using a :class:`whoosh.highlight.Formatter` class.
For example, to display the corrected query string as HTML with the
changed words emphasized::
from whoosh import highlight
correction = mysearcher.correct_query(q, qstring)
hf = highlight.HtmlFormatter(classname="change")
html = correction.format_string(hf)
"""
def __init__(self, q, qstring, corr_q, tokens):
self.original_query = q
self.query = corr_q
self.original_string = qstring
self.tokens = tokens
if self.original_string:
self.string = self.format_string(highlight.NullFormatter())
else:
self.string = ''
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.query,
self.string)
def format_string(self, formatter):
"""
Highlights the corrected words in the original query string using the
given :class:`~whoosh.highlight.Formatter`.
:param formatter: A :class:`whoosh.highlight.Formatter` instance.
:return: the output of the formatter (usually a string).
"""
if not self.original_string:
return ''
if isinstance(formatter, type):
formatter = formatter()
fragment = highlight.Fragment(self.original_string, self.tokens)
return formatter.format_fragment(fragment, replace=True)
# QueryCorrector objects
class QueryCorrector(object):
"""Base class for objects that correct words in a user query.
"""
def correct_query(self, q, qstring):
"""Returns a :class:`Correction` object representing the corrected
form of the given query.
:param q: the original :class:`whoosh.query.Query` tree to be
corrected.
:param qstring: the original user query. This may be None if the
original query string is not available, in which case the
``Correction.string`` attribute will also be None.
:rtype: :class:`Correction`
"""
raise NotImplementedError
class SimpleQueryCorrector(QueryCorrector):
"""A simple query corrector based on a mapping of field names to
:class:`Corrector` objects, and a list of ``("fieldname", "text")`` tuples
to correct. And terms in the query that appear in list of term tuples are
corrected using the appropriate corrector.
"""
def __init__(self, correctors, terms, prefix=0, maxdist=2):
"""
:param correctors: a dictionary mapping field names to
:class:`Corrector` objects.
:param terms: a sequence of ``("fieldname", "text")`` tuples
representing terms to be corrected.
:param prefix: suggested replacement words must share this number of
initial characters with the original word. Increasing this even to
just ``1`` can dramatically speed up suggestions, and may be
justifiable since spellling mistakes rarely involve the first
letter of a word.
:param maxdist: the maximum number of "edits" (insertions, deletions,
subsitutions, or transpositions of letters) allowed between the
original word and any suggestion. Values higher than ``2`` may be
slow.
"""
self.correctors = correctors
self.termset = frozenset(terms)
self.prefix = prefix
self.maxdist = maxdist
def correct_query(self, q, qstring):
correctors = self.correctors
termset = self.termset
prefix = self.prefix
maxdist = self.maxdist
# A list of tokens that were changed by a corrector
corrected_tokens = []
# The corrected query tree. We don't need to deepcopy the original
# because we use Query.replace() to find-and-replace the corrected
# words and it returns a copy of the query tree.
corrected_q = q
# For every word in the original query...
# Note we can't put these in a set, because we must preserve WHERE
# in the query each token occured so we can format them later
for token in q.all_tokens():
fname = token.fieldname
# If this is one of the words we're supposed to correct...
if (fname, token.text) in termset:
sugs = correctors[fname].suggest(token.text, prefix=prefix,
maxdist=maxdist)
if sugs:
# This is a "simple" corrector, so we just pick the first
# suggestion :/
sug = sugs[0]
# Return a new copy of the original query with this word
# replaced by the correction
corrected_q = corrected_q.replace(token.fieldname,
token.text, sug)
# Add the token to the list of corrected tokens (for the
# formatter to use later)
token.original = token.text
token.text = sug
corrected_tokens.append(token)
return Correction(q, qstring, corrected_q, corrected_tokens)
| mit | 6,058,262,299,961,608,000 | 36.946429 | 80 | 0.634902 | false |
TeamExodus/external_chromium_org | third_party/tlslite/tlslite/utils/openssl_rsakey.py | 200 | 4670 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""OpenSSL/M2Crypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
#copied from M2Crypto.util.py, so when we load the local copy of m2
#we can still use it
def password_callback(v, prompt1='Enter private key passphrase:',
prompt2='Verify passphrase:'):
from getpass import getpass
while 1:
try:
p1=getpass(prompt1)
if v:
p2=getpass(prompt2)
if p1==p2:
break
else:
break
except KeyboardInterrupt:
return None
return p1
if m2cryptoLoaded:
class OpenSSL_RSAKey(RSAKey):
def __init__(self, n=0, e=0):
self.rsa = None
self._hasPrivateKey = False
if (n and not e) or (e and not n):
raise AssertionError()
if n and e:
self.rsa = m2.rsa_new()
m2.rsa_set_n(self.rsa, numberToMPI(n))
m2.rsa_set_e(self.rsa, numberToMPI(e))
def __del__(self):
if self.rsa:
m2.rsa_free(self.rsa)
def __getattr__(self, name):
if name == 'e':
if not self.rsa:
return 0
return mpiToNumber(m2.rsa_get_e(self.rsa))
elif name == 'n':
if not self.rsa:
return 0
return mpiToNumber(m2.rsa_get_n(self.rsa))
else:
raise AttributeError
def hasPrivateKey(self):
return self._hasPrivateKey
def _rawPrivateKeyOp(self, m):
b = numberToByteArray(m, numBytes(self.n))
s = m2.rsa_private_encrypt(self.rsa, bytes(b), m2.no_padding)
c = bytesToNumber(bytearray(s))
return c
def _rawPublicKeyOp(self, c):
b = numberToByteArray(c, numBytes(self.n))
s = m2.rsa_public_decrypt(self.rsa, bytes(b), m2.no_padding)
m = bytesToNumber(bytearray(s))
return m
def acceptsPassword(self): return True
def write(self, password=None):
bio = m2.bio_new(m2.bio_s_mem())
if self._hasPrivateKey:
if password:
def f(v): return password
m2.rsa_write_key(self.rsa, bio, m2.des_ede_cbc(), f)
else:
def f(): pass
m2.rsa_write_key_no_cipher(self.rsa, bio, f)
else:
if password:
raise AssertionError()
m2.rsa_write_pub_key(self.rsa, bio)
s = m2.bio_read(bio, m2.bio_ctrl_pending(bio))
m2.bio_free(bio)
return s
def generate(bits):
key = OpenSSL_RSAKey()
def f():pass
key.rsa = m2.rsa_generate_key(bits, 3, f)
key._hasPrivateKey = True
return key
generate = staticmethod(generate)
def parse(s, passwordCallback=None):
# Skip forward to the first PEM header
start = s.find("-----BEGIN ")
if start == -1:
raise SyntaxError()
s = s[start:]
if s.startswith("-----BEGIN "):
if passwordCallback==None:
callback = password_callback
else:
def f(v, prompt1=None, prompt2=None):
return passwordCallback()
callback = f
bio = m2.bio_new(m2.bio_s_mem())
try:
m2.bio_write(bio, s)
key = OpenSSL_RSAKey()
if s.startswith("-----BEGIN RSA PRIVATE KEY-----"):
def f():pass
key.rsa = m2.rsa_read_key(bio, callback)
if key.rsa == None:
raise SyntaxError()
key._hasPrivateKey = True
elif s.startswith("-----BEGIN PUBLIC KEY-----"):
key.rsa = m2.rsa_read_pub_key(bio)
if key.rsa == None:
raise SyntaxError()
key._hasPrivateKey = False
else:
raise SyntaxError()
return key
finally:
m2.bio_free(bio)
else:
raise SyntaxError()
parse = staticmethod(parse)
| bsd-3-clause | 4,369,961,042,199,362,000 | 33.338235 | 73 | 0.466809 | false |
rldhont/Quantum-GIS | tests/src/python/test_qgsserver_projectutils.py | 27 | 2853 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServerProject.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Paul Blottiere'
__date__ = '26/12/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import os
from qgis.server import QgsServerProjectUtils
from qgis.core import QgsProject
from qgis.testing import unittest
from utilities import unitTestDataPath
class TestQgsServerProjectUtils(unittest.TestCase):
def setUp(self):
self.testdata_path = unitTestDataPath('qgis_server_project') + '/'
self.prj = QgsProject()
self.prjPath = os.path.join(self.testdata_path, "project.qgs")
self.prj.read(self.prjPath)
self.prj2 = QgsProject()
self.prj2Path = os.path.join(self.testdata_path, "project2.qgs")
self.prj2.read(self.prj2Path)
def tearDown(self):
pass
def test_size(self):
self.assertEqual(QgsServerProjectUtils.wmsMaxWidth(self.prj), 400)
self.assertEqual(QgsServerProjectUtils.wmsMaxHeight(self.prj), 500)
def test_url(self):
self.assertEqual(QgsServerProjectUtils.wmsServiceUrl(self.prj), "my_wms_advertised_url")
self.assertEqual(QgsServerProjectUtils.wcsServiceUrl(self.prj), "my_wcs_advertised_url")
self.assertEqual(QgsServerProjectUtils.wfsServiceUrl(self.prj), "my_wfs_advertised_url")
def test_wmsuselayerids(self):
self.assertEqual(QgsServerProjectUtils.wmsUseLayerIds(self.prj), False)
self.assertEqual(QgsServerProjectUtils.wmsUseLayerIds(self.prj2), True)
def test_wmsrestrictedlayers(self):
# retrieve entry from project
result = QgsServerProjectUtils.wmsRestrictedLayers(self.prj)
expected = []
expected.append('points') # layer
expected.append('group1') # local group
expected.append('groupEmbedded') # embedded group
self.assertListEqual(sorted(expected), sorted(result))
def test_wfslayersids(self):
# retrieve entry from project
result = QgsServerProjectUtils.wfsLayerIds(self.prj)
expected = []
expected.append('multipoint20170309173637804') # from embedded group
expected.append('points20170309173738552') # local layer
expected.append('polys20170309173913723') # from local group
self.assertEqual(expected, result)
def test_wcslayersids(self):
# retrieve entry from project
result = QgsServerProjectUtils.wcsLayerIds(self.prj)
expected = []
expected.append('landsat20170313142548073')
self.assertEqual(expected, result)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 1,455,714,124,086,651,100 | 33.373494 | 96 | 0.698212 | false |
DBrianKimmel/PyHouse | Project/src/Modules/House/Family/Hue/hue_hub.py | 1 | 25036 | """
@name: Modules/House/Family/hue/hue_hub.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2017-2020 by D. Brian Kimmel
@note: Created on Dec 19, 2017
@license: MIT License
@summary:
/config
/lights
/groups
/schedules
/scenes
/sensors
/rules
Read the hub info and populate parts of pyhouse_obj.
Send hub commands to do things like turn on/off/dim of lights.
The Hue Hub is a network device so we need to know which PyHouse instance is going to be in control.
http://192.168.1.131/debug/clip.html
"""
__updated__ = '2020-02-09'
# Import system type stuff
from zope.interface import implementer
import datetime
import jsonpickle
from queue import Queue
import time
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from twisted.internet.defer import Deferred, succeed
from twisted.internet.protocol import Protocol
from twisted.web.iweb import IBodyProducer
# Import PyMh files
from Modules.Core.Utilities.convert import long_to_str
from Modules.Core.Utilities.json_tools import encode_json
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
from Modules.House.Family.Hue.hue_data import HueLightData
from Modules.House.Lighting.utility import lightingUtility as lightingUtility
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.Hue_Hub ')
SEND_TIMEOUT = 0.8
mac = [ '00', '17', '88', '10', '22', '01' ]
uid = '2f402f80-da50-11e1-9b23-%s' % ''.join(mac)
icon = 'hue.png'
description_xml = 'description.xml'
lights = []
username = "9nR8rIGRYNKBlROabMWuAlhGfAgSjBS2EWHoFYy3"
devicetype = "something"
portalservices = False
def generate_timestamp():
return time.strftime('%Y-%m-%dT%H:%M:%S')
def put_config_json(p_json):
l_entry = jsonpickle.encode(p_json)
if 'devicetype' in l_entry:
global devicetype
devicetype = l_entry['devicetype']
elif 'portalservices' in l_entry:
global portalservices
portalservices = l_entry['portalservices']
def json_dumps(what):
return jsonpickle.decode(what, sort_keys=True, separators=(',', ':'))
def gen_config_json(full):
pass
# return json_dumps(gen_config(full))
def gen_sensors_json():
return json_dumps(dict())
def set_light_state(_nr, state):
_entry = jsonpickle.encode(state)
# return json_dumps(json_obj)
def set_group_state(_nr, state):
# only 1 group in the current version
for i in range(0, len(lights)):
set_light_state(i, state)
def get_light_state(nr):
pass
def gen_ind_light_json(_nr):
return
def gen_lights(which):
global lights
if which == None:
json_obj = dict()
t = []
n = 0
for _l in lights:
th = 9875 # gilj(n)
n += 1
th.start()
t.append(th)
for nr in range(0, n):
t[nr].join()
json_obj['%d' % (nr + 1)] = t[nr].get_result()
return json_obj
return gen_ind_light_json(which)
def gen_groups(which):
#### a light group
action = {
'on' : True,
'bri' : 254,
'hue' : 10000,
'sat' : 254,
'effect' : 'none',
'xy' : [],
'ct' : 250,
'alert' : 'select',
'colormode' : 'ct'
}
action['xy'].append(0.5)
action['xy'].append(0.5)
g_lights = []
nOn = 0
for i in range(0, len(lights)):
g_lights.append('%d' % (i + 1))
if lights[i]['state'] == True:
nOn += 1
state = {
'all_on' : nOn == len(lights),
'any_on' : nOn > 0
}
g = {
'action' : action,
'lights' : g_lights,
'state' : state,
'type' : 'Room',
'class' : 'Living room',
'name' : 'Group 1'
}
if which == None:
answer = { '1': g }
return answer
return g
def gen_groups_json(which):
return json_dumps(gen_groups(which))
def gen_scenes():
scene = {
'name': 'Kathy on 1449133269486',
'lights': [],
'owner': 'ffffffffe0341b1b376a2389376a2389',
'recycle': True,
'locked': False,
'appdata': dict(),
'picture': '',
'lastupdated': '2015-12-03T08:57:13',
'version': 1
}
for i in range(0, len(lights)):
scene['lights'].append('%d' % (i + 1))
answer = { '123123123-on-0': scene }
return answer
def gen_scenes_json():
return json_dumps(gen_scenes())
def gen_light_json(which):
return json_dumps(gen_lights(which))
def gen_dump_json():
answer = {
'lights': gen_lights(None),
'groups': gen_groups(None),
# 'config': gen_config(True),
'sensors': {},
'swupdate2': {},
'schedules': {},
'scenes': {}
}
return json_dumps(answer)
def gen_description_xml(addr):
reply = [
'<root xmlns="urn:schemas-upnp-org:device-1-0">',
' <specVersion>',
' <major>1</major>',
' <minor>0</minor>',
' </specVersion>',
' <URLBase>http://%s/</URLBase>' % addr,
' <device>',
' <deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType>',
' <friendlyName>Virtual hue</friendlyName>',
' <manufacturer>vanheusden.com</manufacturer>',
' <manufacturerURL>http://www.vanheusden.com</manufacturerURL>',
' <modelDescription>Virtual Philips hue bridge</modelDescription>',
' <modelName>Virtual hue</modelName>',
' <modelNumber>1</modelNumber>',
' <modelURL>https://github.com/flok99/virtual-hue</modelURL>',
' <serialNumber>%s</serialNumber>' % ''.join(mac),
' <UDN>uuid:%s/UDN>' % uid,
' <presentationURL>index.html</presentationURL>',
' <iconList>',
' <icon>',
' <mimetype>image/png</mimetype>',
' <height>48</height>',
' <width>48</width>',
' <depth>24</depth>',
' <url>%s</url>' % icon,
' </icon>',
' </iconList>',
' </device>',
'</root>'
]
return '\r\n'.join(reply)
def generate_light_body_json(p_light_control):
""" Convert internal data to hue control data and format
@param p_light_control: ==> Light Data() in Housing.Lighting.lighting_lights
@returns: json body to control lights
{
"on": true,
"bri": 254
}
"""
if p_light_control.BrightnessPct == 0:
l_body = {
'on' : 'false'
}
else:
l_bright = int(p_light_control.BrightnessPct * 254 / 100)
l_body = {
'on' : 'true',
'bri' : '{}'.format(l_bright)
}
return encode_json(l_body)
@implementer(IBodyProducer)
class BytesProducer(object):
"""
Generate the messages to send in the web requests.
"""
def __init__(self, body):
self.m_body = body
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.m_body)
return succeed(None)
def pauseProducing(self):
pass
def stopProducing(self):
pass
# class server(BaseHTTPRequestHandler):
class Server:
"""
"""
m_client_address = None
m_path = '/'
def _set_headers(self, mime_type):
self.send_response(200)
self.send_header('Content-type', mime_type)
self.end_headers()
def do_GET(self):
LOG.debug('GET', self.m_client_address, self.m_path)
parts = self.m_path.split('/')
if self.m_path == '/{}'.format(description_xml):
self._set_headers("text/xml")
LOG.debug('get {}'.format(description_xml))
h = self.Server.server_address[0]
if 'Host' in self.headers:
h = self.headers['Host']
self.wfile.write(gen_description_xml(h))
elif self.m_path == '/%s' % icon:
self._set_headers("image/png")
LOG.debug('get %s' % parts[1])
try:
fh = open(icon, 'r')
self.wfile.write(fh.read())
fh.close()
except Exception as e:
LOG.warning('Cannot access %s' % icon, e)
elif self.m_path == '/api/' or self.m_path == '/api/%s' % username or self.m_path == '/api/%s/' % username:
self._set_headers("application/json")
LOG.debug('get all state')
self.wfile.write(gen_dump_json())
elif self.m_path == '/api/config' or self.m_path == '/api/config/':
self._set_headers("application/json")
LOG.debug('get basic configuration short (2)')
self.wfile.write(gen_config_json(False))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'lights':
self._set_headers("application/json")
LOG.debug('enumerate list of lights')
if len(parts) == 4 or parts[4] == '':
LOG.debug(' ...all')
self.wfile.write(gen_light_json(None))
else:
LOG.debug(' ...single (%s)' % parts[4])
self.wfile.write(gen_light_json(int(parts[4]) - 1))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'groups':
self._set_headers("application/json")
LOG.debug('enumerate list of groups')
if len(parts) == 4 or parts[4] == '':
LOG.debug(' ...all')
self.wfile.write(gen_groups_json(None))
else:
LOG.debug(' ...single (%s)' % parts[4])
self.wfile.write(gen_groups_json(int(parts[4]) - 1))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'scenes':
self._set_headers("application/json")
LOG.debug('enumerate list of scenes')
self.wfile.write(gen_scenes_json())
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'sensors':
self._set_headers("application/json")
LOG.debug('enumerate list of sensors')
self.wfile.write(gen_sensors_json())
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'light':
self._set_headers("application/json")
LOG.debug('get individual light state')
self.wfile.write(gen_ind_light_json(int(parts[4]) - 1))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'config':
self._set_headers("application/json")
if parts[2] == username:
LOG.debug('get basic configuration full')
self.wfile.write(gen_config_json(True))
else:
LOG.debug('get basic configuration short (1)')
self.wfile.write(gen_config_json(False))
else:
self._set_headers("application/json")
LOG.debug('[G] unknown get request', self.m_path, self.headers)
self.wfile.write('unreg()')
# self.wfile.write('[{"error":{"type":1,"address":"/","description":"unauthorized user"}}]')
def do_HEAD(self):
LOG.debug('HEAD')
self._set_headers("text/html")
def do_POST(self):
LOG.debug('POST', self.m_path)
parts = self.m_path.split('/')
# simpler registration; always return the same key
# should keep track in e.g. an sqlite3 database and then do whitelisting etc
if len(parts) >= 2 and parts[1] == 'api':
self._set_headers("application/json")
data_len = int(self.headers['Content-Length'])
LOG.debug(self.rfile.read(data_len))
self.wfile.write('[{"success":{"username": "%s"}}]' % username)
elif len(parts) >= 4 and parts[1] == 'api' and parts['3'] == 'groups':
self._set_headers("application/json")
self.wfile.write('[{"success":{"id": "1"}}]')
else:
LOG.debug('unknown post request', self.m_path)
def do_PUT(self):
LOG.debug('PUT', self.m_path)
data_len = int(self.headers['Content-Length'])
content = self.rfile.read(data_len)
parts = self.m_path.split('/')
if len(parts) >= 6 and parts[1] == 'api' and parts[3] == 'lights' and parts[5] == 'state':
self._set_headers("application/json")
LOG.debug('set individual light state')
self.wfile.write(set_light_state(int(parts[4]) - 1, content))
elif len(parts) >= 6 and parts[1] == 'api' and parts[3] == 'groups' and parts[5] == 'action':
self._set_headers("application/json")
LOG.debug('set individual group state')
self.wfile.write(set_group_state(int(parts[4]) - 1, content))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'config':
self._set_headers("application/json")
LOG.debug('put config')
put_config_json(content)
self.wfile.write('[{"success":"Updated."}]')
elif len(parts) >= 3 and parts[1] == 'api' and parts[2] == 'config':
self._set_headers("application/json")
LOG.debug('put config (2)')
LOG.debug(content)
else:
self._set_headers("text/html")
LOG.debug('unknown put request', self.m_path, content)
def add_light(name, id_, command, command_get):
global lights
row = {
'name': name,
'id': id_,
'cmd': command,
'cmd_get': command_get,
'state': False
}
lights.append(row)
class HueProtocol(Protocol):
""" A minimal protocol for the Hue Hub.
"""
m_finished = None
m_remaining = 0
def __init__(self, p_pyhouse_obj, p_finished, p_command, p_response_code):
"""
@param p_finished: is a deferred that ????
"""
self.m_finished = p_finished
self.m_command = p_command
self.m_code = p_response_code
self.m_pyhouse_obj = p_pyhouse_obj
self.m_body = ''
self.m_remaining = 1024 * 10 # Allow for 10kb response
LOG.debug('Hue Protocol Init')
def dataReceived(self, p_bytes):
if self.m_remaining > 0:
l_display = p_bytes[:self.m_remaining].decode("utf8") # Get the string
# l_json = jsonpickle.decode(l_display)
# LOG.debug('\n\tCommand: {}\n===== Body =====\n{}\n'.format(self.m_command, l_json))
self.m_body = l_display
self.m_remaining -= len(l_display)
def connectionLost(self, p_reason):
""" This gets called when the web page has all been received in its entirety.
GET
Now we have the page (and the command we used to get the page) we can deal with the servers reply.
POST
?
"""
def cb_log(self, p_command, p_code, p_body, p_finished, p_pyhouse_obj):
""" Log the response to our command and dispatch the message
"""
# LOG.debug('\n\tCommand: {}\n\tCode: {}\n\tBody: {}'.format(p_command, p_code, p_body))
if p_command == '/config':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_config(p_body)
elif p_command == '/lights':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_lights(p_body)
elif p_command == '/rules':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_rules(p_body)
elif p_command == '/scenes':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_scenes(p_body)
elif p_command == '/schedules':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_schedules(p_body)
elif p_command == '/sensors':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_sensors(p_body)
def eb_failed(fail_reason):
LOG.warning("initial Hue Hub connection failed: {}".format(fail_reason))
# l_ReconnectingService.stopService()
l_msg = p_reason.getErrorMessage() # this gives a tuple of messages (I think)
if l_msg == '':
self.m_finished.addCallback(cb_log, self.m_command, self.m_code, self.m_body, self.m_finished, self.m_pyhouse_obj)
self.m_finished.addErrback(eb_failed, p_reason)
self.m_finished.callback(None)
return
LOG.debug('Finished receiving body: {}'.format(PrettyFormatAny.form(l_msg, 'Reason', 190)))
LOG.debug('Finished receiving body: {}'.format("\t".join(str(x) for x in l_msg)))
self.m_finished.callback(None)
return
class HueDecode(object):
"""
"""
def decode_get(self):
"""
"""
LOG.info('Decode_Get')
def decode_post(self):
"""
"""
LOG.info('Decode_Post')
class HueDispatch(HueProtocol):
"""
"""
def _add_light(self, p_light_obj):
l_objs = self.m_pyhouse_obj.House.Lighting.Lights
_l_light_obj = lightingUtility().get_object_type_by_id(l_objs, name=p_light_obj.Name)
pass
def get_config(self, p_body):
# l_msg = jsonpickle.decode(p_body)
# LOG.debug('Got Config {}'.format(PrettyFormatAny.form(l_msg, 'Config', 190)))
pass
def get_lights(self, p_body):
"""
See Docs/Design.md for the JSON returned.
"""
LOG.debug('{}'.format(p_body))
return
try:
# l_json = jsonpickle.decode(p_body)
l_json = p_body
except Exception as e_err:
LOG.error('Error - {}\n{}'.format(e_err, PrettyFormatAny.form(l_json, "HUE ERROR", 190)))
# LOG.debug('Got Lights {}'.format(PrettyFormatAny.form(l_json, 'Lights', 190)))
for l_light_obj in l_json.items():
l_light = HueLightData()
LOG.debug('Light: {}'.format(PrettyFormatAny.form(l_light_obj, 'Light', 190)))
for l_key, l_value in l_light_obj[1].items():
l_light.HueLightIndex = l_light_obj[0]
l_light.Key = l_light_obj[0]
# l_light.Active = True
l_light.Family.Name = 'Hue'
l_light.DeviceType = 'Lighting' # Lighting
l_light.DeviceSubType = 'Light'
l_light.ControllerName = 'Hue Hub'
l_light.LastUpdate = datetime.datetime.now()
l_light.IsDimmable = True
# LOG.debug('Add Light: {} {}'.format(l_key, PrettyFormatAny.form(l_value, 'Light', 190)))
if l_key == 'name':
l_light.Name = l_value
# LOG.debug('Add Light {}'.format(PrettyFormatAny.form(l_light, 'Light', 190)))
if l_key == 'type':
l_light.Comment = l_value
if l_key == 'uniqueid':
l_light.HueUniqueId = l_value
if l_key == 'state':
l_state = False
for l_st_key, l_st_val in l_value.items():
if l_st_key == 'on':
l_state = l_st_val
if l_st_key == 'bri':
l_bri = l_st_val
if l_state == True:
l_light.BrightnessPct = int(l_bri / 2.54)
else:
l_light.BrightnessPct = 0
LOG.debug('Add Light {}'.format(PrettyFormatAny.form(l_light, 'Light', 190)))
self._add_light(l_light)
def get_rules(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Rules {}'.format(PrettyFormatAny.form(l_msg, 'Rules', 190)))
def get_scenes(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Scenes {}'.format(PrettyFormatAny.form(l_msg, 'Scenes', 190)))
def get_schedules(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Schedules {}'.format(PrettyFormatAny.form(l_msg, 'Schedules', 190)))
def get_sensors(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Sensors {}'.format(PrettyFormatAny.form(l_msg, 'Sensors', 190)))
class HueHub:
"""
"""
m_bridge_obj = None
m_command = b'/config'
m_headers = None
m_hue_agent = None
m_pyhouse_obj = None
def __init__(self, p_pyhouse_obj):
"""
Agent is a very basic HTTP client. It supports I{HTTP} and I{HTTPS} scheme URIs.
"""
self.m_pyhouse_obj = p_pyhouse_obj
self.m_headers = Headers({'User-Agent': ['Hue Hub Web Client']})
self.m_hue_agent = Agent(p_pyhouse_obj._Twisted.Reactor)
LOG.info('Initialized')
def _build_uri(self, p_command=b'/config'):
"""
URI: b'http://192.168.1.131/api/MBFBC-agf6rq5bsWcxLngYZoClGr2pw2oKEMLZgs/config'
"""
l_uri = b'http://'
try:
l_uri += self.m_bridge_obj.IPv4Address
except TypeError:
l_uri += long_to_str(self.m_bridge_obj.IPv4Address).encode("utf8")
l_uri += b'/api/'
try:
l_uri += self.m_bridge_obj.ApiKey
except TypeError:
l_uri += self.m_bridge_obj.ApiKey.encode("utf8")
try:
l_uri += p_command.encode("utf8")
except:
l_uri += p_command
LOG.info('URI: {}'.format(l_uri))
return l_uri
def _build_command(self, p_command):
try:
l_command = p_command.encode("utf8")
except:
l_command = p_command
return l_command
def _get_all_config(self):
"""
/config
/lights
/groups
/schedules
/scenes
/sensors
/rules
"""
return
_l_agent_d = self.HubGet('/config')
_l_agent_d = self.HubGet('/lights')
# _l_agent_d = self.HubGet('/groups')
# _l_agent_d = self.HubGet('/schedules')
# _l_agent_d = self.HubGet('/scenes')
# _l_agent_d = self.HubGet('/sensors')
# _l_agent_d = self.HubGet('/rules')
# Server().do_GET()
LOG.info('Scheduled All config')
def HubGet(self, p_command):
""" Issue a request for information. It will arrive later via a deferred.
"""
def cb_Response(p_response, p_command):
"""
"""
# LOG.debug('Command: {}'.format(p_command))
# LOG.debug('Response Code: {} {}'.format(p_response.code, p_response.phrase))
d_finished = Deferred()
p_response.deliverBody(HueProtocol(self.m_pyhouse_obj, d_finished, p_command, p_response.code))
return d_finished
d_agent = self.m_hue_agent.request(
b'GET',
self._build_uri(p_command),
self.m_headers,
None)
d_agent.addCallback(cb_Response, p_command)
HueDecode().decode_get()
return d_agent
def HubPostCommand(self, p_command, p_body):
"""
@param p_command: is the Hue command we will be using
@param p_body: is the body producer function.
"""
def cb_response(p_response):
LOG.debug('Response Code: {} {}'.format(p_response.code, p_response.phrase))
LOG.debug('Response Headers: {}'.format(p_response.headers.decode("utf8")))
l_finished = Deferred()
p_response.deliverBody(HueProtocol(self.m_pyhouse_obj, l_finished))
return l_finished
l_agent_d = self.m_hue_agent.request(b'POST',
self._build_uri(p_command),
self.m_headers,
p_body)
l_agent_d.addCallback(cb_response)
HueDecode().decode_post()
return l_agent_d
def HubStart(self, p_bridge_obj):
""" Start the hub(bridge) and then get the hub data
@param p_bridge_obj: is PyHouse_Obj.Computers.Bridges.xxx with xxx being a HueHub
"""
p_bridge_obj._Queue = Queue(32)
self.m_bridge_obj = p_bridge_obj
self._get_all_config()
LOG.info('Started')
def Start(self):
""" Start the hub(bridge) and then get the hub data
@param p_bridge_obj: is PyHouse_Obj.Computers.Bridges.xxx with xxx being a HueHub
"""
# LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj, 'PyHouse'))
# LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj.Computer, 'Computer'))
# LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj.House, 'House'))
for l_bridge_obj in self.m_pyhouse_obj.Computer.Bridges.values():
LOG.debug(PrettyFormatAny.form(l_bridge_obj, 'Bridge'))
l_bridge_obj._Queue = Queue(32)
self.m_bridge_obj = l_bridge_obj
self._get_all_config()
LOG.debug('Started')
# ## END DBK
| mit | -2,782,662,431,639,316,500 | 32.560322 | 126 | 0.543378 | false |
lgscofield/odoo | addons/l10n_be_hr_payroll/__init__.py | 438 | 1072 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import l10n_be_hr_payroll
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,699,206,533,591,164,000 | 45.608696 | 78 | 0.61847 | false |
mapclient-plugins/trcsourcestep | mapclientplugins/trcsourcestep/ui_configuredialog.py | 1 | 3575 | # -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'configuredialog.ui'
##
## Created by: Qt User Interface Compiler version 5.15.2
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
class Ui_ConfigureDialog(object):
def setupUi(self, ConfigureDialog):
if not ConfigureDialog.objectName():
ConfigureDialog.setObjectName(u"ConfigureDialog")
ConfigureDialog.resize(562, 238)
self.gridLayout = QGridLayout(ConfigureDialog)
self.gridLayout.setObjectName(u"gridLayout")
self.configGroupBox = QGroupBox(ConfigureDialog)
self.configGroupBox.setObjectName(u"configGroupBox")
self.gridLayout_2 = QGridLayout(self.configGroupBox)
self.gridLayout_2.setObjectName(u"gridLayout_2")
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.locLineEdit = QLineEdit(self.configGroupBox)
self.locLineEdit.setObjectName(u"locLineEdit")
self.horizontalLayout.addWidget(self.locLineEdit)
self.locButton = QPushButton(self.configGroupBox)
self.locButton.setObjectName(u"locButton")
self.horizontalLayout.addWidget(self.locButton)
self.gridLayout_2.addLayout(self.horizontalLayout, 1, 1, 1, 1)
self.idLineEdit = QLineEdit(self.configGroupBox)
self.idLineEdit.setObjectName(u"idLineEdit")
self.gridLayout_2.addWidget(self.idLineEdit, 0, 1, 1, 1)
self.locLabel = QLabel(self.configGroupBox)
self.locLabel.setObjectName(u"locLabel")
self.gridLayout_2.addWidget(self.locLabel, 1, 0, 1, 1)
self.idLabel = QLabel(self.configGroupBox)
self.idLabel.setObjectName(u"idLabel")
self.gridLayout_2.addWidget(self.idLabel, 0, 0, 1, 1)
self.verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.gridLayout_2.addItem(self.verticalSpacer, 2, 1, 1, 1)
self.gridLayout.addWidget(self.configGroupBox, 0, 0, 1, 1)
self.buttonBox = QDialogButtonBox(ConfigureDialog)
self.buttonBox.setObjectName(u"buttonBox")
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 1)
QWidget.setTabOrder(self.idLineEdit, self.locLineEdit)
QWidget.setTabOrder(self.locLineEdit, self.locButton)
QWidget.setTabOrder(self.locButton, self.buttonBox)
self.retranslateUi(ConfigureDialog)
self.buttonBox.accepted.connect(ConfigureDialog.accept)
self.buttonBox.rejected.connect(ConfigureDialog.reject)
QMetaObject.connectSlotsByName(ConfigureDialog)
# setupUi
def retranslateUi(self, ConfigureDialog):
ConfigureDialog.setWindowTitle(QCoreApplication.translate("ConfigureDialog", u"Configure TRC Source Step", None))
self.configGroupBox.setTitle("")
self.locButton.setText(QCoreApplication.translate("ConfigureDialog", u"...", None))
self.locLabel.setText(QCoreApplication.translate("ConfigureDialog", u"Location: ", None))
self.idLabel.setText(QCoreApplication.translate("ConfigureDialog", u"identifier: ", None))
# retranslateUi
| apache-2.0 | -2,478,812,506,755,879,000 | 39.168539 | 121 | 0.682797 | false |
landscapeio/astroid | brain/py2stdlib.py | 1 | 5939 | """Astroid hooks for the Python 2 standard library.
Currently help understanding of :
* hashlib.md5 and hashlib.sha1
"""
from astroid import MANAGER, AsStringRegexpPredicate, UseInferenceDefault, inference_tip
from astroid import nodes
from astroid.builder import AstroidBuilder
MODULE_TRANSFORMS = {}
# module specific transformation functions #####################################
def transform(module):
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
MANAGER.register_transform(nodes.Module, transform)
# module specific transformation functions #####################################
def hashlib_transform(module):
template = '''
class %s(object):
def __init__(self, value=''): pass
def digest(self):
return u''
def update(self, value): pass
def hexdigest(self):
return u''
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(template % hashfunc for hashfunc in algorithms)
fake = AstroidBuilder(MANAGER).string_build(classes)
for hashfunc in algorithms:
module.locals[hashfunc] = fake.locals[hashfunc]
def collections_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
def __missing__(self, key): pass
class deque(object):
maxlen = 0
def __init__(iterable=None, maxlen=None): pass
def append(self, x): pass
def appendleft(self, x): pass
def clear(self): pass
def count(self, x): return 0
def extend(self, iterable): pass
def extendleft(self, iterable): pass
def pop(self): pass
def popleft(self): pass
def remove(self, value): pass
def reverse(self): pass
def rotate(self, n): pass
''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name):
pass
def resource_isdir(package_or_requirement, resource_name):
pass
def resource_filename(package_or_requirement, resource_name):
pass
def resource_stream(package_or_requirement, resource_name):
pass
def resource_string(package_or_requirement, resource_name):
pass
def resource_listdir(package_or_requirement, resource_name):
pass
def extraction_error():
pass
def get_cache_path(archive_name, names=()):
pass
def postprocess(tempname, filename):
pass
def set_extraction_path(path):
pass
def cleanup_resources(force=False):
pass
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def urlparse_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def urlparse(url, scheme='', allow_fragments=True):
return ParseResult()
class ParseResult(object):
def __init__(self):
self.scheme = ''
self.netloc = ''
self.path = ''
self.params = ''
self.query = ''
self.fragment = ''
self.username = None
self.password = None
self.hostname = None
self.port = None
def geturl(self):
return ''
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def subprocess_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
def communicate(self, input=None):
return ('string', 'string')
def wait(self):
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['urlparse'] = urlparse_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
# namedtuple support ###########################################################
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple CallFunc node"""
# node is a CallFunc node, class name as first argument and generated class
# attributes as second argument
if len(node.args) != 2:
# something weird here, go back to class implementation
raise UseInferenceDefault()
# namedtuple list of attributes can be a list of strings or a
# whitespace-separate string
try:
name = node.args[0].value
try:
attributes = node.args[1].value.split()
except AttributeError:
attributes = [const.value for const in node.args[1].elts]
except AttributeError:
raise UseInferenceDefault()
# we want to return a Class node instance with proper attributes set
class_node = nodes.Class(name, 'docstring')
# set base class=tuple
class_node.bases.append(nodes.Tuple._proxied)
# XXX add __init__(*attributes) method
for attr in attributes:
fake_node = nodes.EmptyNode()
fake_node.parent = class_node
class_node.instance_attrs[attr] = [fake_node]
# we use UseInferenceDefault, we can't be a generator so return an iterator
return iter([class_node])
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
| gpl-2.0 | -367,528,999,626,220,400 | 26.623256 | 88 | 0.64691 | false |
nikolas/lettuce | tests/integration/lib/Django-1.3/django/core/serializers/base.py | 202 | 5487 | """
Module for abstract serializer/unserializer base classes.
"""
from StringIO import StringIO
from django.db import models
from django.utils.encoding import smart_str, smart_unicode
from django.utils import datetime_safe
class SerializationError(Exception):
"""Something bad happened during serialization."""
pass
class DeserializationError(Exception):
"""Something bad happened during deserialization."""
pass
class Serializer(object):
"""
Abstract serializer base class.
"""
# Indicates if the implemented serializer is only available for
# internal Django use.
internal_use_only = False
def serialize(self, queryset, **options):
"""
Serialize a queryset.
"""
self.options = options
self.stream = options.pop("stream", StringIO())
self.selected_fields = options.pop("fields", None)
self.use_natural_keys = options.pop("use_natural_keys", False)
self.start_serialization()
for obj in queryset:
self.start_object(obj)
for field in obj._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in obj._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
self.end_object(obj)
self.end_serialization()
return self.getvalue()
def get_string_value(self, obj, field):
"""
Convert a field's value to a string.
"""
return smart_unicode(field.value_to_string(obj))
def start_serialization(self):
"""
Called when serializing of the queryset starts.
"""
raise NotImplementedError
def end_serialization(self):
"""
Called when serializing of the queryset ends.
"""
pass
def start_object(self, obj):
"""
Called when serializing of an object starts.
"""
raise NotImplementedError
def end_object(self, obj):
"""
Called when serializing of an object ends.
"""
pass
def handle_field(self, obj, field):
"""
Called to handle each individual (non-relational) field on an object.
"""
raise NotImplementedError
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey field.
"""
raise NotImplementedError
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField.
"""
raise NotImplementedError
def getvalue(self):
"""
Return the fully serialized queryset (or None if the output stream is
not seekable).
"""
if callable(getattr(self.stream, 'getvalue', None)):
return self.stream.getvalue()
class Deserializer(object):
"""
Abstract base deserializer class.
"""
def __init__(self, stream_or_string, **options):
"""
Init this serializer given a stream or a string
"""
self.options = options
if isinstance(stream_or_string, basestring):
self.stream = StringIO(stream_or_string)
else:
self.stream = stream_or_string
# hack to make sure that the models have all been loaded before
# deserialization starts (otherwise subclass calls to get_model()
# and friends might fail...)
models.get_apps()
def __iter__(self):
return self
def next(self):
"""Iteration iterface -- return the next item in the stream"""
raise NotImplementedError
class DeserializedObject(object):
"""
A deserialized model.
Basically a container for holding the pre-saved deserialized data along
with the many-to-many data saved with the object.
Call ``save()`` to save the object (with the many-to-many data) to the
database; call ``save(save_m2m=False)`` to save just the object fields
(and not touch the many-to-many stuff.)
"""
def __init__(self, obj, m2m_data=None):
self.object = obj
self.m2m_data = m2m_data
def __repr__(self):
return "<DeserializedObject: %s.%s(pk=%s)>" % (
self.object._meta.app_label, self.object._meta.object_name, self.object.pk)
def save(self, save_m2m=True, using=None):
# Call save on the Model baseclass directly. This bypasses any
# model-defined save. The save is also forced to be raw.
# This ensures that the data that is deserialized is literally
# what came from the file, not post-processed by pre_save/save
# methods.
models.Model.save_base(self.object, using=using, raw=True)
if self.m2m_data and save_m2m:
for accessor_name, object_list in self.m2m_data.items():
setattr(self.object, accessor_name, object_list)
# prevent a second (possibly accidental) call to save() from saving
# the m2m data twice.
self.m2m_data = None
| gpl-3.0 | -2,018,896,434,435,930,000 | 30.901163 | 102 | 0.602515 | false |
arcivanov/unittest-xml-reporting | setup.py | 4 | 1728 | #!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.util import convert_path
import codecs
# Load version information
main_ns = {}
ver_path = convert_path('xmlrunner/version.py')
with codecs.open(ver_path, 'rb', 'utf8') as ver_file:
exec(ver_file.read(), main_ns)
install_requires = ['six>=1.4.0']
# this is for sdist to work.
import sys
if sys.version_info < (2, 7):
install_requires += ['unittest2']
setup(
name = 'unittest-xml-reporting',
version = main_ns['__version__'],
author = 'Daniel Fernandes Martins',
author_email = '[email protected]',
description = 'unittest-based test runner with Ant/JUnit like XML reporting.',
license = 'BSD',
platforms = ['Any'],
keywords = [
'pyunit', 'unittest', 'junit xml', 'report', 'testrunner', 'xmlrunner'
],
url = 'http://github.com/xmlrunner/unittest-xml-reporting/tree/master/',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing'
],
packages = ['xmlrunner', 'xmlrunner.extra'],
zip_safe = False,
include_package_data = True,
install_requires = install_requires,
extras_require={
# this is for wheels to work
':python_version=="2.6"': ['unittest2'],
},
test_suite = 'tests'
)
| bsd-2-clause | -6,121,670,633,159,836,000 | 31.603774 | 82 | 0.627315 | false |
adambrenecki/django | django/contrib/gis/tests/geoapp/tests.py | 4 | 37417 | from __future__ import unicode_literals
import re
import unittest
from unittest import skipUnless
from django.db import connection
from django.contrib.gis import gdal
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.tests.utils import (
HAS_SPATIAL_DB, no_mysql, no_oracle, no_spatialite,
mysql, oracle, postgis, spatialite)
from django.test import TestCase
from django.utils import six
if HAS_GEOS:
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
Point, LineString, LinearRing, Polygon, GeometryCollection)
from .models import Country, City, PennsylvaniaCity, State, Track
if HAS_GEOS and not spatialite:
from .models import Feature, MinusOneSRID
def postgis_bug_version():
spatial_version = getattr(connection.ops, "spatial_version", (0,0,0))
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
class GeoModelTest(TestCase):
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
## Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
## Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry))
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertEqual(True, isinstance(ns.poly.srs, gdal.SpatialReference))
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@no_mysql
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157)) FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)' # Used ogr.py in gdal 1.4.1 for this transform
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
# SpatiaLite does not support missing SRID values.
if not spatialite:
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
@no_spatialite # SpatiaLite does not support abstract geometry columns
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertEqual(True, isinstance(f_1.geom, Point))
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertEqual(True, isinstance(f_2.geom, LineString))
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertEqual(True, isinstance(f_3.geom, Polygon))
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertEqual(True, isinstance(f_4.geom, GeometryCollection))
self.assertEqual(f_3.geom, f_4.geom[2])
@no_mysql
def test_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transform(32128)
self.assertEqual(1, qs.count())
for pc in qs: self.assertEqual(32128, pc.point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
# Only PostGIS would support a 'select *' query because of its recognized
# HEXEWKB format for geometry fields
as_text = 'ST_AsText' if postgis else 'asText'
cities2 = City.objects.raw('select id, name, %s(point) from geoapp_city' % as_text)
self.assertEqual(len(cities1), len(list(cities2)))
self.assertTrue(isinstance(cities2[0].point, Point))
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
class GeoLookupTest(TestCase):
@no_mysql
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if not oracle:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs: self.assertEqual(True, c.name in cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry).
if not spatialite:
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(0, len(Country.objects.filter(mpoly__contains=pueblo.point))) # Query w/GEOSGeometry object
self.assertEqual((mysql and 1) or 0,
len(Country.objects.filter(mpoly__contains=okcity.point.wkt))) # Qeury w/WKT
# OK City is contained w/in bounding box of Texas.
if not oracle:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
# Only PostGIS has `left` and `right` lookup types.
@no_mysql
@no_oracle
@no_spatialite
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# The left/right lookup tests are known failures on PostGIS 2.0/2.0.1
# http://trac.osgeo.org/postgis/ticket/2035
if postgis_bug_version():
test_left_right_lookups = unittest.expectedFailure(test_left_right_lookups)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]: self.assertEqual('Houston', c.name)
@no_mysql
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertEqual(True, 'Colorado' in state_names)
self.assertEqual(True, 'Kansas' in state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None)
# Assigning a geomery and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertEqual(None, State.objects.get(name='Northern Mariana Islands').poly)
@no_mysql
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param shoud
# raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
self.assertRaises(e, qs.count)
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
class GeoQuerySetTest(TestCase):
# Please keep the tests in GeoQuerySet method's alphabetic order
@no_mysql
def test_centroid(self):
"Testing the `centroid` GeoQuerySet method."
qs = State.objects.exclude(poly__isnull=True).centroid()
if oracle:
tol = 0.1
elif spatialite:
tol = 0.000001
else:
tol = 0.000000001
for s in qs:
self.assertEqual(True, s.poly.centroid.equals_exact(s.centroid, tol))
@no_mysql
def test_diff_intersection_union(self):
"Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods."
geom = Point(5, 23)
qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom)
# XXX For some reason SpatiaLite does something screwey with the Texas geometry here. Also,
# XXX it doesn't like the null intersection.
if spatialite:
qs = qs.exclude(name='Texas')
else:
qs = qs.intersection(geom)
for c in qs:
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
pass
else:
self.assertEqual(c.mpoly.difference(geom), c.difference)
if not spatialite:
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
self.assertEqual(c.mpoly.sym_difference(geom), c.sym_difference)
self.assertEqual(c.mpoly.union(geom), c.union)
@skipUnless(getattr(connection.ops, 'envelope', False), 'Database does not support envelope operation')
def test_envelope(self):
"Testing the `envelope` GeoQuerySet method."
countries = Country.objects.all().envelope()
for country in countries:
self.assertIsInstance(country.envelope, Polygon)
@no_mysql
@no_spatialite # SpatiaLite does not have an Extent function
def test_extent(self):
"Testing the `extent` GeoQuerySet method."
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent = qs.extent()
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
@no_mysql
@no_oracle
@no_spatialite
def test_force_rhr(self):
"Testing GeoQuerySet.force_rhr()."
rings = ( ( (0, 0), (5, 0), (0, 5), (0, 0) ),
( (1, 1), (1, 3), (3, 1), (1, 1) ),
)
rhr_rings = ( ( (0, 0), (0, 5), (5, 0), (0, 0) ),
( (1, 1), (3, 1), (1, 3), (1, 1) ),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
s = State.objects.force_rhr().get(name='Foo')
self.assertEqual(rhr_rings, s.force_rhr.coords)
@no_mysql
@no_oracle
@no_spatialite
def test_geohash(self):
"Testing GeoQuerySet.geohash()."
if not connection.ops.geohash: return
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.geohash().get(name='Houston')
h2 = City.objects.geohash(precision=5).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash)
self.assertEqual(ref_hash[:5], h2.geohash)
def test_geojson(self):
"Testing GeoJSON output from the database using GeoQuerySet.geojson()."
# Only PostGIS 1.3.4+ and SpatiaLite 3.0+ support GeoJSON.
if not connection.ops.geojson:
self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly')
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.305196,48.462611]}'
chicago_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
if postgis and connection.ops.spatial_version < (1, 4, 0):
pueblo_json = '{"type":"Point","coordinates":[-104.60925200,38.25500100]}'
houston_json = '{"type":"Point","crs":{"type":"EPSG","properties":{"EPSG":4326}},"coordinates":[-95.36315100,29.76337400]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.30519600,48.46261100]}'
elif spatialite:
victoria_json = '{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],"coordinates":[-123.305196,48.462611]}'
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Victoria';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# 1.(3|4).x: SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(chicago_json, City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson)
def test_gml(self):
"Testing GML output from the database using GeoQuerySet.gml()."
if mysql or (spatialite and not connection.ops.gml) :
self.assertRaises(NotImplementedError, Country.objects.all().gml, field_name='mpoly')
return
# Should throw a TypeError when tyring to obtain GML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.gml, field_name='name')
ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.gml(precision=9).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml"><gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ </gml:coordinates></gml:Point>')
elif spatialite and connection.ops.spatial_version < (3, 0, 0):
# Spatialite before 3.0 has extra colon in SrsName
gml_regex = re.compile(r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>')
else:
gml_regex = re.compile(r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>')
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
# PostGIS < 1.5 doesn't include dimension im GMLv3 output.
if postgis and connection.ops.spatial_version >= (1, 5, 0):
self.assertIn('<gml:pos srsDimension="2">',
City.objects.gml(version=3).get(name='Pueblo').gml)
def test_kml(self):
"Testing KML output from the database using GeoQuerySet.kml()."
# Only PostGIS and Spatialite (>=2.4.0-RC4) support KML serialization
if not (postgis or (spatialite and connection.ops.kml)):
self.assertRaises(NotImplementedError, State.objects.all().kml, field_name='poly')
return
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.kml, 'name')
# The reference KML depends on the version of PostGIS used
# (the output stopped including altitude in 1.3.3).
if connection.ops.spatial_version >= (1, 3, 3):
ref_kml = '<Point><coordinates>-104.609252,38.255001</coordinates></Point>'
else:
ref_kml = '<Point><coordinates>-104.609252,38.255001,0</coordinates></Point>'
# Ensuring the KML is as expected.
ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.kml(precision=9).get(name='Pueblo')
for ptown in [ptown1, ptown2]:
self.assertEqual(ref_kml, ptown.kml)
# Only PostGIS has support for the MakeLine aggregate.
@no_mysql
@no_oracle
@no_spatialite
def test_make_line(self):
"Testing the `make_line` GeoQuerySet method."
# Ensuring that a `TypeError` is raised on models without PointFields.
self.assertRaises(TypeError, State.objects.make_line)
self.assertRaises(TypeError, Country.objects.make_line)
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry('LINESTRING(-95.363151 29.763374,-96.801611 32.782057,-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326)
self.assertEqual(ref_line, City.objects.make_line())
@no_mysql
def test_num_geom(self):
"Testing the `num_geom` GeoQuerySet method."
# Both 'countries' only have two geometries.
for c in Country.objects.num_geom():
self.assertEqual(2, c.num_geom)
for c in City.objects.filter(point__isnull=False).num_geom():
# Oracle and PostGIS 2.0+ will return 1 for the number of
# geometries on non-collections, whereas PostGIS < 2.0.0
# will return None.
if postgis and connection.ops.spatial_version < (2, 0, 0):
self.assertIsNone(c.num_geom)
else:
self.assertEqual(1, c.num_geom)
@no_mysql
@no_spatialite # SpatiaLite can only count vertices in LineStrings
def test_num_points(self):
"Testing the `num_points` GeoQuerySet method."
for c in Country.objects.num_points():
self.assertEqual(c.mpoly.num_points, c.num_points)
if not oracle:
# Oracle cannot count vertices in Point geometries.
for c in City.objects.num_points(): self.assertEqual(1, c.num_points)
@no_mysql
def test_point_on_surface(self):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05)) FROM GEOAPP_COUNTRY;
ref = {'New Zealand' : fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas' : fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
elif postgis or spatialite:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand' : Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas' : Country.objects.get(name='Texas').mpoly.point_on_surface
}
for c in Country.objects.point_on_surface():
if spatialite:
# XXX This seems to be a WKT-translation-related precision issue?
tol = 0.00001
else:
tol = 0.000000001
self.assertEqual(True, ref[c.name].equals_exact(c.point_on_surface, tol))
@no_mysql
@no_spatialite
def test_reverse_geom(self):
"Testing GeoQuerySet.reverse_geom()."
coords = [ (-95.363151, 29.763374), (-95.448601, 29.713803) ]
Track.objects.create(name='Foo', line=LineString(coords))
t = Track.objects.reverse_geom().get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), t.reverse_geom.coords)
if oracle:
self.assertRaises(TypeError, State.objects.reverse_geom)
@no_mysql
@no_oracle
def test_scale(self):
"Testing the `scale` GeoQuerySet method."
xfac, yfac = 2, 3
tol = 5 # XXX The low precision tolerance is for SpatiaLite
qs = Country.objects.scale(xfac, yfac, model_att='scaled')
for c in qs:
for p1, p2 in zip(c.mpoly, c.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
@no_mysql
@no_oracle
@no_spatialite
def test_snap_to_grid(self):
"Testing GeoQuerySet.snap_to_grid()."
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args)
for bad_args in (('1.0',), (1.0, None), tuple(map(six.text_type, range(4)))):
self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args)
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid, tol))
def test_svg(self):
"Testing SVG output using GeoQuerySet.svg()."
if mysql or oracle:
self.assertRaises(NotImplementedError, City.objects.svg)
return
self.assertRaises(TypeError, City.objects.svg, precision='foo')
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg)
@no_mysql
def test_transform(self):
"Testing the transform() GeoQuerySet method."
# Pre-transformed points for Houston and Pueblo.
htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084)
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points. Oracle does not have the 3084 SRID.
if not oracle:
h = City.objects.transform(htown.srid).get(name='Houston')
self.assertEqual(3084, h.point.srid)
self.assertAlmostEqual(htown.x, h.point.x, prec)
self.assertAlmostEqual(htown.y, h.point.y, prec)
p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo')
p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo')
for p in [p1, p2]:
self.assertEqual(2774, p.point.srid)
self.assertAlmostEqual(ptown.x, p.point.x, prec)
self.assertAlmostEqual(ptown.y, p.point.y, prec)
@no_mysql
@no_oracle
def test_translate(self):
"Testing the `translate` GeoQuerySet method."
xfac, yfac = 5, -23
qs = Country.objects.translate(xfac, yfac, model_att='translated')
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# XXX The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
@no_mysql
def test_unionagg(self):
"Testing the `unionagg` (aggregate union) GeoQuerySet method."
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Oracle has different order.
union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
union2 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
qs = City.objects.filter(point__within=tx)
self.assertRaises(TypeError, qs.unionagg, 'name')
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.unionagg(field_name='point')
u2 = qs.order_by('name').unionagg()
tol = 0.00001
if oracle:
union = union2
else:
union = union1
self.assertEqual(True, union.equals_exact(u1, tol))
self.assertEqual(True, union.equals_exact(u2, tol))
qs = City.objects.filter(name='NotACity')
self.assertEqual(None, qs.unionagg(field_name='point'))
| bsd-3-clause | -5,255,689,310,111,231,000 | 46.604326 | 227 | 0.62215 | false |
BrandonY/python-docs-samples | tasks/pull_queue_snippets_test.py | 1 | 1231 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pull_queue_snippets
TEST_PROJECT_ID = os.getenv('GCLOUD_PROJECT')
TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1')
TEST_QUEUE_NAME = os.getenv('TEST_QUEUE_NAME', 'my-pull-queue')
def test_create_task():
result = pull_queue_snippets.create_task(
TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION)
assert TEST_QUEUE_NAME in result['name']
def test_pull_and_ack_task():
pull_queue_snippets.create_task(
TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION)
task = pull_queue_snippets.pull_task(
TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION)
pull_queue_snippets.acknowledge_task(task)
| apache-2.0 | 5,580,290,444,639,018,000 | 34.171429 | 74 | 0.73355 | false |
shashank971/edx-platform | common/djangoapps/terrain/stubs/http.py | 139 | 8556 | """
Stub implementation of an HTTP service.
"""
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import urlparse
import threading
import json
from functools import wraps
from lazy import lazy
from logging import getLogger
LOGGER = getLogger(__name__)
def require_params(method, *required_keys):
"""
Decorator to ensure that the method has all the required parameters.
Example:
@require_params('GET', 'id', 'state')
def handle_request(self):
# ....
would send a 400 response if no GET parameters were specified
for 'id' or 'state' (or if those parameters had empty values).
The wrapped function should be a method of a `StubHttpRequestHandler`
subclass.
Currently, "GET" and "POST" are the only supported methods.
"""
def decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
# Read either GET querystring params or POST dict params
if method == "GET":
params = self.get_params
elif method == "POST":
params = self.post_dict
else:
raise ValueError("Unsupported method '{method}'".format(method=method))
# Check for required values
missing = []
for key in required_keys:
if params.get(key) is None:
missing.append(key)
if len(missing) > 0:
msg = "Missing required key(s) {keys}".format(keys=",".join(missing))
self.send_response(400, content=msg, headers={'Content-type': 'text/plain'})
# If nothing is missing, execute the function as usual
else:
return func(self, *args, **kwargs)
return wrapper
return decorator
class StubHttpRequestHandler(BaseHTTPRequestHandler, object):
"""
Handler for the stub HTTP service.
"""
protocol = "HTTP/1.0"
def log_message(self, format_str, *args):
"""
Redirect messages to keep the test console clean.
"""
LOGGER.debug(self._format_msg(format_str, *args))
def log_error(self, format_str, *args):
"""
Helper to log a server error.
"""
LOGGER.error(self._format_msg(format_str, *args))
@lazy
def request_content(self):
"""
Retrieve the content of the request.
"""
try:
length = int(self.headers.getheader('content-length'))
except (TypeError, ValueError):
return ""
else:
return self.rfile.read(length)
@lazy
def post_dict(self):
"""
Retrieve the request POST parameters from the client as a dictionary.
If no POST parameters can be interpreted, return an empty dict.
"""
contents = self.request_content
# The POST dict will contain a list of values for each key.
# None of our parameters are lists, however, so we map [val] --> val
# If the list contains multiple entries, we pick the first one
try:
post_dict = urlparse.parse_qs(contents, keep_blank_values=True)
return {
key: list_val[0]
for key, list_val in post_dict.items()
}
except:
return dict()
@lazy
def get_params(self):
"""
Return the GET parameters (querystring in the URL).
"""
query = urlparse.urlparse(self.path).query
# By default, `parse_qs` returns a list of values for each param
# For convenience, we replace lists of 1 element with just the element
return {
key: value[0] if len(value) == 1 else value
for key, value in urlparse.parse_qs(query).items()
}
@lazy
def path_only(self):
"""
Return the URL path without GET parameters.
Removes the trailing slash if there is one.
"""
path = urlparse.urlparse(self.path).path
if path.endswith('/'):
return path[:-1]
else:
return path
def do_PUT(self):
"""
Allow callers to configure the stub server using the /set_config URL.
The request should have POST data, such that:
Each POST parameter is the configuration key.
Each POST value is a JSON-encoded string value for the configuration.
"""
if self.path == "/set_config" or self.path == "/set_config/":
if len(self.post_dict) > 0:
for key, value in self.post_dict.iteritems():
# Decode the params as UTF-8
try:
key = unicode(key, 'utf-8')
value = unicode(value, 'utf-8')
except UnicodeDecodeError:
self.log_message("Could not decode request params as UTF-8")
self.log_message(u"Set config '{0}' to '{1}'".format(key, value))
try:
value = json.loads(value)
except ValueError:
self.log_message(u"Could not parse JSON: {0}".format(value))
self.send_response(400)
else:
self.server.config[key] = value
self.send_response(200)
# No parameters sent to configure, so return success by default
else:
self.send_response(200)
else:
self.send_response(404)
def send_response(self, status_code, content=None, headers=None):
"""
Send a response back to the client with the HTTP `status_code` (int),
`content` (str) and `headers` (dict).
"""
self.log_message(
"Sent HTTP response: {0} with content '{1}' and headers {2}".format(status_code, content, headers)
)
if headers is None:
headers = {
'Access-Control-Allow-Origin': "*",
}
BaseHTTPRequestHandler.send_response(self, status_code)
for (key, value) in headers.items():
self.send_header(key, value)
if len(headers) > 0:
self.end_headers()
if content is not None:
self.wfile.write(content)
def send_json_response(self, content):
"""
Send a response with status code 200, the given content serialized as
JSON, and the Content-Type header set appropriately
"""
self.send_response(200, json.dumps(content), {"Content-Type": "application/json"})
def _format_msg(self, format_str, *args):
"""
Format message for logging.
`format_str` is a string with old-style Python format escaping;
`args` is an array of values to fill into the string.
"""
return u"{0} - - [{1}] {2}\n".format(
self.client_address[0],
self.log_date_time_string(),
format_str % args
)
def do_HEAD(self):
"""
Respond to an HTTP HEAD request
"""
self.send_response(200)
class StubHttpService(HTTPServer, object):
"""
Stub HTTP service implementation.
"""
# Subclasses override this to provide the handler class to use.
# Should be a subclass of `StubHttpRequestHandler`
HANDLER_CLASS = StubHttpRequestHandler
def __init__(self, port_num=0):
"""
Configure the server to listen on localhost.
Default is to choose an arbitrary open port.
"""
address = ('0.0.0.0', port_num)
HTTPServer.__init__(self, address, self.HANDLER_CLASS)
# Create a dict to store configuration values set by the client
self.config = dict()
# Start the server in a separate thread
server_thread = threading.Thread(target=self.serve_forever)
server_thread.daemon = True
server_thread.start()
# Log the port we're using to help identify port conflict errors
LOGGER.debug('Starting service on port {0}'.format(self.port))
def shutdown(self):
"""
Stop the server and free up the port
"""
# First call superclass shutdown()
HTTPServer.shutdown(self)
# We also need to manually close the socket
self.socket.close()
@property
def port(self):
"""
Return the port that the service is listening on.
"""
_, port = self.server_address
return port
| agpl-3.0 | -9,055,320,953,270,088,000 | 29.888087 | 110 | 0.563114 | false |
espadrine/opera | chromium/src/v8/tools/testrunner/local/utils.py | 21 | 3476 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
from os.path import exists
from os.path import isdir
from os.path import join
import platform
import re
def GetSuitePaths(test_root):
def IsSuite(path):
return isdir(path) and exists(join(path, 'testcfg.py'))
return [ f for f in os.listdir(test_root) if IsSuite(join(test_root, f)) ]
# Reads a file into an array of strings
def ReadLinesFrom(name):
lines = []
with open(name) as f:
for line in f:
if line.startswith('#'): continue
if '#' in line:
line = line[:line.find('#')]
line = line.strip()
if not line: continue
lines.append(line)
return lines
def GuessOS():
system = platform.system()
if system == 'Linux':
return 'linux'
elif system == 'Darwin':
return 'macos'
elif system.find('CYGWIN') >= 0:
return 'cygwin'
elif system == 'Windows' or system == 'Microsoft':
# On Windows Vista platform.system() can return 'Microsoft' with some
# versions of Python, see http://bugs.python.org/issue1082
return 'win32'
elif system == 'FreeBSD':
return 'freebsd'
elif system == 'OpenBSD':
return 'openbsd'
elif system == 'SunOS':
return 'solaris'
elif system == 'NetBSD':
return 'netbsd'
else:
return None
# This will default to building the 32 bit VM even on machines that are
# capable of running the 64 bit VM.
def DefaultArch():
machine = platform.machine()
machine = machine.lower() # Windows 7 capitalizes 'AMD64'.
if machine.startswith('arm'):
return 'arm'
elif (not machine) or (not re.match('(x|i[3-6])86$', machine) is None):
return 'ia32'
elif machine == 'i86pc':
return 'ia32'
elif machine == 'x86_64':
return 'ia32'
elif machine == 'amd64':
return 'ia32'
else:
return None
def GuessWordsize():
if '64' in platform.machine():
return '64'
else:
return '32'
def IsWindows():
return GuessOS() == 'win32'
| bsd-3-clause | -2,962,663,490,647,458,000 | 31.185185 | 76 | 0.698504 | false |
klonage/nlt-gcs | packages/IronPython.StdLib.2.7.4/content/Lib/encodings/hex_codec.py | 88 | 2388 | """ Python 'hex_codec' Codec - 2-digit hex content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg ([email protected]).
"""
import codecs, binascii
### Codec APIs
def hex_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.b2a_hex(input)
return (output, len(input))
def hex_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input,errors='strict'):
return hex_encode(input,errors)
def decode(self, input,errors='strict'):
return hex_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
assert self.errors == 'strict'
return binascii.b2a_hex(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
assert self.errors == 'strict'
return binascii.a2b_hex(input)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='hex',
encode=hex_encode,
decode=hex_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| gpl-3.0 | -4,965,252,707,678,812,000 | 28.227848 | 68 | 0.663735 | false |
zaina/nova | tools/install_venv_common.py | 333 | 5959 | # Copyright 2013 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Provides methods needed by installation script for OpenStack development
virtual environments.
Since this script is used to bootstrap a virtualenv from the system's Python
environment, it should be kept strictly compatible with Python 2.6.
Synced in from openstack-common
"""
from __future__ import print_function
import optparse
import os
import subprocess
import sys
class InstallVenv(object):
def __init__(self, root, venv, requirements,
test_requirements, py_version,
project):
self.root = root
self.venv = venv
self.requirements = requirements
self.test_requirements = test_requirements
self.py_version = py_version
self.project = project
def die(self, message, *args):
print(message % args, file=sys.stderr)
sys.exit(1)
def check_python_version(self):
if sys.version_info < (2, 6):
self.die("Need Python Version >= 2.6")
def run_command_with_code(self, cmd, redirect_output=True,
check_exit_code=True):
"""Runs a command in an out-of-process shell.
Returns the output of that command. Working directory is self.root.
"""
if redirect_output:
stdout = subprocess.PIPE
else:
stdout = None
proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout)
output = proc.communicate()[0]
if check_exit_code and proc.returncode != 0:
self.die('Command "%s" failed.\n%s', ' '.join(cmd), output)
return (output, proc.returncode)
def run_command(self, cmd, redirect_output=True, check_exit_code=True):
return self.run_command_with_code(cmd, redirect_output,
check_exit_code)[0]
def get_distro(self):
if (os.path.exists('/etc/fedora-release') or
os.path.exists('/etc/redhat-release')):
return Fedora(
self.root, self.venv, self.requirements,
self.test_requirements, self.py_version, self.project)
else:
return Distro(
self.root, self.venv, self.requirements,
self.test_requirements, self.py_version, self.project)
def check_dependencies(self):
self.get_distro().install_virtualenv()
def create_virtualenv(self, no_site_packages=True):
"""Creates the virtual environment and installs PIP.
Creates the virtual environment and installs PIP only into the
virtual environment.
"""
if not os.path.isdir(self.venv):
print('Creating venv...', end=' ')
if no_site_packages:
self.run_command(['virtualenv', '-q', '--no-site-packages',
self.venv])
else:
self.run_command(['virtualenv', '-q', self.venv])
print('done.')
else:
print("venv already exists...")
pass
def pip_install(self, *args):
self.run_command(['tools/with_venv.sh',
'pip', 'install', '--upgrade'] + list(args),
redirect_output=False)
def install_dependencies(self):
print('Installing dependencies with pip (this can take a while)...')
# First things first, make sure our venv has the latest pip and
# setuptools and pbr
self.pip_install('pip>=1.4')
self.pip_install('setuptools')
self.pip_install('pbr')
self.pip_install('-r', self.requirements, '-r', self.test_requirements)
def parse_args(self, argv):
"""Parses command-line arguments."""
parser = optparse.OptionParser()
parser.add_option('-n', '--no-site-packages',
action='store_true',
help="Do not inherit packages from global Python "
"install.")
return parser.parse_args(argv[1:])[0]
class Distro(InstallVenv):
def check_cmd(self, cmd):
return bool(self.run_command(['which', cmd],
check_exit_code=False).strip())
def install_virtualenv(self):
if self.check_cmd('virtualenv'):
return
if self.check_cmd('easy_install'):
print('Installing virtualenv via easy_install...', end=' ')
if self.run_command(['easy_install', 'virtualenv']):
print('Succeeded')
return
else:
print('Failed')
self.die('ERROR: virtualenv not found.\n\n%s development'
' requires virtualenv, please install it using your'
' favorite package management tool' % self.project)
class Fedora(Distro):
"""This covers all Fedora-based distributions.
Includes: Fedora, RHEL, CentOS, Scientific Linux
"""
def check_pkg(self, pkg):
return self.run_command_with_code(['rpm', '-q', pkg],
check_exit_code=False)[1] == 0
def install_virtualenv(self):
if self.check_cmd('virtualenv'):
return
if not self.check_pkg('python-virtualenv'):
self.die("Please install 'python-virtualenv'.")
super(Fedora, self).install_virtualenv()
| apache-2.0 | -6,384,204,946,865,733,000 | 33.645349 | 79 | 0.589025 | false |
charbeljc/OCB | openerp/tools/convert.py | 205 | 41282 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import cStringIO
import csv
import logging
import os.path
import pickle
import re
import sys
# for eval context:
import time
import openerp
import openerp.release
import openerp.workflow
from yaml_import import convert_yaml_import
import assertion_report
_logger = logging.getLogger(__name__)
try:
import pytz
except:
_logger.warning('could not find pytz library, please install it')
class pytzclass(object):
all_timezones=[]
pytz=pytzclass()
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from lxml import etree, builder
import misc
from config import config
from translate import _
# List of etree._Element subclasses that we choose to ignore when parsing XML.
from misc import SKIPPED_ELEMENT_TYPES
from misc import unquote
from openerp import SUPERUSER_ID
# Import of XML records requires the unsafe eval as well,
# almost everywhere, which is ok because it supposedly comes
# from trusted data, but at least we make it obvious now.
unsafe_eval = eval
from safe_eval import safe_eval as eval
class ParseError(Exception):
def __init__(self, msg, text, filename, lineno):
self.msg = msg
self.text = text
self.filename = filename
self.lineno = lineno
def __str__(self):
return '"%s" while parsing %s:%s, near\n%s' \
% (self.msg, self.filename, self.lineno, self.text)
def _ref(self, cr):
return lambda x: self.id_get(cr, x)
def _obj(pool, cr, uid, model_str, context=None):
model = pool[model_str]
return lambda x: model.browse(cr, uid, x, context=context)
def _get_idref(self, cr, uid, model_str, context, idref):
idref2 = dict(idref,
time=time,
DateTime=datetime,
datetime=datetime,
timedelta=timedelta,
relativedelta=relativedelta,
version=openerp.release.major_version,
ref=_ref(self, cr),
pytz=pytz)
if len(model_str):
idref2['obj'] = _obj(self.pool, cr, uid, model_str, context=context)
return idref2
def _fix_multiple_roots(node):
"""
Surround the children of the ``node`` element of an XML field with a
single root "data" element, to prevent having a document with multiple
roots once parsed separately.
XML nodes should have one root only, but we'd like to support
direct multiple roots in our partial documents (like inherited view architectures).
As a convention we'll surround multiple root with a container "data" element, to be
ignored later when parsing.
"""
real_nodes = [x for x in node if not isinstance(x, SKIPPED_ELEMENT_TYPES)]
if len(real_nodes) > 1:
data_node = etree.Element("data")
for child in node:
data_node.append(child)
node.append(data_node)
def _eval_xml(self, node, pool, cr, uid, idref, context=None):
if context is None:
context = {}
if node.tag in ('field','value'):
t = node.get('type','char')
f_model = node.get('model', '').encode('utf-8')
if node.get('search'):
f_search = node.get("search",'').encode('utf-8')
f_use = node.get("use",'id').encode('utf-8')
f_name = node.get("name",'').encode('utf-8')
idref2 = {}
if f_search:
idref2 = _get_idref(self, cr, uid, f_model, context, idref)
q = unsafe_eval(f_search, idref2)
ids = pool[f_model].search(cr, uid, q)
if f_use != 'id':
ids = map(lambda x: x[f_use], pool[f_model].read(cr, uid, ids, [f_use]))
_cols = pool[f_model]._columns
if (f_name in _cols) and _cols[f_name]._type=='many2many':
return ids
f_val = False
if len(ids):
f_val = ids[0]
if isinstance(f_val, tuple):
f_val = f_val[0]
return f_val
a_eval = node.get('eval','')
if a_eval:
idref2 = _get_idref(self, cr, uid, f_model, context, idref)
try:
return unsafe_eval(a_eval, idref2)
except Exception:
logging.getLogger('openerp.tools.convert.init').error(
'Could not eval(%s) for %s in %s', a_eval, node.get('name'), context)
raise
def _process(s, idref):
matches = re.finditer('[^%]%\((.*?)\)[ds]', s)
done = []
for m in matches:
found = m.group()[1:]
if found in done:
continue
done.append(found)
id = m.groups()[0]
if not id in idref:
idref[id] = self.id_get(cr, id)
s = s.replace(found, str(idref[id]))
s = s.replace('%%', '%') # Quite wierd but it's for (somewhat) backward compatibility sake
return s
if t == 'xml':
_fix_multiple_roots(node)
return '<?xml version="1.0"?>\n'\
+_process("".join([etree.tostring(n, encoding='utf-8')
for n in node]), idref)
if t == 'html':
return _process("".join([etree.tostring(n, encoding='utf-8')
for n in node]), idref)
data = node.text
if node.get('file'):
with openerp.tools.file_open(node.get('file'), 'rb') as f:
data = f.read()
if t == 'file':
from ..modules import module
path = data.strip()
if not module.get_module_resource(self.module, path):
raise IOError("No such file or directory: '%s' in %s" % (
path, self.module))
return '%s,%s' % (self.module, path)
if t == 'char':
return data
if t == 'base64':
return data.encode('base64')
if t == 'int':
d = data.strip()
if d == 'None':
return None
return int(d)
if t == 'float':
return float(data.strip())
if t in ('list','tuple'):
res=[]
for n in node.iterchildren(tag='value'):
res.append(_eval_xml(self,n,pool,cr,uid,idref))
if t=='tuple':
return tuple(res)
return res
elif node.tag == "function":
args = []
a_eval = node.get('eval','')
# FIXME: should probably be exclusive
if a_eval:
idref['ref'] = lambda x: self.id_get(cr, x)
args = unsafe_eval(a_eval, idref)
for n in node:
return_val = _eval_xml(self,n, pool, cr, uid, idref, context)
if return_val is not None:
args.append(return_val)
model = pool[node.get('model', '')]
method = node.get('name')
res = getattr(model, method)(cr, uid, *args)
return res
elif node.tag == "test":
return node.text
escape_re = re.compile(r'(?<!\\)/')
def escape(x):
return x.replace('\\/', '/')
class xml_import(object):
@staticmethod
def nodeattr2bool(node, attr, default=False):
if not node.get(attr):
return default
val = node.get(attr).strip()
if not val:
return default
return val.lower() not in ('0', 'false', 'off')
def isnoupdate(self, data_node=None):
return self.noupdate or (len(data_node) and self.nodeattr2bool(data_node, 'noupdate', False))
def get_context(self, data_node, node, eval_dict):
data_node_context = (len(data_node) and data_node.get('context','').encode('utf8'))
node_context = node.get("context",'').encode('utf8')
context = {}
for ctx in (data_node_context, node_context):
if ctx:
try:
ctx_res = unsafe_eval(ctx, eval_dict)
if isinstance(context, dict):
context.update(ctx_res)
else:
context = ctx_res
except NameError:
# Some contexts contain references that are only valid at runtime at
# client-side, so in that case we keep the original context string
# as it is. We also log it, just in case.
context = ctx
_logger.debug('Context value (%s) for element with id "%s" or its data node does not parse '\
'at server-side, keeping original string, in case it\'s meant for client side only',
ctx, node.get('id','n/a'), exc_info=True)
return context
def get_uid(self, cr, uid, data_node, node):
node_uid = node.get('uid','') or (len(data_node) and data_node.get('uid',''))
if node_uid:
return self.id_get(cr, node_uid)
return uid
def _test_xml_id(self, xml_id):
id = xml_id
if '.' in xml_id:
module, id = xml_id.split('.', 1)
assert '.' not in id, """The ID reference "%s" must contain
maximum one dot. They are used to refer to other modules ID, in the
form: module.record_id""" % (xml_id,)
if module != self.module:
modcnt = self.pool['ir.module.module'].search_count(self.cr, self.uid, ['&', ('name', '=', module), ('state', 'in', ['installed'])])
assert modcnt == 1, """The ID "%s" refers to an uninstalled module""" % (xml_id,)
if len(id) > 64:
_logger.error('id: %s is to long (max: 64)', id)
def _tag_delete(self, cr, rec, data_node=None, mode=None):
d_model = rec.get("model")
d_search = rec.get("search",'').encode('utf-8')
d_id = rec.get("id")
ids = []
if d_search:
idref = _get_idref(self, cr, self.uid, d_model, context={}, idref={})
try:
ids = self.pool[d_model].search(cr, self.uid, unsafe_eval(d_search, idref))
except ValueError:
_logger.warning('Skipping deletion for failed search `%r`', d_search, exc_info=True)
pass
if d_id:
try:
ids.append(self.id_get(cr, d_id))
except ValueError:
# d_id cannot be found. doesn't matter in this case
_logger.warning('Skipping deletion for missing XML ID `%r`', d_id, exc_info=True)
pass
if ids:
self.pool[d_model].unlink(cr, self.uid, ids)
def _remove_ir_values(self, cr, name, value, model):
ir_values_obj = self.pool['ir.values']
ir_value_ids = ir_values_obj.search(cr, self.uid, [('name','=',name),('value','=',value),('model','=',model)])
if ir_value_ids:
ir_values_obj.unlink(cr, self.uid, ir_value_ids)
return True
def _tag_report(self, cr, rec, data_node=None, mode=None):
res = {}
for dest,f in (('name','string'),('model','model'),('report_name','name')):
res[dest] = rec.get(f,'').encode('utf8')
assert res[dest], "Attribute %s of report is empty !" % (f,)
for field,dest in (('rml','report_rml'),('file','report_rml'),('xml','report_xml'),('xsl','report_xsl'),
('attachment','attachment'),('attachment_use','attachment_use'), ('usage','usage'),
('report_type', 'report_type'), ('parser', 'parser')):
if rec.get(field):
res[dest] = rec.get(field).encode('utf8')
if rec.get('auto'):
res['auto'] = eval(rec.get('auto','False'))
if rec.get('sxw'):
sxw_content = misc.file_open(rec.get('sxw')).read()
res['report_sxw_content'] = sxw_content
if rec.get('header'):
res['header'] = eval(rec.get('header','False'))
res['multi'] = rec.get('multi') and eval(rec.get('multi','False'))
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
id = self.pool['ir.model.data']._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if not rec.get('menu') or eval(rec.get('menu','False')):
keyword = str(rec.get('keyword', 'client_print_multi'))
value = 'ir.actions.report.xml,'+str(id)
replace = rec.get('replace', True)
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', keyword, res['name'], [res['model']], value, replace=replace, isobject=True, xml_id=xml_id)
elif self.mode=='update' and eval(rec.get('menu','False'))==False:
# Special check for report having attribute menu=False on update
value = 'ir.actions.report.xml,'+str(id)
self._remove_ir_values(cr, res['name'], value, res['model'])
return id
def _tag_function(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
context = self.get_context(data_node, rec, {'ref': _ref(self, cr)})
uid = self.get_uid(cr, self.uid, data_node, rec)
_eval_xml(self,rec, self.pool, cr, uid, self.idref, context=context)
return
def _tag_url(self, cr, rec, data_node=None, mode=None):
url = rec.get("url",'').encode('utf8')
target = rec.get("target",'').encode('utf8')
name = rec.get("name",'').encode('utf8')
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
res = {'name': name, 'url': url, 'target':target}
id = self.pool['ir.model.data']._update(cr, self.uid, "ir.actions.act_url", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
def _tag_act_window(self, cr, rec, data_node=None, mode=None):
name = rec.get('name','').encode('utf-8')
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
type = rec.get('type','').encode('utf-8') or 'ir.actions.act_window'
view_id = False
if rec.get('view_id'):
view_id = self.id_get(cr, rec.get('view_id','').encode('utf-8'))
domain = rec.get('domain','').encode('utf-8') or '[]'
res_model = rec.get('res_model','').encode('utf-8')
src_model = rec.get('src_model','').encode('utf-8')
view_type = rec.get('view_type','').encode('utf-8') or 'form'
view_mode = rec.get('view_mode','').encode('utf-8') or 'tree,form'
usage = rec.get('usage','').encode('utf-8')
limit = rec.get('limit','').encode('utf-8')
auto_refresh = rec.get('auto_refresh','').encode('utf-8')
uid = self.uid
# Act_window's 'domain' and 'context' contain mostly literals
# but they can also refer to the variables provided below
# in eval_context, so we need to eval() them before storing.
# Among the context variables, 'active_id' refers to
# the currently selected items in a list view, and only
# takes meaning at runtime on the client side. For this
# reason it must remain a bare variable in domain and context,
# even after eval() at server-side. We use the special 'unquote'
# class to achieve this effect: a string which has itself, unquoted,
# as representation.
active_id = unquote("active_id")
active_ids = unquote("active_ids")
active_model = unquote("active_model")
def ref(str_id):
return self.id_get(cr, str_id)
# Include all locals() in eval_context, for backwards compatibility
eval_context = {
'name': name,
'xml_id': xml_id,
'type': type,
'view_id': view_id,
'domain': domain,
'res_model': res_model,
'src_model': src_model,
'view_type': view_type,
'view_mode': view_mode,
'usage': usage,
'limit': limit,
'auto_refresh': auto_refresh,
'uid' : uid,
'active_id': active_id,
'active_ids': active_ids,
'active_model': active_model,
'ref' : ref,
}
context = self.get_context(data_node, rec, eval_context)
try:
domain = unsafe_eval(domain, eval_context)
except NameError:
# Some domains contain references that are only valid at runtime at
# client-side, so in that case we keep the original domain string
# as it is. We also log it, just in case.
_logger.debug('Domain value (%s) for element with id "%s" does not parse '\
'at server-side, keeping original string, in case it\'s meant for client side only',
domain, xml_id or 'n/a', exc_info=True)
res = {
'name': name,
'type': type,
'view_id': view_id,
'domain': domain,
'context': context,
'res_model': res_model,
'src_model': src_model,
'view_type': view_type,
'view_mode': view_mode,
'usage': usage,
'limit': limit,
'auto_refresh': auto_refresh,
}
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
if rec.get('target'):
res['target'] = rec.get('target','')
if rec.get('multi'):
res['multi'] = eval(rec.get('multi', 'False'))
id = self.pool['ir.model.data']._update(cr, self.uid, 'ir.actions.act_window', self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if src_model:
#keyword = 'client_action_relate'
keyword = rec.get('key2','').encode('utf-8') or 'client_action_relate'
value = 'ir.actions.act_window,'+str(id)
replace = rec.get('replace','') or True
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', keyword, xml_id, [src_model], value, replace=replace, isobject=True, xml_id=xml_id)
# TODO add remove ir.model.data
def _tag_ir_set(self, cr, rec, data_node=None, mode=None):
if self.mode != 'init':
return
res = {}
for field in rec.findall('./field'):
f_name = field.get("name",'').encode('utf-8')
f_val = _eval_xml(self,field,self.pool, cr, self.uid, self.idref)
res[f_name] = f_val
self.pool['ir.model.data'].ir_set(cr, self.uid, res['key'], res['key2'], res['name'], res['models'], res['value'], replace=res.get('replace',True), isobject=res.get('isobject', False), meta=res.get('meta',None))
def _tag_workflow(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
model = rec.get('model').encode('ascii')
w_ref = rec.get('ref')
if w_ref:
id = self.id_get(cr, w_ref)
else:
number_children = len(rec)
assert number_children > 0,\
'You must define a child node if you dont give a ref'
assert number_children == 1,\
'Only one child node is accepted (%d given)' % number_children
id = _eval_xml(self, rec[0], self.pool, cr, self.uid, self.idref)
uid = self.get_uid(cr, self.uid, data_node, rec)
openerp.workflow.trg_validate(
uid, model, id, rec.get('action').encode('ascii'), cr)
#
# Support two types of notation:
# name="Inventory Control/Sending Goods"
# or
# action="action_id"
# parent="parent_id"
#
def _tag_menuitem(self, cr, rec, data_node=None, mode=None):
rec_id = rec.get("id",'').encode('ascii')
self._test_xml_id(rec_id)
m_l = map(escape, escape_re.split(rec.get("name",'').encode('utf8')))
values = {'parent_id': False}
if rec.get('parent', False) is False and len(m_l) > 1:
# No parent attribute specified and the menu name has several menu components,
# try to determine the ID of the parent according to menu path
pid = False
res = None
values['name'] = m_l[-1]
m_l = m_l[:-1] # last part is our name, not a parent
for idx, menu_elem in enumerate(m_l):
if pid:
cr.execute('select id from ir_ui_menu where parent_id=%s and name=%s', (pid, menu_elem))
else:
cr.execute('select id from ir_ui_menu where parent_id is null and name=%s', (menu_elem,))
res = cr.fetchone()
if res:
pid = res[0]
else:
# the menuitem does't exist but we are in branch (not a leaf)
_logger.warning('Warning no ID for submenu %s of menu %s !', menu_elem, str(m_l))
pid = self.pool['ir.ui.menu'].create(cr, self.uid, {'parent_id' : pid, 'name' : menu_elem})
values['parent_id'] = pid
else:
# The parent attribute was specified, if non-empty determine its ID, otherwise
# explicitly make a top-level menu
if rec.get('parent'):
menu_parent_id = self.id_get(cr, rec.get('parent',''))
else:
# we get here with <menuitem parent="">, explicit clear of parent, or
# if no parent attribute at all but menu name is not a menu path
menu_parent_id = False
values = {'parent_id': menu_parent_id}
if rec.get('name'):
values['name'] = rec.get('name')
try:
res = [ self.id_get(cr, rec.get('id','')) ]
except:
res = None
if rec.get('action'):
a_action = rec.get('action','').encode('utf8')
# determine the type of action
action_type, action_id = self.model_id_get(cr, a_action)
action_type = action_type.split('.')[-1] # keep only type part
if not values.get('name') and action_type in ('act_window', 'wizard', 'url', 'client', 'server'):
a_table = 'ir_act_%s' % action_type.replace('act_', '')
cr.execute('select name from "%s" where id=%%s' % a_table, (int(action_id),))
resw = cr.fetchone()
if resw:
values['name'] = resw[0]
if not values.get('name'):
# ensure menu has a name
values['name'] = rec_id or '?'
if rec.get('sequence'):
values['sequence'] = int(rec.get('sequence'))
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
values['groups_id'] = groups_value
pid = self.pool['ir.model.data']._update(cr, self.uid, 'ir.ui.menu', self.module, values, rec_id, noupdate=self.isnoupdate(data_node), mode=self.mode, res_id=res and res[0] or False)
if rec_id and pid:
self.idref[rec_id] = int(pid)
if rec.get('action') and pid:
action = "ir.actions.%s,%d" % (action_type, action_id)
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(pid))], action, True, True, xml_id=rec_id)
return 'ir.ui.menu', pid
def _assert_equals(self, f1, f2, prec=4):
return not round(f1 - f2, prec)
def _tag_assert(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
rec_model = rec.get("model",'').encode('ascii')
model = self.pool[rec_model]
rec_id = rec.get("id",'').encode('ascii')
self._test_xml_id(rec_id)
rec_src = rec.get("search",'').encode('utf8')
rec_src_count = rec.get("count")
rec_string = rec.get("string",'').encode('utf8') or 'unknown'
ids = None
eval_dict = {'ref': _ref(self, cr)}
context = self.get_context(data_node, rec, eval_dict)
uid = self.get_uid(cr, self.uid, data_node, rec)
if rec_id:
ids = [self.id_get(cr, rec_id)]
elif rec_src:
q = unsafe_eval(rec_src, eval_dict)
ids = self.pool[rec_model].search(cr, uid, q, context=context)
if rec_src_count:
count = int(rec_src_count)
if len(ids) != count:
self.assertion_report.record_failure()
msg = 'assertion "%s" failed!\n' \
' Incorrect search count:\n' \
' expected count: %d\n' \
' obtained count: %d\n' \
% (rec_string, count, len(ids))
_logger.error(msg)
return
assert ids is not None,\
'You must give either an id or a search criteria'
ref = _ref(self, cr)
for id in ids:
brrec = model.browse(cr, uid, id, context)
class d(dict):
def __getitem__(self2, key):
if key in brrec:
return brrec[key]
return dict.__getitem__(self2, key)
globals_dict = d()
globals_dict['floatEqual'] = self._assert_equals
globals_dict['ref'] = ref
globals_dict['_ref'] = ref
for test in rec.findall('./test'):
f_expr = test.get("expr",'').encode('utf-8')
expected_value = _eval_xml(self, test, self.pool, cr, uid, self.idref, context=context) or True
expression_value = unsafe_eval(f_expr, globals_dict)
if expression_value != expected_value: # assertion failed
self.assertion_report.record_failure()
msg = 'assertion "%s" failed!\n' \
' xmltag: %s\n' \
' expected value: %r\n' \
' obtained value: %r\n' \
% (rec_string, etree.tostring(test), expected_value, expression_value)
_logger.error(msg)
return
else: # all tests were successful for this assertion tag (no break)
self.assertion_report.record_success()
def _tag_record(self, cr, rec, data_node=None, mode=None):
rec_model = rec.get("model").encode('ascii')
model = self.pool[rec_model]
rec_id = rec.get("id",'').encode('ascii')
rec_context = rec.get("context", None)
if rec_context:
rec_context = unsafe_eval(rec_context)
self._test_xml_id(rec_id)
# in update mode, the record won't be updated if the data node explicitely
# opt-out using @noupdate="1". A second check will be performed in
# ir.model.data#_update() using the record's ir.model.data `noupdate` field.
if self.isnoupdate(data_node) and self.mode != 'init':
# check if the xml record has no id, skip
if not rec_id:
return None
if '.' in rec_id:
module,rec_id2 = rec_id.split('.')
else:
module = self.module
rec_id2 = rec_id
id = self.pool['ir.model.data']._update_dummy(cr, self.uid, rec_model, module, rec_id2)
if id:
# if the resource already exists, don't update it but store
# its database id (can be useful)
self.idref[rec_id] = int(id)
return None
elif not self.nodeattr2bool(rec, 'forcecreate', True):
# if it doesn't exist and we shouldn't create it, skip it
return None
# else create it normally
res = {}
for field in rec.findall('./field'):
#TODO: most of this code is duplicated above (in _eval_xml)...
f_name = field.get("name").encode('utf-8')
f_ref = field.get("ref",'').encode('utf-8')
f_search = field.get("search",'').encode('utf-8')
f_model = field.get("model",'').encode('utf-8')
if not f_model and f_name in model._fields:
f_model = model._fields[f_name].comodel_name
f_use = field.get("use",'').encode('utf-8') or 'id'
f_val = False
if f_search:
q = unsafe_eval(f_search, self.idref)
assert f_model, 'Define an attribute model="..." in your .XML file !'
f_obj = self.pool[f_model]
# browse the objects searched
s = f_obj.browse(cr, self.uid, f_obj.search(cr, self.uid, q))
# column definitions of the "local" object
_fields = self.pool[rec_model]._fields
# if the current field is many2many
if (f_name in _fields) and _fields[f_name].type == 'many2many':
f_val = [(6, 0, map(lambda x: x[f_use], s))]
elif len(s):
# otherwise (we are probably in a many2one field),
# take the first element of the search
f_val = s[0][f_use]
elif f_ref:
if f_name in model._fields and model._fields[f_name].type == 'reference':
val = self.model_id_get(cr, f_ref)
f_val = val[0] + ',' + str(val[1])
else:
f_val = self.id_get(cr, f_ref)
else:
f_val = _eval_xml(self,field, self.pool, cr, self.uid, self.idref)
if f_name in model._fields:
if model._fields[f_name].type == 'integer':
f_val = int(f_val)
res[f_name] = f_val
id = self.pool['ir.model.data']._update(cr, self.uid, rec_model, self.module, res, rec_id or False, not self.isnoupdate(data_node), noupdate=self.isnoupdate(data_node), mode=self.mode, context=rec_context )
if rec_id:
self.idref[rec_id] = int(id)
if config.get('import_partial'):
cr.commit()
return rec_model, id
def _tag_template(self, cr, el, data_node=None, mode=None):
# This helper transforms a <template> element into a <record> and forwards it
tpl_id = el.get('id', el.get('t-name', '')).encode('ascii')
full_tpl_id = tpl_id
if '.' not in full_tpl_id:
full_tpl_id = '%s.%s' % (self.module, tpl_id)
# set the full template name for qweb <module>.<id>
if not el.get('inherit_id'):
el.set('t-name', full_tpl_id)
el.tag = 't'
else:
el.tag = 'data'
el.attrib.pop('id', None)
record_attrs = {
'id': tpl_id,
'model': 'ir.ui.view',
}
for att in ['forcecreate', 'context']:
if att in el.keys():
record_attrs[att] = el.attrib.pop(att)
Field = builder.E.field
name = el.get('name', tpl_id)
record = etree.Element('record', attrib=record_attrs)
record.append(Field(name, name='name'))
record.append(Field("qweb", name='type'))
record.append(Field(el.get('priority', "16"), name='priority'))
if 'inherit_id' in el.attrib:
record.append(Field(name='inherit_id', ref=el.get('inherit_id')))
if el.get('active') in ("True", "False"):
view_id = self.id_get(cr, tpl_id, raise_if_not_found=False)
if mode != "update" or not view_id:
record.append(Field(name='active', eval=el.get('active')))
if el.get('customize_show') in ("True", "False"):
record.append(Field(name='customize_show', eval=el.get('customize_show')))
groups = el.attrib.pop('groups', None)
if groups:
grp_lst = map(lambda x: "ref('%s')" % x, groups.split(','))
record.append(Field(name="groups_id", eval="[(6, 0, ["+', '.join(grp_lst)+"])]"))
if el.attrib.pop('page', None) == 'True':
record.append(Field(name="page", eval="True"))
if el.get('primary') == 'True':
# Pseudo clone mode, we'll set the t-name to the full canonical xmlid
el.append(
builder.E.xpath(
builder.E.attribute(full_tpl_id, name='t-name'),
expr=".",
position="attributes",
)
)
record.append(Field('primary', name='mode'))
# inject complete <template> element (after changing node name) into
# the ``arch`` field
record.append(Field(el, name="arch", type="xml"))
return self._tag_record(cr, record, data_node)
def id_get(self, cr, id_str, raise_if_not_found=True):
if id_str in self.idref:
return self.idref[id_str]
res = self.model_id_get(cr, id_str, raise_if_not_found)
if res and len(res)>1: res = res[1]
return res
def model_id_get(self, cr, id_str, raise_if_not_found=True):
model_data_obj = self.pool['ir.model.data']
mod = self.module
if '.' not in id_str:
id_str = '%s.%s' % (mod, id_str)
return model_data_obj.xmlid_to_res_model_res_id(
cr, self.uid, id_str,
raise_if_not_found=raise_if_not_found)
def parse(self, de, mode=None):
if de.tag != 'openerp':
raise Exception("Mismatch xml format: root tag must be `openerp`.")
for n in de.findall('./data'):
for rec in n:
if rec.tag in self._tags:
try:
self._tags[rec.tag](self.cr, rec, n, mode=mode)
except Exception, e:
self.cr.rollback()
exc_info = sys.exc_info()
raise ParseError, (misc.ustr(e), etree.tostring(rec).rstrip(), rec.getroottree().docinfo.URL, rec.sourceline), exc_info[2]
return True
def __init__(self, cr, module, idref, mode, report=None, noupdate=False):
self.mode = mode
self.module = module
self.cr = cr
self.idref = idref
self.pool = openerp.registry(cr.dbname)
self.uid = 1
if report is None:
report = assertion_report.assertion_report()
self.assertion_report = report
self.noupdate = noupdate
self._tags = {
'record': self._tag_record,
'delete': self._tag_delete,
'function': self._tag_function,
'menuitem': self._tag_menuitem,
'template': self._tag_template,
'workflow': self._tag_workflow,
'report': self._tag_report,
'ir_set': self._tag_ir_set,
'act_window': self._tag_act_window,
'url': self._tag_url,
'assert': self._tag_assert,
}
def convert_file(cr, module, filename, idref, mode='update', noupdate=False, kind=None, report=None, pathname=None):
if pathname is None:
pathname = os.path.join(module, filename)
fp = misc.file_open(pathname)
ext = os.path.splitext(filename)[1].lower()
try:
if ext == '.csv':
convert_csv_import(cr, module, pathname, fp.read(), idref, mode, noupdate)
elif ext == '.sql':
convert_sql_import(cr, fp)
elif ext == '.yml':
convert_yaml_import(cr, module, fp, kind, idref, mode, noupdate, report)
elif ext == '.xml':
convert_xml_import(cr, module, fp, idref, mode, noupdate, report)
elif ext == '.js':
pass # .js files are valid but ignored here.
else:
_logger.warning("Can't load unknown file type %s.", filename)
finally:
fp.close()
def convert_sql_import(cr, fp):
queries = fp.read().split(';')
for query in queries:
new_query = ' '.join(query.split())
if new_query:
cr.execute(new_query)
def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
noupdate=False):
'''Import csv file :
quote: "
delimiter: ,
encoding: utf-8'''
if not idref:
idref={}
model = ('.'.join(fname.split('.')[:-1]).split('-'))[0]
#remove folder path from model
head, model = os.path.split(model)
input = cStringIO.StringIO(csvcontent) #FIXME
reader = csv.reader(input, quotechar='"', delimiter=',')
fields = reader.next()
fname_partial = ""
if config.get('import_partial'):
fname_partial = module + '/'+ fname
if not os.path.isfile(config.get('import_partial')):
pickle.dump({}, file(config.get('import_partial'),'w+'))
else:
data = pickle.load(file(config.get('import_partial')))
if fname_partial in data:
if not data[fname_partial]:
return
else:
for i in range(data[fname_partial]):
reader.next()
if not (mode == 'init' or 'id' in fields):
_logger.error("Import specification does not contain 'id' and we are in init mode, Cannot continue.")
return
uid = 1
datas = []
for line in reader:
if not (line and any(line)):
continue
try:
datas.append(map(misc.ustr, line))
except:
_logger.error("Cannot import the line: %s", line)
registry = openerp.registry(cr.dbname)
result, rows, warning_msg, dummy = registry[model].import_data(cr, uid, fields, datas,mode, module, noupdate, filename=fname_partial)
if result < 0:
# Report failed import and abort module install
raise Exception(_('Module loading %s failed: file %s could not be processed:\n %s') % (module, fname, warning_msg))
if config.get('import_partial'):
data = pickle.load(file(config.get('import_partial')))
data[fname_partial] = 0
pickle.dump(data, file(config.get('import_partial'),'wb'))
cr.commit()
#
# xml import/export
#
def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate=False, report=None):
doc = etree.parse(xmlfile)
relaxng = etree.RelaxNG(
etree.parse(os.path.join(config['root_path'],'import_xml.rng' )))
try:
relaxng.assert_(doc)
except Exception:
_logger.error('The XML file does not fit the required schema !')
_logger.error(misc.ustr(relaxng.error_log.last_error))
raise
if idref is None:
idref={}
obj = xml_import(cr, module, idref, mode, report=report, noupdate=noupdate)
obj.parse(doc.getroot(), mode=mode)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,073,025,872,213,796,600 | 40.656912 | 219 | 0.529626 | false |
nagyistoce/kaggle-galaxies | try_convnet_cc_multirotflip_3x69r45_maxout2048_extradense_pysexgen1_dup.py | 7 | 17744 | import numpy as np
# import pandas as pd
import theano
import theano.tensor as T
import layers
import cc_layers
import custom
import load_data
import realtime_augmentation as ra
import time
import csv
import os
import cPickle as pickle
from datetime import datetime, timedelta
# import matplotlib.pyplot as plt
# plt.ion()
# import utils
BATCH_SIZE = 16
NUM_INPUT_FEATURES = 3
LEARNING_RATE_SCHEDULE = {
0: 0.04,
1800: 0.004,
2300: 0.0004,
}
MOMENTUM = 0.9
WEIGHT_DECAY = 0.0
CHUNK_SIZE = 10000 # 30000 # this should be a multiple of the batch size, ideally.
NUM_CHUNKS = 2500 # 3000 # 1500 # 600 # 600 # 600 # 500
VALIDATE_EVERY = 20 # 12 # 6 # 6 # 6 # 5 # validate only every 5 chunks. MUST BE A DIVISOR OF NUM_CHUNKS!!!
# else computing the analysis data does not work correctly, since it assumes that the validation set is still loaded.
NUM_CHUNKS_NONORM = 1 # train without normalisation for this many chunks, to get the weights in the right 'zone'.
# this should be only a few, just 1 hopefully suffices.
GEN_BUFFER_SIZE = 1
# # need to load the full training data anyway to extract the validation set from it.
# # alternatively we could create separate validation set files.
# DATA_TRAIN_PATH = "data/images_train_color_cropped33_singletf.npy.gz"
# DATA2_TRAIN_PATH = "data/images_train_color_8x_singletf.npy.gz"
# DATA_VALIDONLY_PATH = "data/images_validonly_color_cropped33_singletf.npy.gz"
# DATA2_VALIDONLY_PATH = "data/images_validonly_color_8x_singletf.npy.gz"
# DATA_TEST_PATH = "data/images_test_color_cropped33_singletf.npy.gz"
# DATA2_TEST_PATH = "data/images_test_color_8x_singletf.npy.gz"
TARGET_PATH = "predictions/final/try_convnet_cc_multirotflip_3x69r45_maxout2048_extradense_pysexgen1_dup.csv"
ANALYSIS_PATH = "analysis/final/try_convnet_cc_multirotflip_3x69r45_maxout2048_extradense_pysexgen1_dup.pkl"
# FEATURES_PATTERN = "features/try_convnet_chunked_ra_b3sched.%s.npy"
print "Set up data loading"
# TODO: adapt this so it loads the validation data from JPEGs and does the processing realtime
input_sizes = [(69, 69), (69, 69)]
ds_transforms = [
ra.build_ds_transform(3.0, target_size=input_sizes[0]),
ra.build_ds_transform(3.0, target_size=input_sizes[1]) + ra.build_augmentation_transform(rotation=45)
]
num_input_representations = len(ds_transforms)
augmentation_params = {
'zoom_range': (1.0 / 1.3, 1.3),
'rotation_range': (0, 360),
'shear_range': (0, 0),
'translation_range': (-4, 4),
'do_flip': True,
}
augmented_data_gen = ra.realtime_augmented_data_gen(num_chunks=NUM_CHUNKS, chunk_size=CHUNK_SIZE,
augmentation_params=augmentation_params, ds_transforms=ds_transforms,
target_sizes=input_sizes, processor_class=ra.LoadAndProcessPysexGen1CenteringRescaling)
post_augmented_data_gen = ra.post_augment_brightness_gen(augmented_data_gen, std=0.5)
train_gen = load_data.buffered_gen_mp(post_augmented_data_gen, buffer_size=GEN_BUFFER_SIZE)
y_train = np.load("data/solutions_train.npy")
train_ids = load_data.train_ids
test_ids = load_data.test_ids
# split training data into training + a small validation set
num_train = len(train_ids)
num_test = len(test_ids)
num_valid = num_train // 10 # integer division
num_train -= num_valid
y_valid = y_train[num_train:]
y_train = y_train[:num_train]
valid_ids = train_ids[num_train:]
train_ids = train_ids[:num_train]
train_indices = np.arange(num_train)
valid_indices = np.arange(num_train, num_train + num_valid)
test_indices = np.arange(num_test)
def create_train_gen():
"""
this generates the training data in order, for postprocessing. Do not use this for actual training.
"""
data_gen_train = ra.realtime_fixed_augmented_data_gen(train_indices, 'train',
ds_transforms=ds_transforms, chunk_size=CHUNK_SIZE, target_sizes=input_sizes,
processor_class=ra.LoadAndProcessFixedPysexGen1CenteringRescaling)
return load_data.buffered_gen_mp(data_gen_train, buffer_size=GEN_BUFFER_SIZE)
def create_valid_gen():
data_gen_valid = ra.realtime_fixed_augmented_data_gen(valid_indices, 'train',
ds_transforms=ds_transforms, chunk_size=CHUNK_SIZE, target_sizes=input_sizes,
processor_class=ra.LoadAndProcessFixedPysexGen1CenteringRescaling)
return load_data.buffered_gen_mp(data_gen_valid, buffer_size=GEN_BUFFER_SIZE)
def create_test_gen():
data_gen_test = ra.realtime_fixed_augmented_data_gen(test_indices, 'test',
ds_transforms=ds_transforms, chunk_size=CHUNK_SIZE, target_sizes=input_sizes,
processor_class=ra.LoadAndProcessFixedPysexGen1CenteringRescaling)
return load_data.buffered_gen_mp(data_gen_test, buffer_size=GEN_BUFFER_SIZE)
print "Preprocess validation data upfront"
start_time = time.time()
xs_valid = [[] for _ in xrange(num_input_representations)]
for data, length in create_valid_gen():
for x_valid_list, x_chunk in zip(xs_valid, data):
x_valid_list.append(x_chunk[:length])
xs_valid = [np.vstack(x_valid) for x_valid in xs_valid]
xs_valid = [x_valid.transpose(0, 3, 1, 2) for x_valid in xs_valid] # move the colour dimension up
print " took %.2f seconds" % (time.time() - start_time)
print "Build model"
l0 = layers.Input2DLayer(BATCH_SIZE, NUM_INPUT_FEATURES, input_sizes[0][0], input_sizes[0][1])
l0_45 = layers.Input2DLayer(BATCH_SIZE, NUM_INPUT_FEATURES, input_sizes[1][0], input_sizes[1][1])
l0r = layers.MultiRotSliceLayer([l0, l0_45], part_size=45, include_flip=True)
l0s = cc_layers.ShuffleBC01ToC01BLayer(l0r)
l1a = cc_layers.CudaConvnetConv2DLayer(l0s, n_filters=32, filter_size=6, weights_std=0.01, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l1 = cc_layers.CudaConvnetPooling2DLayer(l1a, pool_size=2)
l2a = cc_layers.CudaConvnetConv2DLayer(l1, n_filters=64, filter_size=5, weights_std=0.01, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l2 = cc_layers.CudaConvnetPooling2DLayer(l2a, pool_size=2)
l3a = cc_layers.CudaConvnetConv2DLayer(l2, n_filters=128, filter_size=3, weights_std=0.01, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l3b = cc_layers.CudaConvnetConv2DLayer(l3a, n_filters=128, filter_size=3, pad=0, weights_std=0.1, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l3 = cc_layers.CudaConvnetPooling2DLayer(l3b, pool_size=2)
l3s = cc_layers.ShuffleC01BToBC01Layer(l3)
j3 = layers.MultiRotMergeLayer(l3s, num_views=4) # 2) # merge convolutional parts
l4a = layers.DenseLayer(j3, n_outputs=4096, weights_std=0.001, init_bias_value=0.01, dropout=0.5, nonlinearity=layers.identity)
l4b = layers.FeatureMaxPoolingLayer(l4a, pool_size=2, feature_dim=1, implementation='reshape')
l4c = layers.DenseLayer(l4b, n_outputs=4096, weights_std=0.001, init_bias_value=0.01, dropout=0.5, nonlinearity=layers.identity)
l4 = layers.FeatureMaxPoolingLayer(l4c, pool_size=2, feature_dim=1, implementation='reshape')
# l5 = layers.DenseLayer(l4, n_outputs=37, weights_std=0.01, init_bias_value=0.0, dropout=0.5, nonlinearity=custom.clip_01) # nonlinearity=layers.identity)
l5 = layers.DenseLayer(l4, n_outputs=37, weights_std=0.01, init_bias_value=0.1, dropout=0.5, nonlinearity=layers.identity)
# l6 = layers.OutputLayer(l5, error_measure='mse')
l6 = custom.OptimisedDivGalaxyOutputLayer(l5) # this incorporates the constraints on the output (probabilities sum to one, weighting, etc.)
train_loss_nonorm = l6.error(normalisation=False)
train_loss = l6.error() # but compute and print this!
valid_loss = l6.error(dropout_active=False)
all_parameters = layers.all_parameters(l6)
all_bias_parameters = layers.all_bias_parameters(l6)
xs_shared = [theano.shared(np.zeros((1,1,1,1), dtype=theano.config.floatX)) for _ in xrange(num_input_representations)]
y_shared = theano.shared(np.zeros((1,1), dtype=theano.config.floatX))
learning_rate = theano.shared(np.array(LEARNING_RATE_SCHEDULE[0], dtype=theano.config.floatX))
idx = T.lscalar('idx')
givens = {
l0.input_var: xs_shared[0][idx*BATCH_SIZE:(idx+1)*BATCH_SIZE],
l0_45.input_var: xs_shared[1][idx*BATCH_SIZE:(idx+1)*BATCH_SIZE],
l6.target_var: y_shared[idx*BATCH_SIZE:(idx+1)*BATCH_SIZE],
}
# updates = layers.gen_updates(train_loss, all_parameters, learning_rate=LEARNING_RATE, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY)
updates_nonorm = layers.gen_updates_nesterov_momentum_no_bias_decay(train_loss_nonorm, all_parameters, all_bias_parameters, learning_rate=learning_rate, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY)
updates = layers.gen_updates_nesterov_momentum_no_bias_decay(train_loss, all_parameters, all_bias_parameters, learning_rate=learning_rate, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY)
train_nonorm = theano.function([idx], train_loss_nonorm, givens=givens, updates=updates_nonorm)
train_norm = theano.function([idx], train_loss, givens=givens, updates=updates)
compute_loss = theano.function([idx], valid_loss, givens=givens) # dropout_active=False
compute_output = theano.function([idx], l6.predictions(dropout_active=False), givens=givens, on_unused_input='ignore') # not using the labels, so theano complains
compute_features = theano.function([idx], l4.output(dropout_active=False), givens=givens, on_unused_input='ignore')
print "Train model"
start_time = time.time()
prev_time = start_time
num_batches_valid = x_valid.shape[0] // BATCH_SIZE
losses_train = []
losses_valid = []
param_stds = []
for e in xrange(NUM_CHUNKS):
print "Chunk %d/%d" % (e + 1, NUM_CHUNKS)
chunk_data, chunk_length = train_gen.next()
y_chunk = chunk_data.pop() # last element is labels.
xs_chunk = chunk_data
# need to transpose the chunks to move the 'channels' dimension up
xs_chunk = [x_chunk.transpose(0, 3, 1, 2) for x_chunk in xs_chunk]
if e in LEARNING_RATE_SCHEDULE:
current_lr = LEARNING_RATE_SCHEDULE[e]
learning_rate.set_value(LEARNING_RATE_SCHEDULE[e])
print " setting learning rate to %.6f" % current_lr
# train without normalisation for the first # chunks.
if e >= NUM_CHUNKS_NONORM:
train = train_norm
else:
train = train_nonorm
print " load training data onto GPU"
for x_shared, x_chunk in zip(xs_shared, xs_chunk):
x_shared.set_value(x_chunk)
y_shared.set_value(y_chunk)
num_batches_chunk = x_chunk.shape[0] // BATCH_SIZE
# import pdb; pdb.set_trace()
print " batch SGD"
losses = []
for b in xrange(num_batches_chunk):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_chunk)
loss = train(b)
losses.append(loss)
# print " loss: %.6f" % loss
mean_train_loss = np.sqrt(np.mean(losses))
print " mean training loss (RMSE):\t\t%.6f" % mean_train_loss
losses_train.append(mean_train_loss)
# store param stds during training
param_stds.append([p.std() for p in layers.get_param_values(l6)])
if ((e + 1) % VALIDATE_EVERY) == 0:
print
print "VALIDATING"
print " load validation data onto GPU"
for x_shared, x_valid in zip(xs_shared, xs_valid):
x_shared.set_value(x_valid)
y_shared.set_value(y_valid)
print " compute losses"
losses = []
for b in xrange(num_batches_valid):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_valid)
loss = compute_loss(b)
losses.append(loss)
mean_valid_loss = np.sqrt(np.mean(losses))
print " mean validation loss (RMSE):\t\t%.6f" % mean_valid_loss
losses_valid.append(mean_valid_loss)
layers.dump_params(l6, e=e)
now = time.time()
time_since_start = now - start_time
time_since_prev = now - prev_time
prev_time = now
est_time_left = time_since_start * (float(NUM_CHUNKS - (e + 1)) / float(e + 1))
eta = datetime.now() + timedelta(seconds=est_time_left)
eta_str = eta.strftime("%c")
print " %s since start (%.2f s)" % (load_data.hms(time_since_start), time_since_prev)
print " estimated %s to go (ETA: %s)" % (load_data.hms(est_time_left), eta_str)
print
del chunk_data, xs_chunk, x_chunk, y_chunk, xs_valid, x_valid # memory cleanup
print "Compute predictions on validation set for analysis in batches"
predictions_list = []
for b in xrange(num_batches_valid):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_valid)
predictions = compute_output(b)
predictions_list.append(predictions)
all_predictions = np.vstack(predictions_list)
# postprocessing: clip all predictions to 0-1
all_predictions[all_predictions > 1] = 1.0
all_predictions[all_predictions < 0] = 0.0
print "Write validation set predictions to %s" % ANALYSIS_PATH
with open(ANALYSIS_PATH, 'w') as f:
pickle.dump({
'ids': valid_ids[:num_batches_valid * BATCH_SIZE], # note that we need to truncate the ids to a multiple of the batch size.
'predictions': all_predictions,
'targets': y_valid,
'mean_train_loss': mean_train_loss,
'mean_valid_loss': mean_valid_loss,
'time_since_start': time_since_start,
'losses_train': losses_train,
'losses_valid': losses_valid,
'param_values': layers.get_param_values(l6),
'param_stds': param_stds,
}, f, pickle.HIGHEST_PROTOCOL)
del predictions_list, all_predictions # memory cleanup
# print "Loading test data"
# x_test = load_data.load_gz(DATA_TEST_PATH)
# x2_test = load_data.load_gz(DATA2_TEST_PATH)
# test_ids = np.load("data/test_ids.npy")
# num_test = x_test.shape[0]
# x_test = x_test.transpose(0, 3, 1, 2) # move the colour dimension up.
# x2_test = x2_test.transpose(0, 3, 1, 2)
# create_test_gen = lambda: load_data.array_chunker_gen([x_test, x2_test], chunk_size=CHUNK_SIZE, loop=False, truncate=False, shuffle=False)
print "Computing predictions on test data"
predictions_list = []
for e, (xs_chunk, chunk_length) in enumerate(create_test_gen()):
print "Chunk %d" % (e + 1)
xs_chunk = [x_chunk.transpose(0, 3, 1, 2) for x_chunk in xs_chunk] # move the colour dimension up.
for x_shared, x_chunk in zip(xs_shared, xs_chunk):
x_shared.set_value(x_chunk)
num_batches_chunk = int(np.ceil(chunk_length / float(BATCH_SIZE))) # need to round UP this time to account for all data
# make predictions for testset, don't forget to cute off the zeros at the end
for b in xrange(num_batches_chunk):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_chunk)
predictions = compute_output(b)
predictions_list.append(predictions)
all_predictions = np.vstack(predictions_list)
all_predictions = all_predictions[:num_test] # truncate back to the correct length
# postprocessing: clip all predictions to 0-1
all_predictions[all_predictions > 1] = 1.0
all_predictions[all_predictions < 0] = 0.0
print "Write predictions to %s" % TARGET_PATH
# test_ids = np.load("data/test_ids.npy")
with open(TARGET_PATH, 'wb') as csvfile:
writer = csv.writer(csvfile) # , delimiter=',', quoting=csv.QUOTE_MINIMAL)
# write header
writer.writerow(['GalaxyID', 'Class1.1', 'Class1.2', 'Class1.3', 'Class2.1', 'Class2.2', 'Class3.1', 'Class3.2', 'Class4.1', 'Class4.2', 'Class5.1', 'Class5.2', 'Class5.3', 'Class5.4', 'Class6.1', 'Class6.2', 'Class7.1', 'Class7.2', 'Class7.3', 'Class8.1', 'Class8.2', 'Class8.3', 'Class8.4', 'Class8.5', 'Class8.6', 'Class8.7', 'Class9.1', 'Class9.2', 'Class9.3', 'Class10.1', 'Class10.2', 'Class10.3', 'Class11.1', 'Class11.2', 'Class11.3', 'Class11.4', 'Class11.5', 'Class11.6'])
# write data
for k in xrange(test_ids.shape[0]):
row = [test_ids[k]] + all_predictions[k].tolist()
writer.writerow(row)
print "Gzipping..."
os.system("gzip -c %s > %s.gz" % (TARGET_PATH, TARGET_PATH))
del all_predictions, predictions_list, xs_chunk, x_chunk # memory cleanup
# # need to reload training data because it has been split and shuffled.
# # don't need to reload test data
# x_train = load_data.load_gz(DATA_TRAIN_PATH)
# x2_train = load_data.load_gz(DATA2_TRAIN_PATH)
# x_train = x_train.transpose(0, 3, 1, 2) # move the colour dimension up
# x2_train = x2_train.transpose(0, 3, 1, 2)
# train_gen_features = load_data.array_chunker_gen([x_train, x2_train], chunk_size=CHUNK_SIZE, loop=False, truncate=False, shuffle=False)
# test_gen_features = load_data.array_chunker_gen([x_test, x2_test], chunk_size=CHUNK_SIZE, loop=False, truncate=False, shuffle=False)
# for name, gen, num in zip(['train', 'test'], [train_gen_features, test_gen_features], [x_train.shape[0], x_test.shape[0]]):
# print "Extracting feature representations for all galaxies: %s" % name
# features_list = []
# for e, (xs_chunk, chunk_length) in enumerate(gen):
# print "Chunk %d" % (e + 1)
# x_chunk, x2_chunk = xs_chunk
# x_shared.set_value(x_chunk)
# x2_shared.set_value(x2_chunk)
# num_batches_chunk = int(np.ceil(chunk_length / float(BATCH_SIZE))) # need to round UP this time to account for all data
# # compute features for set, don't forget to cute off the zeros at the end
# for b in xrange(num_batches_chunk):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_chunk)
# features = compute_features(b)
# features_list.append(features)
# all_features = np.vstack(features_list)
# all_features = all_features[:num] # truncate back to the correct length
# features_path = FEATURES_PATTERN % name
# print " write features to %s" % features_path
# np.save(features_path, all_features)
print "Done!"
| bsd-3-clause | 7,384,490,626,677,839,000 | 40.074074 | 486 | 0.686147 | false |
jspan/Open-Knesset | apis/tests.py | 14 | 6111 | import datetime, json, csv
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Group, Permission
from tagging.models import Tag, TaggedItem
from laws.models import Vote, VoteAction, Bill, Law
from mks.models import Member, Party, Knesset
from agendas.models import Agenda
from committees.models import Committee
from events.models import Event
from django.core import cache
from voting.models import Vote as UserVote
import apis
class TestAPIV2(TestCase):
"""
General tests for the API V2, not specific to any app (app-specific tests
are located in the app directories).
"""
def setUp(self):
pass
def test_empty_cache_bug(self):
""" Tastypie has a bug when the cache returns None. this test verifies
that our fork of Tastypie doesn't have it. This test should be run with
DummyCache settings"""
res = self.client.get('/api/v2/vote/?format=json')
self.assertEqual(res.status_code, 200)
class MeetingApiTest(TestCase):
def setUp(self):
self.knesset = Knesset.objects.create(number=1,
start_date=datetime.date.today()-datetime.timedelta(days=1))
self.committee_1 = Committee.objects.create(name='c1')
self.committee_2 = Committee.objects.create(name='c2')
self.meeting_1 = self.committee_1.meetings.create(date=datetime.datetime.now(),
protocol_text='''jacob:
I am a perfectionist
adrian:
I have a deadline''')
self.meeting_1.create_protocol_parts()
self.meeting_2 = self.committee_1.meetings.create(date=datetime.datetime.now(),
protocol_text='m2')
self.meeting_2.create_protocol_parts()
self.jacob = User.objects.create_user('jacob', '[email protected]',
'JKM')
self.adrian = User.objects.create_user('adrian', '[email protected]',
'ADRIAN')
(self.group, created) = Group.objects.get_or_create(name='Valid Email')
if created:
self.group.save()
self.group.permissions.add(Permission.objects.get(name='Can add annotation'))
self.jacob.groups.add(self.group)
ct = ContentType.objects.get_for_model(Tag)
self.adrian.user_permissions.add(Permission.objects.get(codename='add_tag', content_type=ct))
self.bill_1 = Bill.objects.create(stage='1', title='bill 1')
self.mk_1 = Member.objects.create(name='mk 1')
self.topic = self.committee_1.topic_set.create(creator=self.jacob,
title="hello", description="hello world")
self.tag_1 = Tag.objects.create(name='tag1')
def testCommitteeMeetingV2(self):
url = reverse('api_dispatch_list', kwargs={'resource_name': 'committeemeeting', 'api_name': 'v2'})
url = url + str(self.meeting_1.id) + '/?format=json'
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
committee_url = reverse('api_dispatch_list', kwargs={'resource_name': 'committee', 'api_name': 'v2'})
committee_url = committee_url + str(self.committee_1.id) + '/'
self.assertEqual(res_json['committee'], committee_url)
self.assertEqual(res_json['absolute_url'], self.meeting_1.get_absolute_url())
def testCommitteeMeetingListV2(self):
url = reverse('api_dispatch_list', kwargs={'resource_name': 'committeemeeting', 'api_name': 'v2'})
url = url + '?format=json'
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
self.assertEqual(len(res_json['objects']), 2)
self.assertTrue(
res_json['objects'][0]['absolute_url'] == self.meeting_1.get_absolute_url()
or
res_json['objects'][0]['absolute_url'] == self.meeting_2.get_absolute_url()
)
def testCommitteeMeetingV2CSV(self):
url = reverse('api_dispatch_list', kwargs={'resource_name': 'committeemeeting', 'api_name': 'v2'})
url = url + '?format=csv'
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
for row in csv.DictReader(res.content.split('\n'), delimiter=','):
if row.has_key('absolute_url'):
absurl = row['absolute_url']
else:
# \ufeff is the BOM - which is required for excel compatibility
absurl = row[u'\ufeff'.encode('utf8')+'absolute_url']
self.assertTrue(
absurl == self.meeting_1.get_absolute_url()
or
absurl == self.meeting_2.get_absolute_url()
)
class SwaggerTest(TestCase):
def testSwaggerUI(self):
"Swagger UI static resources should be properly mounted and served"
res = self.client.get(reverse('tastypie_swagger:index'))
self.assertEqual(res.status_code, 200)
self.assertIn("<title>Swagger UI</title>", res.content)
def testSwaggerResources(self):
"Swagger should find all the apis and list them as resources"
res = self.client.get(reverse('tastypie_swagger:resources'))
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
self.assertEqual(res_json["swaggerVersion"], "1.1")
rendered_apis = [api_obj_path['path'].lstrip('/') for api_obj_path in res_json["apis"]]
for api in apis.resources.v2_api._canonicals:
self.assertIn(api, rendered_apis)
def testSwaggerSchema(self):
"The schema for swagger should be generated properly for at least one controller"
res = self.client.get('/api/v2/doc/schema/agenda/')
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
self.assertEqual(res_json["swaggerVersion"], "1.1")
| bsd-3-clause | 2,665,480,174,076,305,400 | 45.295455 | 109 | 0.624775 | false |
liyu1990/sklearn | examples/ensemble/plot_gradient_boosting_oob.py | 50 | 4764 | """
======================================
Gradient Boosting Out-of-Bag estimates
======================================
Out-of-bag (OOB) estimates can be a useful heuristic to estimate
the "optimal" number of boosting iterations.
OOB estimates are almost identical to cross-validation estimates but
they can be computed on-the-fly without the need for repeated model
fitting.
OOB estimates are only available for Stochastic Gradient Boosting
(i.e. ``subsample < 1.0``), the estimates are derived from the improvement
in loss based on the examples not included in the bootstrap sample
(the so-called out-of-bag examples).
The OOB estimator is a pessimistic estimator of the true
test loss, but remains a fairly good approximation for a small number of trees.
The figure shows the cumulative sum of the negative OOB improvements
as a function of the boosting iteration. As you can see, it tracks the test
loss for the first hundred iterations but then diverges in a
pessimistic way.
The figure also shows the performance of 3-fold cross validation which
usually gives a better estimate of the test loss
but is computationally more demanding.
"""
print(__doc__)
# Author: Peter Prettenhofer <[email protected]>
#
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn import ensemble
from sklearn.model_selection import KFold
from sklearn.model_selection import train_test_split
# Generate data (adapted from G. Ridgeway's gbm example)
n_samples = 1000
random_state = np.random.RandomState(13)
x1 = random_state.uniform(size=n_samples)
x2 = random_state.uniform(size=n_samples)
x3 = random_state.randint(0, 4, size=n_samples)
p = 1 / (1.0 + np.exp(-(np.sin(3 * x1) - 4 * x2 + x3)))
y = random_state.binomial(1, p, size=n_samples)
X = np.c_[x1, x2, x3]
X = X.astype(np.float32)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5,
random_state=9)
# Fit classifier with out-of-bag estimates
params = {'n_estimators': 1200, 'max_depth': 3, 'subsample': 0.5,
'learning_rate': 0.01, 'min_samples_leaf': 1, 'random_state': 3}
clf = ensemble.GradientBoostingClassifier(**params)
clf.fit(X_train, y_train)
acc = clf.score(X_test, y_test)
print("Accuracy: {:.4f}".format(acc))
n_estimators = params['n_estimators']
x = np.arange(n_estimators) + 1
def heldout_score(clf, X_test, y_test):
"""compute deviance scores on ``X_test`` and ``y_test``. """
score = np.zeros((n_estimators,), dtype=np.float64)
for i, y_pred in enumerate(clf.staged_decision_function(X_test)):
score[i] = clf.loss_(y_test, y_pred)
return score
def cv_estimate(n_folds=3):
cv = KFold(n_folds=n_folds)
cv_clf = ensemble.GradientBoostingClassifier(**params)
val_scores = np.zeros((n_estimators,), dtype=np.float64)
for train, test in cv.split(X_train, y_train):
cv_clf.fit(X_train[train], y_train[train])
val_scores += heldout_score(cv_clf, X_train[test], y_train[test])
val_scores /= n_folds
return val_scores
# Estimate best n_estimator using cross-validation
cv_score = cv_estimate(3)
# Compute best n_estimator for test data
test_score = heldout_score(clf, X_test, y_test)
# negative cumulative sum of oob improvements
cumsum = -np.cumsum(clf.oob_improvement_)
# min loss according to OOB
oob_best_iter = x[np.argmin(cumsum)]
# min loss according to test (normalize such that first loss is 0)
test_score -= test_score[0]
test_best_iter = x[np.argmin(test_score)]
# min loss according to cv (normalize such that first loss is 0)
cv_score -= cv_score[0]
cv_best_iter = x[np.argmin(cv_score)]
# color brew for the three curves
oob_color = list(map(lambda x: x / 256.0, (190, 174, 212)))
test_color = list(map(lambda x: x / 256.0, (127, 201, 127)))
cv_color = list(map(lambda x: x / 256.0, (253, 192, 134)))
# plot curves and vertical lines for best iterations
plt.plot(x, cumsum, label='OOB loss', color=oob_color)
plt.plot(x, test_score, label='Test loss', color=test_color)
plt.plot(x, cv_score, label='CV loss', color=cv_color)
plt.axvline(x=oob_best_iter, color=oob_color)
plt.axvline(x=test_best_iter, color=test_color)
plt.axvline(x=cv_best_iter, color=cv_color)
# add three vertical lines to xticks
xticks = plt.xticks()
xticks_pos = np.array(xticks[0].tolist() +
[oob_best_iter, cv_best_iter, test_best_iter])
xticks_label = np.array(list(map(lambda t: int(t), xticks[0])) +
['OOB', 'CV', 'Test'])
ind = np.argsort(xticks_pos)
xticks_pos = xticks_pos[ind]
xticks_label = xticks_label[ind]
plt.xticks(xticks_pos, xticks_label)
plt.legend(loc='upper right')
plt.ylabel('normalized loss')
plt.xlabel('number of iterations')
plt.show()
| bsd-3-clause | -6,497,814,080,348,959,000 | 34.029412 | 79 | 0.691646 | false |
DennisDenuto/puppet-commonscripts | files/aws_cli/AWS-ElasticBeanstalk-CLI-2.6.3/eb/macosx/python3/lib/aws/exception.py | 8 | 3871 | #!/usr/bin/env python
#==============================================================================
# Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use
# this file except in compliance with the License. A copy of the License is
# located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
# implied. See the License for the specific language governing permissions
# and limitations under the License.
#==============================================================================
class AwsErrorCode(object):
'''AWS common error code'''
AccessDenied = 'AccessDenied'
InsufficientPrivileges = 'InsufficientPrivileges'
InvalidClientTokenId = 'InvalidClientTokenId'
InvalidParameterCombination = 'InvalidParameterCombination'
InvalidParameterValue = 'InvalidParameterValue'
InvalidQueryParameter = 'InvalidQueryParameter'
MalformedQueryString = 'MalformedQueryString'
MissingParameter = 'MissingParameter'
OptInRequired = 'OptInRequired'
RequestExpired = 'RequestExpired'
Throttling = 'Throttling'
class AwsServiceException(Exception):
def __init__(self, msg, code, http_code):
self._msg = msg
self._code = code
self._http_code = http_code
@property
def message(self):
return self._msg
@property
def code(self):
return self._code
@property
def http_code(self):
return self._http_code
def __str__(self):
return '{0}. {1}'.format(self._code, self._msg)
def __repr__(self):
return 'HTTP {0}:{1}. {2}'.format(self._http_code, self._code, self._msg)
class UnknownHttpCodeException(AwsServiceException):
''' Exception of receiving http code other than 200'''
def __init__(self, message, code, http_code):
super(UnknownHttpCodeException, self).__init__(message, code, http_code)
class MissingParameterException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(MissingParameterException, self).__init__(ex.message, ex.code, ex.http_code)
class InsufficientPrivilegesException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(InsufficientPrivilegesException, self).__init__(ex.message, ex.code, ex.http_code)
class InvalidParameterValueException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(InvalidParameterValueException, self).__init__(ex.message, ex.code, ex.http_code)
class OptInRequiredException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(OptInRequiredException, self).__init__(ex.message, ex.code, ex.http_code)
class AccessDeniedException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(AccessDeniedException, self).__init__(ex.message, ex.code, ex.http_code)
| mit | 5,605,147,423,601,906,000 | 40.634409 | 98 | 0.658486 | false |
les69/calvin-base | calvin/actorstore/systemactors/std/Alternate3.py | 2 | 1901 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, condition, guard, manage
class Alternate3(Actor):
"""
Alternating between three streams of tokens
Inputs:
token_1 : first token stream
token_2 : second token stream
token_3 : third token stream
Outputs:
token : resulting token stream
"""
@manage(['next_port'])
def init(self):
self.next_port = 1
@condition(['token_1'], ['token'])
@guard(lambda self, _: self.next_port == 1)
def port_1(self, data):
self.next_port = 2
return ActionResult(production=(data, ))
@condition(['token_2'], ['token'])
@guard(lambda self, _: self.next_port == 2)
def port_2(self, data):
self.next_port = 3
return ActionResult(production=(data, ))
@condition(['token_3'], ['token'])
@guard(lambda self, _: self.next_port == 3)
def port_3(self, data):
self.next_port = 1
return ActionResult(production=(data, ))
action_priority = (port_1, port_2, port_3)
test_set = [
{
'in': {'token_1': [1], 'token_2': ['a'], 'token_3': ['alpha']},
'out': {'token': [1, 'a', 'alpha']}
},
{
'in': {'token_1': [1]},
'out': {'token': [1]}
}
]
| apache-2.0 | 6,361,832,392,234,150,000 | 28.703125 | 76 | 0.598106 | false |
Srogozins/aiohttp | tests/test_py35/test_resp.py | 3 | 1443 | import pytest
import aiohttp
from aiohttp import web
from aiohttp.client import _RequestContextManager
from collections.abc import Coroutine
@pytest.mark.run_loop
async def test_await(create_server, loop):
async def handler(request):
return web.HTTPOk()
app, url = await create_server()
app.router.add_route('GET', '/', handler)
resp = await aiohttp.get(url+'/', loop=loop)
assert resp.status == 200
assert resp.connection is not None
await resp.release()
assert resp.connection is None
@pytest.mark.run_loop
async def test_response_context_manager(create_server, loop):
async def handler(request):
return web.HTTPOk()
app, url = await create_server()
app.router.add_route('GET', '/', handler)
resp = await aiohttp.get(url+'/', loop=loop)
async with resp:
assert resp.status == 200
assert resp.connection is not None
assert resp.connection is None
@pytest.mark.run_loop
async def test_client_api_context_manager(create_server, loop):
async def handler(request):
return web.HTTPOk()
app, url = await create_server()
app.router.add_route('GET', '/', handler)
async with aiohttp.get(url+'/', loop=loop) as resp:
assert resp.status == 200
assert resp.connection is not None
assert resp.connection is None
def test_ctx_manager_is_coroutine():
assert issubclass(_RequestContextManager, Coroutine)
| apache-2.0 | 2,736,350,668,307,375,600 | 25.236364 | 63 | 0.685378 | false |
smurfix/p2pool | p2pool/bitcoin/script.py | 2 | 1585 | from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import bases
def reads_nothing(f):
return '', f
def protoPUSH(length):
return lambda f: bitcoin_data.read(f, length)
def protoPUSHDATA(size_len):
def _(f):
length_str, f = bitcoin_data.read(f, size_len)
length = bases.string_to_natural(length_str[::-1].lstrip(chr(0)))
data, f = bitcoin_data.read(f, length)
return data, f
return _
opcodes = {}
for i in xrange(256):
opcodes[i] = 'UNK_' + str(i), reads_nothing
opcodes[0] = '0', reads_nothing
for i in xrange(1, 76):
opcodes[i] = 'PUSH%i' % i, protoPUSH(i)
opcodes[76] = 'PUSHDATA1', protoPUSHDATA(1)
opcodes[77] = 'PUSHDATA2', protoPUSHDATA(2)
opcodes[78] = 'PUSHDATA4', protoPUSHDATA(4)
opcodes[79] = '-1', reads_nothing
for i in xrange(81, 97):
opcodes[i] = str(i - 80), reads_nothing
opcodes[172] = 'CHECKSIG', reads_nothing
opcodes[173] = 'CHECKSIGVERIFY', reads_nothing
opcodes[174] = 'CHECKMULTISIG', reads_nothing
opcodes[175] = 'CHECKMULTISIGVERIFY', reads_nothing
def parse(script):
f = script, 0
while bitcoin_data.size(f):
opcode_str, f = bitcoin_data.read(f, 1)
opcode = ord(opcode_str)
opcode_name, read_func = opcodes[opcode]
opcode_arg, f = read_func(f)
yield opcode_name, opcode_arg
def get_sigop_count(script):
weights = {
'CHECKSIG': 1,
'CHECKSIGVERIFY': 1,
'CHECKMULTISIG': 20,
'CHECKMULTISIGVERIFY': 20,
}
return sum(weights.get(opcode_name, 0) for opcode_name, opcode_arg in parse(script))
| gpl-3.0 | 1,570,432,070,719,183,600 | 30.078431 | 88 | 0.642271 | false |
lgarren/spack | var/spack/repos/builtin/packages/libelf/package.py | 3 | 2051 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libelf(AutotoolsPackage):
"""libelf lets you read, modify or create ELF object files in an
architecture-independent way. The library takes care of size
and endian issues, e.g. you can process a file for SPARC
processors on an Intel-based system."""
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7')
provides('elf@0')
def configure_args(self):
args = ["--enable-shared",
"--disable-dependency-tracking",
"--disable-debug"]
return args
def install(self, spec, prefix):
make('install', parallel=False)
| lgpl-2.1 | -3,376,593,994,144,040,400 | 40.857143 | 78 | 0.660166 | false |
domob1812/bitcoin | test/functional/feature_rbf.py | 2 | 25198 | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RBF code."""
from decimal import Decimal
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, BIP125_SEQUENCE_NUMBER
from test_framework.script import CScript, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round
from test_framework.script_util import DUMMY_P2WPKH_SCRIPT, DUMMY_2_P2WPKH_SCRIPT
from test_framework.wallet import MiniWallet
MAX_REPLACEMENT_LIMIT = 100
def txToHex(tx):
return tx.serialize().hex()
def make_utxo(node, amount, confirmed=True, scriptPubKey=DUMMY_P2WPKH_SCRIPT):
"""Create a txout with a given amount and scriptPubKey
Mines coins as needed.
confirmed - txouts created will be confirmed in the blockchain;
unconfirmed otherwise.
"""
fee = 1 * COIN
while node.getbalance() < satoshi_round((amount + fee) / COIN):
node.generate(COINBASE_MATURITY)
new_addr = node.getnewaddress()
txid = node.sendtoaddress(new_addr, satoshi_round((amount + fee) / COIN))
tx1 = node.getrawtransaction(txid, 1)
txid = int(txid, 16)
i, _ = next(filter(lambda vout: new_addr == vout[1]['scriptPubKey']['address'], enumerate(tx1['vout'])))
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(txid, i))]
tx2.vout = [CTxOut(amount, scriptPubKey)]
tx2.rehash()
signed_tx = node.signrawtransactionwithwallet(txToHex(tx2))
txid = node.sendrawtransaction(signed_tx['hex'], 0)
# If requested, ensure txouts are confirmed.
if confirmed:
mempool_size = len(node.getrawmempool())
while mempool_size > 0:
node.generate(1)
new_size = len(node.getrawmempool())
# Error out if we have something stuck in the mempool, as this
# would likely be a bug.
assert new_size < mempool_size
mempool_size = new_size
return COutPoint(int(txid, 16), 0)
class ReplaceByFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [
[
"-acceptnonstdtxn=1",
"-maxorphantx=1000",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
"-limitdescendantsize=101",
],
]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
make_utxo(self.nodes[0], 1 * COIN)
# Ensure nodes are synced
self.sync_all()
self.log.info("Running test simple doublespend...")
self.test_simple_doublespend()
self.log.info("Running test doublespend chain...")
self.test_doublespend_chain()
self.log.info("Running test doublespend tree...")
self.test_doublespend_tree()
self.log.info("Running test replacement feeperkb...")
self.test_replacement_feeperkb()
self.log.info("Running test spends of conflicting outputs...")
self.test_spends_of_conflicting_outputs()
self.log.info("Running test new unconfirmed inputs...")
self.test_new_unconfirmed_inputs()
self.log.info("Running test too many replacements...")
self.test_too_many_replacements()
self.log.info("Running test opt-in...")
self.test_opt_in()
self.log.info("Running test RPC...")
self.test_rpc()
self.log.info("Running test prioritised transactions...")
self.test_prioritised_transactions()
self.log.info("Running test no inherited signaling...")
self.test_no_inherited_signaling()
self.log.info("Passed")
def test_simple_doublespend(self):
"""Simple doublespend"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
# make_utxo may have generated a bunch of blocks, so we need to sync
# before we can spend the coins generated, or else the resulting
# transactions might not be accepted by our peers.
self.sync_all()
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
self.sync_all()
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(1 * COIN, DUMMY_2_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
# Extra 0.1 BTC fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# Works when enabled
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
mempool = self.nodes[0].getrawmempool()
assert tx1a_txid not in mempool
assert tx1b_txid in mempool
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
initial_nValue = 50 * COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
prevout = tx0_outpoint
remaining_value = initial_nValue
chain_txids = []
while remaining_value > 10 * COIN:
remaining_value -= 1 * COIN
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = [CTxOut(remaining_value, CScript([1, OP_DROP] * 15 + [1]))]
tx_hex = txToHex(tx)
txid = self.nodes[0].sendrawtransaction(tx_hex, 0)
chain_txids.append(txid)
prevout = COutPoint(int(txid, 16), 0)
# Whether the double-spend is allowed is evaluated by including all
# child fees - 40 BTC - so this attempt is rejected.
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 30 * COIN, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
# Accepted with sufficient fee
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)
mempool = self.nodes[0].getrawmempool()
for doublespent_txid in chain_txids:
assert doublespent_txid not in mempool
def test_doublespend_tree(self):
"""Doublespend of a big tree of transactions"""
initial_nValue = 50 * COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001 * COIN, _total_txs=None):
if _total_txs is None:
_total_txs = [0]
if _total_txs[0] >= max_txs:
return
txout_value = (initial_value - fee) // tree_width
if txout_value < fee:
return
vout = [CTxOut(txout_value, CScript([i+1]))
for i in range(tree_width)]
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = vout
tx_hex = txToHex(tx)
assert len(tx.serialize()) < 100000
txid = self.nodes[0].sendrawtransaction(tx_hex, 0)
yield tx
_total_txs[0] += 1
txid = int(txid, 16)
for i, txout in enumerate(tx.vout):
for x in branch(COutPoint(txid, i), txout_value,
max_txs,
tree_width=tree_width, fee=fee,
_total_txs=_total_txs):
yield x
fee = int(0.0001 * COIN)
n = MAX_REPLACEMENT_LIMIT
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
# Attempt double-spend, will fail because too little fee paid
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee * n, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
# 1 BTC fee is enough
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee * n - 1 * COIN, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)
mempool = self.nodes[0].getrawmempool()
for tx in tree_txs:
tx.rehash()
assert tx.hash not in mempool
# Try again, but with more total transactions than the "max txs
# double-spent at once" anti-DoS limit.
for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2):
fee = int(0.0001 * COIN)
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 2 * fee * n, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
for tx in tree_txs:
tx.rehash()
self.nodes[0].getrawtransaction(tx.hash)
def test_replacement_feeperkb(self):
"""Replacement requires fee-per-KB to be higher"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
self.nodes[0].sendrawtransaction(tx1a_hex, 0)
# Higher fee, but the fee per KB is much lower, so the replacement is
# rejected.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 999000]))]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
def test_spends_of_conflicting_outputs(self):
"""Replacements that spend conflicting tx outputs are rejected"""
utxo1 = make_utxo(self.nodes[0], int(1.2 * COIN))
utxo2 = make_utxo(self.nodes[0], 3 * COIN)
tx1a = CTransaction()
tx1a.vin = [CTxIn(utxo1, nSequence=0)]
tx1a.vout = [CTxOut(int(1.1 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
tx1a_txid = int(tx1a_txid, 16)
# Direct spend an output of the transaction we're replacing.
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)]
tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0))
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, 0)
# Spend tx1a's output to test the indirect case.
tx1b = CTransaction()
tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx1b.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
tx1b_txid = int(tx1b_txid, 16)
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0),
CTxIn(COutPoint(tx1b_txid, 0))]
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, 0)
def test_new_unconfirmed_inputs(self):
"""Replacements that add new unconfirmed inputs are rejected"""
confirmed_utxo = make_utxo(self.nodes[0], int(1.1 * COIN))
unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1 * COIN), False)
tx1 = CTransaction()
tx1.vin = [CTxIn(confirmed_utxo)]
tx1.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1_hex = txToHex(tx1)
self.nodes[0].sendrawtransaction(tx1_hex, 0)
tx2 = CTransaction()
tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
tx2.vout = tx1.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, 0)
def test_too_many_replacements(self):
"""Replacements that evict too many transactions are rejected"""
# Try directly replacing more than MAX_REPLACEMENT_LIMIT
# transactions
# Start by creating a single transaction with many outputs
initial_nValue = 10 * COIN
utxo = make_utxo(self.nodes[0], initial_nValue)
fee = int(0.0001 * COIN)
split_value = int((initial_nValue - fee) / (MAX_REPLACEMENT_LIMIT + 1))
outputs = []
for _ in range(MAX_REPLACEMENT_LIMIT + 1):
outputs.append(CTxOut(split_value, CScript([1])))
splitting_tx = CTransaction()
splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
splitting_tx.vout = outputs
splitting_tx_hex = txToHex(splitting_tx)
txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, 0)
txid = int(txid, 16)
# Now spend each of those outputs individually
for i in range(MAX_REPLACEMENT_LIMIT + 1):
tx_i = CTransaction()
tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
tx_i.vout = [CTxOut(split_value - fee, DUMMY_P2WPKH_SCRIPT)]
tx_i_hex = txToHex(tx_i)
self.nodes[0].sendrawtransaction(tx_i_hex, 0)
# Now create doublespend of the whole lot; should fail.
# Need a big enough fee to cover all spending transactions and have
# a higher fee rate
double_spend_value = (split_value - 100 * fee) * (MAX_REPLACEMENT_LIMIT + 1)
inputs = []
for i in range(MAX_REPLACEMENT_LIMIT + 1):
inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
double_tx = CTransaction()
double_tx.vin = inputs
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, 0)
# If we remove an input, it should pass
double_tx = CTransaction()
double_tx.vin = inputs[0:-1]
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
self.nodes[0].sendrawtransaction(double_tx_hex, 0)
def test_opt_in(self):
"""Replacing should only work if orig tx opted in"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
# Create a non-opting in transaction
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
# This transaction isn't shown as replaceable
assert_equal(self.nodes[0].getmempoolentry(tx1a_txid)['bip125-replaceable'], False)
# Shouldn't be able to double-spend
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
# Create a different non-opting in transaction
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx2a_hex = txToHex(tx2a)
tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, 0)
# Still shouldn't be able to double-spend
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx2b_hex = txToHex(tx2b)
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, 0)
# Now create a new transaction that spends from tx1a and tx2a
# opt-in on one of the inputs
# Transaction should be replaceable on either input
tx1a_txid = int(tx1a_txid, 16)
tx2a_txid = int(tx2a_txid, 16)
tx3a = CTransaction()
tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)]
tx3a.vout = [CTxOut(int(0.9 * COIN), CScript([b'c'])), CTxOut(int(0.9 * COIN), CScript([b'd']))]
tx3a_hex = txToHex(tx3a)
tx3a_txid = self.nodes[0].sendrawtransaction(tx3a_hex, 0)
# This transaction is shown as replaceable
assert_equal(self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'], True)
tx3b = CTransaction()
tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx3b.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx3b_hex = txToHex(tx3b)
tx3c = CTransaction()
tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
tx3c.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx3c_hex = txToHex(tx3c)
self.nodes[0].sendrawtransaction(tx3b_hex, 0)
# If tx3b was accepted, tx3c won't look like a replacement,
# but make sure it is accepted anyway
self.nodes[0].sendrawtransaction(tx3c_hex, 0)
def test_prioritised_transactions(self):
# Ensure that fee deltas used via prioritisetransaction are
# correctly used by replacement logic
# 1. Check that feeperkb uses modified fees
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
# Higher fee, but the actual fee per KB is much lower.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 740000]))]
tx1b_hex = txToHex(tx1b)
# Verify tx1b cannot replace tx1a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
# Use prioritisetransaction to set tx1a's fee to 0.
self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1 * COIN))
# Now tx1b should be able to replace tx1a
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
assert tx1b_txid in self.nodes[0].getrawmempool()
# 2. Check that absolute fee checks use modified fee.
tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx2a_hex = txToHex(tx2a)
self.nodes[0].sendrawtransaction(tx2a_hex, 0)
# Lower fee, but we'll prioritise it
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(1.01 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx2b.rehash()
tx2b_hex = txToHex(tx2b)
# Verify tx2b cannot replace tx2a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, 0)
# Now prioritise tx2b to have a higher modified fee
self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1 * COIN))
# tx2b should now be accepted
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, 0)
assert tx2b_txid in self.nodes[0].getrawmempool()
def test_rpc(self):
us0 = self.nodes[0].listunspent()[0]
ins = [us0]
outs = {self.nodes[0].getnewaddress(): Decimal(1.0000000)}
rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True)
rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False)
json0 = self.nodes[0].decoderawtransaction(rawtx0)
json1 = self.nodes[0].decoderawtransaction(rawtx1)
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967295)
rawtx2 = self.nodes[0].createrawtransaction([], outs)
frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True})
frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False})
json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967294)
def test_no_inherited_signaling(self):
wallet = MiniWallet(self.nodes[0])
wallet.scan_blocks(start=76, num=1)
confirmed_utxo = wallet.get_utxo()
# Create an explicitly opt-in parent transaction
optin_parent_tx = wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=BIP125_SEQUENCE_NUMBER,
fee_rate=Decimal('0.01'),
)
assert_equal(True, self.nodes[0].getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
replacement_parent_tx = wallet.create_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=BIP125_SEQUENCE_NUMBER,
fee_rate=Decimal('0.02'),
)
# Test if parent tx can be replaced.
res = self.nodes[0].testmempoolaccept(rawtxs=[replacement_parent_tx['hex']])[0]
# Parent can be replaced.
assert_equal(res['allowed'], True)
# Create an opt-out child tx spending the opt-in parent
parent_utxo = wallet.get_utxo(txid=optin_parent_tx['txid'])
optout_child_tx = wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=parent_utxo,
sequence=0xffffffff,
fee_rate=Decimal('0.01'),
)
# Reports true due to inheritance
assert_equal(True, self.nodes[0].getmempoolentry(optout_child_tx['txid'])['bip125-replaceable'])
replacement_child_tx = wallet.create_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=parent_utxo,
sequence=0xffffffff,
fee_rate=Decimal('0.02'),
mempool_valid=False,
)
# Broadcast replacement child tx
# BIP 125 :
# 1. The original transactions signal replaceability explicitly or through inheritance as described in the above
# Summary section.
# The original transaction (`optout_child_tx`) doesn't signal RBF but its parent (`optin_parent_tx`) does.
# The replacement transaction (`replacement_child_tx`) should be able to replace the original transaction.
# See CVE-2021-31876 for further explanations.
assert_equal(True, self.nodes[0].getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
assert_raises_rpc_error(-26, 'txn-mempool-conflict', self.nodes[0].sendrawtransaction, replacement_child_tx["hex"], 0)
if __name__ == '__main__':
ReplaceByFeeTest().main()
| mit | 489,323,382,248,004,300 | 39.060413 | 126 | 0.614057 | false |
bob-the-hamster/commandergenius | project/jni/python/src/Lib/plat-mac/lib-scriptpackages/Finder/Files.py | 80 | 6439 | """Suite Files: Classes representing files
Level 1, version 1
Generated from /System/Library/CoreServices/Finder.app
AETE/AEUT resource version 0/144, language 0, script 0
"""
import aetools
import MacOS
_code = 'fndr'
class Files_Events:
pass
class alias_file(aetools.ComponentItem):
"""alias file - An alias file (created with \xd2Make Alias\xd3) """
want = 'alia'
class _Prop__3c_Inheritance_3e_(aetools.NProperty):
"""<Inheritance> - inherits some of its properties from the file class """
which = 'c@#^'
want = 'file'
class _Prop_original_item(aetools.NProperty):
"""original item - the original item pointed to by the alias """
which = 'orig'
want = 'obj '
alias_files = alias_file
class application_file(aetools.ComponentItem):
"""application file - An application's file on disk """
want = 'appf'
class _Prop_accepts_high_level_events(aetools.NProperty):
"""accepts high level events - Is the application high-level event aware? (OBSOLETE: always returns true) """
which = 'isab'
want = 'bool'
class _Prop_has_scripting_terminology(aetools.NProperty):
"""has scripting terminology - Does the process have a scripting terminology, i.e., can it be scripted? """
which = 'hscr'
want = 'bool'
class _Prop_minimum_size(aetools.NProperty):
"""minimum size - the smallest memory size with which the application can be launched """
which = 'mprt'
want = 'long'
class _Prop_opens_in_Classic(aetools.NProperty):
"""opens in Classic - Should the application launch in the Classic environment? """
which = 'Clsc'
want = 'bool'
class _Prop_preferred_size(aetools.NProperty):
"""preferred size - the memory size with which the application will be launched """
which = 'appt'
want = 'long'
class _Prop_suggested_size(aetools.NProperty):
"""suggested size - the memory size with which the developer recommends the application be launched """
which = 'sprt'
want = 'long'
application_files = application_file
class clipping(aetools.ComponentItem):
"""clipping - A clipping """
want = 'clpf'
class _Prop_clipping_window(aetools.NProperty):
"""clipping window - (NOT AVAILABLE YET) the clipping window for this clipping """
which = 'lwnd'
want = 'obj '
clippings = clipping
class document_file(aetools.ComponentItem):
"""document file - A document file """
want = 'docf'
document_files = document_file
class file(aetools.ComponentItem):
"""file - A file """
want = 'file'
class _Prop_creator_type(aetools.NProperty):
"""creator type - the OSType identifying the application that created the item """
which = 'fcrt'
want = 'type'
class _Prop_file_type(aetools.NProperty):
"""file type - the OSType identifying the type of data contained in the item """
which = 'asty'
want = 'type'
class _Prop_product_version(aetools.NProperty):
"""product version - the version of the product (visible at the top of the \xd2Get Info\xd3 window) """
which = 'ver2'
want = 'utxt'
class _Prop_stationery(aetools.NProperty):
"""stationery - Is the file a stationery pad? """
which = 'pspd'
want = 'bool'
class _Prop_version(aetools.NProperty):
"""version - the version of the file (visible at the bottom of the \xd2Get Info\xd3 window) """
which = 'vers'
want = 'utxt'
files = file
class internet_location_file(aetools.ComponentItem):
"""internet location file - An file containing an internet location """
want = 'inlf'
class _Prop_location(aetools.NProperty):
"""location - the internet location """
which = 'iloc'
want = 'utxt'
internet_location_files = internet_location_file
class package(aetools.ComponentItem):
"""package - A package """
want = 'pack'
packages = package
alias_file._superclassnames = ['file']
alias_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'original_item' : _Prop_original_item,
}
alias_file._privelemdict = {
}
application_file._superclassnames = ['file']
application_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'accepts_high_level_events' : _Prop_accepts_high_level_events,
'has_scripting_terminology' : _Prop_has_scripting_terminology,
'minimum_size' : _Prop_minimum_size,
'opens_in_Classic' : _Prop_opens_in_Classic,
'preferred_size' : _Prop_preferred_size,
'suggested_size' : _Prop_suggested_size,
}
application_file._privelemdict = {
}
clipping._superclassnames = ['file']
clipping._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'clipping_window' : _Prop_clipping_window,
}
clipping._privelemdict = {
}
document_file._superclassnames = ['file']
document_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
}
document_file._privelemdict = {
}
import Finder_items
file._superclassnames = ['item']
file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'creator_type' : _Prop_creator_type,
'file_type' : _Prop_file_type,
'product_version' : _Prop_product_version,
'stationery' : _Prop_stationery,
'version' : _Prop_version,
}
file._privelemdict = {
}
internet_location_file._superclassnames = ['file']
internet_location_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'location' : _Prop_location,
}
internet_location_file._privelemdict = {
}
package._superclassnames = ['item']
package._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
}
package._privelemdict = {
}
#
# Indices of types declared in this module
#
_classdeclarations = {
'alia' : alias_file,
'appf' : application_file,
'clpf' : clipping,
'docf' : document_file,
'file' : file,
'inlf' : internet_location_file,
'pack' : package,
}
_propdeclarations = {
'Clsc' : _Prop_opens_in_Classic,
'appt' : _Prop_preferred_size,
'asty' : _Prop_file_type,
'c@#^' : _Prop__3c_Inheritance_3e_,
'fcrt' : _Prop_creator_type,
'hscr' : _Prop_has_scripting_terminology,
'iloc' : _Prop_location,
'isab' : _Prop_accepts_high_level_events,
'lwnd' : _Prop_clipping_window,
'mprt' : _Prop_minimum_size,
'orig' : _Prop_original_item,
'pspd' : _Prop_stationery,
'sprt' : _Prop_suggested_size,
'ver2' : _Prop_product_version,
'vers' : _Prop_version,
}
_compdeclarations = {
}
_enumdeclarations = {
}
| lgpl-2.1 | -3,083,165,114,349,103,600 | 29.372642 | 113 | 0.669514 | false |
imply/chuu | tools/vim/chromium.ycm_extra_conf.py | 47 | 6223 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Autocompletion config for YouCompleteMe in Chromium.
#
# USAGE:
#
# 1. Install YCM [https://github.com/Valloric/YouCompleteMe]
# (Googlers should check out [go/ycm])
#
# 2. Point to this config file in your .vimrc:
# let g:ycm_global_ycm_extra_conf =
# '<chrome_depot>/src/tools/vim/chromium.ycm_extra_conf.py'
#
# 3. Profit
#
#
# Usage notes:
#
# * You must use ninja & clang to build Chromium.
#
# * You must have run gyp_chromium and built Chromium recently.
#
#
# Hacking notes:
#
# * The purpose of this script is to construct an accurate enough command line
# for YCM to pass to clang so it can build and extract the symbols.
#
# * Right now, we only pull the -I and -D flags. That seems to be sufficient
# for everything I've used it for.
#
# * That whole ninja & clang thing? We could support other configs if someone
# were willing to write the correct commands and a parser.
#
# * This has only been tested on gPrecise.
import os
import subprocess
# Flags from YCM's default config.
flags = [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x',
'c++',
]
def PathExists(*args):
return os.path.exists(os.path.join(*args))
def FindChromeSrcFromFilename(filename):
"""Searches for the root of the Chromium checkout.
Simply checks parent directories until it finds .gclient and src/.
Args:
filename: (String) Path to source file being edited.
Returns:
(String) Path of 'src/', or None if unable to find.
"""
curdir = os.path.normpath(os.path.dirname(filename))
while not (PathExists(curdir, 'src') and PathExists(curdir, 'src', 'DEPS')
and (PathExists(curdir, '.gclient')
or PathExists(curdir, 'src', '.git'))):
nextdir = os.path.normpath(os.path.join(curdir, '..'))
if nextdir == curdir:
return None
curdir = nextdir
return os.path.join(curdir, 'src')
# Largely copied from ninja-build.vim (guess_configuration)
def GetNinjaOutputDirectory(chrome_root):
"""Returns either <chrome_root>/out/Release or <chrome_root>/out/Debug.
The configuration chosen is the one most recently generated/built."""
root = os.path.join(chrome_root, 'out')
debug_path = os.path.join(root, 'Debug')
release_path = os.path.join(root, 'Release')
def is_release_15s_newer(test_path):
try:
debug_mtime = os.path.getmtime(os.path.join(debug_path, test_path))
except os.error:
debug_mtime = 0
try:
rel_mtime = os.path.getmtime(os.path.join(release_path, test_path))
except os.error:
rel_mtime = 0
return rel_mtime - debug_mtime >= 15
if is_release_15s_newer('build.ninja') or is_release_15s_newer('protoc'):
return release_path
return debug_path
def GetClangCommandFromNinjaForFilename(chrome_root, filename):
"""Returns the command line to build |filename|.
Asks ninja how it would build the source file. If the specified file is a
header, tries to find its companion source file first.
Args:
chrome_root: (String) Path to src/.
filename: (String) Path to source file being edited.
Returns:
(List of Strings) Command line arguments for clang.
"""
if not chrome_root:
return []
# Generally, everyone benefits from including Chromium's src/, because all of
# Chromium's includes are relative to that.
chrome_flags = ['-I' + os.path.join(chrome_root)]
# Header files can't be built. Instead, try to match a header file to its
# corresponding source file.
if filename.endswith('.h'):
alternates = ['.cc', '.cpp']
for alt_extension in alternates:
alt_name = filename[:-2] + alt_extension
if os.path.exists(alt_name):
filename = alt_name
break
else:
# If this is a standalone .h file with no source, the best we can do is
# try to use the default flags.
return chrome_flags
# Ninja needs the path to the source file from the output build directory.
# Cut off the common part and /.
subdir_filename = filename[len(chrome_root)+1:]
rel_filename = os.path.join('..', '..', subdir_filename)
out_dir = GetNinjaOutputDirectory(chrome_root)
# Ask ninja how it would build our source file.
p = subprocess.Popen(['ninja', '-v', '-C', out_dir, '-t',
'commands', rel_filename + '^'],
stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode:
return chrome_flags
# Ninja might execute several commands to build something. We want the last
# clang command.
clang_line = None
for line in reversed(stdout.split('\n')):
if 'clang' in line:
clang_line = line
break
else:
return chrome_flags
# Parse out the -I and -D flags. These seem to be the only ones that are
# important for YCM's purposes.
for flag in clang_line.split(' '):
if flag.startswith('-I'):
# Relative paths need to be resolved, because they're relative to the
# output dir, not the source.
if flag[2] == '/':
chrome_flags.append(flag)
else:
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
chrome_flags.append('-I' + abs_path)
elif flag.startswith('-') and flag[1] in 'DWFfmO':
if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
# These flags causes libclang (3.3) to crash. Remove it until things
# are fixed.
continue
chrome_flags.append(flag)
return chrome_flags
def FlagsForFile(filename):
"""This is the main entry point for YCM. Its interface is fixed.
Args:
filename: (String) Path to source file being edited.
Returns:
(Dictionary)
'flags': (List of Strings) Command line flags.
'do_cache': (Boolean) True if the result should be cached.
"""
chrome_root = FindChromeSrcFromFilename(filename)
chrome_flags = GetClangCommandFromNinjaForFilename(chrome_root,
filename)
final_flags = flags + chrome_flags
return {
'flags': final_flags,
'do_cache': True
}
| bsd-3-clause | -5,269,646,192,685,857,000 | 29.655172 | 80 | 0.657561 | false |
matbra/bokeh | examples/interactions/interactive_bubble/data.py | 49 | 1265 | import numpy as np
from bokeh.palettes import Spectral6
def process_data():
from bokeh.sampledata.gapminder import fertility, life_expectancy, population, regions
# Make the column names ints not strings for handling
columns = list(fertility.columns)
years = list(range(int(columns[0]), int(columns[-1])))
rename_dict = dict(zip(columns, years))
fertility = fertility.rename(columns=rename_dict)
life_expectancy = life_expectancy.rename(columns=rename_dict)
population = population.rename(columns=rename_dict)
regions = regions.rename(columns=rename_dict)
# Turn population into bubble sizes. Use min_size and factor to tweak.
scale_factor = 200
population_size = np.sqrt(population / np.pi) / scale_factor
min_size = 3
population_size = population_size.where(population_size >= min_size).fillna(min_size)
# Use pandas categories and categorize & color the regions
regions.Group = regions.Group.astype('category')
regions_list = list(regions.Group.cat.categories)
def get_color(r):
return Spectral6[regions_list.index(r.Group)]
regions['region_color'] = regions.apply(get_color, axis=1)
return fertility, life_expectancy, population_size, regions, years, regions_list
| bsd-3-clause | 5,114,000,577,044,321,000 | 37.333333 | 90 | 0.72253 | false |
tehpug/TehPUG-flask | wsgi/app/forms.py | 1 | 1502 | from flask.ext.wtf import Form
from wtforms import TextField, BooleanField, PasswordField, TextAreaField, SelectField
from wtforms.validators import Required, Length, email, url, Optional
import os
class RegisterForm(Form):
username = TextField('username', validators = [Required(), Length(min = 4, max = 50)])
password = PasswordField('password', validators = [Required(), Length(min = 4, max = 50)])
email = TextField('email', validators = [Required(), Length(min = 6, max = 50), email()])
admin = SelectField('sound',choices = [('No','No'), ('Yes','Yes')])
class LoginForm(Form):
username = TextField('username', validators = [Required(), Length(min = 4, max = 50)])
password = PasswordField('password', validators = [Required()])
remember_me = BooleanField('remember_me', default = False)
class AddSessionForm(Form):
title = TextField('title', validators = [Required(), Length(min = 5, max = 100)])
description = TextAreaField('description', validators = [Length(min = 0, max = 4000)])
sound = SelectField('sound', validators = [Optional()])
class AddNewsForm(Form):
title = TextField('title', validators = [Required(), Length(min = 5, max= 100)])
description = TextAreaField('description', validators = [Length(min = 0, max = 4000)])
class EditProfileForm(Form):
email = TextField('email', validators = [Required(), Length(min = 6, max = 50), email()])
website = TextField('website', validators = [url])
bio = TextAreaField('bio', validators = [Length(max = 256)]) | gpl-2.0 | -2,651,351,099,410,685,400 | 50.827586 | 91 | 0.691079 | false |
shawnadelic/shuup | shuup/core/utils/vat.py | 2 | 8413 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import re
import six
from django.core.exceptions import ValidationError
# Patterns from
# http://www.vero.fi/fi-FI/Syventavat_veroohjeet/Arvonlisaverotus/Kansainvalinen_kauppa/EUmaiden_arvonlisaverotunnisteet(14426)
PATTERNS = {
"AT": {
"country": "Austria",
"iso3166": "AT",
"pattern": "U99999999", # Initial always U, then 8 numbers
},
"BE": {
"country": "Belgium",
"iso3166": "BE",
"pattern": "9999999999", # 1 block of 10 digits
},
"BG": {
"country": "Bulgaria",
"iso3166": "BG",
"pattern": [
"999999999", # 1 block of 9 digits
"9999999999", # 1 block of 10 digits
]
},
"CY": {
"country": "Cyprus",
"iso3166": "CY",
"pattern": "99999999L", # 1 block of 9 characters
},
"CZ": {
"country": "Czech Republic",
"iso3166": "CZ",
"pattern": [
"99999999",
"999999999",
"9999999999"
]
},
"DE": {
"country": "Germany",
"iso3166": "DE",
"pattern": "999999999", # 1 block of 9 digits
},
"DK": {
"country": "Denmark",
"iso3166": "DK",
"pattern": "99999999", # 4 blocks of 2 digits
},
"EE": {
"country": "Estonia",
"iso3166": "EE",
"pattern": "999999999", # 1 block of 9 digits
},
"EL": {
"iso3166": "GR",
"country": "Greece",
"pattern": "999999999", # 1 block of 9 digits
},
"ES": {
"country": "Spain",
"iso3166": "ES",
"pattern": [
"X9999999X4", # 1 block of 9 characters
"X99999999",
"99999999X",
"X9999999X"
]
# CIF (Certificado de Identificación Fiscal): This is the tax ID number for all companies.
# It consists of a letter followed by 8 digits. The letter represents the type of company,
# the most common being an 'A' for Sociedad Anónima or a 'B' for Sociedad Limitada.
# For companies nonresident in Spain, the letter is 'N'.
# VAT number (Número IVA): This is 'ES' followed by the CIF.
# From vero.fi. 9 characters where first or last can be chars or number, but can not be
# numbers.
},
"FI": {
"country": "Finland",
"iso3166": "FI",
"pattern": "99999999", # 1 block of 8 digits
},
"FR": {
"country": "France",
"iso3166": "FR",
"pattern": "XX999999999", # 1 block of 2 characters, 1 block of 9 digits
},
"GB": {
"country": "United Kingdom",
"iso3166": "GB",
"pattern": [
"999999999", # 1 block of 9 or 12 digits
"999999999999",
"GD999",
"HA999"
]
},
"HU": {
"iso3166": "HU",
"country": "Hungary",
"pattern": "99999999", # 1 block of 8 digits
},
"HR": {
"iso3166": "HR",
"country": "Croatia",
"pattern": "99999999999", # 1 block of 11 digits
},
"IE": {
"iso3166": "IE",
"country": "Ireland",
"pattern": [
"9S99999L", # 1 block of 8 or 9 characters
"9999999LL"
]
},
"IT": {
"iso3166": "IT",
"country": "Italy",
"pattern": "99999999999", # 1 block of 11 digits
},
"LT": {
"iso3166": "LT",
"country": "Lithuania",
"pattern": [
"999999999",
"999999999999", # 1 block of 9 digits, or 1 block of 12 digits
]
},
"LU": {
"iso3166": "LU",
"country": "Luxembourg",
"pattern": "99999999", # 1 block of 8 digits
},
"LV": {
"country": "Latvia",
"iso3166": "LV",
"pattern": "99999999999", # 1 block of 11 digits
},
"MT": {
"country": "Malta",
"iso3166": "MT",
"pattern": "99999999", # 1 block of 8 digits
},
"NL": {
"country": "The Netherlands",
"iso3166": "NL",
"pattern": "999999999B99", # 1 block of 12 characters. From vero.fi tenth char after country code is allways B
},
"PL": {
"country": "Poland",
"iso3166": "PL",
"pattern": "9999999999", # 1 block of 10 digits
},
"PT": {
"country": "Portugal",
"iso3166": "PT",
"pattern": "999999999", # 1 block of 9 digits
},
"RO": {
"country": "Romania",
"iso3166": "RO",
"pattern": "99R", # 1 block of minimum 2 digits and maximum 10 digits
},
"SE": {
"country": "Sweden",
"iso3166": "SE",
"pattern": "999999999901", # 1 block of 12 digits. From vero.fi 2 last digits is allways 01
},
"SI": {
"country": "Slovenia",
"iso3166": "SI",
"pattern": "99999999", # 1 block of 8 digits
},
"SK": {
"country": "Slovakia",
"iso3166": "SK",
"pattern": "9999999999", # 1 block of 10 digits
},
}
# *: Format excludes 2 letter alpha prefix
# 9: A digit
# X: A letter or a digit
# S: A letter; a digit; "+" or "*"
# L: A letter
def compile_pattern(prefix, pattern):
r = pattern.replace(" ", "")
for gf, gt in (
("9", "[0-9]"),
("R", "[0-9]*"),
("X", "[a-z0-9]"),
("S", "[a-z0-9+*]"),
("L", "[a-z]"),
):
regex_frag = "(%s{%%d})" % gt
def gt(m):
return (regex_frag % len(m.group(0)))
r = re.sub(gf + "+", gt, r)
return re.compile("^" + prefix + r + "$", re.I)
class VatValidationError(ValidationError):
code = None
def __init__(self, *args, **kwargs):
code = kwargs.pop("code", self.code)
super(VatValidationError, self).__init__(*args, code=code, **kwargs)
class VatCannotIdentifyValidationError(VatValidationError):
code = "vat_cannot_identify"
class VatInvalidValidationError(VatValidationError):
code = "vat_invalid"
def verify_vat(vat_id, default_prefix=""):
""" Verify an EU VAT ID.
Returns a tuple (prefix, code_parts) -- if both are truthy, the validation succeeded.
If the prefix part is falsy, then the prefix was unknown and no validation was even attempted.
If the prefix part is truthy, then it will contain the country prefix used for validation.
The code_parts part can still be falsy, if the validation for the country's VAT number pattern failed.
:param vat_id: The VAT ID string to validate.
:type vat_id: str
:param default_prefix: The default prefix to assume if none can be parsed.
:type default_prefix: str
:return: Tuple of (prefix, code_parts)
"""
# Normalize the VAT ID a little bit...
vat_id = re.sub(r"\s+", "", vat_id.upper())
vat_id = vat_id.replace("-", "") # TODO: Not sure if this is a good idea
prefix = vat_id[:2]
if prefix not in PATTERNS: # Okay, it's unknown thus far, so try again with the default prefix if any
prefix = default_prefix
# Then see if we know about this prefix.
spec = PATTERNS.get(prefix)
if not spec or not prefix: # Sorry, no dice. :/
raise VatCannotIdentifyValidationError("VAT ID could not be identified")
if not vat_id.startswith(prefix): # Add the prefix back into the VAT if required
vat_id = prefix + vat_id
# Get the relephant PATTERNS (one or more) from the spec
patterns = (spec.get("pattern") or [])
if isinstance(patterns, six.string_types):
patterns = [patterns]
for pat in patterns:
regexp = compile_pattern(prefix, pat) # Prefix will be added to the resulting spec.
match = regexp.match(vat_id)
if match:
return (prefix, match.groups())
raise VatInvalidValidationError(
"VAT ID for %(country)s could not be validated" % spec)
def get_vat_prefix_for_country(iso3166):
iso3166 = six.text_type(iso3166).upper()
for prefix, data in six.iteritems(PATTERNS): # pragma: no branch
if data.get("iso3166") == iso3166:
return prefix
| agpl-3.0 | -144,633,080,479,738,750 | 28.612676 | 127 | 0.537337 | false |
sekikn/incubator-airflow | airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py | 8 | 2916 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime, timedelta
from six.moves.urllib.request import Request
from airflow import DAG
from airflow.operators.python import PythonOperator
from airflow.providers.jenkins.hooks.jenkins import JenkinsHook
from airflow.providers.jenkins.operators.jenkins_job_trigger import JenkinsJobTriggerOperator
default_args = {
"owner": "airflow",
"retries": 1,
"retry_delay": timedelta(minutes=5),
"depends_on_past": False,
"concurrency": 8,
"max_active_runs": 8,
}
with DAG(
"test_jenkins", default_args=default_args, start_date=datetime(2017, 6, 1), schedule_interval=None
) as dag:
job_trigger = JenkinsJobTriggerOperator(
task_id="trigger_job",
job_name="generate-merlin-config",
parameters={"first_parameter": "a_value", "second_parameter": "18"},
# parameters="resources/parameter.json", You can also pass a path to a json file containing your param
jenkins_connection_id="your_jenkins_connection", # T he connection must be configured first
)
def grab_artifact_from_jenkins(**context):
"""
Grab an artifact from the previous job
The python-jenkins library doesn't expose a method for that
But it's totally possible to build manually the request for that
"""
hook = JenkinsHook("your_jenkins_connection")
jenkins_server = hook.get_jenkins_server()
url = context['task_instance'].xcom_pull(task_ids='trigger_job')
# The JenkinsJobTriggerOperator store the job url in the xcom variable corresponding to the task
# You can then use it to access things or to get the job number
# This url looks like : http://jenkins_url/job/job_name/job_number/
url += "artifact/myartifact.xml" # Or any other artifact name
request = Request(url)
response = jenkins_server.jenkins_open(request)
return response # We store the artifact content in a xcom variable for later use
artifact_grabber = PythonOperator(task_id='artifact_grabber', python_callable=grab_artifact_from_jenkins)
job_trigger >> artifact_grabber
| apache-2.0 | 1,314,985,646,564,380,200 | 42.522388 | 110 | 0.718793 | false |
sebastic/QGIS | python/plugins/processing/algs/gdal/ogr2ogrclipextent.py | 6 | 3700 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ogr2ogrclipextent.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterExtent
from processing.core.outputs import OutputVector
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
from processing.tools.vector import ogrConnectionString, ogrLayerName
class Ogr2OgrClipExtent(GdalAlgorithm):
OUTPUT_LAYER = 'OUTPUT_LAYER'
INPUT_LAYER = 'INPUT_LAYER'
CLIP_EXTENT = 'CLIP_EXTENT'
OPTIONS = 'OPTIONS'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Clip vectors by extent')
self.group, self.i18n_group = self.trAlgorithm('[OGR] Geoprocessing')
self.addParameter(ParameterVector(self.INPUT_LAYER,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY], False))
self.addParameter(ParameterExtent(self.CLIP_EXTENT,
self.tr('Clip extent')))
self.addParameter(ParameterString(self.OPTIONS,
self.tr('Additional creation options'), '', optional=True))
self.addOutput(OutputVector(self.OUTPUT_LAYER, self.tr('Clipped (extent)')))
def getConsoleCommands(self):
inLayer = self.getParameterValue(self.INPUT_LAYER)
ogrLayer = ogrConnectionString(inLayer)[1:-1]
clipExtent = self.getParameterValue(self.CLIP_EXTENT)
output = self.getOutputFromName(self.OUTPUT_LAYER)
outFile = output.value
output = ogrConnectionString(outFile)
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
regionCoords = clipExtent.split(',')
arguments.append('-spat')
arguments.append(regionCoords[0])
arguments.append(regionCoords[2])
arguments.append(regionCoords[1])
arguments.append(regionCoords[3])
arguments.append('-clipsrc spat_extent')
if len(options) > 0:
arguments.append(options)
arguments.append(output)
arguments.append(ogrLayer)
arguments.append(ogrLayerName(inLayer))
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
return commands
def commandName(self):
return "ogr2ogr"
| gpl-2.0 | 5,683,246,203,710,712,000 | 37.14433 | 108 | 0.572432 | false |
Audacity-Team/Audacity | lib-src/lv2/lv2/plugins/eg02-midigate.lv2/waflib/Tools/msvc.py | 70 | 27831 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,re,tempfile
from waflib import Utils,Task,Logs,Options,Errors
from waflib.Logs import debug,warn
from waflib.TaskGen import after_method,feature
from waflib.Configure import conf
from waflib.Tools import ccroot,c,cxx,ar,winres
g_msvc_systemlibs='''
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
'''.split()
all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm')]
all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
def options(opt):
opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
def setup_msvc(conf,versions,arch=False):
platforms=getattr(Options.options,'msvc_targets','').split(',')
if platforms==['']:
platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions=getattr(Options.options,'msvc_version','').split(',')
if desired_versions==['']:
desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
versiondict=dict(versions)
for version in desired_versions:
try:
targets=dict(versiondict[version])
for target in platforms:
try:
arch,(p1,p2,p3)=targets[target]
compiler,revision=version.rsplit(' ',1)
if arch:
return compiler,revision,p1,p2,p3,arch
else:
return compiler,revision,p1,p2,p3
except KeyError:continue
except KeyError:continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf,compiler,version,target,vcvars):
debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
batfile=conf.bldnode.make_node('waf-print-msvc.bat')
batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
"""%(vcvars,target))
sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
lines=sout.splitlines()
if not lines[0]:
lines.pop(0)
MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
for line in lines:
if line.startswith('PATH='):
path=line[5:]
MSVC_PATH=path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR=[i for i in line[8:].split(';')if i]
elif line.startswith('LIB='):
MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
env=dict(os.environ)
env.update(PATH=path)
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
cxx=conf.cmd_to_list(cxx)
if'CL'in env:
del(env['CL'])
try:
try:
conf.cmd_and_log(cxx+['/help'],env=env)
except Exception ,e:
debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target))
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
finally:
conf.env[compiler_name]=''
return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
@conf
def gather_wsdk_versions(conf,versions):
version_pattern=re.compile('^v..?.?\...?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
try:
msvc_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
targets=[]
for target,arch in all_msvc_platforms:
try:
targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
except conf.errors.ConfigurationError:
pass
versions.append(('wsdk '+version[1:],targets))
def gather_wince_supported_platforms():
supported_wince_platforms=[]
try:
ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
try:
ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
ce_sdk=''
if not ce_sdk:
return supported_wince_platforms
ce_index=0
while 1:
try:
sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index)
except WindowsError:
break
ce_index=ce_index+1
sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
try:
path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
except WindowsError:
try:
path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
path,xml=os.path.split(path)
except WindowsError:
continue
path=str(path)
path,device=os.path.split(path)
if not device:
path,device=os.path.split(path)
for arch,compiler in all_wince_platforms:
platforms=[]
if os.path.isdir(os.path.join(path,device,'Lib',arch)):
platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
if platforms:
supported_wince_platforms.append((device,platforms))
return supported_wince_platforms
def gather_msvc_detected_versions():
version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
detected_versions=[]
for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:
try:
prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
except WindowsError:
try:
prefix='SOFTWARE\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
except WindowsError:
continue
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
match=version_pattern.match(version)
if not match:
continue
else:
versionnumber=float(match.group(1))
detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version))
def fun(tup):
return tup[0]
detected_versions.sort(key=fun)
return detected_versions
@conf
def gather_msvc_targets(conf,versions,version,vc_path):
targets=[]
if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat')))))
except conf.errors.ConfigurationError:
pass
elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')))))
except conf.errors.ConfigurationError:
pass
elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')))))
except conf.errors.ConfigurationError:
pass
if targets:
versions.append(('msvc '+version,targets))
@conf
def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
for device,platforms in supported_platforms:
cetargets=[]
for platform,compiler,include,lib in platforms:
winCEpath=os.path.join(vc_path,'ce')
if not os.path.isdir(winCEpath):
continue
try:
common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars)
except conf.errors.ConfigurationError:
continue
if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
if cetargets:
versions.append((device+' '+version,cetargets))
@conf
def gather_winphone_targets(conf,versions,version,vc_path,vsvars):
targets=[]
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('winphone',version,target,vsvars))))
except conf.errors.ConfigurationError ,e:
pass
if targets:
versions.append(('winphone '+version,targets))
@conf
def gather_msvc_versions(conf,versions):
vc_paths=[]
for(v,version,reg)in gather_msvc_detected_versions():
try:
try:
msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
except WindowsError:
msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
vc_paths.append((version,os.path.abspath(str(path))))
except WindowsError:
continue
wince_supported_platforms=gather_wince_supported_platforms()
for version,vc_path in vc_paths:
vs_path=os.path.dirname(vc_path)
vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat')
if wince_supported_platforms and os.path.isfile(vsvars):
conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat')
if os.path.isfile(vsvars):
conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars)
for version,vc_path in vc_paths:
vs_path=os.path.dirname(vc_path)
conf.gather_msvc_targets(versions,version,vc_path)
@conf
def gather_icl_versions(conf,versions):
version_pattern=re.compile('^...?.?\....?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
if target=='intel64':targetDir='EM64T_NATIVE'
else:targetDir=target
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError:
pass
except WindowsError:
pass
for target,arch in all_icl_platforms:
try:
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError:
pass
except WindowsError:
continue
major=version[0:2]
versions.append(('intel '+major,targets))
@conf
def gather_intel_composer_versions(conf,versions):
version_pattern=re.compile('^...?.?\...?.?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
if target=='intel64':targetDir='EM64T_NATIVE'
else:targetDir=target
try:
defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
except WindowsError:
if targetDir=='EM64T_NATIVE':
defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
else:
raise WindowsError
uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError ,e:
pass
compilervars_warning_attr='_compilervars_warning_key'
if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
setattr(conf,compilervars_warning_attr,False)
patch_url='http://software.intel.com/en-us/forums/topic/328487'
compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
for vscomntools in['VS110COMNTOOLS','VS100COMNTOOLS']:
if os.environ.has_key(vscomntools):
vs_express_path=os.environ[vscomntools]+r'..\IDE\VSWinExpress.exe'
dev_env_path=os.environ[vscomntools]+r'..\IDE\devenv.exe'
if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
except WindowsError:
pass
major=version[0:2]
versions.append(('intel '+major,targets))
@conf
def get_msvc_versions(conf):
if not conf.env['MSVC_INSTALLED_VERSIONS']:
lst=[]
conf.gather_icl_versions(lst)
conf.gather_intel_composer_versions(lst)
conf.gather_wsdk_versions(lst)
conf.gather_msvc_versions(lst)
conf.env['MSVC_INSTALLED_VERSIONS']=lst
return conf.env['MSVC_INSTALLED_VERSIONS']
@conf
def print_all_msvc_detected(conf):
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
Logs.info(version)
for target,l in targets:
Logs.info("\t"+target)
@conf
def detect_msvc(conf,arch=False):
versions=get_msvc_versions(conf)
return setup_msvc(conf,versions,arch)
@conf
def find_lt_names_msvc(self,libname,is_static=False):
lt_names=['lib%s.la'%libname,'%s.la'%libname,]
for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
if os.path.exists(laf):
ltdict=Utils.read_la_file(laf)
lt_libdir=None
if ltdict.get('libdir',''):
lt_libdir=ltdict['libdir']
if not is_static and ltdict.get('library_names',''):
dllnames=ltdict['library_names'].split()
dll=dllnames[0].lower()
dll=re.sub('\.dll$','',dll)
return(lt_libdir,dll,False)
elif ltdict.get('old_library',''):
olib=ltdict['old_library']
if os.path.exists(os.path.join(path,olib)):
return(path,olib,True)
elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
return(lt_libdir,olib,True)
else:
return(None,olib,True)
else:
raise self.errors.WafError('invalid libtool object file: %s'%laf)
return(None,None,None)
@conf
def libname_msvc(self,libname,is_static=False):
lib=libname.lower()
lib=re.sub('\.lib$','',lib)
if lib in g_msvc_systemlibs:
return lib
lib=re.sub('^lib','',lib)
if lib=='m':
return None
(lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
if lt_path!=None and lt_libname!=None:
if lt_static==True:
return os.path.join(lt_path,lt_libname)
if lt_path!=None:
_libpaths=[lt_path]+self.env['LIBPATH']
else:
_libpaths=self.env['LIBPATH']
static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
libnames=static_libs
if not is_static:
libnames=dynamic_libs+static_libs
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path,libn)):
debug('msvc: lib found: %s'%os.path.join(path,libn))
return re.sub('\.lib$','',libn)
self.fatal("The library %r could not be found"%libname)
return re.sub('\.lib$','',libname)
@conf
def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
libn=self.libname_msvc(libname,is_static)
if not uselib_store:
uselib_store=libname.upper()
if False and is_static:
self.env['STLIB_'+uselib_store]=[libn]
else:
self.env['LIB_'+uselib_store]=[libn]
@conf
def check_libs_msvc(self,libnames,is_static=False):
for libname in Utils.to_list(libnames):
self.check_lib_msvc(libname,is_static)
def configure(conf):
conf.autodetect(True)
conf.find_msvc()
conf.msvc_common_flags()
conf.cc_load_tools()
conf.cxx_load_tools()
conf.cc_add_flags()
conf.cxx_add_flags()
conf.link_add_flags()
conf.visual_studio_add_flags()
@conf
def no_autodetect(conf):
conf.env.NO_MSVC_DETECT=1
configure(conf)
@conf
def autodetect(conf,arch=False):
v=conf.env
if v.NO_MSVC_DETECT:
return
if arch:
compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True)
v['DEST_CPU']=arch
else:
compiler,version,path,includes,libdirs=conf.detect_msvc()
v['PATH']=path
v['INCLUDES']=includes
v['LIBPATH']=libdirs
v['MSVC_COMPILER']=compiler
try:
v['MSVC_VERSION']=float(version)
except Exception:
v['MSVC_VERSION']=float(version[:-3])
def _get_prog_names(conf,compiler):
if compiler=='intel':
compiler_name='ICL'
linker_name='XILINK'
lib_name='XILIB'
else:
compiler_name='CL'
linker_name='LINK'
lib_name='LIB'
return compiler_name,linker_name,lib_name
@conf
def find_msvc(conf):
if sys.platform=='cygwin':
conf.fatal('MSVC module does not work under cygwin Python!')
v=conf.env
path=v['PATH']
compiler=v['MSVC_COMPILER']
version=v['MSVC_VERSION']
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
cxx=None
if v['CXX']:cxx=v['CXX']
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
cxx=conf.cmd_to_list(cxx)
env=dict(conf.environ)
if path:env.update(PATH=';'.join(path))
if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
conf.fatal('the msvc compiler could not be identified')
v['CC']=v['CXX']=cxx
v['CC_NAME']=v['CXX_NAME']='msvc'
if not v['LINK_CXX']:
link=conf.find_program(linker_name,path_list=path)
if link:v['LINK_CXX']=link
else:conf.fatal('%s was not found (linker)'%linker_name)
v['LINK']=link
if not v['LINK_CC']:
v['LINK_CC']=v['LINK_CXX']
if not v['AR']:
stliblink=conf.find_program(lib_name,path_list=path,var='AR')
if not stliblink:return
v['ARFLAGS']=['/NOLOGO']
if v.MSVC_MANIFEST:
conf.find_program('MT',path_list=path,var='MT')
v['MTFLAGS']=['/NOLOGO']
try:
conf.load('winres')
except Errors.WafError:
warn('Resource compiler not found. Compiling resource file is disabled')
@conf
def visual_studio_add_flags(self):
v=self.env
try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
except Exception:pass
try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
except Exception:pass
@conf
def msvc_common_flags(conf):
v=conf.env
v['DEST_BINFMT']='pe'
v.append_value('CFLAGS',['/nologo'])
v.append_value('CXXFLAGS',['/nologo'])
v['DEFINES_ST']='/D%s'
v['CC_SRC_F']=''
v['CC_TGT_F']=['/c','/Fo']
if v['MSVC_VERSION']>=8:
v['CC_TGT_F']=['/FC']+v['CC_TGT_F']
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['/c','/Fo']
if v['MSVC_VERSION']>=8:
v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F']
v['CPPPATH_ST']='/I%s'
v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT']
v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
v['LIB_ST']='%s.lib'
v['LIBPATH_ST']='/LIBPATH:%s'
v['STLIB_ST']='%s.lib'
v['STLIBPATH_ST']='/LIBPATH:%s'
v.append_value('LINKFLAGS',['/NOLOGO'])
if v['MSVC_MANIFEST']:
v.append_value('LINKFLAGS',['/MANIFEST'])
v['CFLAGS_cshlib']=[]
v['CXXFLAGS_cxxshlib']=[]
v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL']
v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll'
v['implib_PATTERN']='%s.lib'
v['IMPLIB_ST']='/IMPLIB:%s'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib'
v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe'
@after_method('apply_link')
@feature('c','cxx')
def apply_flags_msvc(self):
if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
return
is_static=isinstance(self.link_task,ccroot.stlink_task)
subsystem=getattr(self,'subsystem','')
if subsystem:
subsystem='/subsystem:%s'%subsystem
flags=is_static and'ARFLAGS'or'LINKFLAGS'
self.env.append_value(flags,subsystem)
if not is_static:
for f in self.env.LINKFLAGS:
d=f.lower()
if d[1:]=='debug':
pdbnode=self.link_task.outputs[0].change_ext('.pdb')
self.link_task.outputs.append(pdbnode)
try:
self.install_task.source.append(pdbnode)
except AttributeError:
pass
break
@feature('cprogram','cshlib','cxxprogram','cxxshlib')
@after_method('apply_link')
def apply_manifest(self):
if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
out_node=self.link_task.outputs[0]
man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
self.link_task.outputs.append(man_node)
self.link_task.do_manifest=True
def exec_mf(self):
env=self.env
mtool=env['MT']
if not mtool:
return 0
self.do_manifest=False
outfile=self.outputs[0].abspath()
manifest=None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest=out_node.abspath()
break
if manifest is None:
return 0
mode=''
if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
mode='1'
elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
mode='2'
debug('msvc: embedding manifest in mode %r'%mode)
lst=[]
lst.append(env['MT'])
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(['-manifest',manifest])
lst.append('-outputresource:%s;%s'%(outfile,mode))
lst=[lst]
return self.exec_command(*lst)
def quote_response_command(self,flag):
if flag.find(' ')>-1:
for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):
if flag.startswith(x):
flag='%s"%s"'%(x,flag[len(x):])
break
else:
flag='"%s"'%flag
return flag
def exec_response_command(self,cmd,**kw):
try:
tmp=None
if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192:
program=cmd[0]
cmd=[self.quote_response_command(x)for x in cmd]
(fd,tmp)=tempfile.mkstemp()
os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
os.close(fd)
cmd=[program,'@'+tmp]
ret=self.generator.bld.exec_command(cmd,**kw)
finally:
if tmp:
try:
os.remove(tmp)
except OSError:
pass
return ret
def exec_command_msvc(self,*k,**kw):
if isinstance(k[0],list):
lst=[]
carry=''
for a in k[0]:
if a=='/Fo'or a=='/doc'or a[-1]==':':
carry=a
else:
lst.append(carry+a)
carry=''
k=[lst]
if self.env['PATH']:
env=dict(self.env.env or os.environ)
env.update(PATH=';'.join(self.env['PATH']))
kw['env']=env
bld=self.generator.bld
try:
if not kw.get('cwd',None):
kw['cwd']=bld.cwd
except AttributeError:
bld.cwd=kw['cwd']=bld.variant_dir
ret=self.exec_response_command(k[0],**kw)
if not ret and getattr(self,'do_manifest',None):
ret=self.exec_mf()
return ret
def wrap_class(class_name):
cls=Task.classes.get(class_name,None)
if not cls:
return None
derived_class=type(class_name,(cls,),{})
def exec_command(self,*k,**kw):
if self.env['CC_NAME']=='msvc':
return self.exec_command_msvc(*k,**kw)
else:
return super(derived_class,self).exec_command(*k,**kw)
derived_class.exec_command=exec_command
derived_class.exec_response_command=exec_response_command
derived_class.quote_response_command=quote_response_command
derived_class.exec_command_msvc=exec_command_msvc
derived_class.exec_mf=exec_mf
return derived_class
for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
wrap_class(k)
def make_winapp(self,family):
append=self.env.append_unique
append('DEFINES','WINAPI_FAMILY=%s'%family)
append('CXXFLAGS','/ZW')
append('CXXFLAGS','/TP')
for lib_path in self.env.LIBPATH:
append('CXXFLAGS','/AI%s'%lib_path)
@feature('winphoneapp')
@after_method('process_use')
@after_method('propagate_uselib_vars')
def make_winphone_app(self):
make_winapp(self,'WINAPI_FAMILY_PHONE_APP')
conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib')
conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib')
@feature('winapp')
@after_method('process_use')
@after_method('propagate_uselib_vars')
def make_windows_app(self):
make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP')
| mit | 8,346,765,446,274,844,000 | 36.108 | 297 | 0.708239 | false |
logicus4078/vertx-web | src/test/sockjs-protocol/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/mbcharsetprober.py | 2924 | 3268 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
class MultiByteCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mDistributionAnalyzer = None
self._mCodingSM = None
self._mLastChar = [0, 0]
def reset(self):
CharSetProber.reset(self)
if self._mCodingSM:
self._mCodingSM.reset()
if self._mDistributionAnalyzer:
self._mDistributionAnalyzer.reset()
self._mLastChar = [0, 0]
def get_charset_name(self):
pass
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mDistributionAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
return self._mDistributionAnalyzer.get_confidence()
| apache-2.0 | 3,449,815,437,914,763,300 | 37 | 78 | 0.594859 | false |
longmen21/edx-platform | lms/djangoapps/verify_student/tests/test_views.py | 3 | 106566 | # encoding: utf-8
"""
Tests of verify_student views.
"""
import json
import urllib
from datetime import timedelta, datetime
from uuid import uuid4
import ddt
import httpretty
import mock
from nose.plugins.attrib import attr
import boto
import moto
import pytz
from bs4 import BeautifulSoup
from mock import patch, Mock, ANY
import requests
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from django.test import TestCase
from django.test.client import Client, RequestFactory
from django.test.utils import override_settings
from django.utils import timezone
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from opaque_keys.edx.keys import UsageKey
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from courseware.url_helpers import get_redirect_url
from common.test.utils import XssTestMixin
from commerce.models import CommerceConfiguration
from commerce.tests import TEST_PAYMENT_DATA, TEST_API_URL, TEST_API_SIGNING_KEY, TEST_PUBLIC_URL_ROOT
from embargo.test_utils import restrict_course
from openedx.core.djangoapps.user_api.accounts.api import get_account_settings
from openedx.core.djangoapps.theming.tests.test_util import with_comprehensive_theme
from shoppingcart.models import Order, CertificateItem
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from student.models import CourseEnrollment
from util.date_utils import get_default_time_display
from util.testing import UrlResetMixin
from lms.djangoapps.verify_student.views import (
checkout_with_ecommerce_service, render_to_response, PayAndVerifyView,
_compose_message_reverification_email
)
from lms.djangoapps.verify_student.models import (
VerificationDeadline, SoftwareSecurePhotoVerification,
VerificationCheckpoint, VerificationStatus,
IcrvStatusEmailsConfiguration,
)
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import check_mongo_calls
def mock_render_to_response(*args, **kwargs):
return render_to_response(*args, **kwargs)
render_mock = Mock(side_effect=mock_render_to_response)
PAYMENT_DATA_KEYS = {'payment_processor_name', 'payment_page_url', 'payment_form_data'}
@attr(shard=2)
class StartView(TestCase):
"""
This view is for the first time student is
attempting a Photo Verification.
"""
def start_url(self, course_id=""):
return "/verify_student/{0}".format(urllib.quote(course_id))
def test_start_new_verification(self):
"""
Test the case where the user has no pending `PhotoVerificationAttempts`,
but is just starting their first.
"""
UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
def must_be_logged_in(self):
self.assertHttpForbidden(self.client.get(self.start_url()))
@attr(shard=2)
@ddt.ddt
class TestPayAndVerifyView(UrlResetMixin, ModuleStoreTestCase, XssTestMixin):
"""
Tests for the payment and verification flow views.
"""
MIN_PRICE = 12
USERNAME = "test_user"
PASSWORD = "test_password"
NOW = datetime.now(pytz.UTC)
YESTERDAY = NOW - timedelta(days=1)
TOMORROW = NOW + timedelta(days=1)
URLCONF_MODULES = ['embargo']
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(TestPayAndVerifyView, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result, msg="Could not log in")
@ddt.data(
("verified", "verify_student_start_flow"),
("professional", "verify_student_start_flow"),
("verified", "verify_student_begin_flow"),
("professional", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_not_verified(self, course_mode, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
self._assert_upgrade_session_flag(False)
@httpretty.activate
@override_settings(
ECOMMERCE_API_URL=TEST_API_URL,
ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY,
ECOMMERCE_PUBLIC_URL_ROOT=TEST_PUBLIC_URL_ROOT
)
def test_start_flow_with_ecommerce(self):
"""Verify user gets redirected to ecommerce checkout when ecommerce checkout is enabled."""
checkout_page = '/test_basket/'
sku = 'TESTSKU'
# When passing a SKU ecommerce api gets called.
httpretty.register_uri(
httpretty.GET,
"{}/payment/processors/".format(TEST_API_URL),
body=json.dumps(['foo', 'bar']),
content_type="application/json",
)
httpretty.register_uri(httpretty.GET, "{}{}".format(TEST_PUBLIC_URL_ROOT, checkout_page))
CommerceConfiguration.objects.create(
checkout_on_ecommerce_service=True,
single_course_checkout_page=checkout_page
)
course = self._create_course('verified', sku=sku)
self._enroll(course.id)
response = self._get_page('verify_student_start_flow', course.id, expected_status_code=302)
expected_page = '{}{}?sku={}'.format(TEST_PUBLIC_URL_ROOT, checkout_page, sku)
self.assertRedirects(response, expected_page, fetch_redirect_response=False)
@ddt.data(
("no-id-professional", "verify_student_start_flow"),
("no-id-professional", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_with_no_id_professional(self, course_mode, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [])
def test_ab_testing_page(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page("verify_student_begin_flow", course.id)
self._assert_displayed_mode(response, "verified")
self.assertContains(response, "Upgrade to a Verified Certificate")
self.assertContains(response, "Before you upgrade to a certificate track,")
self.assertContains(response, "To receive a certificate, you must also verify your identity")
self.assertContains(response, "You will use your webcam to take a picture of")
@ddt.data(
("expired", "verify_student_start_flow"),
("denied", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_expired_or_denied_verification(self, verification_status, payment_flow):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status(verification_status)
response = self._get_page(payment_flow, course.id)
# Expect the same content as when the user has not verified
self._assert_steps_displayed(
response,
[PayAndVerifyView.INTRO_STEP] + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.INTRO_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data(
("verified", "submitted", "verify_student_start_flow"),
("verified", "approved", "verify_student_start_flow"),
("verified", "error", "verify_student_start_flow"),
("professional", "submitted", "verify_student_start_flow"),
("no-id-professional", None, "verify_student_start_flow"),
("verified", "submitted", "verify_student_begin_flow"),
("verified", "approved", "verify_student_begin_flow"),
("verified", "error", "verify_student_begin_flow"),
("professional", "submitted", "verify_student_begin_flow"),
("no-id-professional", None, "verify_student_begin_flow"),
)
@ddt.unpack
def test_start_flow_already_verified(self, course_mode, verification_status, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id)
self._set_verification_status(verification_status)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [])
@ddt.data(
("verified", "verify_student_start_flow"),
("professional", "verify_student_start_flow"),
("verified", "verify_student_begin_flow"),
("professional", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_already_paid(self, course_mode, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id, course_mode)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
[PayAndVerifyView.INTRO_STEP] + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.INTRO_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_start_flow_not_enrolled(self, payment_flow):
course = self._create_course("verified")
self._set_verification_status("submitted")
response = self._get_page(payment_flow, course.id)
# This shouldn't happen if the student has been auto-enrolled,
# but if they somehow end up on this page without enrolling,
# treat them as if they need to pay
response = self._get_page(payment_flow, course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_requirements_displayed(response, [])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_start_flow_unenrolled(self, payment_flow):
course = self._create_course("verified")
self._set_verification_status("submitted")
self._enroll(course.id, "verified")
self._unenroll(course.id)
# If unenrolled, treat them like they haven't paid at all
# (we assume that they've gotten a refund or didn't pay initially)
response = self._get_page(payment_flow, course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_requirements_displayed(response, [])
@ddt.data(
("verified", "submitted", "verify_student_start_flow"),
("verified", "approved", "verify_student_start_flow"),
("professional", "submitted", "verify_student_start_flow"),
("verified", "submitted", "verify_student_begin_flow"),
("verified", "approved", "verify_student_begin_flow"),
("professional", "submitted", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_already_verified_and_paid(self, course_mode, verification_status, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id, course_mode)
self._set_verification_status(verification_status)
response = self._get_page(
payment_flow,
course.id,
expected_status_code=302
)
self._assert_redirects_to_dashboard(response)
@with_comprehensive_theme("edx.org")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_pay_and_verify_hides_header_nav(self, payment_flow):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page(payment_flow, course.id)
# Verify that the header navigation links are hidden for the edx.org version
self.assertNotContains(response, "How it Works")
self.assertNotContains(response, "Find courses")
self.assertNotContains(response, "Schools & Partners")
def test_verify_now(self):
# We've already paid, and now we're trying to verify
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page('verify_student_verify_now', course.id)
self._assert_messaging(response, PayAndVerifyView.VERIFY_NOW_MSG)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
# Expect that *all* steps are displayed,
# but we start after the payment step (because it's already completed).
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.FACE_PHOTO_STEP
)
# These will be hidden from the user anyway since they're starting
# after the payment step.
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
def test_verify_now_already_verified(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
# Already verified, so if we somehow end up here,
# redirect immediately to the dashboard
response = self._get_page(
'verify_student_verify_now',
course.id,
expected_status_code=302
)
self._assert_redirects_to_dashboard(response)
def test_verify_now_user_details(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page('verify_student_verify_now', course.id)
self._assert_user_details(response, self.user.profile.name)
@ddt.data(
"verify_student_verify_now",
"verify_student_payment_confirmation"
)
def test_verify_now_not_enrolled(self, page_name):
course = self._create_course("verified")
response = self._get_page(page_name, course.id, expected_status_code=302)
self._assert_redirects_to_start_flow(response, course.id)
@ddt.data(
"verify_student_verify_now",
"verify_student_payment_confirmation"
)
def test_verify_now_unenrolled(self, page_name):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._unenroll(course.id)
response = self._get_page(page_name, course.id, expected_status_code=302)
self._assert_redirects_to_start_flow(response, course.id)
@ddt.data(
"verify_student_verify_now",
"verify_student_payment_confirmation"
)
def test_verify_now_not_paid(self, page_name):
course = self._create_course("verified")
self._enroll(course.id)
response = self._get_page(page_name, course.id, expected_status_code=302)
self._assert_redirects_to_upgrade(response, course.id)
def test_payment_confirmation(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page('verify_student_payment_confirmation', course.id)
self._assert_messaging(response, PayAndVerifyView.PAYMENT_CONFIRMATION_MSG)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
# Expect that *all* steps are displayed,
# but we start at the payment confirmation step
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.PAYMENT_CONFIRMATION_STEP,
)
# These will be hidden from the user anyway since they're starting
# after the payment step. We're already including the payment
# steps, so it's easier to include these as well.
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_payment_cannot_skip(self, payment_flow):
"""
Simple test to verify that certain steps cannot be skipped. This test sets up
a scenario where the user should be on the MAKE_PAYMENT_STEP, but is trying to
skip it. Despite setting the parameter, the current step should still be
MAKE_PAYMENT_STEP.
"""
course = self._create_course("verified")
response = self._get_page(
payment_flow,
course.id,
skip_first_step=True
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
# Expect that *all* steps are displayed,
# but we start on the first verify step
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP,
)
def test_payment_confirmation_already_verified(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
response = self._get_page('verify_student_payment_confirmation', course.id)
# Other pages would redirect to the dashboard at this point,
# because the user has paid and verified. However, we want
# the user to see the confirmation page even if there
# isn't anything for them to do here except return
# to the dashboard.
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.PAYMENT_CONFIRMATION_STEP,
)
def test_payment_confirmation_already_verified_skip_first_step(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
response = self._get_page(
'verify_student_payment_confirmation',
course.id,
skip_first_step=True
)
# There are no other steps, so stay on the
# payment confirmation step
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.PAYMENT_CONFIRMATION_STEP,
)
@ddt.data(
(YESTERDAY, True),
(TOMORROW, False)
)
@ddt.unpack
def test_payment_confirmation_course_details(self, course_start, show_courseware_url):
course = self._create_course("verified", course_start=course_start)
self._enroll(course.id, "verified")
response = self._get_page('verify_student_payment_confirmation', course.id)
courseware_url = (
reverse("course_root", kwargs={'course_id': unicode(course.id)})
if show_courseware_url else ""
)
self._assert_course_details(
response,
unicode(course.id),
course.display_name,
course.start_datetime_text(),
courseware_url
)
@ddt.data("verified", "professional")
def test_upgrade(self, course_mode):
course = self._create_course(course_mode)
self._enroll(course.id)
response = self._get_page('verify_student_upgrade_and_verify', course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.UPGRADE_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
self._assert_upgrade_session_flag(True)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
def test_upgrade_already_verified(self):
course = self._create_course("verified")
self._enroll(course.id)
self._set_verification_status("submitted")
response = self._get_page('verify_student_upgrade_and_verify', course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.UPGRADE_MSG)
self._assert_requirements_displayed(response, [])
def test_upgrade_already_paid(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
# If we've already paid, then the upgrade messaging
# won't make much sense. Redirect them to the
# "verify later" page instead.
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_verify_start(response, course.id)
def test_upgrade_already_verified_and_paid(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
# Already verified and paid, so redirect to the dashboard
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_dashboard(response)
def test_upgrade_not_enrolled(self):
course = self._create_course("verified")
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_start_flow(response, course.id)
def test_upgrade_unenrolled(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._unenroll(course.id)
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_start_flow(response, course.id)
@ddt.data([], ["honor"], ["honor", "audit"])
def test_no_verified_mode_for_course(self, modes_available):
course = self._create_course(*modes_available)
pages = [
'verify_student_start_flow',
'verify_student_begin_flow',
'verify_student_verify_now',
'verify_student_upgrade_and_verify',
]
for page_name in pages:
self._get_page(
page_name,
course.id,
expected_status_code=404
)
@ddt.data(
([], "verify_student_start_flow"),
(["no-id-professional", "professional"], "verify_student_start_flow"),
(["honor", "audit"], "verify_student_start_flow"),
([], "verify_student_begin_flow"),
(["no-id-professional", "professional"], "verify_student_begin_flow"),
(["honor", "audit"], "verify_student_begin_flow"),
)
@ddt.unpack
def test_no_id_professional_entry_point(self, modes_available, payment_flow):
course = self._create_course(*modes_available)
if "no-id-professional" in modes_available or "professional" in modes_available:
self._get_page(payment_flow, course.id, expected_status_code=200)
else:
self._get_page(payment_flow, course.id, expected_status_code=404)
@ddt.data(
"verify_student_start_flow",
"verify_student_begin_flow",
"verify_student_verify_now",
"verify_student_upgrade_and_verify",
)
def test_require_login(self, url_name):
self.client.logout()
course = self._create_course("verified")
response = self._get_page(url_name, course.id, expected_status_code=302)
original_url = reverse(url_name, kwargs={'course_id': unicode(course.id)})
login_url = u"{login_url}?next={original_url}".format(
login_url=reverse('signin_user'),
original_url=original_url
)
self.assertRedirects(response, login_url)
@ddt.data(
"verify_student_start_flow",
"verify_student_begin_flow",
"verify_student_verify_now",
"verify_student_upgrade_and_verify",
)
def test_no_such_course(self, url_name):
non_existent_course = CourseLocator(course="test", org="test", run="test")
self._get_page(
url_name,
non_existent_course,
expected_status_code=404
)
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_account_not_active(self, payment_flow):
self.user.is_active = False
self.user.save()
course = self._create_course("verified")
response = self._get_page(payment_flow, course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_requirements_displayed(response, [
PayAndVerifyView.ACCOUNT_ACTIVATION_REQ,
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_no_contribution(self, payment_flow):
# Do NOT specify a contribution for the course in a session var.
course = self._create_course("verified")
response = self._get_page(payment_flow, course.id)
self._assert_contribution_amount(response, "")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_contribution_other_course(self, payment_flow):
# Specify a contribution amount for another course in the session
course = self._create_course("verified")
other_course_id = CourseLocator(org="other", run="test", course="test")
self._set_contribution("12.34", other_course_id)
# Expect that the contribution amount is NOT pre-filled,
response = self._get_page(payment_flow, course.id)
self._assert_contribution_amount(response, "")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_contribution(self, payment_flow):
# Specify a contribution amount for this course in the session
course = self._create_course("verified")
self._set_contribution("12.34", course.id)
# Expect that the contribution amount is pre-filled,
response = self._get_page(payment_flow, course.id)
self._assert_contribution_amount(response, "12.34")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_verification_deadline(self, payment_flow):
deadline = datetime.now(tz=pytz.UTC) + timedelta(days=360)
course = self._create_course("verified")
# Set a deadline on the course mode AND on the verification deadline model.
# This simulates the common case in which the upgrade deadline (course mode expiration)
# and the verification deadline are the same.
# NOTE: we used to use the course mode expiration datetime for BOTH of these deadlines,
# before the VerificationDeadline model was introduced.
self._set_deadlines(course.id, upgrade_deadline=deadline, verification_deadline=deadline)
# Expect that the expiration date is set
response = self._get_page(payment_flow, course.id)
data = self._get_page_data(response)
self.assertEqual(data['verification_deadline'], deadline.strftime("%b %d, %Y at %H:%M UTC"))
def test_course_mode_expired(self):
deadline = datetime.now(tz=pytz.UTC) + timedelta(days=-360)
course = self._create_course("verified")
# Set the upgrade deadline (course mode expiration) and verification deadline
# to the same value. This used to be the default when we used the expiration datetime
# for BOTH values.
self._set_deadlines(course.id, upgrade_deadline=deadline, verification_deadline=deadline)
# Need to be enrolled
self._enroll(course.id, "verified")
# The course mode has expired, so expect an explanation
# to the student that the deadline has passed
response = self._get_page("verify_student_verify_now", course.id)
self.assertContains(response, "verification deadline")
self.assertContains(response, deadline.strftime("%b %d, %Y at %H:%M UTC"))
@ddt.data(datetime.now(tz=pytz.UTC) + timedelta(days=360), None)
def test_course_mode_expired_verification_deadline_in_future(self, verification_deadline):
"""Verify that student can not upgrade in expired course mode."""
course_modes = ("verified", "credit")
course = self._create_course(*course_modes)
# Set the upgrade deadline of verified mode in the past, but the verification
# deadline in the future.
self._set_deadlines(
course.id,
upgrade_deadline=datetime.now(tz=pytz.UTC) + timedelta(days=-360),
verification_deadline=verification_deadline,
)
# Set the upgrade deadline for credit mode in future.
self._set_deadlines(
course.id,
upgrade_deadline=datetime.now(tz=pytz.UTC) + timedelta(days=360),
verification_deadline=verification_deadline,
mode_slug="credit"
)
# Try to pay or upgrade.
# We should get an error message since the deadline has passed and did not allow
# directly sale of credit mode.
for page_name in ["verify_student_start_flow",
"verify_student_begin_flow",
"verify_student_upgrade_and_verify"]:
response = self._get_page(page_name, course.id)
self.assertContains(response, "Upgrade Deadline Has Passed")
# Simulate paying for the course and enrolling
self._enroll(course.id, "verified")
# Enter the verification part of the flow
# Expect that we are able to verify
response = self._get_page("verify_student_verify_now", course.id)
self.assertNotContains(response, "Verification is no longer available")
data = self._get_page_data(response)
self.assertEqual(data['message_key'], PayAndVerifyView.VERIFY_NOW_MSG)
# Check that the mode selected is expired verified mode not the credit mode
# because the direct enrollment to the credit mode is not allowed.
self.assertEqual(data['course_mode_slug'], "verified")
# Check that the verification deadline (rather than the upgrade deadline) is displayed
if verification_deadline is not None:
self.assertEqual(data["verification_deadline"], verification_deadline.strftime("%b %d, %Y at %H:%M UTC"))
else:
self.assertEqual(data["verification_deadline"], "")
def test_course_mode_not_expired_verification_deadline_passed(self):
course = self._create_course("verified")
# Set the upgrade deadline in the future
# and the verification deadline in the past
# We try not to discourage this with validation rules,
# since it's a bad user experience
# to purchase a verified track and then not be able to verify,
# but if it happens we need to handle it gracefully.
upgrade_deadline_in_future = datetime.now(tz=pytz.UTC) + timedelta(days=360)
verification_deadline_in_past = datetime.now(tz=pytz.UTC) + timedelta(days=-360)
self._set_deadlines(
course.id,
upgrade_deadline=upgrade_deadline_in_future,
verification_deadline=verification_deadline_in_past,
)
# Enroll as verified (simulate purchasing the verified enrollment)
self._enroll(course.id, "verified")
# Even though the upgrade deadline is in the future,
# the verification deadline has passed, so we should see an error
# message when we go to verify.
response = self._get_page("verify_student_verify_now", course.id)
self.assertContains(response, "verification deadline")
self.assertContains(response, verification_deadline_in_past.strftime("%b %d, %Y at %H:%M UTC"))
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_embargo_restrict(self, payment_flow):
course = self._create_course("verified")
with restrict_course(course.id) as redirect_url:
# Simulate that we're embargoed from accessing this
# course based on our IP address.
response = self._get_page(payment_flow, course.id, expected_status_code=302)
self.assertRedirects(response, redirect_url)
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_embargo_allow(self, payment_flow):
course = self._create_course("verified")
self._get_page(payment_flow, course.id)
def _create_course(self, *course_modes, **kwargs):
"""Create a new course with the specified course modes. """
course = CourseFactory.create(display_name='<script>alert("XSS")</script>')
if kwargs.get('course_start'):
course.start = kwargs.get('course_start')
modulestore().update_item(course, ModuleStoreEnum.UserID.test)
mode_kwargs = {}
if kwargs.get('sku'):
mode_kwargs['sku'] = kwargs['sku']
for course_mode in course_modes:
min_price = (0 if course_mode in ["honor", "audit"] else self.MIN_PRICE)
CourseModeFactory.create(
course_id=course.id,
mode_slug=course_mode,
mode_display_name=course_mode,
min_price=min_price,
**mode_kwargs
)
return course
def _enroll(self, course_key, mode=CourseMode.DEFAULT_MODE_SLUG):
"""Enroll the user in a course. """
CourseEnrollmentFactory.create(
user=self.user,
course_id=course_key,
mode=mode
)
def _unenroll(self, course_key):
"""Unenroll the user from a course. """
CourseEnrollment.unenroll(self.user, course_key)
def _set_verification_status(self, status):
"""Set the user's photo verification status. """
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
if status in ["submitted", "approved", "expired", "denied", "error"]:
attempt.mark_ready()
attempt.submit()
if status in ["approved", "expired"]:
attempt.approve()
elif status == "denied":
attempt.deny("Denied!")
elif status == "error":
attempt.system_error("Error!")
if status == "expired":
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
attempt.created_at = datetime.now(pytz.UTC) - timedelta(days=(days_good_for + 1))
attempt.save()
def _set_deadlines(self, course_key, upgrade_deadline=None, verification_deadline=None, mode_slug="verified"):
"""
Set the upgrade and verification deadlines.
Arguments:
course_key (CourseKey): Identifier for the course.
Keyword Arguments:
upgrade_deadline (datetime): Datetime after which a user cannot
upgrade to a verified mode.
verification_deadline (datetime): Datetime after which a user cannot
submit an initial verification attempt.
"""
# Set the course mode expiration (same as the "upgrade" deadline)
mode = CourseMode.objects.get(course_id=course_key, mode_slug=mode_slug)
mode.expiration_datetime = upgrade_deadline
mode.save()
# Set the verification deadline
VerificationDeadline.set_deadline(course_key, verification_deadline)
def _set_contribution(self, amount, course_id):
"""Set the contribution amount pre-filled in a session var. """
session = self.client.session
session["donation_for_course"] = {
unicode(course_id): amount
}
session.save()
def _get_page(self, url_name, course_key, expected_status_code=200, skip_first_step=False):
"""Retrieve one of the verification pages. """
url = reverse(url_name, kwargs={"course_id": unicode(course_key)})
if skip_first_step:
url += "?skip-first-step=1"
response = self.client.get(url)
self.assertEqual(response.status_code, expected_status_code)
return response
def _assert_displayed_mode(self, response, expected_mode):
"""Check whether a course mode is displayed. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['course_mode_slug'], expected_mode)
def _assert_steps_displayed(self, response, expected_steps, expected_current_step):
"""Check whether steps in the flow are displayed to the user. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['current_step'], expected_current_step)
self.assertEqual(expected_steps, [
step['name'] for step in
response_dict['display_steps']
])
def _assert_messaging(self, response, expected_message):
"""Check the messaging on the page. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['message_key'], expected_message)
def _assert_requirements_displayed(self, response, requirements):
"""Check that requirements are displayed on the page. """
response_dict = self._get_page_data(response)
for req, displayed in response_dict['requirements'].iteritems():
if req in requirements:
self.assertTrue(displayed, msg="Expected '{req}' requirement to be displayed".format(req=req))
else:
self.assertFalse(displayed, msg="Expected '{req}' requirement to be hidden".format(req=req))
def _assert_course_details(self, response, course_key, display_name, start_text, url):
"""Check the course information on the page. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['course_key'], course_key)
self.assertEqual(response_dict['course_name'], display_name)
self.assertEqual(response_dict['course_start_date'], start_text)
self.assertEqual(response_dict['courseware_url'], url)
def _assert_user_details(self, response, full_name):
"""Check the user detail information on the page. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['full_name'], full_name)
def _assert_contribution_amount(self, response, expected_amount):
"""Check the pre-filled contribution amount. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['contribution_amount'], expected_amount)
def _get_page_data(self, response):
"""Retrieve the data attributes rendered on the page. """
soup = BeautifulSoup(response.content)
pay_and_verify_div = soup.find(id="pay-and-verify-container")
self.assertIsNot(
pay_and_verify_div, None,
msg=(
"Could not load pay and verify flow data. "
"Maybe this isn't the pay and verify page?"
)
)
return {
'full_name': pay_and_verify_div['data-full-name'],
'course_key': pay_and_verify_div['data-course-key'],
'course_name': pay_and_verify_div['data-course-name'],
'course_start_date': pay_and_verify_div['data-course-start-date'],
'courseware_url': pay_and_verify_div['data-courseware-url'],
'course_mode_name': pay_and_verify_div['data-course-mode-name'],
'course_mode_slug': pay_and_verify_div['data-course-mode-slug'],
'display_steps': json.loads(pay_and_verify_div['data-display-steps']),
'current_step': pay_and_verify_div['data-current-step'],
'requirements': json.loads(pay_and_verify_div['data-requirements']),
'message_key': pay_and_verify_div['data-msg-key'],
'contribution_amount': pay_and_verify_div['data-contribution-amount'],
'verification_deadline': pay_and_verify_div['data-verification-deadline']
}
def _assert_upgrade_session_flag(self, is_upgrade):
"""Check that the session flag for attempting an upgrade is set. """
self.assertEqual(self.client.session.get('attempting_upgrade'), is_upgrade)
def _assert_redirects_to_dashboard(self, response):
"""Check that the page redirects to the student dashboard. """
self.assertRedirects(response, reverse('dashboard'))
def _assert_redirects_to_start_flow(self, response, course_id):
"""Check that the page redirects to the start of the payment/verification flow. """
url = reverse('verify_student_start_flow', kwargs={'course_id': unicode(course_id)})
self.assertRedirects(response, url)
def _assert_redirects_to_verify_start(self, response, course_id, status_code=302):
"""Check that the page redirects to the "verify later" part of the flow. """
url = reverse('verify_student_verify_now', kwargs={'course_id': unicode(course_id)})
self.assertRedirects(response, url, status_code)
def _assert_redirects_to_upgrade(self, response, course_id):
"""Check that the page redirects to the "upgrade" part of the flow. """
url = reverse('verify_student_upgrade_and_verify', kwargs={'course_id': unicode(course_id)})
self.assertRedirects(response, url)
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_course_upgrade_page_with_unicode_and_special_values_in_display_name(self, payment_flow):
"""Check the course information on the page. """
mode_display_name = u"Introduction à l'astrophysique"
course = CourseFactory.create(display_name=mode_display_name)
for course_mode in [CourseMode.DEFAULT_MODE_SLUG, "verified"]:
min_price = (self.MIN_PRICE if course_mode != CourseMode.DEFAULT_MODE_SLUG else 0)
CourseModeFactory.create(
course_id=course.id,
mode_slug=course_mode,
mode_display_name=mode_display_name,
min_price=min_price
)
self._enroll(course.id)
response_dict = self._get_page_data(self._get_page(payment_flow, course.id))
self.assertEqual(response_dict['course_name'], mode_display_name)
@httpretty.activate
@override_settings(ECOMMERCE_API_URL=TEST_API_URL, ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY)
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_processors_api(self, payment_flow):
"""
Check that when working with a product being processed by the
ecommerce api, we correctly call to that api for the list of
available payment processors.
"""
# setting a nonempty sku on the course will a trigger calls to
# the ecommerce api to get payment processors.
course = self._create_course("verified", sku='nonempty-sku')
self._enroll(course.id)
# mock out the payment processors endpoint
httpretty.register_uri(
httpretty.GET,
"{}/payment/processors/".format(TEST_API_URL),
body=json.dumps(['foo', 'bar']),
content_type="application/json",
)
# make the server request
response = self._get_page(payment_flow, course.id)
self.assertEqual(response.status_code, 200)
# ensure the mock api call was made. NOTE: the following line
# approximates the check - if the headers were empty it means
# there was no last request.
self.assertNotEqual(httpretty.last_request().headers, {})
class CheckoutTestMixin(object):
"""
Mixin implementing test methods that should behave identically regardless
of which backend is used (shoppingcart or ecommerce service). Subclasses
immediately follow for each backend, which inherit from TestCase and
define methods needed to customize test parameters, and patch the
appropriate checkout method.
Though the view endpoint under test is named 'create_order' for backward-
compatibility, the effect of using this endpoint is to choose a specific product
(i.e. course mode) and trigger immediate checkout.
"""
def setUp(self):
""" Create a user and course. """
super(CheckoutTestMixin, self).setUp()
self.user = UserFactory.create(username="test", password="test")
self.course = CourseFactory.create()
for mode, min_price in (('audit', 0), ('honor', 0), ('verified', 100)):
CourseModeFactory.create(mode_slug=mode, course_id=self.course.id, min_price=min_price, sku=self.make_sku())
self.client.login(username="test", password="test")
def _assert_checked_out(
self,
post_params,
patched_create_order,
expected_course_key,
expected_mode_slug,
expected_status_code=200
):
"""
DRY helper.
Ensures that checkout functions were invoked as
expected during execution of the create_order endpoint.
"""
post_params.setdefault('processor', None)
response = self.client.post(reverse('verify_student_create_order'), post_params)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == 200:
# ensure we called checkout at all
self.assertTrue(patched_create_order.called)
# ensure checkout args were correct
args = self._get_checkout_args(patched_create_order)
self.assertEqual(args['user'], self.user)
self.assertEqual(args['course_key'], expected_course_key)
self.assertEqual(args['course_mode'].slug, expected_mode_slug)
# ensure response data was correct
data = json.loads(response.content)
self.assertEqual(set(data.keys()), PAYMENT_DATA_KEYS)
else:
self.assertFalse(patched_create_order.called)
def test_create_order(self, patched_create_order):
# Create an order
params = {
'course_id': unicode(self.course.id),
'contribution': 100,
}
self._assert_checked_out(params, patched_create_order, self.course.id, 'verified')
def test_create_order_prof_ed(self, patched_create_order):
# Create a prof ed course
course = CourseFactory.create()
CourseModeFactory.create(mode_slug="professional", course_id=course.id, min_price=10, sku=self.make_sku())
# Create an order for a prof ed course
params = {'course_id': unicode(course.id)}
self._assert_checked_out(params, patched_create_order, course.id, 'professional')
def test_create_order_no_id_professional(self, patched_create_order):
# Create a no-id-professional ed course
course = CourseFactory.create()
CourseModeFactory.create(mode_slug="no-id-professional", course_id=course.id, min_price=10, sku=self.make_sku())
# Create an order for a prof ed course
params = {'course_id': unicode(course.id)}
self._assert_checked_out(params, patched_create_order, course.id, 'no-id-professional')
def test_create_order_for_multiple_paid_modes(self, patched_create_order):
# Create a no-id-professional ed course
course = CourseFactory.create()
CourseModeFactory.create(mode_slug="no-id-professional", course_id=course.id, min_price=10, sku=self.make_sku())
CourseModeFactory.create(mode_slug="professional", course_id=course.id, min_price=10, sku=self.make_sku())
# Create an order for a prof ed course
params = {'course_id': unicode(course.id)}
# TODO jsa - is this the intended behavior?
self._assert_checked_out(params, patched_create_order, course.id, 'no-id-professional')
def test_create_order_bad_donation_amount(self, patched_create_order):
# Create an order
params = {
'course_id': unicode(self.course.id),
'contribution': '99.9'
}
self._assert_checked_out(params, patched_create_order, None, None, expected_status_code=400)
def test_create_order_good_donation_amount(self, patched_create_order):
# Create an order
params = {
'course_id': unicode(self.course.id),
'contribution': '100.0'
}
self._assert_checked_out(params, patched_create_order, self.course.id, 'verified')
def test_old_clients(self, patched_create_order):
# ensure the response to a request from a stale js client is modified so as
# not to break behavior in the browser.
# (XCOM-214) remove after release.
expected_payment_data = TEST_PAYMENT_DATA.copy()
expected_payment_data['payment_form_data'].update({'foo': 'bar'})
patched_create_order.return_value = expected_payment_data
# there is no 'processor' parameter in the post payload, so the response should only contain payment form data.
params = {'course_id': unicode(self.course.id), 'contribution': 100}
response = self.client.post(reverse('verify_student_create_order'), params)
self.assertEqual(response.status_code, 200)
self.assertTrue(patched_create_order.called)
# ensure checkout args were correct
args = self._get_checkout_args(patched_create_order)
self.assertEqual(args['user'], self.user)
self.assertEqual(args['course_key'], self.course.id)
self.assertEqual(args['course_mode'].slug, 'verified')
# ensure response data was correct
data = json.loads(response.content)
self.assertEqual(data, {'foo': 'bar'})
@attr(shard=2)
@patch('lms.djangoapps.verify_student.views.checkout_with_shoppingcart', return_value=TEST_PAYMENT_DATA, autospec=True)
class TestCreateOrderShoppingCart(CheckoutTestMixin, ModuleStoreTestCase):
""" Test view behavior when the shoppingcart is used. """
def make_sku(self):
""" Checkout is handled by shoppingcart when the course mode's sku is empty. """
return ''
def _get_checkout_args(self, patched_create_order):
""" Assuming patched_create_order was called, return a mapping containing the call arguments."""
return dict(zip(('request', 'user', 'course_key', 'course_mode', 'amount'), patched_create_order.call_args[0]))
@attr(shard=2)
@override_settings(ECOMMERCE_API_URL=TEST_API_URL, ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY)
@patch(
'lms.djangoapps.verify_student.views.checkout_with_ecommerce_service',
return_value=TEST_PAYMENT_DATA,
autospec=True,
)
class TestCreateOrderEcommerceService(CheckoutTestMixin, ModuleStoreTestCase):
""" Test view behavior when the ecommerce service is used. """
def make_sku(self):
""" Checkout is handled by the ecommerce service when the course mode's sku is nonempty. """
return uuid4().hex.decode('ascii')
def _get_checkout_args(self, patched_create_order):
""" Assuming patched_create_order was called, return a mapping containing the call arguments."""
return dict(zip(('user', 'course_key', 'course_mode', 'processor'), patched_create_order.call_args[0]))
@attr(shard=2)
class TestCheckoutWithEcommerceService(ModuleStoreTestCase):
"""
Ensures correct behavior in the function `checkout_with_ecommerce_service`.
"""
@httpretty.activate
@override_settings(ECOMMERCE_API_URL=TEST_API_URL, ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY)
def test_create_basket(self):
"""
Check that when working with a product being processed by the
ecommerce api, we correctly call to that api to create a basket.
"""
user = UserFactory.create(username="test-username")
course_mode = CourseModeFactory.create(sku="test-sku").to_tuple() # pylint: disable=no-member
expected_payment_data = {'foo': 'bar'}
# mock out the payment processors endpoint
httpretty.register_uri(
httpretty.POST,
"{}/baskets/".format(TEST_API_URL),
body=json.dumps({'payment_data': expected_payment_data}),
content_type="application/json",
)
with mock.patch('lms.djangoapps.verify_student.views.audit_log') as mock_audit_log:
# Call the function
actual_payment_data = checkout_with_ecommerce_service(
user,
'dummy-course-key',
course_mode,
'test-processor'
)
# Verify that an audit message was logged
self.assertTrue(mock_audit_log.called)
# Check the api call
self.assertEqual(json.loads(httpretty.last_request().body), {
'products': [{'sku': 'test-sku'}],
'checkout': True,
'payment_processor_name': 'test-processor',
})
# Check the response
self.assertEqual(actual_payment_data, expected_payment_data)
@attr(shard=2)
class TestCreateOrderView(ModuleStoreTestCase):
"""
Tests for the create_order view of verified course enrollment process.
"""
def setUp(self):
super(TestCreateOrderView, self).setUp()
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
self.course_id = 'Robot/999/Test_Course'
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
verified_mode = CourseMode(
course_id=SlashSeparatedCourseKey("Robot", "999", 'Test_Course'),
mode_slug="verified",
mode_display_name="Verified Certificate",
min_price=50
)
verified_mode.save()
course_mode_post_data = {
'certificate_mode': 'Select Certificate',
'contribution': 50,
'contribution-other-amt': '',
'explain': ''
}
self.client.post(
reverse("course_modes_choose", kwargs={'course_id': self.course_id}),
course_mode_post_data
)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_invalid_amount(self):
response = self._create_order('1.a', self.course_id, expect_status_code=400)
self.assertIn('Selected price is not valid number.', response.content)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_invalid_mode(self):
# Create a course that does not have a verified mode
course_id = 'Fake/999/Test_Course'
CourseFactory.create(org='Fake', number='999', display_name='Test Course')
response = self._create_order('50', course_id, expect_status_code=400)
self.assertIn('This course doesn\'t support paid certificates', response.content)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_create_order_fail_with_get(self):
create_order_post_data = {
'contribution': 50,
'course_id': self.course_id,
}
# Use the wrong HTTP method
response = self.client.get(reverse('verify_student_create_order'), create_order_post_data)
self.assertEqual(response.status_code, 405)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_create_order_success(self):
response = self._create_order(50, self.course_id)
json_response = json.loads(response.content)
self.assertIsNotNone(json_response['payment_form_data'].get('orderNumber')) # TODO not canonical
# Verify that the order exists and is configured correctly
order = Order.objects.get(user=self.user)
self.assertEqual(order.status, 'paying')
item = CertificateItem.objects.get(order=order)
self.assertEqual(item.status, 'paying')
self.assertEqual(item.course_id, self.course.id)
self.assertEqual(item.mode, 'verified')
def _create_order(self, contribution, course_id, expect_success=True, expect_status_code=200):
"""Create a new order.
Arguments:
contribution (int): The contribution amount.
course_id (CourseKey): The course to purchase.
Keyword Arguments:
expect_success (bool): If True, verify that the response was successful.
expect_status_code (int): The expected HTTP status code
Returns:
HttpResponse
"""
url = reverse('verify_student_create_order')
data = {
'contribution': contribution,
'course_id': course_id,
'processor': None,
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, expect_status_code)
if expect_status_code == 200:
json_response = json.loads(response.content)
if expect_success:
self.assertEqual(set(json_response.keys()), PAYMENT_DATA_KEYS)
else:
self.assertFalse(json_response['success'])
return response
@attr(shard=2)
@ddt.ddt
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
class TestSubmitPhotosForVerification(TestCase):
"""
Tests for submitting photos for verification.
"""
USERNAME = "test_user"
PASSWORD = "test_password"
IMAGE_DATA = "abcd,1234"
FULL_NAME = u"Ḟüḷḷ Ṅäṁë"
def setUp(self):
super(TestSubmitPhotosForVerification, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result, msg="Could not log in")
def test_submit_photos(self):
# Submit the photos
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA
)
# Verify that the attempt is created in the database
attempt = SoftwareSecurePhotoVerification.objects.get(user=self.user)
self.assertEqual(attempt.status, "submitted")
# Verify that the user's name wasn't changed
self._assert_user_name(self.user.profile.name)
def test_submit_photos_and_change_name(self):
# Submit the photos, along with a name change
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA,
full_name=self.FULL_NAME
)
# Check that the user's name was changed in the database
self._assert_user_name(self.FULL_NAME)
def test_submit_photos_sends_confirmation_email(self):
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA
)
self._assert_confirmation_email(True)
def test_submit_photos_error_does_not_send_email(self):
# Error because invalid parameters, so no confirmation email
# should be sent.
self._submit_photos(expected_status_code=400)
self._assert_confirmation_email(False)
# Disable auto-auth since we will be intercepting POST requests
# to the verification service ourselves in this test.
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': False})
@override_settings(VERIFY_STUDENT={
"SOFTWARE_SECURE": {
"API_URL": "https://verify.example.com/submit/",
"API_ACCESS_KEY": "dcf291b5572942f99adaab4c2090c006",
"API_SECRET_KEY": "c392efdcc0354c5f922dc39844ec0dc7",
"FACE_IMAGE_AES_KEY": "f82400259e3b4f88821cd89838758292",
"RSA_PUBLIC_KEY": (
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDkgtz3fQdiXshy/RfOHkoHlhx/"
"SSPZ+nNyE9JZXtwhlzsXjnu+e9GOuJzgh4kUqo73ePIG5FxVU+mnacvufq2cu1SOx"
"lRYGyBK7qDf9Ym67I5gmmcNhbzdKcluAuDCPmQ4ecKpICQQldrDQ9HWDxwjbbcqpVB"
"PYWkE1KrtypGThmcehLmabf6SPq1CTAGlXsHgUtbWCwV6mqR8yScV0nRLln0djLDm9d"
"L8tIVFFVpAfBaYYh2Cm5EExQZjxyfjWd8P5H+8/l0pmK2jP7Hc0wuXJemIZbsdm+DSD"
"FhCGY3AILGkMwr068dGRxfBtBy/U9U5W+nStvkDdMrSgQezS5+V [email protected]"
),
"AWS_ACCESS_KEY": "c987c7efe35c403caa821f7328febfa1",
"AWS_SECRET_KEY": "fc595fc657c04437bb23495d8fe64881",
"S3_BUCKET": "test.example.com",
},
"DAYS_GOOD_FOR": 10,
})
@httpretty.activate
@moto.mock_s3
def test_submit_photos_for_reverification(self):
# Create the S3 bucket for photo upload
conn = boto.connect_s3()
conn.create_bucket("test.example.com")
# Mock the POST to Software Secure
httpretty.register_uri(httpretty.POST, "https://verify.example.com/submit/")
# Submit an initial verification attempt
self._submit_photos(
face_image=self.IMAGE_DATA + "4567",
photo_id_image=self.IMAGE_DATA + "8910",
)
initial_data = self._get_post_data()
# Submit a face photo for re-verification
self._submit_photos(face_image=self.IMAGE_DATA + "1112")
reverification_data = self._get_post_data()
# Verify that the initial attempt sent the same ID photo as the reverification attempt
self.assertEqual(initial_data["PhotoIDKey"], reverification_data["PhotoIDKey"])
initial_photo_response = requests.get(initial_data["PhotoID"])
self.assertEqual(initial_photo_response.status_code, 200)
reverification_photo_response = requests.get(reverification_data["PhotoID"])
self.assertEqual(reverification_photo_response.status_code, 200)
self.assertEqual(initial_photo_response.content, reverification_photo_response.content)
# Verify that the second attempt sent the updated face photo
initial_photo_response = requests.get(initial_data["UserPhoto"])
self.assertEqual(initial_photo_response.status_code, 200)
reverification_photo_response = requests.get(reverification_data["UserPhoto"])
self.assertEqual(reverification_photo_response.status_code, 200)
self.assertNotEqual(initial_photo_response.content, reverification_photo_response.content)
# Submit a new face photo and photo id for verification
self._submit_photos(
face_image=self.IMAGE_DATA + "9999",
photo_id_image=self.IMAGE_DATA + "1111",
)
two_photo_reverification_data = self._get_post_data()
# Verify that the initial attempt sent a new ID photo for the reverification attempt
self.assertNotEqual(initial_data["PhotoIDKey"], two_photo_reverification_data["PhotoIDKey"])
@ddt.data('face_image', 'photo_id_image')
def test_invalid_image_data(self, invalid_param):
params = {
'face_image': self.IMAGE_DATA,
'photo_id_image': self.IMAGE_DATA
}
params[invalid_param] = ""
response = self._submit_photos(expected_status_code=400, **params)
self.assertEqual(response.content, "Image data is not valid.")
def test_invalid_name(self):
response = self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA,
full_name="a",
expected_status_code=400
)
self.assertEqual(response.content, "Name must be at least 2 characters long.")
def test_missing_required_param(self):
# Missing face image parameter
params = {
'photo_id_image': self.IMAGE_DATA
}
response = self._submit_photos(expected_status_code=400, **params)
self.assertEqual(response.content, "Missing required parameter face_image")
def test_no_photo_id_and_no_initial_verification(self):
# Submit face image data, but not photo ID data.
# Since the user doesn't have an initial verification attempt, this should fail
response = self._submit_photos(expected_status_code=400, face_image=self.IMAGE_DATA)
self.assertEqual(
response.content,
"Photo ID image is required if the user does not have an initial verification attempt."
)
# Create the initial verification attempt with some dummy
# value set for field 'photo_id_key'
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA,
)
attempt = SoftwareSecurePhotoVerification.objects.get(user=self.user)
attempt.photo_id_key = "dummy_photo_id_key"
attempt.save()
# Now the request should succeed
self._submit_photos(face_image=self.IMAGE_DATA)
def _submit_photos(self, face_image=None, photo_id_image=None, full_name=None, expected_status_code=200):
"""Submit photos for verification.
Keyword Arguments:
face_image (str): The base-64 encoded face image data.
photo_id_image (str): The base-64 encoded ID image data.
full_name (unicode): The full name of the user, if the user is changing it.
expected_status_code (int): The expected response status code.
Returns:
HttpResponse
"""
url = reverse("verify_student_submit_photos")
params = {}
if face_image is not None:
params['face_image'] = face_image
if photo_id_image is not None:
params['photo_id_image'] = photo_id_image
if full_name is not None:
params['full_name'] = full_name
response = self.client.post(url, params)
self.assertEqual(response.status_code, expected_status_code)
return response
def _assert_confirmation_email(self, expect_email):
"""
Check that a confirmation email was or was not sent.
"""
if expect_email:
# Verify that photo submission confirmation email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("Verification photos received", mail.outbox[0].subject)
else:
# Verify that photo submission confirmation email was not sent
self.assertEqual(len(mail.outbox), 0)
def _assert_user_name(self, full_name):
"""Check the user's name.
Arguments:
full_name (unicode): The user's full name.
Raises:
AssertionError
"""
request = RequestFactory().get('/url')
request.user = self.user
account_settings = get_account_settings(request)[0]
self.assertEqual(account_settings['name'], full_name)
def _get_post_data(self):
"""Retrieve POST data from the last request. """
last_request = httpretty.last_request()
return json.loads(last_request.body)
@attr(shard=2)
class TestPhotoVerificationResultsCallback(ModuleStoreTestCase):
"""
Tests for the results_callback view.
"""
def setUp(self):
super(TestPhotoVerificationResultsCallback, self).setUp()
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
self.course_id = self.course.id
self.user = UserFactory.create()
self.attempt = SoftwareSecurePhotoVerification(
status="submitted",
user=self.user
)
self.attempt.save()
self.receipt_id = self.attempt.receipt_id
self.client = Client()
def mocked_has_valid_signature(method, headers_dict, body_dict, access_key, secret_key): # pylint: disable=no-self-argument, unused-argument
"""
Used as a side effect when mocking `verify_student.ssencrypt.has_valid_signature`.
"""
return True
def test_invalid_json(self):
"""
Test for invalid json being posted by software secure.
"""
data = {"Testing invalid"}
response = self.client.post(
reverse('verify_student_results_callback'),
data=data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB: testing',
HTTP_DATE='testdate'
)
self.assertIn('Invalid JSON', response.content)
self.assertEqual(response.status_code, 400)
def test_invalid_dict(self):
"""
Test for invalid dictionary being posted by software secure.
"""
data = '"\\"Test\\tTesting"'
response = self.client.post(
reverse('verify_student_results_callback'),
data=data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertIn('JSON should be dict', response.content)
self.assertEqual(response.status_code, 400)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_invalid_access_key(self):
"""
Test for invalid access key.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": "Testing",
"Reason": "Testing",
"MessageType": "Testing"
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test testing:testing',
HTTP_DATE='testdate'
)
self.assertIn('Access key invalid', response.content)
self.assertEqual(response.status_code, 400)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_wrong_edx_id(self):
"""
Test for wrong id of Software secure verification attempt.
"""
data = {
"EdX-ID": "Invalid-Id",
"Result": "Testing",
"Reason": "Testing",
"MessageType": "Testing"
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertIn('edX ID Invalid-Id not found', response.content)
self.assertEqual(response.status_code, 400)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_pass_result(self):
"""
Test for verification passed.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_fail_result(self):
"""
Test for failed verification.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": 'FAIL',
"Reason": 'Invalid photo',
"MessageType": 'Your photo doesn\'t meet standards.'
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'denied')
self.assertEqual(attempt.error_code, u'Your photo doesn\'t meet standards.')
self.assertEqual(attempt.error_msg, u'"Invalid photo"')
self.assertEquals(response.content, 'OK!')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_system_fail_result(self):
"""
Test for software secure result system failure.
"""
data = {"EdX-ID": self.receipt_id,
"Result": 'SYSTEM FAIL',
"Reason": 'Memory overflow',
"MessageType": 'You must retry the verification.'}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'must_retry')
self.assertEqual(attempt.error_code, u'You must retry the verification.')
self.assertEqual(attempt.error_msg, u'"Memory overflow"')
self.assertEquals(response.content, 'OK!')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_unknown_result(self):
"""
test for unknown software secure result
"""
data = {
"EdX-ID": self.receipt_id,
"Result": 'Unknown',
"Reason": 'Unknown reason',
"MessageType": 'Unknown message'
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertIn('Result Unknown not understood', response.content)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_in_course_reverify_disabled(self):
"""
Test for verification passed.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
# Verify that photo submission confirmation email was sent
self.assertEqual(len(mail.outbox), 0)
user_status = VerificationStatus.objects.filter(user=self.user).count()
self.assertEqual(user_status, 0)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_pass_in_course_reverify_result(self):
"""
Test for verification passed.
"""
# Verify that ICRV status email was sent when config is enabled
IcrvStatusEmailsConfiguration.objects.create(enabled=True)
self.create_reverification_xblock()
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("Re-verification Status", mail.outbox[0].subject)
@mock.patch('verify_student.ssencrypt.has_valid_signature', mock.Mock(side_effect=mocked_has_valid_signature))
def test_icrv_status_email_with_disable_config(self):
"""
Verify that photo re-verification status email was not sent when config is disable
"""
IcrvStatusEmailsConfiguration.objects.create(enabled=False)
self.create_reverification_xblock()
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
self.assertEqual(len(mail.outbox), 0)
@mock.patch('lms.djangoapps.verify_student.views._send_email')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_reverification_on_callback(self, mock_send_email):
"""
Test software secure callback flow for re-verification.
"""
IcrvStatusEmailsConfiguration.objects.create(enabled=True)
# Create the 'edx-reverification-block' in course tree
self.create_reverification_xblock()
# create dummy data for software secure photo verification result callback
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertEqual(response.content, 'OK!')
# now check that '_send_email' method is called on result callback
# with required parameters
subject = "Re-verification Status"
mock_send_email.assert_called_once_with(self.user.id, subject, ANY)
def create_reverification_xblock(self):
"""
Create the reverification XBlock.
"""
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block'
)
# Create checkpoint
checkpoint = VerificationCheckpoint(course_id=self.course_id, checkpoint_location=reverification.location)
checkpoint.save()
# Add a re-verification attempt
checkpoint.add_verification_attempt(self.attempt)
# Add a re-verification attempt status for the user
VerificationStatus.add_verification_status(checkpoint, self.user, "submitted")
@attr(shard=2)
class TestReverifyView(TestCase):
"""
Tests for the reverification view.
Reverification occurs when a verification attempt is denied or expired,
and the student is given the option to resubmit.
"""
USERNAME = "shaftoe"
PASSWORD = "detachment-2702"
def setUp(self):
super(TestReverifyView, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
success = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(success, msg="Could not log in")
def test_reverify_view_can_do_initial_verification(self):
"""
Test that a User can use reverify link for initial verification.
"""
self._assert_can_reverify()
def test_reverify_view_can_reverify_denied(self):
# User has a denied attempt, so can reverify
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.deny("error")
self._assert_can_reverify()
def test_reverify_view_can_reverify_expired(self):
# User has a verification attempt, but it's expired
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
attempt.created_at = datetime.now(pytz.UTC) - timedelta(days=(days_good_for + 1))
attempt.save()
# Allow the student to reverify
self._assert_can_reverify()
def test_reverify_view_can_reverify_pending(self):
""" Test that the user can still re-verify even if the previous photo
verification is in pending state.
A photo verification is considered in pending state when the user has
either submitted the photo verification (status in database: 'submitted')
or photo verification submission failed (status in database: 'must_retry').
"""
# User has submitted a verification attempt, but Software Secure has not yet responded
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
# Can re-verify because an attempt has already been submitted.
self._assert_can_reverify()
def test_reverify_view_cannot_reverify_approved(self):
# Submitted attempt has been approved
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
# Cannot reverify because the user is already verified.
self._assert_cannot_reverify()
def _get_reverify_page(self):
"""
Retrieve the reverification page and return the response.
"""
url = reverse("verify_student_reverify")
return self.client.get(url)
def _assert_can_reverify(self):
"""
Check that the reverification flow is rendered.
"""
response = self._get_reverify_page()
self.assertContains(response, "reverify-container")
def _assert_cannot_reverify(self):
"""
Check that the user is blocked from reverifying.
"""
response = self._get_reverify_page()
self.assertContains(response, "reverify-blocked")
@attr(shard=2)
class TestInCourseReverifyView(ModuleStoreTestCase):
"""
Tests for the incourse reverification views.
"""
IMAGE_DATA = "abcd,1234"
def build_course(self):
"""
Build up a course tree with a Reverificaiton xBlock.
"""
self.course_key = SlashSeparatedCourseKey("Robot", "999", "Test_Course")
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
min_price = 0 if mode in ["honor", "audit"] else 1
CourseModeFactory.create(mode_slug=mode, course_id=self.course_key, min_price=min_price)
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
self.reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block'
)
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.reverification_location = unicode(self.reverification.location)
self.reverification_assessment = self.reverification.related_assessment
def setUp(self):
super(TestInCourseReverifyView, self).setUp()
self.build_course()
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
# Enroll the user in the default mode (honor) to emulate
CourseEnrollment.enroll(self.user, self.course_key, mode="verified")
# mocking and patching for bi events
analytics_patcher = patch('lms.djangoapps.verify_student.views.analytics')
self.mock_tracker = analytics_patcher.start()
self.addCleanup(analytics_patcher.stop)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_invalid_checkpoint_get(self):
# Retrieve a checkpoint that doesn't yet exist
response = self.client.get(self._get_url(self.course_key, "invalid_checkpoint"))
self.assertEqual(response.status_code, 404)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_initial_redirect_get(self):
self._create_checkpoint()
response = self.client.get(self._get_url(self.course_key, self.reverification_location))
url = reverse('verify_student_verify_now', kwargs={"course_id": unicode(self.course_key)})
url += u"?{params}".format(params=urllib.urlencode({"checkpoint": self.reverification_location}))
self.assertRedirects(response, url)
@override_settings(LMS_SEGMENT_KEY="foobar")
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_get(self):
"""
Test incourse reverification.
"""
self._create_checkpoint()
self._create_initial_verification()
response = self.client.get(self._get_url(self.course_key, self.reverification_location))
self.assertEquals(response.status_code, 200)
# verify that Google Analytics event fires after successfully
# submitting the photo verification
self.mock_tracker.track.assert_called_once_with( # pylint: disable=no-member
self.user.id,
'edx.bi.reverify.started',
{
'category': "verification",
'label': unicode(self.course_key),
'checkpoint': self.reverification_assessment
},
context={
'ip': '127.0.0.1',
'Google Analytics':
{'clientId': None}
}
)
self.mock_tracker.reset_mock()
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_checkpoint_post(self):
"""Verify that POST requests including an invalid checkpoint location
results in a 400 response.
"""
response = self._submit_photos(self.course_key, self.reverification_location, self.IMAGE_DATA)
self.assertEquals(response.status_code, 400)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_id_required_if_no_initial_verification(self):
self._create_checkpoint()
# Since the user has no initial verification and we're not sending the ID photo,
# we should expect a 400 bad request
response = self._submit_photos(self.course_key, self.reverification_location, self.IMAGE_DATA)
self.assertEqual(response.status_code, 400)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_index_error_post(self):
self._create_checkpoint()
self._create_initial_verification()
response = self._submit_photos(self.course_key, self.reverification_location, "")
self.assertEqual(response.status_code, 400)
@override_settings(LMS_SEGMENT_KEY="foobar")
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_post(self):
self._create_checkpoint()
self._create_initial_verification()
response = self._submit_photos(self.course_key, self.reverification_location, self.IMAGE_DATA)
self.assertEqual(response.status_code, 200)
# Check that the checkpoint status has been updated
status = VerificationStatus.get_user_status_at_checkpoint(
self.user, self.course_key, self.reverification_location
)
self.assertEqual(status, "submitted")
# Test that Google Analytics event fires after successfully submitting
# photo verification
self.mock_tracker.track.assert_called_once_with( # pylint: disable=no-member
self.user.id,
'edx.bi.reverify.submitted',
{
'category': "verification",
'label': unicode(self.course_key),
'checkpoint': self.reverification_assessment
},
context={
'ip': '127.0.0.1',
'Google Analytics':
{'clientId': None}
}
)
self.mock_tracker.reset_mock()
def _create_checkpoint(self):
"""
Helper method for creating a reverification checkpoint.
"""
checkpoint = VerificationCheckpoint(course_id=self.course_key, checkpoint_location=self.reverification_location)
checkpoint.save()
def _create_initial_verification(self):
"""
Helper method for initial verification.
"""
attempt = SoftwareSecurePhotoVerification(user=self.user, photo_id_key="dummy_photo_id_key")
attempt.mark_ready()
attempt.save()
attempt.submit()
def _get_url(self, course_key, checkpoint_location):
"""
Construct the reverification url.
Arguments:
course_key (unicode): The ID of the course
checkpoint_location (str): Location of verification checkpoint
Returns:
url
"""
return reverse(
'verify_student_incourse_reverify',
kwargs={
"course_id": unicode(course_key),
"usage_id": checkpoint_location
}
)
def _submit_photos(self, course_key, checkpoint_location, face_image_data):
""" Submit photos for verification. """
url = reverse("verify_student_submit_photos")
data = {
"course_key": unicode(course_key),
"checkpoint": checkpoint_location,
"face_image": face_image_data,
}
return self.client.post(url, data)
@attr(shard=2)
class TestEmailMessageWithCustomICRVBlock(ModuleStoreTestCase):
"""
Test email sending on re-verification
"""
def build_course(self):
"""
Build up a course tree with a Reverificaiton xBlock.
"""
self.course_key = SlashSeparatedCourseKey("Robot", "999", "Test_Course")
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
self.due_date = datetime.now(pytz.UTC) + timedelta(days=20)
self.allowed_attempts = 1
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
min_price = 0 if mode in ["honor", "audit"] else 1
CourseModeFactory.create(mode_slug=mode, course_id=self.course_key, min_price=min_price)
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
self.reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block',
metadata={'attempts': self.allowed_attempts, 'due': self.due_date}
)
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.reverification_location = unicode(self.reverification.location)
self.assessment = self.reverification.related_assessment
self.re_verification_link = reverse(
'verify_student_incourse_reverify',
args=(
unicode(self.course_key),
self.reverification_location
)
)
def setUp(self):
"""
Setup method for testing photo verification email messages.
"""
super(TestEmailMessageWithCustomICRVBlock, self).setUp()
self.build_course()
self.check_point = VerificationCheckpoint.objects.create(
course_id=self.course.id, checkpoint_location=self.reverification_location
)
self.check_point.add_verification_attempt(SoftwareSecurePhotoVerification.objects.create(user=self.user))
VerificationStatus.add_verification_status(
checkpoint=self.check_point,
user=self.user,
status='submitted'
)
self.attempt = SoftwareSecurePhotoVerification.objects.filter(user=self.user)
location_id = VerificationStatus.get_location_id(self.attempt)
usage_key = UsageKey.from_string(location_id)
redirect_url = get_redirect_url(self.course_key, usage_key.replace(course_key=self.course_key))
self.request = RequestFactory().get('/url')
self.course_link = self.request.build_absolute_uri(redirect_url)
def test_approved_email_message(self):
subject, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "approved", self.request
)
self.assertIn(
"We have successfully verified your identity for the {assessment} "
"assessment in the {course_name} course.".format(
assessment=self.assessment,
course_name=self.course.display_name_with_default_escaped
),
body
)
self.check_courseware_link_exists(body)
self.assertIn("Re-verification Status", subject)
def test_denied_email_message_with_valid_due_date_and_attempts_allowed(self):
subject, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=1,
allowed_attempts=self.allowed_attempts + 1
),
body
)
self.assertIn(
"You must verify your identity before the assessment "
"closes on {due_date}".format(
due_date=get_default_time_display(self.due_date)
),
body
)
reverify_link = self.request.build_absolute_uri(self.re_verification_link)
self.assertIn(
"To try to verify your identity again, select the following link:",
body
)
self.assertIn(reverify_link, body)
self.assertIn("Re-verification Status", subject)
def test_denied_email_message_with_due_date_and_no_attempts(self):
""" Denied email message if due date is still open but user has no
attempts available.
"""
VerificationStatus.add_verification_status(
checkpoint=self.check_point,
user=self.user,
status='submitted'
)
__, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity, and verification is no longer "
"possible".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=2,
allowed_attempts=self.allowed_attempts + 1
),
body
)
self.check_courseware_link_exists(body)
def test_denied_email_message_with_close_verification_dates(self):
# Due date given and expired
return_value = datetime.now(tz=pytz.UTC) + timedelta(days=22)
with patch.object(timezone, 'now', return_value=return_value):
__, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity, and verification is no longer "
"possible".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=1,
allowed_attempts=self.allowed_attempts + 1
),
body
)
def test_check_num_queries(self):
# Get the re-verification block to check the call made
with check_mongo_calls(1):
ver_block = modulestore().get_item(self.reverification.location)
# Expect that the verification block is fetched
self.assertIsNotNone(ver_block)
def check_courseware_link_exists(self, body):
"""Checking courseware url and signature information of EDX"""
self.assertIn(
"To go to the courseware, select the following link:",
body
)
self.assertIn(
"{course_link}".format(
course_link=self.course_link
),
body
)
self.assertIn("Thanks,", body)
self.assertIn(
"The {platform_name} team".format(
platform_name=settings.PLATFORM_NAME
),
body
)
@attr(shard=2)
class TestEmailMessageWithDefaultICRVBlock(ModuleStoreTestCase):
"""
Test for In-course Re-verification
"""
def build_course(self):
"""
Build up a course tree with a Reverificaiton xBlock.
"""
self.course_key = SlashSeparatedCourseKey("Robot", "999", "Test_Course")
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
min_price = 0 if mode in ["honor", "audit"] else 1
CourseModeFactory.create(mode_slug=mode, course_id=self.course_key, min_price=min_price)
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
self.reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block'
)
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.reverification_location = unicode(self.reverification.location)
self.assessment = self.reverification.related_assessment
self.re_verification_link = reverse(
'verify_student_incourse_reverify',
args=(
unicode(self.course_key),
self.reverification_location
)
)
def setUp(self):
super(TestEmailMessageWithDefaultICRVBlock, self).setUp()
self.build_course()
self.check_point = VerificationCheckpoint.objects.create(
course_id=self.course.id, checkpoint_location=self.reverification_location
)
self.check_point.add_verification_attempt(SoftwareSecurePhotoVerification.objects.create(user=self.user))
self.attempt = SoftwareSecurePhotoVerification.objects.filter(user=self.user)
self.request = RequestFactory().get('/url')
def test_denied_email_message_with_no_attempt_allowed(self):
VerificationStatus.add_verification_status(
checkpoint=self.check_point,
user=self.user,
status='submitted'
)
__, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity, and verification is no longer "
"possible".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=1,
allowed_attempts=1
),
body
)
def test_error_on_compose_email(self):
resp = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", True
)
self.assertIsNone(resp)
| agpl-3.0 | -1,914,971,750,501,064,400 | 40.219342 | 145 | 0.636853 | false |
MIPS/external-chromium_org-tools-gyp | test/copies/gyptest-slash.py | 249 | 1433 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies with a trailing slash in the destination directory.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('copies-slash.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('copies-slash.gyp', chdir='relocate/src')
test.built_file_must_match('copies-out-slash/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash-2/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash-2/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash-2/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.pass_test()
| bsd-3-clause | 8,556,398,514,487,747,000 | 36.710526 | 72 | 0.577809 | false |
dolphin-emu/dolphin | docs/DSP/prefix_replace.py | 164 | 1049 | # this can be used to upgrade disassemblies that aren't too annotated.
# won't do very well on the current zelda disasm.
import os
import sys
def GetPrefixLine(l, a):
for s in a:
if s[0:len(l)] == l:
return s
return ""
def GetComment(l):
comment_start = l.find("//")
if comment_start < 0:
comment_start = l.find("->")
if comment_start < 0:
return ""
while (l[comment_start-1] == ' ') or (l[comment_start-1] == '\t'):
comment_start -= 1
return l[comment_start:]
def main():
old_lines = open("DSP_UC_Zelda.txt", "r").readlines()
# for l in old_lines:
# print l
new_lines = open("zeldanew.txt", "r").readlines()
for i in range(0, len(old_lines)):
prefix = old_lines[i][0:14]
comment = GetComment(old_lines[i])
new_line = GetPrefixLine(prefix, new_lines)
if new_line:
old_lines[i] = new_line[:-1] + comment[:-1] + "\n"
for i in range(0, len(old_lines)):
print old_lines[i],
new_file = open("output.txt", "w")
new_file.writelines(old_lines)
main() | gpl-2.0 | -6,106,910,879,947,386,000 | 22.333333 | 70 | 0.597712 | false |
nash-x/hws | neutron/db/migration/alembic_migrations/versions/16c8803e1cf_trunk_port.py | 1 | 1549 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""trunk_port
Revision ID: 16c8803e1cf
Revises: 544673ac99ab
Create Date: 2014-09-01 18:06:15.722787
"""
# revision identifiers, used by Alembic.
revision = '16c8803e1cf'
down_revision = '42f49dd148cd'
from alembic import op
import sqlalchemy as sa
def upgrade(active_plugins=None, options=None):
op.create_table(
'trunkports',
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('trunk_type', sa.String(length=16), nullable=True),
sa.Column('parent_id', sa.String(length=36), nullable=True),
sa.Column('vid', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('port_id'))
if op.get_bind().dialect.name == 'mysql':
op.execute("ALTER TABLE %s ENGINE=InnoDB" % 'trunkports')
def downgrade(active_plugins=None, options=None):
op.drop_table('trunkports')
| apache-2.0 | 4,849,109,674,138,148,000 | 31.957447 | 79 | 0.693996 | false |
shingonoide/odoo | addons/procurement_jit_stock/procurement_jit_stock.py | 64 | 2047 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class procurement_order(osv.osv):
_inherit = "procurement.order"
def run(self, cr, uid, ids, autocommit=False, context=None):
context = dict(context or {}, procurement_autorun_defer=True)
res = super(procurement_order, self).run(cr, uid, ids, autocommit=autocommit, context=context)
procurement_ids = self.search(cr, uid, [('move_dest_id.procurement_id', 'in', ids), ('state', 'not in', ['exception', 'cancel'])], order='id', context=context)
if procurement_ids:
return self.run(cr, uid, procurement_ids, autocommit=autocommit, context=context)
return res
class stock_move(osv.osv):
_inherit = "stock.move"
def _create_procurements(self, cr, uid, moves, context=None):
res = super(stock_move, self)._create_procurements(cr, uid, moves, context=dict(context or {}, procurement_autorun_defer=True))
self.pool['procurement.order'].run(cr, uid, res, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -8,509,350,440,075,620,000 | 43.5 | 167 | 0.638007 | false |
jmcanterafonseca/fiware-orion | test/acceptance/behave/components/common_steps/entities/delete_steps.py | 5 | 3670 | # -*- coding: utf-8 -*-
"""
Copyright 2015 Telefonica Investigacion y Desarrollo, S.A.U
This file is part of Orion Context Broker.
Orion Context Broker is free software: you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Orion Context Broker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
For those usages not covered by this license please contact with
iot_support at tid dot es
"""
__author__ = 'Iván Arias León (ivan dot ariasleon at telefonica dot com)'
import behave
from behave import step
from iotqatools.helpers_utils import *
from iotqatools.mongo_utils import Mongo
from tools.NGSI_v2 import NGSI
from tools.properties_config import Properties
# constants
CONTEXT_BROKER_ENV = u'context_broker_env'
MONGO_ENV = u'mongo_env'
properties_class = Properties()
behave.use_step_matcher("re")
__logger__ = logging.getLogger("steps")
# ------------------------- delete steps ----------------------------
@step(u'delete an entity with id "([^"]*)"')
def delete_an_entity_by_id(context, entity_id):
"""
delete an entity by id
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param entity_id: entity id name
"""
__logger__.debug("Deleting entity: %s..." % entity_id)
context.resp = context.cb.delete_entities_by_id(context, entity_id)
__logger__.info("...\"%s\" entity has been deleted" % entity_id)
@step(u'delete an attribute "([^"]*)" in the entity with id "([^"]*)"')
def delete_an_attribute_in_entity_with_id(context, attribute_name, entity_id):
"""
delete an attribute in an entity
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param entity_id: entity id name
:param attribute_name: attribute name to delete
"""
__logger__.debug("Deleting an attribute in an entity defined by ID...")
# if delete a single attribute in several entities a response list is returned, else only one response is returned.
context.resp = context.cb.delete_entities_by_id(context, entity_id, attribute_name)
__logger__.info("... an attribute is deleted in an entity defined by ID")
# ------------------------- verification steps ----------------------------
@step(u'verify that the attribute is deleted into mongo in the defined entity')
@step(u'verify that the attribute is deleted into mongo')
def verify_that_the_attribute_is_deleted_into_mongo(context):
"""
verify that the attribute is deleted into mongo
"""
props_mongo = properties_class.read_properties()[MONGO_ENV] # mongo properties dict
__logger__.debug("Verifying if the atribute is deleted...")
mongo = Mongo(host=props_mongo["MONGO_HOST"], port=props_mongo["MONGO_PORT"], user=props_mongo["MONGO_USER"],
password=props_mongo["MONGO_PASS"])
ngsi = NGSI()
ngsi.verify_attribute_is_deleted(mongo, context.cb.get_entity_context(), context.cb.get_headers(), context.cb.get_entities_parameters())
__logger__.info("...verified that the attribute is deleted")
| agpl-3.0 | 8,758,400,781,217,366,000 | 42.105882 | 157 | 0.701692 | false |
jaimahajan1997/sympy | sympy/series/tests/test_limitseq.py | 62 | 3139 | from sympy import symbols, oo, Sum, harmonic, Add, S, binomial, factorial
from sympy.series.limitseq import limit_seq
from sympy.series.limitseq import difference_delta as dd
from sympy.utilities.pytest import raises, XFAIL
n, m, k = symbols('n m k', integer=True)
def test_difference_delta():
e = n*(n + 1)
e2 = e * k
assert dd(e) == 2*n + 2
assert dd(e2, n, 2) == k*(4*n + 6)
raises(ValueError, lambda: dd(e2))
raises(ValueError, lambda: dd(e2, n, oo))
def test_difference_delta__Sum():
e = Sum(1/k, (k, 1, n))
assert dd(e, n) == 1/(n + 1)
assert dd(e, n, 5) == Add(*[1/(i + n + 1) for i in range(5)])
e = Sum(1/k, (k, 1, 3*n))
assert dd(e, n) == Add(*[1/(i + 3*n + 1) for i in range(3)])
e = n * Sum(1/k, (k, 1, n))
assert dd(e, n) == 1 + Sum(1/k, (k, 1, n))
e = Sum(1/k, (k, 1, n), (m, 1, n))
assert dd(e, n) == harmonic(n)
def test_difference_delta__Add():
e = n + n*(n + 1)
assert dd(e, n) == 2*n + 3
assert dd(e, n, 2) == 4*n + 8
e = n + Sum(1/k, (k, 1, n))
assert dd(e, n) == 1 + 1/(n + 1)
assert dd(e, n, 5) == 5 + Add(*[1/(i + n + 1) for i in range(5)])
def test_difference_delta__Pow():
e = 4**n
assert dd(e, n) == 3*4**n
assert dd(e, n, 2) == 15*4**n
e = 4**(2*n)
assert dd(e, n) == 15*4**(2*n)
assert dd(e, n, 2) == 255*4**(2*n)
e = n**4
assert dd(e, n) == (n + 1)**4 - n**4
e = n**n
assert dd(e, n) == (n + 1)**(n + 1) - n**n
def test_limit_seq():
e = binomial(2*n, n) / Sum(binomial(2*k, k), (k, 1, n))
assert limit_seq(e) == S(3) / 4
assert limit_seq(e, m) == e
e = (5*n**3 + 3*n**2 + 4) / (3*n**3 + 4*n - 5)
assert limit_seq(e, n) == S(5) / 3
e = (harmonic(n) * Sum(harmonic(k), (k, 1, n))) / (n * harmonic(2*n)**2)
assert limit_seq(e, n) == 1
e = Sum(k**2 * Sum(2**m/m, (m, 1, k)), (k, 1, n)) / (2**n*n)
assert limit_seq(e, n) == 4
e = (Sum(binomial(3*k, k) * binomial(5*k, k), (k, 1, n)) /
(binomial(3*n, n) * binomial(5*n, n)))
assert limit_seq(e, n) == S(84375) / 83351
e = Sum(harmonic(k)**2/k, (k, 1, 2*n)) / harmonic(n)**3
assert limit_seq(e, n) == S(1) / 3
raises(ValueError, lambda: limit_seq(e * m))
@XFAIL
def test_limit_seq_fail():
# improve Summation algorithm or add ad-hoc criteria
e = (harmonic(n)**3 * Sum(1/harmonic(k), (k, 1, n)) /
(n * Sum(harmonic(k)/k, (k, 1, n))))
assert limit_seq(e, n) == 2
# No unique dominant term
e = (Sum(2**k * binomial(2*k, k) / k**2, (k, 1, n)) /
(Sum(2**k/k*2, (k, 1, n)) * Sum(binomial(2*k, k), (k, 1, n))))
assert limit_seq(e, n) == S(3) / 7
# Simplifications of summations needs to be improved.
e = n**3*Sum(2**k/k**2, (k, 1, n))**2 / (2**n * Sum(2**k/k, (k, 1, n)))
assert limit_seq(e, n) == 2
e = (harmonic(n) * Sum(2**k/k, (k, 1, n)) /
(n * Sum(2**k*harmonic(k)/k**2, (k, 1, n))))
assert limit_seq(e, n) == 1
e = (Sum(2**k*factorial(k) / k**2, (k, 1, 2*n)) /
(Sum(4**k/k**2, (k, 1, n)) * Sum(factorial(k), (k, 1, 2*n))))
assert limit_seq(e, n) == S(3) / 16
| bsd-3-clause | 4,658,436,460,113,190,000 | 28.336449 | 76 | 0.486142 | false |
b-jesch/service.fritzbox.callmonitor | resources/lib/PhoneBooks/pyicloud/vendorlibs/requests/packages/urllib3/exceptions.py | 515 | 5599 | from __future__ import absolute_import
# Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
class HTTPWarning(Warning):
"Base warning used by this module."
pass
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, None)
class RequestError(PoolError):
"Base exception for PoolErrors that have associated URLs."
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url, None)
class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
class ProxyError(HTTPError):
"Raised when the connection to a proxy fails."
pass
class DecodeError(HTTPError):
"Raised when automatic decoding based on Content-Type fails."
pass
class ProtocolError(HTTPError):
"Raised when something unexpected happens mid-request/response."
pass
#: Renamed to ProtocolError but aliased for backwards compatibility.
ConnectionError = ProtocolError
# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
:param pool: The connection pool
:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
:param string url: The requested Url
:param exceptions.Exception reason: The underlying error
"""
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s (Caused by %r)" % (
url, reason)
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
RequestError.__init__(self, pool, url, message)
self.retries = retries
class TimeoutStateError(HTTPError):
""" Raised when passing an invalid state to a timeout """
pass
class TimeoutError(HTTPError):
""" Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
pass
class ReadTimeoutError(TimeoutError, RequestError):
"Raised when a socket timeout occurs while receiving data from a server"
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"Raised when a socket timeout occurs while connecting to a server"
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
pass
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationValueError(ValueError, HTTPError):
"Raised when there is something wrong with a given URL input."
pass
class LocationParseError(LocationValueError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
class ResponseError(HTTPError):
"Used as a container for an error reason supplied in a MaxRetryError."
GENERIC_ERROR = 'too many error responses'
SPECIFIC_ERROR = 'too many {status_code} error responses'
class SecurityWarning(HTTPWarning):
"Warned when perfoming security reducing actions"
pass
class SubjectAltNameWarning(SecurityWarning):
"Warned when connecting to a host with a certificate missing a SAN."
pass
class InsecureRequestWarning(SecurityWarning):
"Warned when making an unverified HTTPS request."
pass
class SystemTimeWarning(SecurityWarning):
"Warned when system time is suspected to be wrong"
pass
class InsecurePlatformWarning(SecurityWarning):
"Warned when certain SSL configuration is not available on a platform."
pass
class SNIMissingWarning(HTTPWarning):
"Warned when making a HTTPS request without SNI available."
pass
class DependencyWarning(HTTPWarning):
"""
Warned when an attempt is made to import a module with missing optional
dependencies.
"""
pass
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass
class ProxySchemeUnknown(AssertionError, ValueError):
"ProxyManager does not support the supplied scheme"
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
message = "Not supported proxy scheme %s" % scheme
super(ProxySchemeUnknown, self).__init__(message)
class HeaderParsingError(HTTPError):
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
def __init__(self, defects, unparsed_data):
message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
super(HeaderParsingError, self).__init__(message)
| gpl-2.0 | -9,071,719,050,610,124,000 | 25.789474 | 84 | 0.701732 | false |
avanov/django | tests/generic_views/urls.py | 194 | 14571 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from . import models, views
urlpatterns = [
# TemplateView
url(r'^template/no_template/$',
TemplateView.as_view()),
url(r'^template/login_required/$',
login_required(TemplateView.as_view())),
url(r'^template/simple/(?P<foo>\w+)/$',
TemplateView.as_view(template_name='generic_views/about.html')),
url(r'^template/custom/(?P<foo>\w+)/$',
views.CustomTemplateView.as_view(template_name='generic_views/about.html')),
url(r'^template/content_type/$',
TemplateView.as_view(template_name='generic_views/robots.txt', content_type='text/plain')),
url(r'^template/cached/(?P<foo>\w+)/$',
cache_page(2.0)(TemplateView.as_view(template_name='generic_views/about.html'))),
# DetailView
url(r'^detail/obj/$',
views.ObjectDetail.as_view()),
url(r'^detail/artist/(?P<pk>[0-9]+)/$',
views.ArtistDetail.as_view(),
name="artist_detail"),
url(r'^detail/author/(?P<pk>[0-9]+)/$',
views.AuthorDetail.as_view(),
name="author_detail"),
url(r'^detail/author/bycustompk/(?P<foo>[0-9]+)/$',
views.AuthorDetail.as_view(pk_url_kwarg='foo')),
url(r'^detail/author/byslug/(?P<slug>[\w-]+)/$',
views.AuthorDetail.as_view()),
url(r'^detail/author/bycustomslug/(?P<foo>[\w-]+)/$',
views.AuthorDetail.as_view(slug_url_kwarg='foo')),
url(r'^detail/author/bypkignoreslug/(?P<pk>[0-9]+)-(?P<slug>[\w-]+)/$',
views.AuthorDetail.as_view()),
url(r'^detail/author/bypkandslug/(?P<pk>[0-9]+)-(?P<slug>[\w-]+)/$',
views.AuthorDetail.as_view(query_pk_and_slug=True)),
url(r'^detail/author/(?P<pk>[0-9]+)/template_name_suffix/$',
views.AuthorDetail.as_view(template_name_suffix='_view')),
url(r'^detail/author/(?P<pk>[0-9]+)/template_name/$',
views.AuthorDetail.as_view(template_name='generic_views/about.html')),
url(r'^detail/author/(?P<pk>[0-9]+)/context_object_name/$',
views.AuthorDetail.as_view(context_object_name='thingy')),
url(r'^detail/author/(?P<pk>[0-9]+)/dupe_context_object_name/$',
views.AuthorDetail.as_view(context_object_name='object')),
url(r'^detail/page/(?P<pk>[0-9]+)/field/$',
views.PageDetail.as_view()),
url(r'^detail/author/invalid/url/$',
views.AuthorDetail.as_view()),
url(r'^detail/author/invalid/qs/$',
views.AuthorDetail.as_view(queryset=None)),
url(r'^detail/nonmodel/1/$',
views.NonModelDetail.as_view()),
url(r'^detail/doesnotexist/(?P<pk>[0-9]+)/$',
views.ObjectDoesNotExistDetail.as_view()),
# FormView
url(r'^contact/$',
views.ContactView.as_view()),
# Create/UpdateView
url(r'^edit/artists/create/$',
views.ArtistCreate.as_view()),
url(r'^edit/artists/(?P<pk>[0-9]+)/update/$',
views.ArtistUpdate.as_view()),
url(r'^edit/authors/create/naive/$',
views.NaiveAuthorCreate.as_view()),
url(r'^edit/authors/create/redirect/$',
views.NaiveAuthorCreate.as_view(success_url='/edit/authors/create/')),
url(r'^edit/authors/create/interpolate_redirect/$',
views.NaiveAuthorCreate.as_view(success_url='/edit/author/%(id)d/update/')),
url(r'^edit/authors/create/interpolate_redirect_nonascii/$',
views.NaiveAuthorCreate.as_view(success_url='/%C3%A9dit/author/{id}/update/')),
url(r'^edit/authors/create/restricted/$',
views.AuthorCreateRestricted.as_view()),
url(r'^[eé]dit/authors/create/$',
views.AuthorCreate.as_view()),
url(r'^edit/authors/create/special/$',
views.SpecializedAuthorCreate.as_view()),
url(r'^edit/author/(?P<pk>[0-9]+)/update/naive/$',
views.NaiveAuthorUpdate.as_view()),
url(r'^edit/author/(?P<pk>[0-9]+)/update/redirect/$',
views.NaiveAuthorUpdate.as_view(success_url='/edit/authors/create/')),
url(r'^edit/author/(?P<pk>[0-9]+)/update/interpolate_redirect/$',
views.NaiveAuthorUpdate.as_view(success_url='/edit/author/%(id)d/update/')),
url(r'^edit/author/(?P<pk>[0-9]+)/update/interpolate_redirect_nonascii/$',
views.NaiveAuthorUpdate.as_view(success_url='/%C3%A9dit/author/{id}/update/')),
url(r'^[eé]dit/author/(?P<pk>[0-9]+)/update/$',
views.AuthorUpdate.as_view()),
url(r'^edit/author/update/$',
views.OneAuthorUpdate.as_view()),
url(r'^edit/author/(?P<pk>[0-9]+)/update/special/$',
views.SpecializedAuthorUpdate.as_view()),
url(r'^edit/author/(?P<pk>[0-9]+)/delete/naive/$',
views.NaiveAuthorDelete.as_view()),
url(r'^edit/author/(?P<pk>[0-9]+)/delete/redirect/$',
views.NaiveAuthorDelete.as_view(success_url='/edit/authors/create/')),
url(r'^edit/author/(?P<pk>[0-9]+)/delete/interpolate_redirect/$',
views.NaiveAuthorDelete.as_view(success_url='/edit/authors/create/?deleted=%(id)s')),
url(r'^edit/author/(?P<pk>[0-9]+)/delete/interpolate_redirect_nonascii/$',
views.NaiveAuthorDelete.as_view(success_url='/%C3%A9dit/authors/create/?deleted={id}')),
url(r'^edit/author/(?P<pk>[0-9]+)/delete/$',
views.AuthorDelete.as_view()),
url(r'^edit/author/(?P<pk>[0-9]+)/delete/special/$',
views.SpecializedAuthorDelete.as_view()),
# ArchiveIndexView
url(r'^dates/books/$',
views.BookArchive.as_view()),
url(r'^dates/books/context_object_name/$',
views.BookArchive.as_view(context_object_name='thingies')),
url(r'^dates/books/allow_empty/$',
views.BookArchive.as_view(allow_empty=True)),
url(r'^dates/books/template_name/$',
views.BookArchive.as_view(template_name='generic_views/list.html')),
url(r'^dates/books/template_name_suffix/$',
views.BookArchive.as_view(template_name_suffix='_detail')),
url(r'^dates/books/invalid/$',
views.BookArchive.as_view(queryset=None)),
url(r'^dates/books/paginated/$',
views.BookArchive.as_view(paginate_by=10)),
url(r'^dates/books/reverse/$',
views.BookArchive.as_view(queryset=models.Book.objects.order_by('pubdate'))),
url(r'^dates/books/by_month/$',
views.BookArchive.as_view(date_list_period='month')),
url(r'^dates/booksignings/$',
views.BookSigningArchive.as_view()),
url(r'^dates/books/sortedbyname/$',
views.BookArchive.as_view(ordering='name')),
url(r'^dates/books/sortedbynamedec/$',
views.BookArchive.as_view(ordering='-name')),
# ListView
url(r'^list/dict/$',
views.DictList.as_view()),
url(r'^list/dict/paginated/$',
views.DictList.as_view(paginate_by=1)),
url(r'^list/artists/$',
views.ArtistList.as_view(),
name="artists_list"),
url(r'^list/authors/$',
views.AuthorList.as_view(),
name="authors_list"),
url(r'^list/authors/paginated/$',
views.AuthorList.as_view(paginate_by=30)),
url(r'^list/authors/paginated/(?P<page>[0-9]+)/$',
views.AuthorList.as_view(paginate_by=30)),
url(r'^list/authors/paginated-orphaned/$',
views.AuthorList.as_view(paginate_by=30, paginate_orphans=2)),
url(r'^list/authors/notempty/$',
views.AuthorList.as_view(allow_empty=False)),
url(r'^list/authors/notempty/paginated/$',
views.AuthorList.as_view(allow_empty=False, paginate_by=2)),
url(r'^list/authors/template_name/$',
views.AuthorList.as_view(template_name='generic_views/list.html')),
url(r'^list/authors/template_name_suffix/$',
views.AuthorList.as_view(template_name_suffix='_objects')),
url(r'^list/authors/context_object_name/$',
views.AuthorList.as_view(context_object_name='author_list')),
url(r'^list/authors/dupe_context_object_name/$',
views.AuthorList.as_view(context_object_name='object_list')),
url(r'^list/authors/invalid/$',
views.AuthorList.as_view(queryset=None)),
url(r'^list/authors/paginated/custom_class/$',
views.AuthorList.as_view(paginate_by=5, paginator_class=views.CustomPaginator)),
url(r'^list/authors/paginated/custom_page_kwarg/$',
views.AuthorList.as_view(paginate_by=30, page_kwarg='pagina')),
url(r'^list/authors/paginated/custom_constructor/$',
views.AuthorListCustomPaginator.as_view()),
url(r'^list/books/sorted/$',
views.BookList.as_view(ordering='name')),
url(r'^list/books/sortedbypagesandnamedec/$',
views.BookList.as_view(ordering=('pages', '-name'))),
# YearArchiveView
# Mixing keyword and positional captures below is intentional; the views
# ought to be able to accept either.
url(r'^dates/books/(?P<year>[0-9]{4})/$',
views.BookYearArchive.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/make_object_list/$',
views.BookYearArchive.as_view(make_object_list=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/allow_empty/$',
views.BookYearArchive.as_view(allow_empty=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/allow_future/$',
views.BookYearArchive.as_view(allow_future=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/paginated/$',
views.BookYearArchive.as_view(make_object_list=True, paginate_by=30)),
url(r'^dates/books/(?P<year>\d{4})/sortedbyname/$',
views.BookYearArchive.as_view(make_object_list=True, ordering='name')),
url(r'^dates/books/(?P<year>\d{4})/sortedbypageandnamedec/$',
views.BookYearArchive.as_view(make_object_list=True, ordering=('pages', '-name'))),
url(r'^dates/books/no_year/$',
views.BookYearArchive.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/reverse/$',
views.BookYearArchive.as_view(queryset=models.Book.objects.order_by('pubdate'))),
url(r'^dates/booksignings/(?P<year>[0-9]{4})/$',
views.BookSigningYearArchive.as_view()),
# MonthArchiveView
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/$',
views.BookMonthArchive.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[0-9]{1,2})/$',
views.BookMonthArchive.as_view(month_format='%m')),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/allow_empty/$',
views.BookMonthArchive.as_view(allow_empty=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/allow_future/$',
views.BookMonthArchive.as_view(allow_future=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/paginated/$',
views.BookMonthArchive.as_view(paginate_by=30)),
url(r'^dates/books/(?P<year>[0-9]{4})/no_month/$',
views.BookMonthArchive.as_view()),
url(r'^dates/booksignings/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/$',
views.BookSigningMonthArchive.as_view()),
# WeekArchiveView
url(r'^dates/books/(?P<year>[0-9]{4})/week/(?P<week>[0-9]{1,2})/$',
views.BookWeekArchive.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/week/(?P<week>[0-9]{1,2})/allow_empty/$',
views.BookWeekArchive.as_view(allow_empty=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/week/(?P<week>[0-9]{1,2})/allow_future/$',
views.BookWeekArchive.as_view(allow_future=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/week/(?P<week>[0-9]{1,2})/paginated/$',
views.BookWeekArchive.as_view(paginate_by=30)),
url(r'^dates/books/(?P<year>[0-9]{4})/week/no_week/$',
views.BookWeekArchive.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/week/(?P<week>[0-9]{1,2})/monday/$',
views.BookWeekArchive.as_view(week_format='%W')),
url(r'^dates/booksignings/(?P<year>[0-9]{4})/week/(?P<week>[0-9]{1,2})/$',
views.BookSigningWeekArchive.as_view()),
# DayArchiveView
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/$',
views.BookDayArchive.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[0-9]{1,2})/(?P<day>[0-9]{1,2})/$',
views.BookDayArchive.as_view(month_format='%m')),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/allow_empty/$',
views.BookDayArchive.as_view(allow_empty=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/allow_future/$',
views.BookDayArchive.as_view(allow_future=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/allow_empty_and_future/$',
views.BookDayArchive.as_view(allow_empty=True, allow_future=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/paginated/$',
views.BookDayArchive.as_view(paginate_by=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/no_day/$',
views.BookDayArchive.as_view()),
url(r'^dates/booksignings/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/$',
views.BookSigningDayArchive.as_view()),
# TodayArchiveView
url(r'^dates/books/today/$',
views.BookTodayArchive.as_view()),
url(r'^dates/books/today/allow_empty/$',
views.BookTodayArchive.as_view(allow_empty=True)),
url(r'^dates/booksignings/today/$',
views.BookSigningTodayArchive.as_view()),
# DateDetailView
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/(?P<pk>[0-9]+)/$',
views.BookDetail.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[0-9]{1,2})/(?P<day>[0-9]{1,2})/(?P<pk>[0-9]+)/$',
views.BookDetail.as_view(month_format='%m')),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/(?P<pk>[0-9]+)/allow_future/$',
views.BookDetail.as_view(allow_future=True)),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/nopk/$',
views.BookDetail.as_view()),
url(r'^dates/books/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/byslug/(?P<slug>[\w-]+)/$',
views.BookDetail.as_view()),
url(r'^dates/books/get_object_custom_queryset/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/(?P<pk>[0-9]+)/$',
views.BookDetailGetObjectCustomQueryset.as_view()),
url(r'^dates/booksignings/(?P<year>[0-9]{4})/(?P<month>[a-z]{3})/(?P<day>[0-9]{1,2})/(?P<pk>[0-9]+)/$',
views.BookSigningDetail.as_view()),
# Useful for testing redirects
url(r'^accounts/login/$', auth_views.login)
]
| bsd-3-clause | -5,119,776,499,333,351,000 | 49.411765 | 127 | 0.616515 | false |
kyrias/cjdns | node_build/dependencies/libuv/build/gyp/test/escaping/gyptest-colon.py | 216 | 1428 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tests that filenames that contain colons are handled correctly.
(This is important for absolute paths on Windows.)
"""
import os
import sys
import TestGyp
# TODO: Make colons in filenames work with make, if required.
test = TestGyp.TestGyp(formats=['!make', '!android'])
CHDIR = 'colon'
source_name = 'colon/a:b.c'
copies_name = 'colon/a:b.c-d'
if sys.platform == 'win32':
# Windows uses : as drive separator and doesn't allow it in regular filenames.
# Use abspath() to create a path that contains a colon instead.
abs_source = os.path.abspath('colon/file.c')
test.write('colon/test.gyp',
test.read('colon/test.gyp').replace("'a:b.c'", repr(abs_source)))
source_name = abs_source
abs_copies = os.path.abspath('colon/file.txt')
test.write('colon/test.gyp',
test.read('colon/test.gyp').replace("'a:b.c-d'", repr(abs_copies)))
copies_name = abs_copies
# Create the file dynamically, Windows is unhappy if a file with a colon in
# its name is checked in.
test.write(source_name, 'int main() {}')
test.write(copies_name, 'foo')
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', test.ALL, chdir=CHDIR)
test.built_file_must_exist(os.path.basename(copies_name), chdir=CHDIR)
test.pass_test()
| gpl-3.0 | 6,800,756,538,011,165,000 | 32.209302 | 80 | 0.69958 | false |
paulsoh/moxie | moxie/social/strategies/django_strategy.py | 52 | 5052 | from django.conf import settings
from django.http import HttpResponse
from django.db.models import Model
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth import authenticate
from django.shortcuts import redirect
from django.template import TemplateDoesNotExist, RequestContext, loader
from django.utils.encoding import force_text
from django.utils.functional import Promise
from django.utils.translation import get_language
from social.strategies.base import BaseStrategy, BaseTemplateStrategy
class DjangoTemplateStrategy(BaseTemplateStrategy):
def render_template(self, tpl, context):
template = loader.get_template(tpl)
return template.render(RequestContext(self.strategy.request, context))
def render_string(self, html, context):
template = loader.get_template_from_string(html)
return template.render(RequestContext(self.strategy.request, context))
class DjangoStrategy(BaseStrategy):
DEFAULT_TEMPLATE_STRATEGY = DjangoTemplateStrategy
def __init__(self, storage, request=None, tpl=None):
self.request = request
self.session = request.session if request else {}
super(DjangoStrategy, self).__init__(storage, tpl)
def get_setting(self, name):
value = getattr(settings, name)
# Force text on URL named settings that are instance of Promise
if name.endswith('_URL') and isinstance(value, Promise):
value = force_text(value)
return value
def request_data(self, merge=True):
if not self.request:
return {}
if merge:
data = self.request.GET.copy()
data.update(self.request.POST)
elif self.request.method == 'POST':
data = self.request.POST
else:
data = self.request.GET
return data
def request_host(self):
if self.request:
return self.request.get_host()
def request_is_secure(self):
"""Is the request using HTTPS?"""
return self.request.is_secure()
def request_path(self):
"""path of the current request"""
return self.request.path
def request_port(self):
"""Port in use for this request"""
return self.request.META['SERVER_PORT']
def request_get(self):
"""Request GET data"""
return self.request.GET.copy()
def request_post(self):
"""Request POST data"""
return self.request.POST.copy()
def redirect(self, url):
return redirect(url)
def html(self, content):
return HttpResponse(content, content_type='text/html;charset=UTF-8')
def render_html(self, tpl=None, html=None, context=None):
if not tpl and not html:
raise ValueError('Missing template or html parameters')
context = context or {}
try:
template = loader.get_template(tpl)
except TemplateDoesNotExist:
template = loader.get_template_from_string(html)
return template.render(RequestContext(self.request, context))
def authenticate(self, backend, *args, **kwargs):
kwargs['strategy'] = self
kwargs['storage'] = self.storage
kwargs['backend'] = backend
return authenticate(*args, **kwargs)
def session_get(self, name, default=None):
return self.session.get(name, default)
def session_set(self, name, value):
self.session[name] = value
if hasattr(self.session, 'modified'):
self.session.modified = True
def session_pop(self, name):
return self.session.pop(name, None)
def session_setdefault(self, name, value):
return self.session.setdefault(name, value)
def build_absolute_uri(self, path=None):
if self.request:
return self.request.build_absolute_uri(path)
else:
return path
def random_string(self, length=12, chars=BaseStrategy.ALLOWED_CHARS):
try:
from django.utils.crypto import get_random_string
except ImportError: # django < 1.4
return super(DjangoStrategy, self).random_string(length, chars)
else:
return get_random_string(length, chars)
def to_session_value(self, val):
"""Converts values that are instance of Model to a dictionary
with enough information to retrieve the instance back later."""
if isinstance(val, Model):
val = {
'pk': val.pk,
'ctype': ContentType.objects.get_for_model(val).pk
}
return val
def from_session_value(self, val):
"""Converts back the instance saved by self._ctype function."""
if isinstance(val, dict) and 'pk' in val and 'ctype' in val:
ctype = ContentType.objects.get_for_id(val['ctype'])
ModelClass = ctype.model_class()
val = ModelClass.objects.get(pk=val['pk'])
return val
def get_language(self):
"""Return current language"""
return get_language()
| mit | 9,086,650,016,513,033,000 | 33.60274 | 78 | 0.642122 | false |
ropik/androguard | androguard/core/bytecodes/apk.py | 1 | 58292 | # This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from androguard.core import bytecode
from androguard.core import androconf
from androguard.core.bytecodes.dvm_permissions import DVM_PERMISSIONS
from androguard.util import read
import StringIO
from struct import pack, unpack
from xml.sax.saxutils import escape
from zlib import crc32
import re
from xml.dom import minidom
NS_ANDROID_URI = 'http://schemas.android.com/apk/res/android'
# 0: chilkat
# 1: default python zipfile module
# 2: patch zipfile module
ZIPMODULE = 1
import sys
if sys.hexversion < 0x2070000:
try:
import chilkat
ZIPMODULE = 0
# UNLOCK : change it with your valid key !
try:
CHILKAT_KEY = read("key.txt")
except Exception:
CHILKAT_KEY = "testme"
except ImportError:
ZIPMODULE = 1
else:
ZIPMODULE = 1
################################################### CHILKAT ZIP FORMAT #####################################################
class ChilkatZip(object):
def __init__(self, raw):
self.files = []
self.zip = chilkat.CkZip()
self.zip.UnlockComponent( CHILKAT_KEY )
self.zip.OpenFromMemory( raw, len(raw) )
filename = chilkat.CkString()
e = self.zip.FirstEntry()
while e != None:
e.get_FileName(filename)
self.files.append( filename.getString() )
e = e.NextEntry()
def delete(self, patterns):
el = []
filename = chilkat.CkString()
e = self.zip.FirstEntry()
while e != None:
e.get_FileName(filename)
if re.match(patterns, filename.getString()) != None:
el.append( e )
e = e.NextEntry()
for i in el:
self.zip.DeleteEntry( i )
def remplace_file(self, filename, buff):
entry = self.zip.GetEntryByName(filename)
if entry != None:
obj = chilkat.CkByteData()
obj.append2( buff, len(buff) )
return entry.ReplaceData( obj )
return False
def write(self):
obj = chilkat.CkByteData()
self.zip.WriteToMemory( obj )
return obj.getBytes()
def namelist(self):
return self.files
def read(self, elem):
e = self.zip.GetEntryByName( elem )
s = chilkat.CkByteData()
e.Inflate( s )
return s.getBytes()
def sign_apk(filename, keystore, storepass):
from subprocess import Popen, PIPE, STDOUT
compile = Popen([androconf.CONF["PATH_JARSIGNER"],
"-sigalg",
"MD5withRSA",
"-digestalg",
"SHA1",
"-storepass",
storepass,
"-keystore",
keystore,
filename,
"alias_name"],
stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
######################################################## APK FORMAT ########################################################
class APK(object):
"""
This class can access to all elements in an APK file
:param filename: specify the path of the file, or raw data
:param raw: specify if the filename is a path or raw data (optional)
:param mode: specify the mode to open the file (optional)
:param magic_file: specify the magic file (optional)
:param zipmodule: specify the type of zip module to use (0:chilkat, 1:zipfile, 2:patch zipfile)
:type filename: string
:type raw: boolean
:type mode: string
:type magic_file: string
:type zipmodule: int
:Example:
APK("myfile.apk")
APK(read("myfile.apk"), raw=True)
"""
def __init__(self, filename, raw=False, mode="r", magic_file=None, zipmodule=ZIPMODULE):
self.filename = filename
self.xml = {}
self.axml = {}
self.arsc = {}
self.package = ""
self.androidversion = {}
self.permissions = []
self.valid_apk = False
self.files = {}
self.files_crc32 = {}
self.magic_file = magic_file
if raw == True:
self.__raw = filename
else:
self.__raw = read(filename)
self.zipmodule = zipmodule
if zipmodule == 0:
self.zip = ChilkatZip(self.__raw)
elif zipmodule == 2:
from androguard.patch import zipfile
self.zip = zipfile.ZipFile(StringIO.StringIO(self.__raw), mode=mode)
else:
import zipfile
self.zip = zipfile.ZipFile(StringIO.StringIO(self.__raw), mode=mode)
for i in self.zip.namelist():
if i == "AndroidManifest.xml":
self.axml[i] = AXMLPrinter(self.zip.read(i))
try:
self.xml[i] = minidom.parseString(self.axml[i].get_buff())
except:
self.xml[i] = None
if self.xml[i] != None:
self.package = self.xml[i].documentElement.getAttribute("package")
self.androidversion["Code"] = self.xml[i].documentElement.getAttributeNS(NS_ANDROID_URI, "versionCode")
self.androidversion["Name"] = self.xml[i].documentElement.getAttributeNS(NS_ANDROID_URI, "versionName")
for item in self.xml[i].getElementsByTagName('uses-permission'):
self.permissions.append(str(item.getAttributeNS(NS_ANDROID_URI, "name")))
self.valid_apk = True
self.get_files_types()
def get_AndroidManifest(self):
"""
Return the Android Manifest XML file
:rtype: xml object
"""
return self.xml["AndroidManifest.xml"]
def is_valid_APK(self):
"""
Return true if the APK is valid, false otherwise
:rtype: boolean
"""
return self.valid_apk
def get_filename(self):
"""
Return the filename of the APK
:rtype: string
"""
return self.filename
def get_package(self):
"""
Return the name of the package
:rtype: string
"""
return self.package
def get_androidversion_code(self):
"""
Return the android version code
:rtype: string
"""
return self.androidversion["Code"]
def get_androidversion_name(self):
"""
Return the android version name
:rtype: string
"""
return self.androidversion["Name"]
def get_files(self):
"""
Return the files inside the APK
:rtype: a list of strings
"""
return self.zip.namelist()
def get_files_types(self):
"""
Return the files inside the APK with their associated types (by using python-magic)
:rtype: a dictionnary
"""
try:
import magic
except ImportError:
# no lib magic !
for i in self.get_files():
buffer = self.zip.read(i)
self.files_crc32[i] = crc32(buffer)
self.files[i] = "Unknown"
return self.files
if self.files != {}:
return self.files
builtin_magic = 0
try:
getattr(magic, "MagicException")
except AttributeError:
builtin_magic = 1
if builtin_magic:
ms = magic.open(magic.MAGIC_NONE)
ms.load()
for i in self.get_files():
buffer = self.zip.read(i)
self.files[i] = ms.buffer(buffer)
self.files[i] = self._patch_magic(buffer, self.files[i])
self.files_crc32[i] = crc32(buffer)
else:
m = magic.Magic(magic_file=self.magic_file)
for i in self.get_files():
buffer = self.zip.read(i)
self.files[i] = m.from_buffer(buffer)
self.files[i] = self._patch_magic(buffer, self.files[i])
self.files_crc32[i] = crc32(buffer)
return self.files
def _patch_magic(self, buffer, orig):
if ("Zip" in orig) or ("DBase" in orig):
val = androconf.is_android_raw(buffer)
if val == "APK":
if androconf.is_valid_android_raw(buffer):
return "Android application package file"
elif val == "AXML":
return "Android's binary XML"
return orig
def get_files_crc32(self):
if self.files_crc32 == {}:
self.get_files_types()
return self.files_crc32
def get_files_information(self):
"""
Return the files inside the APK with their associated types and crc32
:rtype: string, string, int
"""
if self.files == {}:
self.get_files_types()
for i in self.get_files():
try:
yield i, self.files[i], self.files_crc32[i]
except KeyError:
yield i, "", ""
def get_raw(self):
"""
Return raw bytes of the APK
:rtype: string
"""
return self.__raw
def get_file(self, filename):
"""
Return the raw data of the specified filename
:rtype: string
"""
try:
return self.zip.read(filename)
except KeyError:
return ""
def get_dex(self):
"""
Return the raw data of the classes dex file
:rtype: string
"""
return self.get_file("classes.dex")
def get_elements(self, tag_name, attribute):
"""
Return elements in xml files which match with the tag name and the specific attribute
:param tag_name: a string which specify the tag name
:param attribute: a string which specify the attribute
"""
l = []
for i in self.xml:
for item in self.xml[i].getElementsByTagName(tag_name):
value = item.getAttributeNS(NS_ANDROID_URI, attribute)
value = self.format_value( value )
l.append( str( value ) )
return l
def format_value(self, value):
if len(value) > 0:
if value[0] == ".":
value = self.package + value
else:
v_dot = value.find(".")
if v_dot == 0:
value = self.package + "." + value
elif v_dot == -1:
value = self.package + "." + value
return value
def get_element(self, tag_name, attribute):
"""
Return element in xml files which match with the tag name and the specific attribute
:param tag_name: specify the tag name
:type tag_name: string
:param attribute: specify the attribute
:type attribute: string
:rtype: string
"""
for i in self.xml:
for item in self.xml[i].getElementsByTagName(tag_name):
value = item.getAttributeNS(NS_ANDROID_URI, attribute)
if len(value) > 0:
return value
return None
def get_main_activity(self):
"""
Return the name of the main activity
:rtype: string
"""
x = set()
y = set()
for i in self.xml:
for item in self.xml[i].getElementsByTagName("activity"):
for sitem in item.getElementsByTagName( "action" ):
val = sitem.getAttributeNS(NS_ANDROID_URI, "name" )
if val == "android.intent.action.MAIN":
x.add( item.getAttributeNS(NS_ANDROID_URI, "name" ) )
for sitem in item.getElementsByTagName( "category" ):
val = sitem.getAttributeNS(NS_ANDROID_URI, "name" )
if val == "android.intent.category.LAUNCHER":
y.add( item.getAttributeNS(NS_ANDROID_URI, "name" ) )
z = x.intersection(y)
if len(z) > 0:
return self.format_value(z.pop())
return None
def get_activities(self):
"""
Return the android:name attribute of all activities
:rtype: a list of string
"""
return self.get_elements("activity", "name")
def get_services(self):
"""
Return the android:name attribute of all services
:rtype: a list of string
"""
return self.get_elements("service", "name")
def get_receivers(self):
"""
Return the android:name attribute of all receivers
:rtype: a list of string
"""
return self.get_elements("receiver", "name")
def get_providers(self):
"""
Return the android:name attribute of all providers
:rtype: a list of string
"""
return self.get_elements("provider", "name")
def get_intent_filters(self, category, name):
d = {}
d["action"] = []
d["category"] = []
for i in self.xml:
for item in self.xml[i].getElementsByTagName(category):
if self.format_value(item.getAttributeNS(NS_ANDROID_URI, "name")) == name:
for sitem in item.getElementsByTagName("intent-filter"):
for ssitem in sitem.getElementsByTagName("action"):
if ssitem.getAttributeNS(NS_ANDROID_URI, "name") not in d["action"]:
d["action"].append(ssitem.getAttributeNS(NS_ANDROID_URI, "name"))
for ssitem in sitem.getElementsByTagName("category"):
if ssitem.getAttributeNS(NS_ANDROID_URI, "name") not in d["category"]:
d["category"].append(ssitem.getAttributeNS(NS_ANDROID_URI, "name"))
if not d["action"]:
del d["action"]
if not d["category"]:
del d["category"]
return d
def get_permissions(self):
"""
Return permissions
:rtype: list of string
"""
return self.permissions
def get_details_permissions(self):
"""
Return permissions with details
:rtype: list of string
"""
l = {}
for i in self.permissions:
perm = i
pos = i.rfind(".")
if pos != -1:
perm = i[pos+1:]
try:
l[ i ] = DVM_PERMISSIONS["MANIFEST_PERMISSION"][ perm ]
except KeyError:
l[ i ] = [ "normal", "Unknown permission from android reference", "Unknown permission from android reference" ]
return l
def get_max_sdk_version(self):
"""
Return the android:maxSdkVersion attribute
:rtype: string
"""
return self.get_element("uses-sdk", "maxSdkVersion")
def get_min_sdk_version(self):
"""
Return the android:minSdkVersion attribute
:rtype: string
"""
return self.get_element("uses-sdk", "minSdkVersion")
def get_target_sdk_version(self):
"""
Return the android:targetSdkVersion attribute
:rtype: string
"""
return self.get_element( "uses-sdk", "targetSdkVersion" )
def get_libraries(self):
"""
Return the android:name attributes for libraries
:rtype: list
"""
return self.get_elements( "uses-library", "name" )
def get_certificate(self, filename):
"""
Return a certificate object by giving the name in the apk file
"""
import chilkat
cert = chilkat.CkCert()
f = self.get_file(filename)
data = chilkat.CkByteData()
data.append2(f, len(f))
success = cert.LoadFromBinary(data)
return success, cert
def new_zip(self, filename, deleted_files=None, new_files={}):
"""
Create a new zip file
:param filename: the output filename of the zip
:param deleted_files: a regex pattern to remove specific file
:param new_files: a dictionnary of new files
:type filename: string
:type deleted_files: None or a string
:type new_files: a dictionnary (key:filename, value:content of the file)
"""
if self.zipmodule == 2:
from androguard.patch import zipfile
zout = zipfile.ZipFile(filename, 'w')
else:
import zipfile
zout = zipfile.ZipFile(filename, 'w')
for item in self.zip.infolist():
if deleted_files != None:
if re.match(deleted_files, item.filename) == None:
if item.filename in new_files:
zout.writestr(item, new_files[item.filename])
else:
buffer = self.zip.read(item.filename)
zout.writestr(item, buffer)
zout.close()
def get_android_manifest_axml(self):
"""
Return the :class:`AXMLPrinter` object which corresponds to the AndroidManifest.xml file
:rtype: :class:`AXMLPrinter`
"""
try:
return self.axml["AndroidManifest.xml"]
except KeyError:
return None
def get_android_manifest_xml(self):
"""
Return the xml object which corresponds to the AndroidManifest.xml file
:rtype: object
"""
try:
return self.xml["AndroidManifest.xml"]
except KeyError:
return None
def get_android_resources(self):
"""
Return the :class:`ARSCParser` object which corresponds to the resources.arsc file
:rtype: :class:`ARSCParser`
"""
try:
return self.arsc["resources.arsc"]
except KeyError:
try:
self.arsc["resources.arsc"] = ARSCParser(self.zip.read("resources.arsc"))
return self.arsc["resources.arsc"]
except KeyError:
return None
def get_signature_name(self):
signature_expr = re.compile("^(META-INF/)(.*)(\.RSA|\.DSA)$")
for i in self.get_files():
if signature_expr.search(i):
return i
return None
def get_signature(self):
signature_expr = re.compile("^(META-INF/)(.*)(\.RSA|\.DSA)$")
for i in self.get_files():
if signature_expr.search(i):
return self.get_file(i)
return None
def show(self):
self.get_files_types()
print "FILES: "
for i in self.get_files():
try:
print "\t", i, self.files[i], "%x" % self.files_crc32[i]
except KeyError:
print "\t", i, "%x" % self.files_crc32[i]
print "PERMISSIONS: "
details_permissions = self.get_details_permissions()
for i in details_permissions:
print "\t", i, details_permissions[i]
print "MAIN ACTIVITY: ", self.get_main_activity()
print "ACTIVITIES: "
activities = self.get_activities()
for i in activities:
filters = self.get_intent_filters("activity", i)
print "\t", i, filters or ""
print "SERVICES: "
services = self.get_services()
for i in services:
filters = self.get_intent_filters("service", i)
print "\t", i, filters or ""
print "RECEIVERS: "
receivers = self.get_receivers()
for i in receivers:
filters = self.get_intent_filters("receiver", i)
print "\t", i, filters or ""
print "PROVIDERS: ", self.get_providers()
def show_Certificate(cert):
print "Issuer: C=%s, CN=%s, DN=%s, E=%s, L=%s, O=%s, OU=%s, S=%s" % (cert.issuerC(), cert.issuerCN(), cert.issuerDN(), cert.issuerE(), cert.issuerL(), cert.issuerO(), cert.issuerOU(), cert.issuerS())
print "Subject: C=%s, CN=%s, DN=%s, E=%s, L=%s, O=%s, OU=%s, S=%s" % (cert.subjectC(), cert.subjectCN(), cert.subjectDN(), cert.subjectE(), cert.subjectL(), cert.subjectO(), cert.subjectOU(), cert.subjectS())
######################################################## AXML FORMAT ########################################################
# Translated from http://code.google.com/p/android4me/source/browse/src/android/content/res/AXmlResourceParser.java
UTF8_FLAG = 0x00000100
class StringBlock(object):
def __init__(self, buff):
self.start = buff.get_idx()
self._cache = {}
self.header = unpack('<h', buff.read(2))[0]
self.header_size = unpack('<h', buff.read(2))[0]
self.chunkSize = unpack('<i', buff.read(4))[0]
self.stringCount = unpack('<i', buff.read(4))[0]
self.styleOffsetCount = unpack('<i', buff.read(4))[0]
self.flags = unpack('<i', buff.read(4))[0]
self.m_isUTF8 = ((self.flags & UTF8_FLAG) != 0)
self.stringsOffset = unpack('<i', buff.read(4))[0]
self.stylesOffset = unpack('<i', buff.read(4))[0]
self.m_stringOffsets = []
self.m_styleOffsets = []
self.m_strings = []
self.m_styles = []
for i in range(0, self.stringCount):
self.m_stringOffsets.append(unpack('<i', buff.read(4))[0])
for i in range(0, self.styleOffsetCount):
self.m_styleOffsets.append(unpack('<i', buff.read(4))[0])
size = self.chunkSize - self.stringsOffset
if self.stylesOffset != 0:
size = self.stylesOffset - self.stringsOffset
# FIXME
if (size % 4) != 0:
androconf.warning("ooo")
for i in range(0, size):
self.m_strings.append(unpack('=b', buff.read(1))[0])
if self.stylesOffset != 0:
size = self.chunkSize - self.stylesOffset
# FIXME
if (size % 4) != 0:
androconf.warning("ooo")
for i in range(0, size / 4):
self.m_styles.append(unpack('<i', buff.read(4))[0])
def getString(self, idx):
if idx in self._cache:
return self._cache[idx]
if idx < 0 or not self.m_stringOffsets or idx >= len(self.m_stringOffsets):
return ""
offset = self.m_stringOffsets[idx]
if not self.m_isUTF8:
length = self.getShort2(self.m_strings, offset)
offset += 2
self._cache[idx] = self.decode(self.m_strings, offset, length)
else:
offset += self.getVarint(self.m_strings, offset)[1]
varint = self.getVarint(self.m_strings, offset)
offset += varint[1]
length = varint[0]
self._cache[idx] = self.decode2(self.m_strings, offset, length)
return self._cache[idx]
def getStyle(self, idx):
print idx
print idx in self.m_styleOffsets, self.m_styleOffsets[idx]
print self.m_styles[0]
def decode(self, array, offset, length):
length = length * 2
length = length + length % 2
data = ""
for i in range(0, length):
t_data = pack("=b", self.m_strings[offset + i])
data += unicode(t_data, errors='ignore')
if data[-2:] == "\x00\x00":
break
end_zero = data.find("\x00\x00")
if end_zero != -1:
data = data[:end_zero]
return data.decode("utf-16", 'replace')
def decode2(self, array, offset, length):
data = ""
for i in range(0, length):
t_data = pack("=b", self.m_strings[offset + i])
data += unicode(t_data, errors='ignore')
return data.decode("utf-8", 'replace')
def getVarint(self, array, offset):
val = array[offset]
more = (val & 0x80) != 0
val &= 0x7f
if not more:
return val, 1
return val << 8 | array[offset + 1] & 0xff, 2
def getShort(self, array, offset):
value = array[offset / 4]
if ((offset % 4) / 2) == 0:
return value & 0xFFFF
else:
return value >> 16
def getShort2(self, array, offset):
return (array[offset + 1] & 0xff) << 8 | array[offset] & 0xff
def show(self):
print "StringBlock", hex(self.start), hex(self.header), hex(self.header_size), hex(self.chunkSize), hex(self.stringsOffset), self.m_stringOffsets
for i in range(0, len(self.m_stringOffsets)):
print i, repr(self.getString(i))
ATTRIBUTE_IX_NAMESPACE_URI = 0
ATTRIBUTE_IX_NAME = 1
ATTRIBUTE_IX_VALUE_STRING = 2
ATTRIBUTE_IX_VALUE_TYPE = 3
ATTRIBUTE_IX_VALUE_DATA = 4
ATTRIBUTE_LENGHT = 5
CHUNK_AXML_FILE = 0x00080003
CHUNK_RESOURCEIDS = 0x00080180
CHUNK_XML_FIRST = 0x00100100
CHUNK_XML_START_NAMESPACE = 0x00100100
CHUNK_XML_END_NAMESPACE = 0x00100101
CHUNK_XML_START_TAG = 0x00100102
CHUNK_XML_END_TAG = 0x00100103
CHUNK_XML_TEXT = 0x00100104
CHUNK_XML_LAST = 0x00100104
START_DOCUMENT = 0
END_DOCUMENT = 1
START_TAG = 2
END_TAG = 3
TEXT = 4
class AXMLParser(object):
def __init__(self, raw_buff):
self.reset()
self.valid_axml = True
self.buff = bytecode.BuffHandle(raw_buff)
axml_file = unpack('<L', self.buff.read(4))[0]
if axml_file == CHUNK_AXML_FILE:
self.buff.read(4)
self.sb = StringBlock(self.buff)
self.m_resourceIDs = []
self.m_prefixuri = {}
self.m_uriprefix = {}
self.m_prefixuriL = []
self.visited_ns = []
else:
self.valid_axml = False
androconf.warning("Not a valid xml file")
def is_valid(self):
return self.valid_axml
def reset(self):
self.m_event = -1
self.m_lineNumber = -1
self.m_name = -1
self.m_namespaceUri = -1
self.m_attributes = []
self.m_idAttribute = -1
self.m_classAttribute = -1
self.m_styleAttribute = -1
def next(self):
self.doNext()
return self.m_event
def doNext(self):
if self.m_event == END_DOCUMENT:
return
event = self.m_event
self.reset()
while True:
chunkType = -1
# Fake END_DOCUMENT event.
if event == END_TAG:
pass
# START_DOCUMENT
if event == START_DOCUMENT:
chunkType = CHUNK_XML_START_TAG
else:
if self.buff.end():
self.m_event = END_DOCUMENT
break
chunkType = unpack('<L', self.buff.read(4))[0]
if chunkType == CHUNK_RESOURCEIDS:
chunkSize = unpack('<L', self.buff.read(4))[0]
# FIXME
if chunkSize < 8 or chunkSize % 4 != 0:
androconf.warning("Invalid chunk size")
for i in range(0, chunkSize / 4 - 2):
self.m_resourceIDs.append(unpack('<L', self.buff.read(4))[0])
continue
# FIXME
if chunkType < CHUNK_XML_FIRST or chunkType > CHUNK_XML_LAST:
androconf.warning("invalid chunk type")
# Fake START_DOCUMENT event.
if chunkType == CHUNK_XML_START_TAG and event == -1:
self.m_event = START_DOCUMENT
break
self.buff.read(4) # /*chunkSize*/
lineNumber = unpack('<L', self.buff.read(4))[0]
self.buff.read(4) # 0xFFFFFFFF
if chunkType == CHUNK_XML_START_NAMESPACE or chunkType == CHUNK_XML_END_NAMESPACE:
if chunkType == CHUNK_XML_START_NAMESPACE:
prefix = unpack('<L', self.buff.read(4))[0]
uri = unpack('<L', self.buff.read(4))[0]
self.m_prefixuri[prefix] = uri
self.m_uriprefix[uri] = prefix
self.m_prefixuriL.append((prefix, uri))
self.ns = uri
else:
self.ns = -1
self.buff.read(4)
self.buff.read(4)
(prefix, uri) = self.m_prefixuriL.pop()
#del self.m_prefixuri[ prefix ]
#del self.m_uriprefix[ uri ]
continue
self.m_lineNumber = lineNumber
if chunkType == CHUNK_XML_START_TAG:
self.m_namespaceUri = unpack('<L', self.buff.read(4))[0]
self.m_name = unpack('<L', self.buff.read(4))[0]
# FIXME
self.buff.read(4) # flags
attributeCount = unpack('<L', self.buff.read(4))[0]
self.m_idAttribute = (attributeCount >> 16) - 1
attributeCount = attributeCount & 0xFFFF
self.m_classAttribute = unpack('<L', self.buff.read(4))[0]
self.m_styleAttribute = (self.m_classAttribute >> 16) - 1
self.m_classAttribute = (self.m_classAttribute & 0xFFFF) - 1
for i in range(0, attributeCount * ATTRIBUTE_LENGHT):
self.m_attributes.append(unpack('<L', self.buff.read(4))[0])
for i in range(ATTRIBUTE_IX_VALUE_TYPE, len(self.m_attributes), ATTRIBUTE_LENGHT):
self.m_attributes[i] = self.m_attributes[i] >> 24
self.m_event = START_TAG
break
if chunkType == CHUNK_XML_END_TAG:
self.m_namespaceUri = unpack('<L', self.buff.read(4))[0]
self.m_name = unpack('<L', self.buff.read(4))[0]
self.m_event = END_TAG
break
if chunkType == CHUNK_XML_TEXT:
self.m_name = unpack('<L', self.buff.read(4))[0]
# FIXME
self.buff.read(4)
self.buff.read(4)
self.m_event = TEXT
break
def getPrefixByUri(self, uri):
try:
return self.m_uriprefix[uri]
except KeyError:
return -1
def getPrefix(self):
try:
return self.sb.getString(self.m_uriprefix[self.m_namespaceUri])
except KeyError:
return u''
def getName(self):
if self.m_name == -1 or (self.m_event != START_TAG and self.m_event != END_TAG):
return u''
return self.sb.getString(self.m_name)
def getText(self):
if self.m_name == -1 or self.m_event != TEXT:
return u''
return self.sb.getString(self.m_name)
def getNamespacePrefix(self, pos):
prefix = self.m_prefixuriL[pos][0]
return self.sb.getString(prefix)
def getNamespaceUri(self, pos):
uri = self.m_prefixuriL[pos][1]
return self.sb.getString(uri)
def getXMLNS(self):
buff = ""
for i in self.m_uriprefix:
if i not in self.visited_ns:
buff += "xmlns:%s=\"%s\"\n" % (self.sb.getString(self.m_uriprefix[i]), self.sb.getString(self.m_prefixuri[self.m_uriprefix[i]]))
self.visited_ns.append(i)
return buff
def getNamespaceCount(self, pos):
pass
def getAttributeOffset(self, index):
# FIXME
if self.m_event != START_TAG:
androconf.warning("Current event is not START_TAG.")
offset = index * 5
# FIXME
if offset >= len(self.m_attributes):
androconf.warning("Invalid attribute index")
return offset
def getAttributeCount(self):
if self.m_event != START_TAG:
return -1
return len(self.m_attributes) / ATTRIBUTE_LENGHT
def getAttributePrefix(self, index):
offset = self.getAttributeOffset(index)
uri = self.m_attributes[offset + ATTRIBUTE_IX_NAMESPACE_URI]
prefix = self.getPrefixByUri(uri)
if prefix == -1:
return ""
return self.sb.getString(prefix)
def getAttributeName(self, index):
offset = self.getAttributeOffset(index)
name = self.m_attributes[offset+ATTRIBUTE_IX_NAME]
if name == -1:
return ""
return self.sb.getString( name )
def getAttributeValueType(self, index):
offset = self.getAttributeOffset(index)
return self.m_attributes[offset+ATTRIBUTE_IX_VALUE_TYPE]
def getAttributeValueData(self, index):
offset = self.getAttributeOffset(index)
return self.m_attributes[offset+ATTRIBUTE_IX_VALUE_DATA]
def getAttributeValue(self, index):
offset = self.getAttributeOffset(index)
valueType = self.m_attributes[offset+ATTRIBUTE_IX_VALUE_TYPE]
if valueType == TYPE_STRING:
valueString = self.m_attributes[offset+ATTRIBUTE_IX_VALUE_STRING]
return self.sb.getString( valueString )
# WIP
return ""
#int valueData=m_attributes[offset+ATTRIBUTE_IX_VALUE_DATA];
#return TypedValue.coerceToString(valueType,valueData);
TYPE_ATTRIBUTE = 2
TYPE_DIMENSION = 5
TYPE_FIRST_COLOR_INT = 28
TYPE_FIRST_INT = 16
TYPE_FLOAT = 4
TYPE_FRACTION = 6
TYPE_INT_BOOLEAN = 18
TYPE_INT_COLOR_ARGB4 = 30
TYPE_INT_COLOR_ARGB8 = 28
TYPE_INT_COLOR_RGB4 = 31
TYPE_INT_COLOR_RGB8 = 29
TYPE_INT_DEC = 16
TYPE_INT_HEX = 17
TYPE_LAST_COLOR_INT = 31
TYPE_LAST_INT = 31
TYPE_NULL = 0
TYPE_REFERENCE = 1
TYPE_STRING = 3
RADIX_MULTS = [ 0.00390625, 3.051758E-005, 1.192093E-007, 4.656613E-010 ]
DIMENSION_UNITS = [ "px","dip","sp","pt","in","mm" ]
FRACTION_UNITS = [ "%", "%p" ]
COMPLEX_UNIT_MASK = 15
def complexToFloat(xcomplex):
return (float)(xcomplex & 0xFFFFFF00) * RADIX_MULTS[(xcomplex >> 4) & 3]
class AXMLPrinter(object):
def __init__(self, raw_buff):
self.axml = AXMLParser(raw_buff)
self.xmlns = False
self.buff = u''
while True and self.axml.is_valid():
_type = self.axml.next()
# print "tagtype = ", _type
if _type == START_DOCUMENT:
self.buff += u'<?xml version="1.0" encoding="utf-8"?>\n'
elif _type == START_TAG:
self.buff += u'<' + self.getPrefix(self.axml.getPrefix()) + self.axml.getName() + u'\n'
self.buff += self.axml.getXMLNS()
for i in range(0, self.axml.getAttributeCount()):
self.buff += "%s%s=\"%s\"\n" % (self.getPrefix(
self.axml.getAttributePrefix(i)), self.axml.getAttributeName(i), self._escape(self.getAttributeValue(i)))
self.buff += u'>\n'
elif _type == END_TAG:
self.buff += "</%s%s>\n" % (self.getPrefix(self.axml.getPrefix()), self.axml.getName())
elif _type == TEXT:
self.buff += "%s\n" % self.axml.getText()
elif _type == END_DOCUMENT:
break
# pleed patch
def _escape(self, s):
s = s.replace("&", "&")
s = s.replace('"', """)
s = s.replace("'", "'")
s = s.replace("<", "<")
s = s.replace(">", ">")
return escape(s)
def get_buff(self):
return self.buff.encode('utf-8')
def get_xml(self):
return minidom.parseString(self.get_buff()).toprettyxml(encoding="utf-8")
def get_xml_obj(self):
return minidom.parseString(self.get_buff())
def getPrefix(self, prefix):
if prefix == None or len(prefix) == 0:
return u''
return prefix + u':'
def getAttributeValue(self, index):
_type = self.axml.getAttributeValueType(index)
_data = self.axml.getAttributeValueData(index)
if _type == TYPE_STRING:
return self.axml.getAttributeValue(index)
elif _type == TYPE_ATTRIBUTE:
return "?%s%08X" % (self.getPackage(_data), _data)
elif _type == TYPE_REFERENCE:
return "@%s%08X" % (self.getPackage(_data), _data)
elif _type == TYPE_FLOAT:
return "%f" % unpack("=f", pack("=L", _data))[0]
elif _type == TYPE_INT_HEX:
return "0x%08X" % _data
elif _type == TYPE_INT_BOOLEAN:
if _data == 0:
return "false"
return "true"
elif _type == TYPE_DIMENSION:
return "%f%s" % (complexToFloat(_data), DIMENSION_UNITS[_data & COMPLEX_UNIT_MASK])
elif _type == TYPE_FRACTION:
return "%f%s" % (complexToFloat(_data) * 100, FRACTION_UNITS[_data & COMPLEX_UNIT_MASK])
elif _type >= TYPE_FIRST_COLOR_INT and _type <= TYPE_LAST_COLOR_INT:
return "#%08X" % _data
elif _type >= TYPE_FIRST_INT and _type <= TYPE_LAST_INT:
return "%d" % androconf.long2int(_data)
return "<0x%X, type 0x%02X>" % (_data, _type)
def getPackage(self, id):
if id >> 24 == 1:
return "android:"
return ""
RES_NULL_TYPE = 0x0000
RES_STRING_POOL_TYPE = 0x0001
RES_TABLE_TYPE = 0x0002
RES_XML_TYPE = 0x0003
# Chunk types in RES_XML_TYPE
RES_XML_FIRST_CHUNK_TYPE = 0x0100
RES_XML_START_NAMESPACE_TYPE= 0x0100
RES_XML_END_NAMESPACE_TYPE = 0x0101
RES_XML_START_ELEMENT_TYPE = 0x0102
RES_XML_END_ELEMENT_TYPE = 0x0103
RES_XML_CDATA_TYPE = 0x0104
RES_XML_LAST_CHUNK_TYPE = 0x017f
# This contains a uint32_t array mapping strings in the string
# pool back to resource identifiers. It is optional.
RES_XML_RESOURCE_MAP_TYPE = 0x0180
# Chunk types in RES_TABLE_TYPE
RES_TABLE_PACKAGE_TYPE = 0x0200
RES_TABLE_TYPE_TYPE = 0x0201
RES_TABLE_TYPE_SPEC_TYPE = 0x0202
class ARSCParser(object):
def __init__(self, raw_buff):
self.analyzed = False
self.buff = bytecode.BuffHandle(raw_buff)
#print "SIZE", hex(self.buff.size())
self.header = ARSCHeader(self.buff)
self.packageCount = unpack('<i', self.buff.read(4))[0]
#print hex(self.packageCount)
self.stringpool_main = StringBlock(self.buff)
self.next_header = ARSCHeader(self.buff)
self.packages = {}
self.values = {}
for i in range(0, self.packageCount):
current_package = ARSCResTablePackage(self.buff)
package_name = current_package.get_name()
self.packages[package_name] = []
mTableStrings = StringBlock(self.buff)
mKeyStrings = StringBlock(self.buff)
#self.stringpool_main.show()
#self.mTableStrings.show()
#self.mKeyStrings.show()
self.packages[package_name].append(current_package)
self.packages[package_name].append(mTableStrings)
self.packages[package_name].append(mKeyStrings)
pc = PackageContext(current_package, self.stringpool_main, mTableStrings, mKeyStrings)
current = self.buff.get_idx()
while not self.buff.end():
header = ARSCHeader(self.buff)
self.packages[package_name].append(header)
if header.type == RES_TABLE_TYPE_SPEC_TYPE:
self.packages[package_name].append(ARSCResTypeSpec(self.buff, pc))
elif header.type == RES_TABLE_TYPE_TYPE:
a_res_type = ARSCResType(self.buff, pc)
self.packages[package_name].append(a_res_type)
entries = []
for i in range(0, a_res_type.entryCount):
current_package.mResId = current_package.mResId & 0xffff0000 | i
entries.append((unpack('<i', self.buff.read(4))[0], current_package.mResId))
self.packages[package_name].append(entries)
for entry, res_id in entries:
if self.buff.end():
break
if entry != -1:
ate = ARSCResTableEntry(self.buff, res_id, pc)
self.packages[package_name].append(ate)
elif header.type == RES_TABLE_PACKAGE_TYPE:
break
else:
androconf.warning("unknown type")
break
current += header.size
self.buff.set_idx(current)
def _analyse(self):
if self.analyzed:
return
self.analyzed = True
for package_name in self.packages:
self.values[package_name] = {}
nb = 3
for header in self.packages[package_name][nb:]:
if isinstance(header, ARSCHeader):
if header.type == RES_TABLE_TYPE_TYPE:
a_res_type = self.packages[package_name][nb + 1]
if a_res_type.config.get_language() not in self.values[package_name]:
self.values[package_name][a_res_type.config.get_language()] = {}
self.values[package_name][a_res_type.config.get_language()]["public"] = []
c_value = self.values[package_name][a_res_type.config.get_language()]
entries = self.packages[package_name][nb + 2]
nb_i = 0
for entry, res_id in entries:
if entry != -1:
ate = self.packages[package_name][nb + 3 + nb_i]
#print ate.is_public(), a_res_type.get_type(), ate.get_value(), hex(ate.mResId)
if ate.get_index() != -1:
c_value["public"].append((a_res_type.get_type(), ate.get_value(), ate.mResId))
if a_res_type.get_type() not in c_value:
c_value[a_res_type.get_type()] = []
if a_res_type.get_type() == "string":
c_value["string"].append(self.get_resource_string(ate))
elif a_res_type.get_type() == "id":
if not ate.is_complex():
c_value["id"].append(self.get_resource_id(ate))
elif a_res_type.get_type() == "bool":
if not ate.is_complex():
c_value["bool"].append(self.get_resource_bool(ate))
elif a_res_type.get_type() == "integer":
c_value["integer"].append(self.get_resource_integer(ate))
elif a_res_type.get_type() == "color":
c_value["color"].append(self.get_resource_color(ate))
elif a_res_type.get_type() == "dimen":
c_value["dimen"].append(self.get_resource_dimen(ate))
#elif a_res_type.get_type() == "style":
# c_value["style"].append(self.get_resource_style(ate))
nb_i += 1
nb += 1
def get_resource_string(self, ate):
return [ate.get_value(), ate.get_key_data()]
def get_resource_id(self, ate):
x = [ate.get_value()]
if ate.key.get_data() == 0:
x.append("false")
elif ate.key.get_data() == 1:
x.append("true")
return x
def get_resource_bool(self, ate):
x = [ate.get_value()]
if ate.key.get_data() == 0:
x.append("false")
elif ate.key.get_data() == -1:
x.append("true")
return x
def get_resource_integer(self, ate):
return [ate.get_value(), ate.key.get_data()]
def get_resource_color(self, ate):
entry_data = ate.key.get_data()
return [ate.get_value(), "#%02x%02x%02x%02x" % (((entry_data >> 24) & 0xFF), ((entry_data >> 16) & 0xFF), ((entry_data >> 8) & 0xFF), (entry_data & 0xFF))]
def get_resource_dimen(self, ate):
try:
return [ate.get_value(), "%s%s" % (complexToFloat(ate.key.get_data()), DIMENSION_UNITS[ate.key.get_data() & COMPLEX_UNIT_MASK])]
except Exception, why:
androconf.warning(why.__str__())
return [ate.get_value(), ate.key.get_data()]
# FIXME
def get_resource_style(self, ate):
return ["", ""]
def get_packages_names(self):
return self.packages.keys()
def get_locales(self, package_name):
self._analyse()
return self.values[package_name].keys()
def get_types(self, package_name, locale):
self._analyse()
return self.values[package_name][locale].keys()
def get_public_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["public"]:
buff += '<public type="%s" name="%s" id="0x%08x" />\n' % (i[0], i[1], i[2])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_string_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["string"]:
buff += '<string name="%s">%s</string>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_strings_resources(self):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += "<packages>\n"
for package_name in self.get_packages_names():
buff += "<package name=\"%s\">\n" % package_name
for locale in self.get_locales(package_name):
buff += "<locale value=%s>\n" % repr(locale)
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["string"]:
buff += '<string name="%s">%s</string>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
buff += '</locale>\n'
buff += "</package>\n"
buff += "</packages>\n"
return buff.encode('utf-8')
def get_id_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["id"]:
if len(i) == 1:
buff += '<item type="id" name="%s"/>\n' % (i[0])
else:
buff += '<item type="id" name="%s">%s</item>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_bool_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["bool"]:
buff += '<bool name="%s">%s</bool>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_integer_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["integer"]:
buff += '<integer name="%s">%s</integer>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_color_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["color"]:
buff += '<color name="%s">%s</color>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_dimen_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["dimen"]:
buff += '<dimen name="%s">%s</dimen>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_id(self, package_name, rid, locale='\x00\x00'):
self._analyse()
try:
for i in self.values[package_name][locale]["public"]:
if i[2] == rid:
return i
except KeyError:
return None
def get_string(self, package_name, name, locale='\x00\x00'):
self._analyse()
try:
for i in self.values[package_name][locale]["string"]:
if i[0] == name:
return i
except KeyError:
return None
def get_items(self, package_name):
self._analyse()
return self.packages[package_name]
class PackageContext(object):
def __init__(self, current_package, stringpool_main, mTableStrings, mKeyStrings):
self.stringpool_main = stringpool_main
self.mTableStrings = mTableStrings
self.mKeyStrings = mKeyStrings
self.current_package = current_package
def get_mResId(self):
return self.current_package.mResId
def set_mResId(self, mResId):
self.current_package.mResId = mResId
class ARSCHeader(object):
def __init__(self, buff):
self.start = buff.get_idx()
self.type = unpack('<h', buff.read(2))[0]
self.header_size = unpack('<h', buff.read(2))[0]
self.size = unpack('<i', buff.read(4))[0]
#print "ARSCHeader", hex(self.start), hex(self.type), hex(self.header_size), hex(self.size)
class ARSCResTablePackage(object):
def __init__(self, buff):
self.start = buff.get_idx()
self.id = unpack('<i', buff.read(4))[0]
self.name = buff.readNullString(256)
self.typeStrings = unpack('<i', buff.read(4))[0]
self.lastPublicType = unpack('<i', buff.read(4))[0]
self.keyStrings = unpack('<i', buff.read(4))[0]
self.lastPublicKey = unpack('<i', buff.read(4))[0]
self.mResId = self.id << 24
#print "ARSCResTablePackage", hex(self.start), hex(self.id), hex(self.mResId), repr(self.name.decode("utf-16", errors='replace')), hex(self.typeStrings), hex(self.lastPublicType), hex(self.keyStrings), hex(self.lastPublicKey)
def get_name(self):
name = self.name.decode("utf-16", 'replace')
name = name[:name.find("\x00")]
return name
class ARSCResTypeSpec(object):
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.id = unpack('<b', buff.read(1))[0]
self.res0 = unpack('<b', buff.read(1))[0]
self.res1 = unpack('<h', buff.read(2))[0]
self.entryCount = unpack('<i', buff.read(4))[0]
#print "ARSCResTypeSpec", hex(self.start), hex(self.id), hex(self.res0), hex(self.res1), hex(self.entryCount), "table:" + self.parent.mTableStrings.getString(self.id - 1)
self.typespec_entries = []
for i in range(0, self.entryCount):
self.typespec_entries.append(unpack('<i', buff.read(4))[0])
class ARSCResType(object):
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.id = unpack('<b', buff.read(1))[0]
self.res0 = unpack('<b', buff.read(1))[0]
self.res1 = unpack('<h', buff.read(2))[0]
self.entryCount = unpack('<i', buff.read(4))[0]
self.entriesStart = unpack('<i', buff.read(4))[0]
self.mResId = (0xff000000 & self.parent.get_mResId()) | self.id << 16
self.parent.set_mResId(self.mResId)
#print "ARSCResType", hex(self.start), hex(self.id), hex(self.res0), hex(self.res1), hex(self.entryCount), hex(self.entriesStart), hex(self.mResId), "table:" + self.parent.mTableStrings.getString(self.id - 1)
self.config = ARSCResTableConfig(buff)
def get_type(self):
return self.parent.mTableStrings.getString(self.id - 1)
class ARSCResTableConfig(object):
def __init__(self, buff):
self.start = buff.get_idx()
self.size = unpack('<i', buff.read(4))[0]
self.imsi = unpack('<i', buff.read(4))[0]
self.locale = unpack('<i', buff.read(4))[0]
self.screenType = unpack('<i', buff.read(4))[0]
self.input = unpack('<i', buff.read(4))[0]
self.screenSize = unpack('<i', buff.read(4))[0]
self.version = unpack('<i', buff.read(4))[0]
self.screenConfig = 0
self.screenSizeDp = 0
if self.size >= 32:
self.screenConfig = unpack('<i', buff.read(4))[0]
if self.size >= 36:
self.screenSizeDp = unpack('<i', buff.read(4))[0]
self.exceedingSize = self.size - 36
if self.exceedingSize > 0:
androconf.warning("too much bytes !")
self.padding = buff.read(self.exceedingSize)
#print "ARSCResTableConfig", hex(self.start), hex(self.size), hex(self.imsi), hex(self.locale), repr(self.get_language()), repr(self.get_country()), hex(self.screenType), hex(self.input), hex(self.screenSize), hex(self.version), hex(self.screenConfig), hex(self.screenSizeDp)
def get_language(self):
x = self.locale & 0x0000ffff
return chr(x & 0x00ff) + chr((x & 0xff00) >> 8)
def get_country(self):
x = (self.locale & 0xffff0000) >> 16
return chr(x & 0x00ff) + chr((x & 0xff00) >> 8)
class ARSCResTableEntry(object):
def __init__(self, buff, mResId, parent=None):
self.start = buff.get_idx()
self.mResId = mResId
self.parent = parent
self.size = unpack('<h', buff.read(2))[0]
self.flags = unpack('<h', buff.read(2))[0]
self.index = unpack('<i', buff.read(4))[0]
#print "ARSCResTableEntry", hex(self.start), hex(self.mResId), hex(self.size), hex(self.flags), hex(self.index), self.is_complex()#, hex(self.mResId)
if self.flags & 1:
self.item = ARSCComplex(buff, parent)
else:
self.key = ARSCResStringPoolRef(buff, self.parent)
def get_index(self):
return self.index
def get_value(self):
return self.parent.mKeyStrings.getString(self.index)
def get_key_data(self):
return self.key.get_data_value()
def is_public(self):
return self.flags == 0 or self.flags == 2
def is_complex(self):
return (self.flags & 1) == 1
class ARSCComplex(object):
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.id_parent = unpack('<i', buff.read(4))[0]
self.count = unpack('<i', buff.read(4))[0]
self.items = []
for i in range(0, self.count):
self.items.append((unpack('<i', buff.read(4))[0], ARSCResStringPoolRef(buff, self.parent)))
#print "ARSCComplex", hex(self.start), self.id_parent, self.count, repr(self.parent.mKeyStrings.getString(self.id_parent))
class ARSCResStringPoolRef(object):
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.skip_bytes = buff.read(3)
self.data_type = unpack('<b', buff.read(1))[0]
self.data = unpack('<i', buff.read(4))[0]
#print "ARSCResStringPoolRef", hex(self.start), hex(self.data_type), hex(self.data)#, "key:" + self.parent.mKeyStrings.getString(self.index), self.parent.stringpool_main.getString(self.data)
def get_data_value(self):
return self.parent.stringpool_main.getString(self.data)
def get_data(self):
return self.data
def get_data_type(self):
return self.data_type
def get_arsc_info(arscobj):
buff = ""
for package in arscobj.get_packages_names():
buff += package + ":\n"
for locale in arscobj.get_locales(package):
buff += "\t" + repr(locale) + ":\n"
for ttype in arscobj.get_types(package, locale):
buff += "\t\t" + ttype + ":\n"
try:
tmp_buff = getattr(arscobj, "get_" + ttype + "_resources")(package, locale).decode("utf-8", 'replace').split("\n")
for i in tmp_buff:
buff += "\t\t\t" + i + "\n"
except AttributeError:
pass
return buff
| apache-2.0 | 3,609,272,022,803,543,600 | 31.366463 | 283 | 0.530605 | false |
Limags/MissionPlanner | Lib/site-packages/numpy/distutils/tests/test_misc_util.py | 51 | 2430 | #!"C:\Users\hog\Documents\Visual Studio 2010\Projects\ArdupilotMega\ArdupilotMega\bin\Debug\ipy.exe"
from numpy.testing import *
from numpy.distutils.misc_util import appendpath, minrelpath, gpaths, rel_path
from os.path import join, sep, dirname
ajoin = lambda *paths: join(*((sep,)+paths))
class TestAppendpath(TestCase):
def test_1(self):
assert_equal(appendpath('prefix','name'),join('prefix','name'))
assert_equal(appendpath('/prefix','name'),ajoin('prefix','name'))
assert_equal(appendpath('/prefix','/name'),ajoin('prefix','name'))
assert_equal(appendpath('prefix','/name'),join('prefix','name'))
def test_2(self):
assert_equal(appendpath('prefix/sub','name'),
join('prefix','sub','name'))
assert_equal(appendpath('prefix/sub','sup/name'),
join('prefix','sub','sup','name'))
assert_equal(appendpath('/prefix/sub','/prefix/name'),
ajoin('prefix','sub','name'))
def test_3(self):
assert_equal(appendpath('/prefix/sub','/prefix/sup/name'),
ajoin('prefix','sub','sup','name'))
assert_equal(appendpath('/prefix/sub/sub2','/prefix/sup/sup2/name'),
ajoin('prefix','sub','sub2','sup','sup2','name'))
assert_equal(appendpath('/prefix/sub/sub2','/prefix/sub/sup/name'),
ajoin('prefix','sub','sub2','sup','name'))
class TestMinrelpath(TestCase):
def test_1(self):
n = lambda path: path.replace('/',sep)
assert_equal(minrelpath(n('aa/bb')),n('aa/bb'))
assert_equal(minrelpath('..'),'..')
assert_equal(minrelpath(n('aa/..')),'')
assert_equal(minrelpath(n('aa/../bb')),'bb')
assert_equal(minrelpath(n('aa/bb/..')),'aa')
assert_equal(minrelpath(n('aa/bb/../..')),'')
assert_equal(minrelpath(n('aa/bb/../cc/../dd')),n('aa/dd'))
assert_equal(minrelpath(n('.././..')),n('../..'))
assert_equal(minrelpath(n('aa/bb/.././../dd')),n('dd'))
class TestGpaths(TestCase):
def test_gpaths(self):
local_path = minrelpath(join(dirname(__file__),'..'))
ls = gpaths('command/*.py', local_path)
assert join(local_path,'command','build_src.py') in ls,`ls`
f = gpaths('system_info.py', local_path)
assert join(local_path,'system_info.py')==f[0],`f`
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 | 8,683,271,359,492,046,000 | 40.896552 | 100 | 0.574486 | false |
MathieuDuponchelle/gobject-introspection | giscanner/girwriter.py | 1 | 26509 | # -*- Mode: Python -*-
# GObject-Introspection - a framework for introspecting GObject libraries
# Copyright (C) 2008 Johan Dahlin
# Copyright (C) 2008, 2009 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
from __future__ import with_statement
from . import ast
from .xmlwriter import XMLWriter
# Bump this for *incompatible* changes to the .gir.
# Compatible changes we just make inline
COMPATIBLE_GIR_VERSION = '1.2'
class GIRWriter(XMLWriter):
def __init__(self, namespace):
super(GIRWriter, self).__init__()
self.write_comment(
'This file was automatically generated from C sources - DO NOT EDIT!\n'
'To affect the contents of this file, edit the original C definitions,\n'
'and/or use gtk-doc annotations. ')
self._write_repository(namespace)
def _write_repository(self, namespace):
attrs = [
('version', COMPATIBLE_GIR_VERSION),
('xmlns', 'http://www.gtk.org/introspection/core/1.0'),
('xmlns:c', 'http://www.gtk.org/introspection/c/1.0'),
('xmlns:glib', 'http://www.gtk.org/introspection/glib/1.0')]
with self.tagcontext('repository', attrs):
for include in sorted(namespace.includes):
self._write_include(include)
for pkg in sorted(set(namespace.exported_packages)):
self._write_pkgconfig_pkg(pkg)
for c_include in sorted(set(namespace.c_includes)):
self._write_c_include(c_include)
self._namespace = namespace
self._write_namespace(namespace)
self._namespace = None
def _write_include(self, include):
attrs = [('name', include.name), ('version', include.version)]
self.write_tag('include', attrs)
def _write_pkgconfig_pkg(self, package):
attrs = [('name', package)]
self.write_tag('package', attrs)
def _write_c_include(self, c_include):
attrs = [('name', c_include)]
self.write_tag('c:include', attrs)
def _write_namespace(self, namespace):
attrs = [('name', namespace.name),
('version', namespace.version),
('shared-library', ','.join(namespace.shared_libraries)),
('c:identifier-prefixes', ','.join(namespace.identifier_prefixes)),
('c:symbol-prefixes', ','.join(namespace.symbol_prefixes))]
with self.tagcontext('namespace', attrs):
# We define a custom sorting function here because
# we want aliases to be first. They're a bit
# special because the typelib compiler expands them.
def nscmp(a, b):
if isinstance(a, ast.Alias):
if isinstance(b, ast.Alias):
return cmp(a.name, b.name)
else:
return -1
elif isinstance(b, ast.Alias):
return 1
else:
return cmp(a, b)
for node in sorted(namespace.itervalues(), cmp=nscmp):
self._write_node(node)
def _write_node(self, node):
if isinstance(node, ast.Function):
self._write_function(node)
elif isinstance(node, ast.Enum):
self._write_enum(node)
elif isinstance(node, ast.Bitfield):
self._write_bitfield(node)
elif isinstance(node, (ast.Class, ast.Interface)):
self._write_class(node)
elif isinstance(node, ast.Callback):
self._write_callback(node)
elif isinstance(node, ast.Record):
self._write_record(node)
elif isinstance(node, ast.Union):
self._write_union(node)
elif isinstance(node, ast.Boxed):
self._write_boxed(node)
elif isinstance(node, ast.Member):
# FIXME: atk_misc_instance singleton
pass
elif isinstance(node, ast.Alias):
self._write_alias(node)
elif isinstance(node, ast.Constant):
self._write_constant(node)
elif isinstance(node, ast.DocSection):
self._write_doc_section(node)
else:
print 'WRITER: Unhandled node', node
def _append_version(self, node, attrs):
if node.version:
attrs.append(('version', node.version))
def _write_generic(self, node):
for key, value in node.attributes.items():
self.write_tag('attribute', [('name', key), ('value', value)])
if hasattr(node, 'doc') and node.doc:
self.write_tag('doc', [('xml:space', 'preserve')],
node.doc)
if hasattr(node, 'version_doc') and node.version_doc:
self.write_tag('doc-version', [('xml:space', 'preserve')],
node.version_doc)
if hasattr(node, 'deprecated_doc') and node.deprecated_doc:
self.write_tag('doc-deprecated', [('xml:space', 'preserve')],
node.deprecated_doc)
if hasattr(node, 'stability_doc') and node.stability_doc:
self.write_tag('doc-stability', [('xml:space', 'preserve')],
node.stability_doc)
def _append_node_generic(self, node, attrs):
if node.skip or not node.introspectable:
attrs.append(('introspectable', '0'))
if node.deprecated or node.deprecated_doc:
# The deprecated attribute used to contain node.deprecated_doc as an attribute. As
# an xml attribute cannot preserve whitespace, deprecated_doc has been moved into
# it's own tag, written in _write_generic() above. We continue to write the deprecated
# attribute for backwards compatibility
attrs.append(('deprecated', '1'))
if node.deprecated:
attrs.append(('deprecated-version', node.deprecated))
if node.stability:
attrs.append(('stability', node.stability))
def _append_throws(self, func, attrs):
if func.throws:
attrs.append(('throws', '1'))
def _write_alias(self, alias):
attrs = [('name', alias.name)]
if alias.ctype is not None:
attrs.append(('c:type', alias.ctype))
self._append_node_generic(alias, attrs)
with self.tagcontext('alias', attrs):
self._write_generic(alias)
self._write_type_ref(alias.target)
def _write_callable(self, callable, tag_name, extra_attrs):
attrs = [('name', callable.name)]
attrs.extend(extra_attrs)
self._append_version(callable, attrs)
self._append_node_generic(callable, attrs)
self._append_throws(callable, attrs)
with self.tagcontext(tag_name, attrs):
self._write_generic(callable)
self._write_return_type(callable.retval, parent=callable)
self._write_parameters(callable)
def _write_function(self, func, tag_name='function'):
if func.internal_skipped:
return
attrs = []
if hasattr(func, 'symbol'):
attrs.append(('c:identifier', func.symbol))
if func.shadowed_by:
attrs.append(('shadowed-by', func.shadowed_by))
elif func.shadows:
attrs.append(('shadows', func.shadows))
if func.moved_to is not None:
attrs.append(('moved-to', func.moved_to))
self._write_callable(func, tag_name, attrs)
def _write_method(self, method):
self._write_function(method, tag_name='method')
def _write_static_method(self, method):
self._write_function(method, tag_name='function')
def _write_constructor(self, method):
self._write_function(method, tag_name='constructor')
def _write_return_type(self, return_, parent=None):
if not return_:
return
attrs = []
if return_.transfer:
attrs.append(('transfer-ownership', return_.transfer))
if return_.skip:
attrs.append(('skip', '1'))
if return_.nullable:
attrs.append(('nullable', '1'))
with self.tagcontext('return-value', attrs):
self._write_generic(return_)
self._write_type(return_.type, parent=parent)
def _write_parameters(self, callable):
if not callable.parameters and callable.instance_parameter is None:
return
with self.tagcontext('parameters'):
if callable.instance_parameter:
self._write_parameter(callable, callable.instance_parameter, 'instance-parameter')
for parameter in callable.parameters:
self._write_parameter(callable, parameter)
def _write_parameter(self, parent, parameter, nodename='parameter'):
attrs = []
if parameter.argname is not None:
attrs.append(('name', parameter.argname))
if (parameter.direction is not None) and (parameter.direction != 'in'):
attrs.append(('direction', parameter.direction))
attrs.append(('caller-allocates',
'1' if parameter.caller_allocates else '0'))
if parameter.transfer:
attrs.append(('transfer-ownership',
parameter.transfer))
if parameter.nullable:
attrs.append(('nullable', '1'))
if parameter.direction != ast.PARAM_DIRECTION_OUT:
attrs.append(('allow-none', '1'))
if parameter.optional:
attrs.append(('optional', '1'))
if parameter.direction == ast.PARAM_DIRECTION_OUT:
attrs.append(('allow-none', '1'))
if parameter.scope:
attrs.append(('scope', parameter.scope))
if parameter.closure_name is not None:
idx = parent.get_parameter_index(parameter.closure_name)
attrs.append(('closure', '%d' % (idx, )))
if parameter.destroy_name is not None:
idx = parent.get_parameter_index(parameter.destroy_name)
attrs.append(('destroy', '%d' % (idx, )))
if parameter.skip:
attrs.append(('skip', '1'))
with self.tagcontext(nodename, attrs):
self._write_generic(parameter)
self._write_type(parameter.type, parent=parent)
def _type_to_name(self, typeval):
if not typeval.resolved:
raise AssertionError("Caught unresolved type %r (ctype=%r)" % (typeval, typeval.ctype))
assert typeval.target_giname is not None
prefix = self._namespace.name + '.'
if typeval.target_giname.startswith(prefix):
return typeval.target_giname[len(prefix):]
return typeval.target_giname
def _write_type_ref(self, ntype):
""" Like _write_type, but only writes the type name rather than the full details """
assert isinstance(ntype, ast.Type), ntype
attrs = []
if ntype.ctype:
attrs.append(('c:type', ntype.ctype))
if isinstance(ntype, ast.Array):
if ntype.array_type != ast.Array.C:
attrs.insert(0, ('name', ntype.array_type))
elif isinstance(ntype, ast.List):
if ntype.name:
attrs.insert(0, ('name', ntype.name))
elif isinstance(ntype, ast.Map):
attrs.insert(0, ('name', 'GLib.HashTable'))
else:
if ntype.target_giname:
attrs.insert(0, ('name', self._type_to_name(ntype)))
elif ntype.target_fundamental:
attrs.insert(0, ('name', ntype.target_fundamental))
self.write_tag('type', attrs)
def _write_type(self, ntype, relation=None, parent=None):
assert isinstance(ntype, ast.Type), ntype
attrs = []
if ntype.complete_ctype:
attrs.append(('c:type', ntype.complete_ctype))
elif ntype.ctype:
attrs.append(('c:type', ntype.ctype))
if isinstance(ntype, ast.Varargs):
self.write_tag('varargs', [])
elif isinstance(ntype, ast.Array):
if ntype.array_type != ast.Array.C:
attrs.insert(0, ('name', ntype.array_type))
# we insert an explicit 'zero-terminated' attribute
# when it is false, or when it would not be implied
# by the absence of length and fixed-size
if not ntype.zeroterminated:
attrs.insert(0, ('zero-terminated', '0'))
elif (ntype.zeroterminated
and (ntype.size is not None or ntype.length_param_name is not None)):
attrs.insert(0, ('zero-terminated', '1'))
if ntype.size is not None:
attrs.append(('fixed-size', '%d' % (ntype.size, )))
if ntype.length_param_name is not None:
if isinstance(parent, ast.Callable):
length = parent.get_parameter_index(ntype.length_param_name)
elif isinstance(parent, ast.Compound):
length = parent.get_field_index(ntype.length_param_name)
else:
assert False, "parent not a callable or compound: %r" % parent
attrs.insert(0, ('length', '%d' % (length, )))
with self.tagcontext('array', attrs):
self._write_type(ntype.element_type)
elif isinstance(ntype, ast.List):
if ntype.name:
attrs.insert(0, ('name', ntype.name))
with self.tagcontext('type', attrs):
self._write_type(ntype.element_type)
elif isinstance(ntype, ast.Map):
attrs.insert(0, ('name', 'GLib.HashTable'))
with self.tagcontext('type', attrs):
self._write_type(ntype.key_type)
self._write_type(ntype.value_type)
else:
# REWRITEFIXME - enable this for 1.2
if ntype.target_giname:
attrs.insert(0, ('name', self._type_to_name(ntype)))
elif ntype.target_fundamental:
# attrs = [('fundamental', ntype.target_fundamental)]
attrs.insert(0, ('name', ntype.target_fundamental))
elif ntype.target_foreign:
attrs.insert(0, ('foreign', '1'))
self.write_tag('type', attrs)
def _append_registered(self, node, attrs):
assert isinstance(node, ast.Registered)
if node.get_type:
attrs.extend([('glib:type-name', node.gtype_name),
('glib:get-type', node.get_type)])
def _write_enum(self, enum):
attrs = [('name', enum.name)]
self._append_version(enum, attrs)
self._append_node_generic(enum, attrs)
self._append_registered(enum, attrs)
attrs.append(('c:type', enum.ctype))
if enum.error_domain:
attrs.append(('glib:error-domain', enum.error_domain))
with self.tagcontext('enumeration', attrs):
self._write_generic(enum)
for member in enum.members:
self._write_member(member)
for method in sorted(enum.static_methods):
self._write_static_method(method)
def _write_bitfield(self, bitfield):
attrs = [('name', bitfield.name)]
self._append_version(bitfield, attrs)
self._append_node_generic(bitfield, attrs)
self._append_registered(bitfield, attrs)
attrs.append(('c:type', bitfield.ctype))
with self.tagcontext('bitfield', attrs):
self._write_generic(bitfield)
for member in bitfield.members:
self._write_member(member)
for method in sorted(bitfield.static_methods):
self._write_static_method(method)
def _write_member(self, member):
attrs = [('name', member.name),
('value', str(member.value)),
('c:identifier', member.symbol)]
if member.nick is not None:
attrs.append(('glib:nick', member.nick))
with self.tagcontext('member', attrs):
self._write_generic(member)
def _write_doc_section(self, doc_section):
attrs = [('name', doc_section.name)]
with self.tagcontext('docsection', attrs):
self._write_generic(doc_section)
def _write_constant(self, constant):
attrs = [('name', constant.name),
('value', constant.value),
('c:type', constant.ctype)]
self._append_version(constant, attrs)
self._append_node_generic(constant, attrs)
with self.tagcontext('constant', attrs):
self._write_generic(constant)
self._write_type(constant.value_type)
def _write_class(self, node):
attrs = [('name', node.name),
('c:symbol-prefix', node.c_symbol_prefix),
('c:type', node.ctype)]
self._append_version(node, attrs)
self._append_node_generic(node, attrs)
if isinstance(node, ast.Class):
tag_name = 'class'
if node.parent_type is not None:
attrs.append(('parent',
self._type_to_name(node.parent_type)))
if node.is_abstract:
attrs.append(('abstract', '1'))
else:
assert isinstance(node, ast.Interface)
tag_name = 'interface'
attrs.append(('glib:type-name', node.gtype_name))
if node.get_type is not None:
attrs.append(('glib:get-type', node.get_type))
if node.glib_type_struct is not None:
attrs.append(('glib:type-struct',
self._type_to_name(node.glib_type_struct)))
if isinstance(node, ast.Class):
if node.fundamental:
attrs.append(('glib:fundamental', '1'))
if node.ref_func:
attrs.append(('glib:ref-func', node.ref_func))
if node.unref_func:
attrs.append(('glib:unref-func', node.unref_func))
if node.set_value_func:
attrs.append(('glib:set-value-func', node.set_value_func))
if node.get_value_func:
attrs.append(('glib:get-value-func', node.get_value_func))
with self.tagcontext(tag_name, attrs):
self._write_generic(node)
if isinstance(node, ast.Class):
for iface in sorted(node.interfaces):
self.write_tag('implements',
[('name', self._type_to_name(iface))])
if isinstance(node, ast.Interface):
for iface in sorted(node.prerequisites):
self.write_tag('prerequisite',
[('name', self._type_to_name(iface))])
if isinstance(node, ast.Class):
for method in sorted(node.constructors):
self._write_constructor(method)
for method in sorted(node.static_methods):
self._write_static_method(method)
for vfunc in sorted(node.virtual_methods):
self._write_vfunc(vfunc)
for method in sorted(node.methods):
self._write_method(method)
for prop in sorted(node.properties):
self._write_property(prop)
for field in node.fields:
self._write_field(field, node)
for signal in sorted(node.signals):
self._write_signal(signal)
def _write_boxed(self, boxed):
attrs = [('glib:name', boxed.name)]
if boxed.c_symbol_prefix is not None:
attrs.append(('c:symbol-prefix', boxed.c_symbol_prefix))
self._append_registered(boxed, attrs)
with self.tagcontext('glib:boxed', attrs):
self._write_generic(boxed)
for method in sorted(boxed.constructors):
self._write_constructor(method)
for method in sorted(boxed.methods):
self._write_method(method)
for method in sorted(boxed.static_methods):
self._write_static_method(method)
def _write_property(self, prop):
attrs = [('name', prop.name)]
self._append_version(prop, attrs)
self._append_node_generic(prop, attrs)
# Properties are assumed to be readable (see also generate.c)
if not prop.readable:
attrs.append(('readable', '0'))
if prop.writable:
attrs.append(('writable', '1'))
if prop.construct:
attrs.append(('construct', '1'))
if prop.construct_only:
attrs.append(('construct-only', '1'))
if prop.transfer:
attrs.append(('transfer-ownership', prop.transfer))
with self.tagcontext('property', attrs):
self._write_generic(prop)
self._write_type(prop.type)
def _write_vfunc(self, vf):
attrs = []
if vf.invoker:
attrs.append(('invoker', vf.invoker))
self._write_callable(vf, 'virtual-method', attrs)
def _write_callback(self, callback):
attrs = []
if callback.ctype != callback.name:
attrs.append(('c:type', callback.ctype))
self._write_callable(callback, 'callback', attrs)
def _write_record(self, record, extra_attrs=[]):
is_gtype_struct = False
attrs = list(extra_attrs)
if record.name is not None:
attrs.append(('name', record.name))
if record.ctype is not None: # the record might be anonymous
attrs.append(('c:type', record.ctype))
if record.disguised:
attrs.append(('disguised', '1'))
if record.foreign:
attrs.append(('foreign', '1'))
if record.is_gtype_struct_for is not None:
is_gtype_struct = True
attrs.append(('glib:is-gtype-struct-for',
self._type_to_name(record.is_gtype_struct_for)))
self._append_version(record, attrs)
self._append_node_generic(record, attrs)
self._append_registered(record, attrs)
if record.c_symbol_prefix:
attrs.append(('c:symbol-prefix', record.c_symbol_prefix))
with self.tagcontext('record', attrs):
self._write_generic(record)
if record.fields:
for field in record.fields:
self._write_field(field, record, is_gtype_struct)
for method in sorted(record.constructors):
self._write_constructor(method)
for method in sorted(record.methods):
self._write_method(method)
for method in sorted(record.static_methods):
self._write_static_method(method)
def _write_union(self, union):
attrs = []
if union.name is not None:
attrs.append(('name', union.name))
if union.ctype is not None: # the union might be anonymous
attrs.append(('c:type', union.ctype))
self._append_version(union, attrs)
self._append_node_generic(union, attrs)
self._append_registered(union, attrs)
if union.c_symbol_prefix:
attrs.append(('c:symbol-prefix', union.c_symbol_prefix))
with self.tagcontext('union', attrs):
self._write_generic(union)
if union.fields:
for field in union.fields:
self._write_field(field, union)
for method in sorted(union.constructors):
self._write_constructor(method)
for method in sorted(union.methods):
self._write_method(method)
for method in sorted(union.static_methods):
self._write_static_method(method)
def _write_field(self, field, parent, is_gtype_struct=False):
if field.anonymous_node:
if isinstance(field.anonymous_node, ast.Callback):
attrs = [('name', field.name)]
self._append_node_generic(field, attrs)
with self.tagcontext('field', attrs):
self._write_callback(field.anonymous_node)
elif isinstance(field.anonymous_node, ast.Record):
self._write_record(field.anonymous_node)
elif isinstance(field.anonymous_node, ast.Union):
self._write_union(field.anonymous_node)
else:
raise AssertionError("Unknown field anonymous: %r" % (field.anonymous_node, ))
else:
attrs = [('name', field.name)]
self._append_node_generic(field, attrs)
# Fields are assumed to be read-only
# (see also girparser.c and generate.c)
if not field.readable:
attrs.append(('readable', '0'))
if field.writable:
attrs.append(('writable', '1'))
if field.bits:
attrs.append(('bits', str(field.bits)))
if field.private:
attrs.append(('private', '1'))
with self.tagcontext('field', attrs):
self._write_generic(field)
self._write_type(field.type, parent=parent)
def _write_signal(self, signal):
attrs = [('name', signal.name)]
if signal.when:
attrs.append(('when', signal.when))
if signal.no_recurse:
attrs.append(('no-recurse', '1'))
if signal.detailed:
attrs.append(('detailed', '1'))
if signal.action:
attrs.append(('action', '1'))
if signal.no_hooks:
attrs.append(('no-hooks', '1'))
self._append_version(signal, attrs)
self._append_node_generic(signal, attrs)
with self.tagcontext('glib:signal', attrs):
self._write_generic(signal)
self._write_return_type(signal.retval)
self._write_parameters(signal)
| gpl-2.0 | -1,240,516,225,499,027,000 | 41.825525 | 99 | 0.569052 | false |
Serag8/Bachelor | google_appengine/google/appengine/ext/admin_redirect/main.py | 8 | 2616 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Main module for admin redirect.
To use, add this to app.yaml:
builtins:
- admin_redirect: on
"""
import logging
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
GOOGLE_SUFFIX = '.google.com'
CONSOLE_SUFFIX = '/dashboard?app_id='
APPENGINE_URL = 'https://appengine.google.com'
ADMIN_CONSOLE_NAME = 'admin-console'
APPLICATION_ID_PARAM = 'APPLICATION_ID'
SERVER_NAME_PARAM = 'SERVER_NAME'
class RedirectToAdminConsole(webapp.RequestHandler):
"""Used to redirect the user to the appropriate Admin Console URL."""
def get(self):
"""Handler to redirect all /_ah/admin.* requests to Admin Console."""
app_id = self.request.environ.get(APPLICATION_ID_PARAM)
if not app_id:
logging.error('Could not get application id; generic redirect.')
self.redirect(APPENGINE_URL)
return
server = self.request.environ.get(SERVER_NAME_PARAM)
if not server:
logging.warning('Server parameter not present; appengine.com redirect.')
self.redirect('%s%s%s' % (APPENGINE_URL, CONSOLE_SUFFIX, app_id))
return
if server.endswith(GOOGLE_SUFFIX):
if server.find(app_id) == 0:
new_server = server.replace(app_id, ADMIN_CONSOLE_NAME)
self.redirect('http://%s%s%s' % (new_server,
CONSOLE_SUFFIX,
app_id))
else:
self.response.out.write("""
Could not determine admin console location from server name.""")
else:
self.redirect('%s%s%s' % (APPENGINE_URL, CONSOLE_SUFFIX, app_id))
def CreateApplication():
"""Create new WSGIApplication and register all handlers.
Returns:
an instance of webapp.WSGIApplication with all mapreduce handlers
registered.
"""
return webapp.WSGIApplication([(r'.*', RedirectToAdminConsole)],
debug=True)
APP = CreateApplication()
def main():
util.run_wsgi_app(APP)
if __name__ == '__main__':
main()
| mit | -8,541,275,971,834,269,000 | 25.424242 | 78 | 0.667813 | false |
helium/helium-client-python | setup.py | 1 | 2330 | #!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from setuptools import setup, find_packages
from setuptools.extension import Extension
from setuptools.command.build_ext import build_ext
import codecs
import versioneer
def get_ext_modules():
local_inc = 'helium_client/helium-client'
local_sources = ['helium_client/_helium.c',
'helium_client/_serial.c',
'helium_client/helium-client/helium-client.c',
'helium_client/helium-client/cauterize/atom_api.c',
'helium_client/helium-client/cauterize/config_api.c',
'helium_client/helium-client/cauterize/cauterize.c']
extra_compile_args = ['-std=gnu99', '-Werror']
return [
Extension(name="helium_client._helium",
sources=local_sources,
include_dirs=[local_inc],
extra_compile_args=extra_compile_args),
]
cmdclass = {'build_ext' : build_ext}
cmdclass.update(versioneer.get_cmdclass())
setup(
name='helium_client',
version=versioneer.get_version(),
author='Helium',
author_email='[email protected]',
packages=find_packages(),
license='LICENSE.txt',
url="http://github.com/helium/helium-client-python",
description='A Python interface to the Helium Atom.',
long_description=codecs.open('README.md',
mode='r', encoding='utf-8').read(),
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Development Status :: 3 - Alpha',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'],
extras_require={'dev': ['cython']},
include_package_data=True,
ext_modules=get_ext_modules(),
cmdclass=cmdclass,
)
| bsd-3-clause | -794,048,752,122,516,400 | 36.580645 | 74 | 0.57897 | false |
xeoron/namebench | nb_third_party/jinja2/tests.py | 285 | 3313 | # -*- coding: utf-8 -*-
"""
jinja2.tests
~~~~~~~~~~~~
Jinja test functions. Used with the "is" operator.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
from jinja2.runtime import Undefined
# nose, nothing here to test
__test__ = False
number_re = re.compile(r'^-?\d+(\.\d+)?$')
regex_type = type(number_re)
try:
test_callable = callable
except NameError:
def test_callable(x):
return hasattr(x, '__call__')
def test_odd(value):
"""Return true if the variable is odd."""
return value % 2 == 1
def test_even(value):
"""Return true if the variable is even."""
return value % 2 == 0
def test_divisibleby(value, num):
"""Check if a variable is divisible by a number."""
return value % num == 0
def test_defined(value):
"""Return true if the variable is defined:
.. sourcecode:: jinja
{% if variable is defined %}
value of variable: {{ variable }}
{% else %}
variable is not defined
{% endif %}
See the :func:`default` filter for a simple way to set undefined
variables.
"""
return not isinstance(value, Undefined)
def test_undefined(value):
"""Like :func:`defined` but the other way round."""
return isinstance(value, Undefined)
def test_none(value):
"""Return true if the variable is none."""
return value is None
def test_lower(value):
"""Return true if the variable is lowercased."""
return unicode(value).islower()
def test_upper(value):
"""Return true if the variable is uppercased."""
return unicode(value).isupper()
def test_string(value):
"""Return true if the object is a string."""
return isinstance(value, basestring)
def test_number(value):
"""Return true if the variable is a number."""
return isinstance(value, (int, long, float, complex))
def test_sequence(value):
"""Return true if the variable is a sequence. Sequences are variables
that are iterable.
"""
try:
len(value)
value.__getitem__
except:
return False
return True
def test_sameas(value, other):
"""Check if an object points to the same memory address than another
object:
.. sourcecode:: jinja
{% if foo.attribute is sameas false %}
the foo attribute really is the `False` singleton
{% endif %}
"""
return value is other
def test_iterable(value):
"""Check if it's possible to iterate over an object."""
try:
iter(value)
except TypeError:
return False
return True
def test_escaped(value):
"""Check if the value is escaped."""
return hasattr(value, '__html__')
TESTS = {
'odd': test_odd,
'even': test_even,
'divisibleby': test_divisibleby,
'defined': test_defined,
'undefined': test_undefined,
'none': test_none,
'lower': test_lower,
'upper': test_upper,
'string': test_string,
'number': test_number,
'sequence': test_sequence,
'iterable': test_iterable,
'callable': test_callable,
'sameas': test_sameas,
'escaped': test_escaped
}
| apache-2.0 | 2,176,545,024,465,804,300 | 21.691781 | 73 | 0.589798 | false |
bukepo/openthread | tests/scripts/thread-cert/pktverify/verify.py | 7 | 3121 | #!/usr/bin/env python3
#
# Copyright (c) 2020, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import importlib.util
import inspect
import json
import logging
import os
import sys
THREAD_CERT_DIR = './tests/scripts/thread-cert'
sys.path.append(THREAD_CERT_DIR)
import thread_cert
from pktverify.packet_verifier import PacketVerifier
logging.basicConfig(level=logging.INFO,
format='File "%(pathname)s", line %(lineno)d, in %(funcName)s\n'
'%(asctime)s - %(levelname)s - %(message)s')
def main():
json_file = sys.argv[1]
with open(json_file, 'rt') as fp:
test_info = json.load(fp)
script = test_info['script']
script = os.path.relpath(script, THREAD_CERT_DIR)
module_name = os.path.splitext(script)[0].replace('/', '.')
logging.info("Loading %s as module %s ...", script, module_name)
spec = importlib.util.spec_from_file_location(module_name, os.path.join(THREAD_CERT_DIR, script))
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
test_class = None
for name, member in inspect.getmembers(mod):
if isinstance(member, type) and issubclass(member, thread_cert.TestCase):
assert test_class is None, (test_class, member)
test_class = member
assert test_class is not None, "can not find a test class in %s" % script
test_instance = test_class()
pv = PacketVerifier(json_file)
pv.add_common_vars()
test_instance.verify(pv)
print("Packet verification passed: %s" % json_file, file=sys.stderr)
if __name__ == '__main__':
main()
| bsd-3-clause | 6,312,570,464,396,989,000 | 36.60241 | 101 | 0.713553 | false |
KousikaGanesh/purchaseandInventory | openerp/addons/mrp_repair/__openerp__.py | 35 | 2497 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Repairs Management',
'version': '1.0',
'category': 'Manufacturing',
'description': """
The aim is to have a complete module to manage all products repairs.
====================================================================
The following topics should be covered by this module:
------------------------------------------------------
* Add/remove products in the reparation
* Impact for stocks
* Invoicing (products and/or services)
* Warranty concept
* Repair quotation report
* Notes for the technician and for the final customer
""",
'author': 'OpenERP SA',
'images': ['images/repair_order.jpeg'],
'depends': ['mrp', 'sale', 'account'],
'data': [
'security/ir.model.access.csv',
'security/mrp_repair_security.xml',
'mrp_repair_data.xml',
'mrp_repair_sequence.xml',
'wizard/mrp_repair_cancel_view.xml',
'wizard/mrp_repair_make_invoice_view.xml',
'mrp_repair_view.xml',
'mrp_repair_workflow.xml',
'mrp_repair_report.xml',
],
'demo': ['mrp_repair_demo.yml'],
'test': ['test/test_mrp_repair_noneinv.yml',
'test/test_mrp_repair_b4inv.yml',
'test/test_mrp_repair_afterinv.yml',
'test/test_mrp_repair_cancel.yml',
'test/mrp_repair_report.yml',
'test/test_mrp_repair_fee.yml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -769,582,949,480,938,500 | 37.415385 | 78 | 0.573889 | false |
datamade/openelections-core | openelex/us/ia/transform.py | 2 | 1161 | from openelex.base.transform import Transform, registry
from openelex.models import RawResult
class FixVanBurenTransform(Transform):
"""
s/VanBuren/Van Buren in RawResults from the source file
20001107__ia__general__state_senate__county.csv
"""
name = 'fix_van_buren'
def __call__(self):
results = RawResult.objects.filter(
source="20001107__ia__general__state_senate__county.csv",
jurisdiction="VanBuren")
msg = "Changing 'VanBuren' to 'Van Buren' in {} raw results.".format(
results.count())
print(msg)
results.update(set__jurisdiction="Van Buren",
set__ocd_id="ocd-division/country:us/state:ia/county:van_buren")
def reverse(self):
results = RawResult.objects.filter(
source="20001107__ia__general__state_senate__county.csv",
jurisdiction="Van Buren")
msg = "Reverting 'Van Buren' to 'VanBuren' in {} raw results".format(
results.count())
print(msg)
results.update(set__jurisdiction="VanBuren", set__ocd_id="")
registry.register('ia', FixVanBurenTransform, raw=True)
| mit | -3,220,123,441,521,502,000 | 37.7 | 77 | 0.62963 | false |
sumanthns/flask-project | flask_project/templates/factory.py | 1 | 2162 | from flask import Flask
from werkzeug.utils import import_string
class NoBlueprintException(Exception):
pass
class NoRouteModuleException(Exception):
pass
def _get_imported_stuff_by_path(path):
module_name, object_name = path.rsplit('.', 1)
module = import_string(module_name)
return module, object_name
class AppFactory(object):
def __init__(self, config, name):
self.config = config
self.name = name
def _build_app(self):
app = Flask(self.name)
self._add_config(app)
self._init_db(app)
self._register_blueprints(app)
self._register_routes(app)
return app
def _add_config(self, app):
app.config.from_object(self.config)
def _init_db(self, app):
from app import db
db.init_app(app)
def get_app(self):
app = self._build_app()
return app
def _register_blueprints(self, app):
self._bp = {}
for blueprint_path in app.config.get('BLUEPRINTS', []):
module, b_name = \
_get_imported_stuff_by_path(blueprint_path)
if hasattr(module, b_name):
app.register_blueprint(getattr(module, b_name))
else:
raise NoBlueprintException(
'No {bp_name} blueprint found'.format(bp_name=b_name))
def _register_routes(self, app):
for url_module in app.config.get('URL_MODULES', []):
module, r_name = _get_imported_stuff_by_path(url_module)
if hasattr(module, r_name):
self._setup_routes(getattr(module, r_name), app)
else:
raise NoRouteModuleException('No {r_name} url module found'.format(r_name=r_name))
def _setup_routes(self, routes, app):
for route in routes:
blueprint, rules = route[0], route[1:]
for pattern, view in rules:
if isinstance(blueprint, tuple):
blueprint = blueprint[0]
blueprint.add_url_rule(pattern, view_func=view)
if blueprint not in app.blueprints:
app.register_blueprint(blueprint)
| mit | 1,485,140,431,797,699,300 | 29.450704 | 98 | 0.582331 | false |
cloudmesh/cmd3light | cloudmesh_cmd3light/console.py | 1 | 3669 | import textwrap
from colorama import Fore, Back, Style
import colorama
colorama.init()
class Console(object):
"""
A simple way to print in a console terminal in color. Instead of using
simply the print statement you can use special methods to indicate
warnings, errors, ok and regular messages.
Example Usage::
Console.warning("Warning")
Console.error("Error")
Console.info("Info")
Console.msg("msg")
Console.ok("Success")
One can swith the color mode off with::
Console.color = False
Console.error("Error")
The color will be switched on by default.
"""
#
# TODO: It would be good if the Console uses the borg pattern to have a
# global switch for the console color mode. Currently each import
# switches it back to color.
#
color = True
theme_color = {
'HEADER': Fore.MAGENTA,
'BLACK': Fore.RED,
'CYAN': Fore.CYAN,
'WHITE': Fore.WHITE,
'BLUE': Fore.BLUE,
'OKBLUE': Fore.BLUE,
'OKGREEN': Fore.GREEN,
'FAIL': Fore.RED,
'WARNING': Fore.MAGENTA,
'RED': Fore.RED,
'ENDC': '\033[0m',
'BOLD': "\033[1m",
'OK': Fore.GREEN,
}
theme_bw = {
'HEADER': '',
'BLACK': '',
'CYAN': '',
'WHITE': '',
'BLUE': '',
'OKBLUE': '',
'OKGREEN': '',
'FAIL': '',
'WARNING': '',
'RED': '',
'ENDC': '',
'BOLD': "",
'OK': "",
}
theme = theme_color
@staticmethod
def set_theme(color=True):
if color:
Console.theme = Console.theme_color
else:
Console.theme = Console.theme_bw
Console.color = color
@staticmethod
def get(name):
if name in Console.theme:
return Console.theme[name]
else:
return Console.theme['BLACK']
@staticmethod
def _msg(message, width=90):
return textwrap.fill(message, width=width)
@staticmethod
def msg(message):
print (message)
@staticmethod
def error(message, prefix=True):
if prefix:
text = "ERROR: "
else:
text = ""
if Console.color:
Console._print('FAIL', text, message)
else:
print Console._msg(text + message)
@staticmethod
def info(message):
if Console.color:
Console._print('OKBLUE', "INFO: ", message)
else:
print Console._msg("INFO: " + message)
@staticmethod
def warning(message):
if Console.color:
Console._print('WARNING', "WARNING: ", message)
else:
print Console._msg("WARNING: " + message)
@staticmethod
def ok(message):
if Console.color:
Console._print('OKGREEN', "", message)
else:
print Console._msg(message)
@staticmethod
def _print(color, prefix, message):
print (Console.theme[color] +
prefix +
Console._msg(message) +
Console.theme['ENDC'])
#
# Example
#
if __name__ == "__main__":
print Console.color
print Console.theme
Console.warning("Warning")
Console.error("Error")
Console.info("Info")
Console.msg("msg")
Console.ok("Success")
Console.color = False
print Console.color
Console.error("Error")
print(Fore.RED + 'some red text')
print(Back.GREEN + 'and with a green background')
print(Style.DIM + 'and in dim text')
print(Fore.RESET + Back.RESET + Style.RESET_ALL)
print('back to normal now')
| apache-2.0 | 1,963,679,051,398,093,800 | 22.075472 | 75 | 0.541292 | false |
jemofthewest/mykoans | python2/koans/about_asserts.py | 10 | 2289 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutAsserts(Koan):
def test_assert_truth(self):
"""
We shall contemplate truth by testing reality, via asserts.
"""
# Confused? This video should help:
#
# http://bit.ly/about_asserts
self.assertTrue(False) # This should be true
def test_assert_with_message(self):
"""
Enlightenment may be more easily achieved with appropriate messages.
"""
self.assertTrue(False, "This should be true -- Please fix this")
def test_fill_in_values(self):
"""
Sometimes we will ask you to fill in the values
"""
self.assertEqual(__, 1 + 1)
def test_assert_equality(self):
"""
To understand reality, we must compare our expectations against
reality.
"""
expected_value = __
actual_value = 1 + 1
self.assertTrue(expected_value == actual_value)
def test_a_better_way_of_asserting_equality(self):
"""
Some ways of asserting equality are better than others.
"""
expected_value = __
actual_value = 1 + 1
self.assertEqual(expected_value, actual_value)
def test_that_unittest_asserts_work_the_same_way_as_python_asserts(self):
"""
Understand what lies within.
"""
# This throws an AssertionError exception
assert False
def test_that_sometimes_we_need_to_know_the_class_type(self):
"""
What is in a class name?
"""
# Sometimes we will ask you what the class type of an object is.
#
# For example, contemplate the text string "naval". What is it's class type?
# The koans runner will include this feedback for this koan:
#
# AssertionError: '-=> FILL ME IN! <=-' != <type 'str'>
#
# So "naval".__class__ is equal to <type 'str'>? No not quite. This
# is just what it displays. The answer is simply str.
#
# See for yourself:
self.assertEqual(__, "naval".__class__) # It's str, not <type 'str'>
# Need an illustration? More reading can be found here:
#
# http://bit.ly/__class__
| mit | 6,201,426,503,639,093,000 | 27.974684 | 84 | 0.570555 | false |
darkleons/odoo | addons/hr_expense/report/hr_expense_report.py | 287 | 5652 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
from openerp.addons.decimal_precision import decimal_precision as dp
class hr_expense_report(osv.osv):
_name = "hr.expense.report"
_description = "Expenses Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.date('Date ', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True),
'product_id':fields.many2one('product.product', 'Product', readonly=True),
'journal_id': fields.many2one('account.journal', 'Force Journal', readonly=True),
'product_qty':fields.float('Product Quantity', readonly=True),
'employee_id': fields.many2one('hr.employee', "Employee's Name", readonly=True),
'date_confirm': fields.date('Confirmation Date', readonly=True),
'date_valid': fields.date('Validation Date', readonly=True),
'department_id':fields.many2one('hr.department','Department', readonly=True),
'company_id':fields.many2one('res.company', 'Company', readonly=True),
'user_id':fields.many2one('res.users', 'Validation User', readonly=True),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'price_total':fields.float('Total Price', readonly=True, digits_compute=dp.get_precision('Account')),
'delay_valid':fields.float('Delay to Valid', readonly=True),
'delay_confirm':fields.float('Delay to Confirm', readonly=True),
'analytic_account': fields.many2one('account.analytic.account','Analytic account',readonly=True),
'price_average':fields.float('Average Price', readonly=True, digits_compute=dp.get_precision('Account')),
'nbr':fields.integer('# of Lines', readonly=True), # TDE FIXME master: rename into nbr_lines
'no_of_products':fields.integer('# of Products', readonly=True),
'no_of_account':fields.integer('# of Accounts', readonly=True),
'state': fields.selection([
('draft', 'Draft'),
('confirm', 'Waiting confirmation'),
('accepted', 'Accepted'),
('done', 'Done'),
('cancelled', 'Cancelled')],
'Status', readonly=True),
}
_order = 'date desc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'hr_expense_report')
cr.execute("""
create or replace view hr_expense_report as (
select
min(l.id) as id,
s.date as date,
s.create_date as create_date,
s.employee_id,
s.journal_id,
s.currency_id,
s.date_confirm as date_confirm,
s.date_valid as date_valid,
s.user_valid as user_id,
s.department_id,
avg(extract('epoch' from age(s.date_valid,s.date)))/(3600*24) as delay_valid,
avg(extract('epoch' from age(s.date_valid,s.date_confirm)))/(3600*24) as delay_confirm,
l.product_id as product_id,
l.analytic_account as analytic_account,
sum(l.unit_quantity * u.factor) as product_qty,
s.company_id as company_id,
sum(l.unit_quantity*l.unit_amount) as price_total,
(sum(l.unit_quantity*l.unit_amount)/sum(case when l.unit_quantity=0 or u.factor=0 then 1 else l.unit_quantity * u.factor end))::decimal(16,2) as price_average,
count(*) as nbr,
(select unit_quantity from hr_expense_line where id=l.id and product_id is not null) as no_of_products,
(select analytic_account from hr_expense_line where id=l.id and analytic_account is not null) as no_of_account,
s.state
from hr_expense_line l
left join hr_expense_expense s on (s.id=l.expense_id)
left join product_uom u on (u.id=l.uom_id)
group by
s.date,
s.create_date,
s.date_confirm,
s.date_valid,
l.product_id,
l.analytic_account,
s.currency_id,
s.user_valid,
s.department_id,
l.uom_id,
l.id,
s.state,
s.journal_id,
s.company_id,
s.employee_id
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,320,746,796,010,623,000 | 49.017699 | 180 | 0.557325 | false |
thnee/ansible | lib/ansible/modules/cloud/vultr/vultr_dns_record.py | 21 | 10067 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2017, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vultr_dns_record
short_description: Manages DNS records on Vultr.
description:
- Create, update and remove DNS records.
version_added: "2.5"
author: "René Moser (@resmo)"
options:
name:
description:
- The record name (subrecord).
default: ""
aliases: [ subrecord ]
domain:
description:
- The domain the record is related to.
required: true
record_type:
description:
- Type of the record.
default: A
choices:
- A
- AAAA
- CNAME
- MX
- SRV
- CAA
- TXT
- NS
- SSHFP
aliases: [ type ]
data:
description:
- Data of the record.
- Required if C(state=present) or C(multiple=yes).
ttl:
description:
- TTL of the record.
default: 300
multiple:
description:
- Whether to use more than one record with similar C(name) including no name and C(record_type).
- Only allowed for a few record types, e.g. C(record_type=A), C(record_type=NS) or C(record_type=MX).
- C(data) will not be updated, instead it is used as a key to find existing records.
default: no
type: bool
priority:
description:
- Priority of the record.
default: 0
state:
description:
- State of the DNS record.
default: present
choices: [ present, absent ]
extends_documentation_fragment: vultr
'''
EXAMPLES = '''
- name: Ensure an A record exists
vultr_dns_record:
name: www
domain: example.com
data: 10.10.10.10
ttl: 3600
- name: Ensure a second A record exists for round robin LB
vultr_dns_record:
name: www
domain: example.com
data: 10.10.10.11
ttl: 60
multiple: yes
- name: Ensure a CNAME record exists
vultr_dns_record:
name: web
record_type: CNAME
domain: example.com
data: www.example.com
- name: Ensure MX record exists
vultr_dns_record:
record_type: MX
domain: example.com
data: "{{ item.data }}"
priority: "{{ item.priority }}"
multiple: yes
with_items:
- { data: mx1.example.com, priority: 10 }
- { data: mx2.example.com, priority: 10 }
- { data: mx3.example.com, priority: 20 }
- name: Ensure a record is absent
local_action:
module: vultr_dns_record
name: www
domain: example.com
state: absent
- name: Ensure MX record is absent in case multiple exists
vultr_dns_record:
record_type: MX
domain: example.com
data: mx1.example.com
multiple: yes
state: absent
'''
RETURN = '''
---
vultr_api:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
api_account:
description: Account used in the ini file to select the key
returned: success
type: str
sample: default
api_timeout:
description: Timeout used for the API requests
returned: success
type: int
sample: 60
api_retries:
description: Amount of max retries for the API requests
returned: success
type: int
sample: 5
api_retry_max_delay:
description: Exponential backoff delay in seconds between retries up to this max delay value.
returned: success
type: int
sample: 12
version_added: '2.9'
api_endpoint:
description: Endpoint used for the API requests
returned: success
type: str
sample: "https://api.vultr.com"
vultr_dns_record:
description: Response from Vultr API
returned: success
type: complex
contains:
id:
description: The ID of the DNS record.
returned: success
type: int
sample: 1265277
name:
description: The name of the DNS record.
returned: success
type: str
sample: web
record_type:
description: The name of the DNS record.
returned: success
type: str
sample: web
data:
description: Data of the DNS record.
returned: success
type: str
sample: 10.10.10.10
domain:
description: Domain the DNS record is related to.
returned: success
type: str
sample: example.com
priority:
description: Priority of the DNS record.
returned: success
type: int
sample: 10
ttl:
description: Time to live of the DNS record.
returned: success
type: int
sample: 300
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vultr import (
Vultr,
vultr_argument_spec,
)
RECORD_TYPES = [
'A',
'AAAA',
'CNAME',
'MX',
'TXT',
'NS',
'SRV',
'CAA',
'SSHFP'
]
class AnsibleVultrDnsRecord(Vultr):
def __init__(self, module):
super(AnsibleVultrDnsRecord, self).__init__(module, "vultr_dns_record")
self.returns = {
'RECORDID': dict(key='id'),
'name': dict(),
'record': dict(),
'priority': dict(),
'data': dict(),
'type': dict(key='record_type'),
'ttl': dict(),
}
def get_record(self):
records = self.api_query(path="/v1/dns/records?domain=%s" % self.module.params.get('domain'))
multiple = self.module.params.get('multiple')
data = self.module.params.get('data')
name = self.module.params.get('name')
record_type = self.module.params.get('record_type')
result = {}
for record in records or []:
if record.get('type') != record_type:
continue
if record.get('name') == name:
if not multiple:
if result:
self.module.fail_json(msg="More than one record with record_type=%s and name=%s params. "
"Use multiple=yes for more than one record." % (record_type, name))
else:
result = record
elif record.get('data') == data:
return record
return result
def present_record(self):
record = self.get_record()
if not record:
record = self._create_record(record)
else:
record = self._update_record(record)
return record
def _create_record(self, record):
self.result['changed'] = True
data = {
'name': self.module.params.get('name'),
'domain': self.module.params.get('domain'),
'data': self.module.params.get('data'),
'type': self.module.params.get('record_type'),
'priority': self.module.params.get('priority'),
'ttl': self.module.params.get('ttl'),
}
self.result['diff']['before'] = {}
self.result['diff']['after'] = data
if not self.module.check_mode:
self.api_query(
path="/v1/dns/create_record",
method="POST",
data=data
)
record = self.get_record()
return record
def _update_record(self, record):
data = {
'RECORDID': record['RECORDID'],
'name': self.module.params.get('name'),
'domain': self.module.params.get('domain'),
'data': self.module.params.get('data'),
'type': self.module.params.get('record_type'),
'priority': self.module.params.get('priority'),
'ttl': self.module.params.get('ttl'),
}
has_changed = [k for k in data if k in record and data[k] != record[k]]
if has_changed:
self.result['changed'] = True
self.result['diff']['before'] = record
self.result['diff']['after'] = record.copy()
self.result['diff']['after'].update(data)
if not self.module.check_mode:
self.api_query(
path="/v1/dns/update_record",
method="POST",
data=data
)
record = self.get_record()
return record
def absent_record(self):
record = self.get_record()
if record:
self.result['changed'] = True
data = {
'RECORDID': record['RECORDID'],
'domain': self.module.params.get('domain'),
}
self.result['diff']['before'] = record
self.result['diff']['after'] = {}
if not self.module.check_mode:
self.api_query(
path="/v1/dns/delete_record",
method="POST",
data=data
)
return record
def main():
argument_spec = vultr_argument_spec()
argument_spec.update(dict(
domain=dict(required=True),
name=dict(default="", aliases=['subrecord']),
state=dict(choices=['present', 'absent'], default='present'),
ttl=dict(type='int', default=300),
record_type=dict(choices=RECORD_TYPES, default='A', aliases=['type']),
multiple=dict(type='bool', default=False),
priority=dict(type='int', default=0),
data=dict()
))
module = AnsibleModule(
argument_spec=argument_spec,
required_if=[
('state', 'present', ['data']),
('multiple', True, ['data']),
],
supports_check_mode=True,
)
vultr_record = AnsibleVultrDnsRecord(module)
if module.params.get('state') == "absent":
record = vultr_record.absent_record()
else:
record = vultr_record.present_record()
result = vultr_record.get_result(record)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | 5,818,126,169,287,994,000 | 26.056452 | 117 | 0.563537 | false |
dhomeier/astropy | astropy/convolution/tests/test_convolve_nddata.py | 12 | 1761 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
import numpy as np
from astropy.convolution.convolve import convolve, convolve_fft
from astropy.convolution.kernels import Gaussian2DKernel
from astropy.nddata import NDData
def test_basic_nddata():
arr = np.zeros((11, 11))
arr[5, 5] = 1
ndd = NDData(arr)
test_kernel = Gaussian2DKernel(1)
result = convolve(ndd, test_kernel)
x, y = np.mgrid[:11, :11]
expected = result[5, 5] * np.exp(-0.5 * ((x - 5)**2 + (y - 5)**2))
np.testing.assert_allclose(result, expected, atol=1e-6)
resultf = convolve_fft(ndd, test_kernel)
np.testing.assert_allclose(resultf, expected, atol=1e-6)
@pytest.mark.parametrize('convfunc',
[lambda *args: convolve(*args, nan_treatment='interpolate', normalize_kernel=True),
lambda *args: convolve_fft(*args, nan_treatment='interpolate', normalize_kernel=True)])
def test_masked_nddata(convfunc):
arr = np.zeros((11, 11))
arr[4, 5] = arr[6, 5] = arr[5, 4] = arr[5, 6] = 0.2
arr[5, 5] = 1.5
ndd_base = NDData(arr)
mask = arr < 0 # this is all False
mask[5, 5] = True
ndd_mask = NDData(arr, mask=mask)
arrnan = arr.copy()
arrnan[5, 5] = np.nan
ndd_nan = NDData(arrnan)
test_kernel = Gaussian2DKernel(1)
result_base = convfunc(ndd_base, test_kernel)
result_nan = convfunc(ndd_nan, test_kernel)
result_mask = convfunc(ndd_mask, test_kernel)
assert np.allclose(result_nan, result_mask)
assert not np.allclose(result_base, result_mask)
assert not np.allclose(result_base, result_nan)
# check to make sure the mask run doesn't talk back to the initial array
assert np.sum(np.isnan(ndd_base.data)) != np.sum(np.isnan(ndd_nan.data))
| bsd-3-clause | -2,447,203,834,322,577,000 | 30.446429 | 91 | 0.665531 | false |
47lining/ansible | lib/ansible/runner/lookup_plugins/csvfile.py | 121 | 2645 | # (c) 2013, Jan-Piet Mens <jpmens(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible import utils, errors
import os
import codecs
import csv
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def read_csv(self, filename, key, delimiter, dflt=None, col=1):
try:
f = codecs.open(filename, 'r', encoding='utf-8')
creader = csv.reader(f, delimiter=delimiter)
for row in creader:
if row[0] == key:
return row[int(col)]
except Exception, e:
raise errors.AnsibleError("csvfile: %s" % str(e))
return dflt
def run(self, terms, inject=None, **kwargs):
terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)
if isinstance(terms, basestring):
terms = [ terms ]
ret = []
for term in terms:
params = term.split()
key = params[0]
paramvals = {
'file' : 'ansible.csv',
'default' : None,
'delimiter' : "TAB",
'col' : "1", # column to return
}
# parameters specified?
try:
for param in params[1:]:
name, value = param.split('=')
assert(name in paramvals)
paramvals[name] = value
except (ValueError, AssertionError), e:
raise errors.AnsibleError(e)
if paramvals['delimiter'] == 'TAB':
paramvals['delimiter'] = "\t"
path = utils.path_dwim(self.basedir, paramvals['file'])
var = self.read_csv(path, key, paramvals['delimiter'], paramvals['default'], paramvals['col'])
if var is not None:
if type(var) is list:
for v in var:
ret.append(v)
else:
ret.append(var)
return ret
| gpl-3.0 | 7,255,945,061,523,529,000 | 31.256098 | 106 | 0.558034 | false |
froch/kubernetes-py | kubernetes_py/models/v1/LoadBalancerStatus.py | 3 | 1579 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
from kubernetes_py.models.v1.LoadBalancerIngress import LoadBalancerIngress
from kubernetes_py.utils import is_valid_list
class LoadBalancerStatus(object):
"""
http://kubernetes.io/docs/api-reference/v1/definitions/#_v1_loadbalancerstatus
"""
def __init__(self, model=None):
super(LoadBalancerStatus, self).__init__()
self._ingress = None
if model is not None:
self._build_with_model(model)
def _build_with_model(self, model=None):
if "ingress" in model:
statuses = []
for i in model["ingress"]:
status = LoadBalancerIngress(i)
statuses.append(status)
self.ingress = statuses
# ------------------------------------------------------------------------------------- ingress
@property
def ingress(self):
return self._ingress
@ingress.setter
def ingress(self, ingress=None):
if not is_valid_list(ingress, LoadBalancerIngress):
raise SyntaxError("LoadBalancerStatus: ingress: [ {0} ] is invalid.".format(ingress))
self._ingress = ingress
# ------------------------------------------------------------------------------------- serialize
def serialize(self):
data = {}
if self.ingress is not None:
data["ingress"] = [x.serialize() for x in self.ingress]
return data
| apache-2.0 | -7,630,295,677,407,446,000 | 29.960784 | 101 | 0.550982 | false |
Johnzero/OE7 | openerp/addons-modules/base_gengo/res_company.py | 34 | 1601 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class res_company(osv.Model):
_name = "res.company"
_inherit = "res.company"
_columns = {
"gengo_private_key": fields.text("Gengo Private Key"),
"gengo_public_key": fields.text("Gengo Public Key"),
"gengo_comment": fields.text("Comments", help="This comment will be automatically be enclosed in each an every request sent to Gengo"),
"gengo_auto_approve": fields.boolean("Auto Approve Translation ?", help="Jobs are Automatically Approved by Gengo."),
}
_defaults = {
"gengo_auto_approve": True,
}
| agpl-3.0 | 4,168,023,327,768,589,300 | 42.27027 | 146 | 0.613991 | false |
phase4ground/DVB-receiver | modem/python/library/demodulator.py | 1 | 2190 | import numpy as np
import iir_filter
import pi_filter
class demodulator:
'General purpose demodulator that supports BPSK, QPSK and OQPSK'
def __init__(self, modulation_type, samples_per_symbol):
""" Create the classical Costas loop carrier recovery object """
# store the parameteers internally - important for stability analysis later
self.modulation_type = modulation_type
self.samples_per_symbol = samples_per_symbol
# create the sample counter
self.count = 0
# I and Q channel sum variables
self.I_sum = 0.0
self.Q_sum = 0.0
def update(self, input_sample, input_tick):
""" process a new sample, estimate a new demodulated bit if the correct time """
# # if the previous block wants to delay sampling it will supply an empty list
# # therefore we want to skip any operation and hold back on advancing the count
# if input_sample != []:
# # new bit transition, return demodulated bits depending on modulation type
# if self.count == 0:
# self.count += 1
# if self.modulation_type == "BPSK":
# return [np.real(input_sample)]
# elif self.modulation_type == "QPSK":
# return [np.real(input_sample), np.imag(input_sample)]
# elif self.modulation_type == "OQPSK":
# return [np.real(input_sample)]
# # offset bit, return demodulated bit for the offset bit in OQPSK
# elif self.count == self.samples_per_symbol/2:
# self.count += 1
# if self.modulation_type == "OQPSK":
# return [np.imag(input_sample)]
# # not the correct time demodulate, return nothing
# # callign function should be used with the extend function rather than append so a zero length list is added
# else:
# self.count += 1
# return []
# else:
# return []
if output_tick[0] == 1:
I_sample = I_sum
I_sum = 0.0
| gpl-3.0 | -1,065,047,188,241,007,700 | 29.84507 | 122 | 0.556164 | false |
drowningchild/lgog_old | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 | 3,594,935,474,822,208,500 | 23.122727 | 70 | 0.49463 | false |
psgganesh/sparkplug | packages/Sparkplug/Admin/src/node_modules/laravel-elixir/node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py | 2736 | 6387 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import gyp.common
import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
class Tool(object):
"""Visual Studio tool."""
def __init__(self, name, attrs=None):
"""Initializes the tool.
Args:
name: Tool name.
attrs: Dict of tool attributes; may be None.
"""
self._attrs = attrs or {}
self._attrs['Name'] = name
def _GetSpecification(self):
"""Creates an element for the tool.
Returns:
A new xml.dom.Element for the tool.
"""
return ['Tool', self._attrs]
class Filter(object):
"""Visual Studio filter - that is, a virtual folder."""
def __init__(self, name, contents=None):
"""Initializes the folder.
Args:
name: Filter (folder) name.
contents: List of filenames and/or Filter objects contained.
"""
self.name = name
self.contents = list(contents or [])
#------------------------------------------------------------------------------
class Writer(object):
"""Visual Studio XML project writer."""
def __init__(self, project_path, version, name, guid=None, platforms=None):
"""Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
name: Name of the project.
guid: GUID to use for project, if not None.
platforms: Array of string, the supported platforms. If null, ['Win32']
"""
self.project_path = project_path
self.version = version
self.name = name
self.guid = guid
# Default to Win32 for platforms.
if not platforms:
platforms = ['Win32']
# Initialize the specifications of the various sections.
self.platform_section = ['Platforms']
for platform in platforms:
self.platform_section.append(['Platform', {'Name': platform}])
self.tool_files_section = ['ToolFiles']
self.configurations_section = ['Configurations']
self.files_section = ['Files']
# Keep a dict keyed on filename to speed up access.
self.files_dict = dict()
def AddToolFile(self, path):
"""Adds a tool file to the project.
Args:
path: Relative path from project to tool file.
"""
self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
"""Returns the specification for a configuration.
Args:
config_type: Type of configuration node.
config_name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Returns:
"""
# Handle defaults
if not attrs:
attrs = {}
if not tools:
tools = []
# Add configuration node and its attributes
node_attrs = attrs.copy()
node_attrs['Name'] = config_name
specification = [config_type, node_attrs]
# Add tool nodes and their attributes
if tools:
for t in tools:
if isinstance(t, Tool):
specification.append(t._GetSpecification())
else:
specification.append(Tool(t)._GetSpecification())
return specification
def AddConfig(self, name, attrs=None, tools=None):
"""Adds a configuration to the project.
Args:
name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
"""
spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
self.configurations_section.append(spec)
def _AddFilesToNode(self, parent, files):
"""Adds files and/or filters to the parent node.
Args:
parent: Destination node
files: A list of Filter objects and/or relative paths to files.
Will call itself recursively, if the files list contains Filter objects.
"""
for f in files:
if isinstance(f, Filter):
node = ['Filter', {'Name': f.name}]
self._AddFilesToNode(node, f.contents)
else:
node = ['File', {'RelativePath': f}]
self.files_dict[f] = node
parent.append(node)
def AddFiles(self, files):
"""Adds files to the project.
Args:
files: A list of Filter objects and/or relative paths to files.
This makes a copy of the file/filter tree at the time of this call. If you
later add files to a Filter object which was passed into a previous call
to AddFiles(), it will not be reflected in this project.
"""
self._AddFilesToNode(self.files_section, files)
# TODO(rspangler) This also doesn't handle adding files to an existing
# filter. That is, it doesn't merge the trees.
def AddFileConfig(self, path, config, attrs=None, tools=None):
"""Adds a configuration to a file.
Args:
path: Relative path to the file.
config: Name of configuration to add.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Raises:
ValueError: Relative path does not match any file added via AddFiles().
"""
# Find the file node with the right relative path
parent = self.files_dict.get(path)
if not parent:
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
# Add the config to the file node
spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
tools)
parent.append(spec)
def WriteIfChanged(self):
"""Writes the project file."""
# First create XML content definition
content = [
'VisualStudioProject',
{'ProjectType': 'Visual C++',
'Version': self.version.ProjectVersion(),
'Name': self.name,
'ProjectGUID': self.guid,
'RootNamespace': self.name,
'Keyword': 'Win32Proj'
},
self.platform_section,
self.tool_files_section,
self.configurations_section,
['References'], # empty section
self.files_section,
['Globals'] # empty section
]
easy_xml.WriteXmlIfChanged(content, self.project_path,
encoding="Windows-1252")
| mit | -21,774,723,299,539,844 | 29.706731 | 79 | 0.62361 | false |
usirin/koding | go/src/vendor/github.com/caglar10ur/lxc/config/apparmor/lxc-generate-aa-rules.py | 34 | 3770 | #!/usr/bin/env python3
import sys
blocks = []
#
# blocks is an array of paths under which we want to block by
# default.
#
# blocks[0] = ['path' = '/sys', 'children' = [A,B] ]
# blocks[1] = ['path' = '/proc/sys', 'children' = [ E ] ]
# A = [ 'path' = 'fs', children = [C] ]
# C = [ 'path' = 'cgroup', children = [F] ]
# B = [ 'path' = 'class', children = [D] ]
# D = [ 'path' = 'net', children = [F] ]
# E = [ 'path' = 'shm*' ]
# F = [ 'path' = '**' ]
def add_block(path):
for b in blocks:
if b['path'] == path:
# duplicate
return
blocks.append({'path': path.strip(), 'children': []})
# @prev is an array of dicts which containing 'path' and
# 'children'. @path is a string. We are looking for an entry
# in @prev which contains @path, and will return its
# children array.
def child_get(prev, path):
for p in prev:
if p['path'] == path:
return p['children']
return None
def add_allow(path):
# find which block we belong to
found = None
for b in blocks:
l = len(b['path'])
if len(path) <= l:
continue
# TODO - should we find the longest match?
if path[0:l] == b['path']:
found = b
break
if found is None:
print("allow with no previous block at %s" % path)
sys.exit(1)
p = path[l:].strip()
while p[:1] == "/":
p = p[1:]
prev = b['children']
for s in p.split('/'):
n = {'path': s.strip(), 'children': []}
tmp = child_get(prev, n['path'])
if tmp is not None:
prev = tmp
else:
prev.append(n)
prev = n['children']
config = "config"
if len(sys.argv) > 1:
config = sys.argv[1]
with open(config) as f:
for x in f.readlines():
x.strip()
if x[:1] == '#':
continue
try:
(cmd, path) = x.split(' ')
except: # blank line
continue
if cmd == "block":
add_block(path)
elif cmd == "allow":
add_allow(path)
else:
print("Unknown command: %s" % cmd)
sys.exit(1)
denies = []
def collect_chars(children, ref, index):
r = ""
for c in children:
if index >= len(c['path']):
continue
if ref[0:index] != c['path'][0:index]:
continue
if c['path'][index] not in r:
r = r + c['path'][index]
return r
def append_deny(s):
s = "%s wklx," % s
if s not in denies:
denies.append(s)
def gen_denies(pathsofar, children):
for c in children:
for char in range(len(c['path'])):
if char == len(c['path'])-1 and c['path'][char] == '*':
continue
if char == len(c['path'])-2:
if c['path'][char:char+2] == '**':
continue
x = collect_chars(children, c['path'], char)
newdeny = "deny %s/%s[^%s]*{,/**}" % (pathsofar,
c['path'][0:char], x)
append_deny(newdeny)
if c['path'] != '**' and c['path'][len(c['path'])-1] != '*':
newdeny = "deny %s/%s?*{,/**}" % (pathsofar, c['path'])
append_deny(newdeny)
elif c['path'] != '**':
newdeny = "deny %s/%s/**" % (pathsofar, c['path'])
append_deny(newdeny)
if len(c['children']) != 0:
newpath = "%s/%s" % (pathsofar, c['path'])
gen_denies(newpath, c['children'])
for b in blocks:
gen_denies(b['path'], b['children'])
denies.sort()
genby = " # generated by: lxc-generate-aa-rules.py"
for a in sys.argv[1:]:
genby += " %s" % a
print(genby)
for d in denies:
print(" %s" % d)
| apache-2.0 | 5,589,174,180,813,709,000 | 26.122302 | 71 | 0.471353 | false |
jaxxstorm/fullerite | src/diamond/collectors/portstat/portstat.py | 51 | 1984 | """
The PortStatCollector collects metrics about ports listed in config file.
##### Dependencies
* psutil
"""
from collections import defaultdict
import diamond.collector
try:
import psutil
except ImportError:
psutil = None
def get_port_stats(port):
"""
Iterate over connections and count states for specified port
:param port: port for which stats are collected
:return: Counter with port states
"""
cnts = defaultdict(int)
for c in psutil.net_connections():
c_port = c.laddr[1]
if c_port != port:
continue
status = c.status.lower()
cnts[status] += 1
return cnts
class PortStatCollector(diamond.collector.Collector):
def __init__(self, *args, **kwargs):
super(PortStatCollector, self).__init__(*args, **kwargs)
self.ports = {}
for port_name, cfg in self.config['port'].items():
port_cfg = {}
for key in ('number',):
port_cfg[key] = cfg.get(key, [])
self.ports[port_name] = port_cfg
def get_default_config_help(self):
config_help = super(PortStatCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
config = super(PortStatCollector, self).get_default_config()
config.update({
'path': 'port',
'port': {},
})
return config
def collect(self):
"""
Overrides the Collector.collect method
"""
if psutil is None:
self.log.error('Unable to import module psutil')
return {}
for port_name, port_cfg in self.ports.iteritems():
port = int(port_cfg['number'])
stats = get_port_stats(port)
for stat_name, stat_value in stats.iteritems():
metric_name = '%s.%s' % (port_name, stat_name)
self.publish(metric_name, stat_value)
| apache-2.0 | 7,592,469,356,822,668,000 | 25.453333 | 78 | 0.576613 | false |
jruiperezv/ANALYSE | lms/djangoapps/shoppingcart/processors/helpers.py | 169 | 1025 | """
Helper methods for credit card processing modules.
These methods should be shared among all processor implementations,
but should NOT be imported by modules outside this package.
"""
from django.conf import settings
from microsite_configuration import microsite
def get_processor_config():
"""
Return a dictionary of configuration settings for the active credit card processor.
If we're in a microsite and overrides are available, return those instead.
Returns:
dict
"""
# Retrieve the configuration settings for the active credit card processor
config = settings.CC_PROCESSOR.get(
settings.CC_PROCESSOR_NAME, {}
)
# Check whether we're in a microsite that overrides our configuration
# If so, find the microsite-specific configuration in the 'microsites'
# sub-key of the normal processor configuration.
config_key = microsite.get_value('cybersource_config_key')
if config_key:
config = config['microsites'][config_key]
return config
| agpl-3.0 | 4,789,410,187,150,981,000 | 32.064516 | 87 | 0.729756 | false |
kenrachynski/powerline | powerline/renderers/vim.py | 32 | 5785 | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import vim
from powerline.bindings.vim import vim_get_func, vim_getoption, environ, current_tabpage, get_vim_encoding
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from powerline.theme import Theme
from powerline.lib.unicode import unichr, register_strwidth_error
vim_mode = vim_get_func('mode', rettype='unicode')
if int(vim.eval('v:version')) >= 702:
_vim_mode = vim_mode
vim_mode = lambda: _vim_mode(1)
mode_translations = {
unichr(ord('V') - 0x40): '^V',
unichr(ord('S') - 0x40): '^S',
}
class VimRenderer(Renderer):
'''Powerline vim segment renderer.'''
character_translations = Renderer.character_translations.copy()
character_translations[ord('%')] = '%%'
segment_info = Renderer.segment_info.copy()
segment_info.update(environ=environ)
def __init__(self, *args, **kwargs):
if not hasattr(vim, 'strwidth'):
# Hope nobody want to change this at runtime
if vim.eval('&ambiwidth') == 'double':
kwargs = dict(**kwargs)
kwargs['ambigious'] = 2
super(VimRenderer, self).__init__(*args, **kwargs)
self.hl_groups = {}
self.prev_highlight = None
self.strwidth_error_name = register_strwidth_error(self.strwidth)
self.encoding = get_vim_encoding()
def shutdown(self):
self.theme.shutdown()
for match in self.local_themes.values():
if 'theme' in match:
match['theme'].shutdown()
def add_local_theme(self, matcher, theme):
if matcher in self.local_themes:
raise KeyError('There is already a local theme with given matcher')
self.local_themes[matcher] = theme
def get_matched_theme(self, match):
try:
return match['theme']
except KeyError:
match['theme'] = Theme(theme_config=match['config'], main_theme_config=self.theme_config, **self.theme_kwargs)
return match['theme']
def get_theme(self, matcher_info):
if matcher_info is None:
return self.get_matched_theme(self.local_themes[None])
for matcher in self.local_themes.keys():
if matcher and matcher(matcher_info):
return self.get_matched_theme(self.local_themes[matcher])
else:
return self.theme
if hasattr(vim, 'strwidth'):
if sys.version_info < (3,):
def strwidth(self, string):
# Does not work with tabs, but neither is strwidth from default
# renderer
return vim.strwidth(string.encode(self.encoding, 'replace'))
else:
@staticmethod
def strwidth(string):
return vim.strwidth(string)
def get_segment_info(self, segment_info, mode):
return segment_info or self.segment_info
def render(self, window=None, window_id=None, winnr=None, is_tabline=False):
'''Render all segments.'''
segment_info = self.segment_info.copy()
if window is vim.current.window:
mode = vim_mode()
mode = mode_translations.get(mode, mode)
else:
mode = 'nc'
segment_info.update(
window=window,
mode=mode,
window_id=window_id,
winnr=winnr,
buffer=window.buffer,
tabpage=current_tabpage(),
encoding=self.encoding,
)
segment_info['tabnr'] = segment_info['tabpage'].number
segment_info['bufnr'] = segment_info['buffer'].number
if is_tabline:
winwidth = int(vim_getoption('columns'))
else:
winwidth = segment_info['window'].width
statusline = super(VimRenderer, self).render(
mode=mode,
width=winwidth,
segment_info=segment_info,
matcher_info=(None if is_tabline else segment_info),
)
statusline = statusline.encode(self.encoding, self.strwidth_error_name)
return statusline
def reset_highlight(self):
self.hl_groups.clear()
def hlstyle(self, fg=None, bg=None, attrs=None):
'''Highlight a segment.
If an argument is None, the argument is ignored. If an argument is
False, the argument is reset to the terminal defaults. If an argument
is a valid color or attribute, it’s added to the vim highlight group.
'''
# In order not to hit E541 two consequent identical highlighting
# specifiers may be squashed into one.
attrs = attrs or 0 # Normalize `attrs`
if (fg, bg, attrs) == self.prev_highlight:
return ''
self.prev_highlight = (fg, bg, attrs)
# We don’t need to explicitly reset attributes in vim, so skip those
# calls
if not attrs and not bg and not fg:
return ''
if not (fg, bg, attrs) in self.hl_groups:
hl_group = {
'ctermfg': 'NONE',
'guifg': None,
'ctermbg': 'NONE',
'guibg': None,
'attrs': ['NONE'],
'name': '',
}
if fg is not None and fg is not False:
hl_group['ctermfg'] = fg[0]
hl_group['guifg'] = fg[1]
if bg is not None and bg is not False:
hl_group['ctermbg'] = bg[0]
hl_group['guibg'] = bg[1]
if attrs:
hl_group['attrs'] = []
if attrs & ATTR_BOLD:
hl_group['attrs'].append('bold')
if attrs & ATTR_ITALIC:
hl_group['attrs'].append('italic')
if attrs & ATTR_UNDERLINE:
hl_group['attrs'].append('underline')
hl_group['name'] = (
'Pl_'
+ str(hl_group['ctermfg']) + '_'
+ str(hl_group['guifg']) + '_'
+ str(hl_group['ctermbg']) + '_'
+ str(hl_group['guibg']) + '_'
+ ''.join(hl_group['attrs'])
)
self.hl_groups[(fg, bg, attrs)] = hl_group
vim.command('hi {group} ctermfg={ctermfg} guifg={guifg} guibg={guibg} ctermbg={ctermbg} cterm={attrs} gui={attrs}'.format(
group=hl_group['name'],
ctermfg=hl_group['ctermfg'],
guifg='#{0:06x}'.format(hl_group['guifg']) if hl_group['guifg'] is not None else 'NONE',
ctermbg=hl_group['ctermbg'],
guibg='#{0:06x}'.format(hl_group['guibg']) if hl_group['guibg'] is not None else 'NONE',
attrs=','.join(hl_group['attrs']),
))
return '%#' + self.hl_groups[(fg, bg, attrs)]['name'] + '#'
renderer = VimRenderer
| mit | -7,180,432,641,266,247,000 | 29.75 | 125 | 0.667878 | false |
saurabhbajaj207/CarpeDiem | venv/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py | 117 | 15558 | # -*- coding: utf-8 -*-
#
# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Cipher.DES3"""
__revision__ = "$Id$"
from common import dict # For compatibility with Python 2.1 and 2.2
from Crypto.Util.py3compat import *
from binascii import hexlify
# This is a list of (plaintext, ciphertext, key, description) tuples.
SP800_20_A1_KEY = '01' * 24
SP800_20_A2_PT = '00' * 8
test_data = [
# Test vector from Appendix B of NIST SP 800-67
# "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block
# Cipher"
# http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf
('54686520717566636b2062726f776e20666f78206a756d70',
'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900',
'0123456789abcdef23456789abcdef01456789abcdef0123',
'NIST SP800-67 B.1'),
# Test vectors "The Multi-block Message Test (MMT) for DES and TDES"
# http://csrc.nist.gov/groups/STM/cavp/documents/des/DESMMT.pdf
('326a494cd33fe756', 'b22b8d66de970692',
'627f460e08104a1043cd265d5840eaf1313edf97df2a8a8c',
'DESMMT #1', dict(mode='CBC', iv='8e29f75ea77e5475')),
('84401f78fe6c10876d8ea23094ea5309', '7b1f7c7e3b1c948ebd04a75ffba7d2f5',
'37ae5ebf46dff2dc0754b94f31cbb3855e7fd36dc870bfae',
'DESMMT #2', dict(mode='CBC', iv='3d1de3cc132e3b65')),
# Test vectors from Appendix A of NIST SP 800-20
# "Modes of Operation Validation System for the Triple Data Encryption
# Algorithm (TMOVS): Requirements and Procedures"
# http://csrc.nist.gov/publications/nistpubs/800-20/800-20.pdf
# Table A.1 - Variable Plaintext Known Answer Test
('8000000000000000', '95f8a5e5dd31d900', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #0'),
('4000000000000000', 'dd7f121ca5015619', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #1'),
('2000000000000000', '2e8653104f3834ea', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #2'),
('1000000000000000', '4bd388ff6cd81d4f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #3'),
('0800000000000000', '20b9e767b2fb1456', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #4'),
('0400000000000000', '55579380d77138ef', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #5'),
('0200000000000000', '6cc5defaaf04512f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #6'),
('0100000000000000', '0d9f279ba5d87260', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #7'),
('0080000000000000', 'd9031b0271bd5a0a', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #8'),
('0040000000000000', '424250b37c3dd951', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #9'),
('0020000000000000', 'b8061b7ecd9a21e5', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #10'),
('0010000000000000', 'f15d0f286b65bd28', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #11'),
('0008000000000000', 'add0cc8d6e5deba1', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #12'),
('0004000000000000', 'e6d5f82752ad63d1', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #13'),
('0002000000000000', 'ecbfe3bd3f591a5e', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #14'),
('0001000000000000', 'f356834379d165cd', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #15'),
('0000800000000000', '2b9f982f20037fa9', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #16'),
('0000400000000000', '889de068a16f0be6', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #17'),
('0000200000000000', 'e19e275d846a1298', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #18'),
('0000100000000000', '329a8ed523d71aec', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #19'),
('0000080000000000', 'e7fce22557d23c97', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #20'),
('0000040000000000', '12a9f5817ff2d65d', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #21'),
('0000020000000000', 'a484c3ad38dc9c19', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #22'),
('0000010000000000', 'fbe00a8a1ef8ad72', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #23'),
('0000008000000000', '750d079407521363', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #24'),
('0000004000000000', '64feed9c724c2faf', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #25'),
('0000002000000000', 'f02b263b328e2b60', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #26'),
('0000001000000000', '9d64555a9a10b852', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #27'),
('0000000800000000', 'd106ff0bed5255d7', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #28'),
('0000000400000000', 'e1652c6b138c64a5', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #29'),
('0000000200000000', 'e428581186ec8f46', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #30'),
('0000000100000000', 'aeb5f5ede22d1a36', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #31'),
('0000000080000000', 'e943d7568aec0c5c', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #32'),
('0000000040000000', 'df98c8276f54b04b', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #33'),
('0000000020000000', 'b160e4680f6c696f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #34'),
('0000000010000000', 'fa0752b07d9c4ab8', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #35'),
('0000000008000000', 'ca3a2b036dbc8502', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #36'),
('0000000004000000', '5e0905517bb59bcf', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #37'),
('0000000002000000', '814eeb3b91d90726', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #38'),
('0000000001000000', '4d49db1532919c9f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #39'),
('0000000000800000', '25eb5fc3f8cf0621', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #40'),
('0000000000400000', 'ab6a20c0620d1c6f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #41'),
('0000000000200000', '79e90dbc98f92cca', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #42'),
('0000000000100000', '866ecedd8072bb0e', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #43'),
('0000000000080000', '8b54536f2f3e64a8', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #44'),
('0000000000040000', 'ea51d3975595b86b', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #45'),
('0000000000020000', 'caffc6ac4542de31', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #46'),
('0000000000010000', '8dd45a2ddf90796c', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #47'),
('0000000000008000', '1029d55e880ec2d0', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #48'),
('0000000000004000', '5d86cb23639dbea9', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #49'),
('0000000000002000', '1d1ca853ae7c0c5f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #50'),
('0000000000001000', 'ce332329248f3228', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #51'),
('0000000000000800', '8405d1abe24fb942', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #52'),
('0000000000000400', 'e643d78090ca4207', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #53'),
('0000000000000200', '48221b9937748a23', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #54'),
('0000000000000100', 'dd7c0bbd61fafd54', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #55'),
('0000000000000080', '2fbc291a570db5c4', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #56'),
('0000000000000040', 'e07c30d7e4e26e12', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #57'),
('0000000000000020', '0953e2258e8e90a1', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #58'),
('0000000000000010', '5b711bc4ceebf2ee', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #59'),
('0000000000000008', 'cc083f1e6d9e85f6', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #60'),
('0000000000000004', 'd2fd8867d50d2dfe', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #61'),
('0000000000000002', '06e7ea22ce92708f', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #62'),
('0000000000000001', '166b40b44aba4bd6', SP800_20_A1_KEY,
'NIST SP800-20 A.1 #63'),
# Table A.2 - Variable Key Known Answer Test
(SP800_20_A2_PT, '95a8d72813daa94d', '8001010101010101'*3,
'NIST SP800-20 A.2 #0'),
(SP800_20_A2_PT, '0eec1487dd8c26d5', '4001010101010101'*3,
'NIST SP800-20 A.2 #1'),
(SP800_20_A2_PT, '7ad16ffb79c45926', '2001010101010101'*3,
'NIST SP800-20 A.2 #2'),
(SP800_20_A2_PT, 'd3746294ca6a6cf3', '1001010101010101'*3,
'NIST SP800-20 A.2 #3'),
(SP800_20_A2_PT, '809f5f873c1fd761', '0801010101010101'*3,
'NIST SP800-20 A.2 #4'),
(SP800_20_A2_PT, 'c02faffec989d1fc', '0401010101010101'*3,
'NIST SP800-20 A.2 #5'),
(SP800_20_A2_PT, '4615aa1d33e72f10', '0201010101010101'*3,
'NIST SP800-20 A.2 #6'),
(SP800_20_A2_PT, '2055123350c00858', '0180010101010101'*3,
'NIST SP800-20 A.2 #7'),
(SP800_20_A2_PT, 'df3b99d6577397c8', '0140010101010101'*3,
'NIST SP800-20 A.2 #8'),
(SP800_20_A2_PT, '31fe17369b5288c9', '0120010101010101'*3,
'NIST SP800-20 A.2 #9'),
(SP800_20_A2_PT, 'dfdd3cc64dae1642', '0110010101010101'*3,
'NIST SP800-20 A.2 #10'),
(SP800_20_A2_PT, '178c83ce2b399d94', '0108010101010101'*3,
'NIST SP800-20 A.2 #11'),
(SP800_20_A2_PT, '50f636324a9b7f80', '0104010101010101'*3,
'NIST SP800-20 A.2 #12'),
(SP800_20_A2_PT, 'a8468ee3bc18f06d', '0102010101010101'*3,
'NIST SP800-20 A.2 #13'),
(SP800_20_A2_PT, 'a2dc9e92fd3cde92', '0101800101010101'*3,
'NIST SP800-20 A.2 #14'),
(SP800_20_A2_PT, 'cac09f797d031287', '0101400101010101'*3,
'NIST SP800-20 A.2 #15'),
(SP800_20_A2_PT, '90ba680b22aeb525', '0101200101010101'*3,
'NIST SP800-20 A.2 #16'),
(SP800_20_A2_PT, 'ce7a24f350e280b6', '0101100101010101'*3,
'NIST SP800-20 A.2 #17'),
(SP800_20_A2_PT, '882bff0aa01a0b87', '0101080101010101'*3,
'NIST SP800-20 A.2 #18'),
(SP800_20_A2_PT, '25610288924511c2', '0101040101010101'*3,
'NIST SP800-20 A.2 #19'),
(SP800_20_A2_PT, 'c71516c29c75d170', '0101020101010101'*3,
'NIST SP800-20 A.2 #20'),
(SP800_20_A2_PT, '5199c29a52c9f059', '0101018001010101'*3,
'NIST SP800-20 A.2 #21'),
(SP800_20_A2_PT, 'c22f0a294a71f29f', '0101014001010101'*3,
'NIST SP800-20 A.2 #22'),
(SP800_20_A2_PT, 'ee371483714c02ea', '0101012001010101'*3,
'NIST SP800-20 A.2 #23'),
(SP800_20_A2_PT, 'a81fbd448f9e522f', '0101011001010101'*3,
'NIST SP800-20 A.2 #24'),
(SP800_20_A2_PT, '4f644c92e192dfed', '0101010801010101'*3,
'NIST SP800-20 A.2 #25'),
(SP800_20_A2_PT, '1afa9a66a6df92ae', '0101010401010101'*3,
'NIST SP800-20 A.2 #26'),
(SP800_20_A2_PT, 'b3c1cc715cb879d8', '0101010201010101'*3,
'NIST SP800-20 A.2 #27'),
(SP800_20_A2_PT, '19d032e64ab0bd8b', '0101010180010101'*3,
'NIST SP800-20 A.2 #28'),
(SP800_20_A2_PT, '3cfaa7a7dc8720dc', '0101010140010101'*3,
'NIST SP800-20 A.2 #29'),
(SP800_20_A2_PT, 'b7265f7f447ac6f3', '0101010120010101'*3,
'NIST SP800-20 A.2 #30'),
(SP800_20_A2_PT, '9db73b3c0d163f54', '0101010110010101'*3,
'NIST SP800-20 A.2 #31'),
(SP800_20_A2_PT, '8181b65babf4a975', '0101010108010101'*3,
'NIST SP800-20 A.2 #32'),
(SP800_20_A2_PT, '93c9b64042eaa240', '0101010104010101'*3,
'NIST SP800-20 A.2 #33'),
(SP800_20_A2_PT, '5570530829705592', '0101010102010101'*3,
'NIST SP800-20 A.2 #34'),
(SP800_20_A2_PT, '8638809e878787a0', '0101010101800101'*3,
'NIST SP800-20 A.2 #35'),
(SP800_20_A2_PT, '41b9a79af79ac208', '0101010101400101'*3,
'NIST SP800-20 A.2 #36'),
(SP800_20_A2_PT, '7a9be42f2009a892', '0101010101200101'*3,
'NIST SP800-20 A.2 #37'),
(SP800_20_A2_PT, '29038d56ba6d2745', '0101010101100101'*3,
'NIST SP800-20 A.2 #38'),
(SP800_20_A2_PT, '5495c6abf1e5df51', '0101010101080101'*3,
'NIST SP800-20 A.2 #39'),
(SP800_20_A2_PT, 'ae13dbd561488933', '0101010101040101'*3,
'NIST SP800-20 A.2 #40'),
(SP800_20_A2_PT, '024d1ffa8904e389', '0101010101020101'*3,
'NIST SP800-20 A.2 #41'),
(SP800_20_A2_PT, 'd1399712f99bf02e', '0101010101018001'*3,
'NIST SP800-20 A.2 #42'),
(SP800_20_A2_PT, '14c1d7c1cffec79e', '0101010101014001'*3,
'NIST SP800-20 A.2 #43'),
(SP800_20_A2_PT, '1de5279dae3bed6f', '0101010101012001'*3,
'NIST SP800-20 A.2 #44'),
(SP800_20_A2_PT, 'e941a33f85501303', '0101010101011001'*3,
'NIST SP800-20 A.2 #45'),
(SP800_20_A2_PT, 'da99dbbc9a03f379', '0101010101010801'*3,
'NIST SP800-20 A.2 #46'),
(SP800_20_A2_PT, 'b7fc92f91d8e92e9', '0101010101010401'*3,
'NIST SP800-20 A.2 #47'),
(SP800_20_A2_PT, 'ae8e5caa3ca04e85', '0101010101010201'*3,
'NIST SP800-20 A.2 #48'),
(SP800_20_A2_PT, '9cc62df43b6eed74', '0101010101010180'*3,
'NIST SP800-20 A.2 #49'),
(SP800_20_A2_PT, 'd863dbb5c59a91a0', '0101010101010140'*3,
'NIST SP800-20 A.2 #50'),
(SP800_20_A2_PT, 'a1ab2190545b91d7', '0101010101010120'*3,
'NIST SP800-20 A.2 #51'),
(SP800_20_A2_PT, '0875041e64c570f7', '0101010101010110'*3,
'NIST SP800-20 A.2 #52'),
(SP800_20_A2_PT, '5a594528bebef1cc', '0101010101010108'*3,
'NIST SP800-20 A.2 #53'),
(SP800_20_A2_PT, 'fcdb3291de21f0c0', '0101010101010104'*3,
'NIST SP800-20 A.2 #54'),
(SP800_20_A2_PT, '869efd7f9f265a09', '0101010101010102'*3,
'NIST SP800-20 A.2 #55'),
# "Two-key 3DES". Test vector generated using PyCrypto 2.0.1.
# This test is designed to test the DES3 API, not the correctness of the
# output.
('21e81b7ade88a259', '5c577d4d9b20c0f8',
'9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'),
# The following test vectors have been generated with gpg v1.4.0.
# The command line used was:
# gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \
# --disable-mdc --s2k-mode 0 --output ct pt
# For an explanation, see test_AES.py .
( 'ac1762037074324fb53ba3596f73656d69746556616c6c6579', # Plaintext, 'YosemiteValley'
'9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d', # Ciphertext
'7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2', # Key (hash of 'BearsAhead')
'GPG Test Vector #1',
dict(mode='OPENPGP', iv='cd47e2afb8b7e4b0', encrypted_iv='6a7eef0b58050e8b904a' ) ),
]
def get_tests(config={}):
from Crypto.Cipher import DES3
from common import make_block_tests
return make_block_tests(DES3, "DES3", test_data)
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit | -4,086,194,489,357,829,600 | 45.720721 | 93 | 0.631508 | false |
MSusik/invenio | invenio/legacy/webstyle/templates.py | 3 | 29772 | ## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebStyle templates. Customize the look of pages of Invenio
"""
__revision__ = \
"$Id$"
import time
import cgi
import traceback
import urllib
import sys
import string
from bs4 import BeautifulSoup
from invenio.ext.template import render_template_to_string
from invenio.config import \
CFG_SITE_RECORD, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_NAME_INTL, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_SECURE_URL, \
CFG_BASE_URL, \
CFG_SITE_URL, \
CFG_VERSION, \
CFG_WEBSTYLE_TEMPLATE_SKIN, \
CFG_INSPIRE_SITE, \
CFG_WEBLINKBACK_TRACKBACK_ENABLED
from invenio.base.i18n import gettext_set_language, language_list_long, is_language_rtl
from invenio.utils.url import make_canonical_urlargd, create_html_link, \
get_canonical_and_alternates_urls
from invenio.utils.date import convert_datecvs_to_datestruct, \
convert_datestruct_to_dategui
from invenio.modules.formatter import format_record
from invenio.utils.html import get_mathjax_header
import invenio.legacy.template
websearch_templates = invenio.legacy.template.load('websearch')
class Template:
def tmpl_navtrailbox_body(self, ln, title, previous_links, separator,
prolog, epilog):
"""Bootstrap friendly-Create navigation trail box body
Parameters:
- 'ln' *string* - The language to display
- 'title' *string* - page title;
- 'previous_links' *string* - the trail content from site title until current page (both ends exclusive)
- 'prolog' *string* - HTML code to prefix the navtrail item with
- 'epilog' *string* - HTML code to suffix the navtrail item with
- 'separator' *string* - HTML code that separates two navtrail items
Output:
- text containing the navtrail
Note: returns empty string for Home page. (guessed by title).
"""
# load the right message language
_ = gettext_set_language(ln)
if title == CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME):
return ""
# Breadcrumbs
# breadcrumb objects should provide properties 'text' and 'url'
# First element
breadcrumbs = [dict(text=_("Home"), url=CFG_SITE_URL), ]
# Decode previous elements
if previous_links:
soup = BeautifulSoup(previous_links)
for link in soup.find_all('a'):
breadcrumbs.append(dict(
text=unicode(' '.join(link.contents)),
url=link.get('href')))
# Add head
if title:
breadcrumbs.append(dict(text=title, url='#'))
return render_template_to_string("breadcrumbs.html",
breadcrumbs=breadcrumbs).encode('utf8')
def tmpl_page(self, req, **kwargs):
"""Creates a complete page
Parameters:
- 'ln' *string* - The language to display
- 'description' *string* - description goes to the metadata in the header of the HTML page,
not yet escaped for HTML
- 'keywords' *string* - keywords goes to the metadata in the header of the HTML page,
not yet escaped for HTML
- 'userinfobox' *string* - the HTML code for the user information box
- 'useractivities_menu' *string* - the HTML code for the user activities menu
- 'adminactivities_menu' *string* - the HTML code for the admin activities menu
- 'navtrailbox' *string* - the HTML code for the navigation trail box
- 'pageheaderadd' *string* - additional page header HTML code
- 'boxlefttop' *string* - left-top box HTML code
- 'boxlefttopadd' *string* - additional left-top box HTML code
- 'boxleftbottom' *string* - left-bottom box HTML code
- 'boxleftbottomadd' *string* - additional left-bottom box HTML code
- 'boxrighttop' *string* - right-top box HTML code
- 'boxrighttopadd' *string* - additional right-top box HTML code
- 'boxrightbottom' *string* - right-bottom box HTML code
- 'boxrightbottomadd' *string* - additional right-bottom box HTML code
- 'title' *string* - the title of the page, not yet escaped for HTML
- 'titleprologue' *string* - what to print before page title
- 'titleepilogue' *string* - what to print after page title
- 'body' *string* - the body of the page
- 'lastupdated' *string* - when the page was last updated
- 'uid' *int* - user ID
- 'pagefooteradd' *string* - additional page footer HTML code
- 'secure_page_p' *int* (0 or 1) - are we to use HTTPS friendly page elements or not?
- 'navmenuid' *string* - the id of the navigation item to highlight for this page
- 'metaheaderadd' *string* - list of further tags to add to the <HEAD></HEAD> part of the page
- 'rssurl' *string* - the url of the RSS feed for this page
- 'show_title_p' *int* (0 or 1) - do we display the page title in the body of the page?
- 'body_css_classes' *list* - list of classes to add to the body tag
- 'show_header' *boolean* - tells whether page header should be displayed or not
- 'show_footer' *boolean* - tells whether page footer should be displayed or not
Output:
- HTML code of the page
"""
ctx = dict(ln=CFG_SITE_LANG, description="",
keywords="", userinfobox="", useractivities_menu="",
adminactivities_menu="", navtrailbox="",
pageheaderadd="", boxlefttop="", boxlefttopadd="",
boxleftbottom="", boxleftbottomadd="",
boxrighttop="", boxrighttopadd="",
boxrightbottom="", boxrightbottomadd="",
titleprologue="", title="", titleepilogue="",
body="", lastupdated=None, pagefooteradd="", uid=0,
secure_page_p=0, navmenuid="", metaheaderadd="",
rssurl=CFG_SITE_URL+"/rss",
show_title_p=True, body_css_classes=None,
show_header=True, show_footer=True)
ctx.update(kwargs)
return render_template_to_string("legacy_page.html", **ctx).encode('utf8')
def tmpl_pageheader(self, req, **kwargs):
"""Creates a page header
Parameters:
- 'ln' *string* - The language to display
- 'headertitle' *string* - the title of the HTML page, not yet escaped for HTML
- 'description' *string* - description goes to the metadata in the header of the HTML page,
not yet escaped for HTML
- 'keywords' *string* - keywords goes to the metadata in the header of the HTML page,
not yet escaped for HTML
- 'userinfobox' *string* - the HTML code for the user information box
- 'useractivities_menu' *string* - the HTML code for the user activities menu
- 'adminactivities_menu' *string* - the HTML code for the admin activities menu
- 'navtrailbox' *string* - the HTML code for the navigation trail box
- 'pageheaderadd' *string* - additional page header HTML code
- 'uid' *int* - user ID
- 'secure_page_p' *int* (0 or 1) - are we to use HTTPS friendly page elements or not?
- 'navmenuid' *string* - the id of the navigation item to highlight for this page
- 'metaheaderadd' *string* - list of further tags to add to the <HEAD></HEAD> part of the page
- 'rssurl' *string* - the url of the RSS feed for this page
- 'body_css_classes' *list* - list of classes to add to the body tag
Output:
- HTML code of the page headers
"""
ctx = dict(ln=CFG_SITE_LANG, headertitle="",
description="", keywords="", userinfobox="",
useractivities_menu="", adminactivities_menu="",
navtrailbox="", pageheaderadd="", uid=0,
secure_page_p=0, navmenuid="admin", metaheaderadd="",
rssurl=CFG_SITE_URL+"/rss", body_css_classes=None)
ctx.update(kwargs)
if ctx['body_css_classes'] is None:
ctx['body_css_classes'] = [ctx.get('navmenuid', '')]
else:
ctx['body_css_classes'].append([ctx.get('navmenuid', '')])
return render_template_to_string(
"legacy_page.html",
no_pagebody=True,
no_pagefooter=True,
**ctx
).encode('utf8')
def tmpl_pagefooter(self, req, **kwargs):
"""Creates a page footer
Parameters:
- 'ln' *string* - The language to display
- 'lastupdated' *string* - when the page was last updated
- 'pagefooteradd' *string* - additional page footer HTML code
Output:
- HTML code of the page headers
"""
ctx = dict(ln=CFG_SITE_LANG, lastupdated=None, pagefooteradd=None)
ctx.update(kwargs)
lastupdated = ctx.get('lastupdated')
if lastupdated and lastupdated != '$Date$':
if lastupdated.startswith("$Date: ") or lastupdated.startswith("$Id: "):
ctx['lastupdated'] = convert_datecvs_to_datestruct(lastupdated)
return render_template_to_string(
"legacy_page.html",
no_pagebody=True,
no_pageheader=True,
**ctx
).encode('utf8')
def tmpl_language_selection_box(self, req, language=CFG_SITE_LANG):
"""Take URLARGS and LANGUAGE and return textual language
selection box for the given page.
Parameters:
- 'req' - The mod_python request object
- 'language' *string* - The selected language
"""
# load the right message language
_ = gettext_set_language(language)
# Work on a copy in order not to bork the arguments of the caller
argd = {}
if req and req.args:
argd.update(cgi.parse_qs(req.args))
parts = []
for (lang, lang_namelong) in language_list_long():
if lang == language:
parts.append('<span class="langinfo">%s</span>' % lang_namelong)
else:
# Update the 'ln' argument in the initial request
argd['ln'] = lang
if req and req.uri:
args = urllib.quote(req.uri, '/:?') + make_canonical_urlargd(argd, {})
else:
args = ""
parts.append(create_html_link(args,
{}, lang_namelong,
{'class': "langinfo"}))
if len(parts) > 1:
return _("This site is also available in the following languages:") + \
"<br />" + ' '.join(parts)
else:
## There is only one (or zero?) languages configured,
## so there so need to display language alternatives.
return ""
def tmpl_error_box(self, ln, title, verbose, req, errors):
"""Produces an error box.
Parameters:
- 'title' *string* - The title of the error box
- 'ln' *string* - The selected language
- 'verbose' *bool* - If lots of information should be displayed
- 'req' *object* - the request object
- 'errors' list of tuples (error_code, error_message)
"""
# load the right message language
_ = gettext_set_language(ln)
info_not_available = _("N/A")
if title is None:
if errors:
title = _("Error") + ': %s' % errors[0][1]
else:
title = _("Internal Error")
browser_s = _("Browser")
if req:
try:
if 'User-Agent' in req.headers_in:
browser_s += ': ' + req.headers_in['User-Agent']
else:
browser_s += ': ' + info_not_available
host_s = req.hostname
page_s = req.unparsed_uri
client_s = req.remote_ip
except: # FIXME: bad except
browser_s += ': ' + info_not_available
host_s = page_s = client_s = info_not_available
else:
browser_s += ': ' + info_not_available
host_s = page_s = client_s = info_not_available
error_s = ''
sys_error_s = ''
traceback_s = ''
if verbose >= 1:
if sys.exc_info()[0]:
sys_error_s = '\n' + _("System Error") + ': %s %s\n' % \
(sys.exc_info()[0], sys.exc_info()[1])
if errors:
errs = ''
for error_tuple in errors:
try:
errs += "%s%s : %s\n " % (' '*6, error_tuple[0],
error_tuple[1])
except:
errs += "%s%s\n" % (' '*6, error_tuple)
errs = errs[6:-2] # get rid of trainling ','
error_s = _("Error") + ': %s")' % errs + "\n"
else:
error_s = _("Error") + ': ' + info_not_available
if verbose >= 9:
traceback_s = '\n' + _("Traceback") + ': \n%s' % \
string.join(traceback.format_tb(sys.exc_info()[2]),
"\n")
out = """
<table class="errorbox">
<thead>
<tr>
<th class="errorboxheader">
<p> %(title)s %(sys1)s %(sys2)s</p>
</th>
</tr>
</thead>
<tbody>
<tr>
<td class="errorboxbody">
<p>%(contact)s</p>
<blockquote><pre>
URI: http://%(host)s%(page)s
%(time_label)s: %(time)s
%(browser)s
%(client_label)s: %(client)s
%(error)s%(sys_error)s%(traceback)s
</pre></blockquote>
</td>
</tr>
<tr>
<td>
<form action="%(siteurl)s/error/send" method="post">
%(send_error_label)s
<input class="adminbutton" type="submit" value="%(send_label)s" />
<input type="hidden" name="header" value="%(title)s %(sys1)s %(sys2)s" />
<input type="hidden" name="url" value="URI: http://%(host)s%(page)s" />
<input type="hidden" name="time" value="Time: %(time)s" />
<input type="hidden" name="browser" value="%(browser)s" />
<input type="hidden" name="client" value="Client: %(client)s" />
<input type="hidden" name="error" value="%(error)s" />
<input type="hidden" name="sys_error" value="%(sys_error)s" />
<input type="hidden" name="traceback" value="%(traceback)s" />
<input type="hidden" name="referer" value="%(referer)s" />
</form>
</td>
</tr>
</tbody>
</table>
""" % {
'title' : cgi.escape(title).replace('"', '"'),
'time_label': _("Time"),
'client_label': _("Client"),
'send_error_label': \
_("Please send an error report to the administrator."),
'send_label': _("Send error report"),
'sys1' : cgi.escape(str((sys.exc_info()[0] or ''))).replace('"', '"'),
'sys2' : cgi.escape(str((sys.exc_info()[1] or ''))).replace('"', '"'),
'contact' : \
_("Please contact %(x_name)s quoting the following information:",
x_name=('<a href="mailto:' + urllib.quote(CFG_SITE_SUPPORT_EMAIL) +'">' + CFG_SITE_SUPPORT_EMAIL + '</a>')),
'host' : cgi.escape(host_s),
'page' : cgi.escape(page_s),
'time' : time.strftime("%d/%b/%Y:%H:%M:%S %z"),
'browser' : cgi.escape(browser_s).replace('"', '"'),
'client' : cgi.escape(client_s).replace('"', '"'),
'error' : cgi.escape(error_s).replace('"', '"'),
'traceback' : cgi.escape(traceback_s).replace('"', '"'),
'sys_error' : cgi.escape(sys_error_s).replace('"', '"'),
'siteurl' : CFG_BASE_URL,
'referer' : page_s!=info_not_available and \
("http://" + host_s + page_s) or \
info_not_available
}
return out
def detailed_record_container_top(self, recid, tabs, ln=CFG_SITE_LANG,
show_similar_rec_p=True,
creationdate=None,
modificationdate=None, show_short_rec_p=True,
citationnum=-1, referencenum=-1, discussionnum=-1,
include_jquery = False, include_mathjax = False):
"""Prints the box displayed in detailed records pages, with tabs at the top.
Returns content as it is if the number of tabs for this record
is smaller than 2
Parameters:
@param recid: int - the id of the displayed record
@param tabs: ** - the tabs displayed at the top of the box.
@param ln: *string* - the language of the page in which the box is displayed
@param show_similar_rec_p: *bool* print 'similar records' link in the box
@param creationdate: *string* - the creation date of the displayed record
@param modificationdate: *string* - the last modification date of the displayed record
@param show_short_rec_p: *boolean* - prints a very short version of the record as reminder.
@param citationnum: show (this) number of citations in the citations tab
@param referencenum: show (this) number of references in the references tab
@param discussionnum: show (this) number of comments/reviews in the discussion tab
"""
from invenio.legacy.search_engine import \
get_restricted_collections_for_recid, \
is_record_in_any_collection
# load the right message language
_ = gettext_set_language(ln)
# Prepare restriction flag
restriction_flag = ''
if get_restricted_collections_for_recid(recid, recreate_cache_if_needed=False):
restriction_flag = '<div class="restrictedflag"><span>%s</span></div>' % _("Restricted")
elif not is_record_in_any_collection(recid, recreate_cache_if_needed=False):
restriction_flag = '<div class="restrictedflag restrictedflag-pending"><span>%s</span></div>' % _("Restricted (Processing Record)")
# If no tabs, returns nothing (excepted if restricted)
if len(tabs) <= 1:
return restriction_flag
# Build the tabs at the top of the page
out_tabs = ''
if len(tabs) > 1:
first_tab = True
for (label, url, selected, enabled) in tabs:
addnum = ""
if (citationnum > -1) and url.count("/citation") == 1:
addnum = "(" + str(citationnum) + ")"
if (referencenum > -1) and url.count("/references") == 1:
addnum = "(" + str(referencenum) + ")"
if (discussionnum > -1) and url.count("/comments") == 1:
addnum = "(" + str(discussionnum) + ")"
css_class = []
if selected:
css_class.append('on')
if first_tab:
css_class.append('first')
first_tab = False
if not enabled:
css_class.append('disabled')
css_class = ' class="%s"' % ' '.join(css_class)
if not enabled:
out_tabs += '<li%(class)s><a>%(label)s %(addnum)s</a></li>' % \
{'class':css_class,
'label':label,
'addnum':addnum}
else:
out_tabs += '<li%(class)s><a href="%(url)s">%(label)s %(addnum)s </a></li>' % \
{'class':css_class,
'url':url,
'label':label,
'addnum':addnum}
if out_tabs != '':
out_tabs = ''' <div class="detailedrecordtabs">
<div>
<ul class="detailedrecordtabs">%s</ul>
<div id="tabsSpacer" style="clear:both;height:0px"> </div></div>
</div>''' % out_tabs
# Add the clip icon and the brief record reminder if necessary
record_brief = ''
if show_short_rec_p:
record_brief = format_record(recID=recid, of='hs', ln=ln)
record_brief = '''<div id="detailedrecordshortreminder">
<div id="clip"> </div>
<div id="HB">
%(record_brief)s
</div>
</div>
<div style="clear:both;height:1px"> </div>
''' % {'record_brief': record_brief}
additional_scripts = ""
if include_jquery:
additional_scripts += """<script type="text/javascript" src="%s/js/jquery.min.js">' \
'</script>\n""" % (CFG_BASE_URL, )
if include_mathjax:
additional_scripts += get_mathjax_header()
# Print the content
out = """
%(additional_scripts)s<div class="detailedrecordbox">
%(tabs)s
<div class="detailedrecordboxcontent">
<div class="top-left-folded"></div>
<div class="top-right-folded"></div>
<div class="inside">
<!--<div style="height:0.1em;"> </div>
<p class="notopgap"> </p>-->
%(record_brief)s
""" % {'additional_scripts': additional_scripts,
'tabs':out_tabs,
'record_brief':record_brief}
out = restriction_flag + out
return out
def detailed_record_container_bottom(self, recid, tabs, ln=CFG_SITE_LANG,
show_similar_rec_p=True,
creationdate=None,
modificationdate=None, show_short_rec_p=True):
"""Prints the box displayed in detailed records pages, with tabs at the top.
Returns content as it is if the number of tabs for this record
is smaller than 2
Parameters:
- recid *int* - the id of the displayed record
- tabs ** - the tabs displayed at the top of the box.
- ln *string* - the language of the page in which the box is displayed
- show_similar_rec_p *bool* print 'similar records' link in the box
- creationdate *string* - the creation date of the displayed record
- modificationdate *string* - the last modification date of the displayed record
- show_short_rec_p *boolean* - prints a very short version of the record as reminder.
"""
# If no tabs, returns nothing
if len(tabs) <= 1:
return ''
# load the right message language
_ = gettext_set_language(ln)
similar = ""
if show_similar_rec_p and not CFG_INSPIRE_SITE:
similar = create_html_link(
websearch_templates.build_search_url(p='recid:%d' % \
recid,
rm='wrd',
ln=ln),
{}, _("Similar records"),{'class': "moreinfo"})
out = """
<div class="bottom-left-folded">%(dates)s</div>
<div class="bottom-right-folded" style="text-align:right;padding-bottom:2px;">
<span class="moreinfo" style="margin-right:10px;">%(similar)s</span></div>
</div>
</div>
</div>
<br/>
""" % {'similar' : similar,
'dates' : creationdate and '<div class="recordlastmodifiedbox" style="position:relative;margin-left:1px"> %(dates)s</div>' % {
'dates': _("Record created %(x_date_creation)s, last modified %(x_date_modification)s",
x_date_creation=creationdate,
x_date_modification=modificationdate),
} or ''
}
return out
def detailed_record_mini_panel(self, recid, ln=CFG_SITE_LANG,
format='hd',
files='',
reviews='',
actions=''):
"""Displays the actions dock at the bottom of the detailed record
pages.
Parameters:
- recid *int* - the id of the displayed record
- ln *string* - interface language code
- format *string* - the format used to display the record
- files *string* - the small panel representing the attached files
- reviews *string* - the small panel representing the reviews
- actions *string* - the small panel representing the possible user's action
"""
# load the right message language
_ = gettext_set_language(ln)
out = """
<br />
<div class="detailedrecordminipanel">
<div class="top-left"></div><div class="top-right"></div>
<div class="inside">
<div id="detailedrecordminipanelfile" style="width:33%%;float:left;text-align:center;margin-top:0">
%(files)s
</div>
<div id="detailedrecordminipanelreview" style="width:30%%;float:left;text-align:center">
%(reviews)s
</div>
<div id="detailedrecordminipanelactions" style="width:36%%;float:right;text-align:right;">
%(actions)s
</div>
<div style="clear:both;margin-bottom: 0;"></div>
</div>
<div class="bottom-left"></div><div class="bottom-right"></div>
</div>
""" % {
'siteurl': CFG_BASE_URL,
'ln':ln,
'recid':recid,
'files': files,
'reviews':reviews,
'actions': actions,
}
return out
def tmpl_error_page(self, ln=CFG_SITE_LANG, status="", admin_was_alerted=True):
"""
Display an error page.
- status *string* - the HTTP status.
"""
_ = gettext_set_language(ln)
out = """
<p>%(message)s</p>
<p>%(alerted)s</p>
<p>%(doubts)s</p>""" % {
'status' : status,
'message' : _("The server encountered an error while dealing with your request."),
'alerted' : admin_was_alerted and _("The system administrators have been alerted.") or '',
'doubts' : _("In case of doubt, please contact %(x_admin_email)s.",
x_admin_email='<a href="mailto:%(admin)s">%(admin)s</a>' % {'admin' : CFG_SITE_SUPPORT_EMAIL})
}
return out
def tmpl_warning_message(self, ln, msg):
"""
Produces a warning message for the specified text
Parameters:
- 'ln' *string* - The language to display the interface in
- 'msg' *string* - The message to display
"""
# load the right message language
_ = gettext_set_language(ln)
return """<center><font color="red">%s</font></center>""" % msg
def tmpl_write_warning(self, msg, type='', prologue='', epilogue=''):
"""
Returns formatted warning message.
Parameters:
- 'msg' *string* - The message string
- 'type' *string* - the warning type
- 'prologue' *string* - HTML code to display before the warning
- 'epilogue' *string* - HTML code to display after the warning
"""
out = '\n%s<span class="quicknote">' % (prologue)
if type:
out += '%s: ' % type
out += '%s</span>%s' % (msg, epilogue)
return out
| gpl-2.0 | -3,385,006,754,124,190,700 | 38.748999 | 143 | 0.51891 | false |
davgibbs/django | django/forms/widgets.py | 184 | 37166 | """
HTML Widget classes
"""
from __future__ import unicode_literals
import copy
import datetime
import re
from itertools import chain
from django.conf import settings
from django.forms.utils import flatatt, to_current_timezone
from django.utils import datetime_safe, formats, six
from django.utils.datastructures import MultiValueDict
from django.utils.dates import MONTHS
from django.utils.encoding import (
force_str, force_text, python_2_unicode_compatible,
)
from django.utils.formats import get_format
from django.utils.html import conditional_escape, format_html, html_safe
from django.utils.safestring import mark_safe
from django.utils.six.moves import range
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.translation import ugettext_lazy
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'NumberInput',
'EmailInput', 'URLInput', 'PasswordInput', 'HiddenInput',
'MultipleHiddenInput', 'FileInput', 'ClearableFileInput', 'Textarea',
'DateInput', 'DateTimeInput', 'TimeInput', 'CheckboxInput', 'Select',
'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget',
'SplitHiddenDateTimeWidget', 'SelectDateWidget',
)
MEDIA_TYPES = ('css', 'js')
@html_safe
@python_2_unicode_compatible
class Media(object):
def __init__(self, media=None, **kwargs):
if media:
media_attrs = media.__dict__
else:
media_attrs = kwargs
self._css = {}
self._js = []
for name in MEDIA_TYPES:
getattr(self, 'add_' + name)(media_attrs.get(name))
def __str__(self):
return self.render()
def render(self):
return mark_safe('\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [
format_html(
'<script type="text/javascript" src="{}"></script>',
self.absolute_path(path)
) for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css.keys())
return chain(*[[
format_html(
'<link href="{}" type="text/css" media="{}" rel="stylesheet" />',
self.absolute_path(path), medium
) for path in self._css[medium]
] for medium in media])
def absolute_path(self, path, prefix=None):
if path.startswith(('http://', 'https://', '/')):
return path
if prefix is None:
if settings.STATIC_URL is None:
# backwards compatibility
prefix = settings.MEDIA_URL
else:
prefix = settings.STATIC_URL
return urljoin(prefix, path)
def __getitem__(self, name):
"Returns a Media object that only contains media of the given type"
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
def add_js(self, data):
if data:
for path in data:
if path not in self._js:
self._js.append(path)
def add_css(self, data):
if data:
for medium, paths in data.items():
for path in paths:
if not self._css.get(medium) or path not in self._css[medium]:
self._css.setdefault(medium, []).append(path)
def __add__(self, other):
combined = Media()
for name in MEDIA_TYPES:
getattr(combined, 'add_' + name)(getattr(self, '_' + name, None))
getattr(combined, 'add_' + name)(getattr(other, '_' + name, None))
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend is True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
else:
return Media(definition)
else:
return base
return property(_media)
class MediaDefiningClass(type):
"""
Metaclass for classes that can have media definitions.
"""
def __new__(mcs, name, bases, attrs):
new_class = (super(MediaDefiningClass, mcs)
.__new__(mcs, name, bases, attrs))
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
@html_safe
@python_2_unicode_compatible
class SubWidget(object):
"""
Some widgets are made of multiple HTML elements -- namely, RadioSelect.
This is a class that represents the "inner" HTML element of a widget.
"""
def __init__(self, parent_widget, name, value, attrs, choices):
self.parent_widget = parent_widget
self.name, self.value = name, value
self.attrs, self.choices = attrs, choices
def __str__(self):
args = [self.name, self.value, self.attrs]
if self.choices:
args.append(self.choices)
return self.parent_widget.render(*args)
class Widget(six.with_metaclass(MediaDefiningClass)):
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
supports_microseconds = True
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.input_type == 'hidden' if hasattr(self, 'input_type') else False
def subwidgets(self, name, value, attrs=None, choices=()):
"""
Yields all "subwidgets" of this widget. Used only by RadioSelect to
allow template access to individual <input type="radio"> buttons.
Arguments are the same as for render().
"""
yield SubWidget(self, name, value, attrs, choices)
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
raise NotImplementedError('subclasses of Widget must provide a render() method')
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, returns the value
of this widget. Returns None if it's not provided.
"""
return data.get(name)
def id_for_label(self, id_):
"""
Returns the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Returns None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
class Input(Widget):
"""
Base class for all <input> widgets (except type='checkbox' and
type='radio', which are special).
"""
input_type = None # Subclasses must define this.
def _format_value(self, value):
if self.is_localized:
return formats.localize_input(value)
return value
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(self._format_value(value))
return format_html('<input{} />', flatatt(final_attrs))
class TextInput(Input):
input_type = 'text'
def __init__(self, attrs=None):
if attrs is not None:
self.input_type = attrs.pop('type', self.input_type)
super(TextInput, self).__init__(attrs)
class NumberInput(TextInput):
input_type = 'number'
class EmailInput(TextInput):
input_type = 'email'
class URLInput(TextInput):
input_type = 'url'
class PasswordInput(TextInput):
input_type = 'password'
def __init__(self, attrs=None, render_value=False):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value:
value = None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
class MultipleHiddenInput(HiddenInput):
"""
A widget that handles <input type="hidden"> for fields that have a list
of values.
"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
# choices can be any iterable
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id')
inputs = []
for i, v in enumerate(value):
input_attrs = dict(value=force_text(v), **final_attrs)
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
input_attrs['id'] = '%s_%s' % (id_, i)
inputs.append(format_html('<input{} />', flatatt(input_attrs)))
return mark_safe('\n'.join(inputs))
def value_from_datadict(self, data, files, name):
if isinstance(data, MultiValueDict):
return data.getlist(name)
return data.get(name)
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name)
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = (
'%(initial_text)s: <a href="%(initial_url)s">%(initial)s</a> '
'%(clear_template)s<br />%(input_text)s: %(input)s'
)
template_with_clear = '%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + '-clear'
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + '_id'
def is_initial(self, value):
"""
Return whether value is considered to be initial value.
"""
# hasattr() masks exceptions on Python 2.
if six.PY2:
try:
getattr(value, 'url')
except AttributeError:
return False
else:
return bool(value)
return bool(value and hasattr(value, 'url'))
def get_template_substitution_values(self, value):
"""
Return value-related substitutions.
"""
return {
'initial': conditional_escape(value),
'initial_url': conditional_escape(value.url),
}
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = '%(input)s'
substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs)
if self.is_initial(value):
template = self.template_with_initial
substitutions.update(self.get_template_substitution_values(value))
if not self.is_required:
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
substitutions['clear_checkbox_name'] = conditional_escape(checkbox_name)
substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id)
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.template_with_clear % substitutions
return mark_safe(template % substitutions)
def value_from_datadict(self, data, files, name):
upload = super(ClearableFileInput, self).value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
class Textarea(Widget):
def __init__(self, attrs=None):
# Use slightly better defaults than HTML's 20x2 box
default_attrs = {'cols': '40', 'rows': '10'}
if attrs:
default_attrs.update(attrs)
super(Textarea, self).__init__(default_attrs)
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return format_html('<textarea{}>\r\n{}</textarea>',
flatatt(final_attrs),
force_text(value))
class DateTimeBaseInput(TextInput):
format_key = ''
supports_microseconds = False
def __init__(self, attrs=None, format=None):
super(DateTimeBaseInput, self).__init__(attrs)
self.format = format if format else None
def _format_value(self, value):
return formats.localize_input(value,
self.format or formats.get_format(self.format_key)[0])
class DateInput(DateTimeBaseInput):
format_key = 'DATE_INPUT_FORMATS'
class DateTimeInput(DateTimeBaseInput):
format_key = 'DATETIME_INPUT_FORMATS'
class TimeInput(DateTimeBaseInput):
format_key = 'TIME_INPUT_FORMATS'
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == '')
class CheckboxInput(Widget):
def __init__(self, attrs=None, check_test=None):
super(CheckboxInput, self).__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
if self.check_test(value):
final_attrs['checked'] = 'checked'
if not (value is True or value is False or value is None or value == ''):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(value)
return format_html('<input{} />', flatatt(final_attrs))
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {'true': True, 'false': False}
if isinstance(value, six.string_types):
value = values.get(value.lower(), value)
return bool(value)
class Select(Widget):
allow_multiple_selected = False
def __init__(self, attrs=None, choices=()):
super(Select, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
obj.choices = copy.copy(self.choices)
memo[id(self)] = obj
return obj
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select{}>', flatatt(final_attrs))]
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def render_option(self, selected_choices, option_value, option_label):
if option_value is None:
option_value = ''
option_value = force_text(option_value)
if option_value in selected_choices:
selected_html = mark_safe(' selected="selected"')
if not self.allow_multiple_selected:
# Only allow for a single selection.
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html('<option value="{}"{}>{}</option>',
option_value,
selected_html,
force_text(option_label))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set(force_text(v) for v in selected_choices)
output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(format_html('<optgroup label="{}">', force_text(option_value)))
for option in option_label:
output.append(self.render_option(selected_choices, *option))
output.append('</optgroup>')
else:
output.append(self.render_option(selected_choices, option_value, option_label))
return '\n'.join(output)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (('1', ugettext_lazy('Unknown')),
('2', ugettext_lazy('Yes')),
('3', ugettext_lazy('No')))
super(NullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {True: '2', False: '3', '2': '2', '3': '3'}[value]
except KeyError:
value = '1'
return super(NullBooleanSelect, self).render(name, value, attrs, choices)
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {'2': True,
True: True,
'True': True,
'3': False,
'False': False,
False: False}.get(value)
class SelectMultiple(Select):
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select multiple="multiple"{}>', flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def value_from_datadict(self, data, files, name):
if isinstance(data, MultiValueDict):
return data.getlist(name)
return data.get(name)
@html_safe
@python_2_unicode_compatible
class ChoiceInput(SubWidget):
"""
An object used by ChoiceFieldRenderer that represents a single
<input type='$input_type'>.
"""
input_type = None # Subclasses must define this
def __init__(self, name, value, attrs, choice, index):
self.name = name
self.value = value
self.attrs = attrs
self.choice_value = force_text(choice[0])
self.choice_label = force_text(choice[1])
self.index = index
if 'id' in self.attrs:
self.attrs['id'] += "_%d" % self.index
def __str__(self):
return self.render()
def render(self, name=None, value=None, attrs=None, choices=()):
if self.id_for_label:
label_for = format_html(' for="{}"', self.id_for_label)
else:
label_for = ''
attrs = dict(self.attrs, **attrs) if attrs else self.attrs
return format_html(
'<label{}>{} {}</label>', label_for, self.tag(attrs), self.choice_label
)
def is_checked(self):
return self.value == self.choice_value
def tag(self, attrs=None):
attrs = attrs or self.attrs
final_attrs = dict(attrs, type=self.input_type, name=self.name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return format_html('<input{} />', flatatt(final_attrs))
@property
def id_for_label(self):
return self.attrs.get('id', '')
class RadioChoiceInput(ChoiceInput):
input_type = 'radio'
def __init__(self, *args, **kwargs):
super(RadioChoiceInput, self).__init__(*args, **kwargs)
self.value = force_text(self.value)
class CheckboxChoiceInput(ChoiceInput):
input_type = 'checkbox'
def __init__(self, *args, **kwargs):
super(CheckboxChoiceInput, self).__init__(*args, **kwargs)
self.value = set(force_text(v) for v in self.value)
def is_checked(self):
return self.choice_value in self.value
@html_safe
@python_2_unicode_compatible
class ChoiceFieldRenderer(object):
"""
An object used by RadioSelect to enable customization of radio widgets.
"""
choice_input_class = None
outer_html = '<ul{id_attr}>{content}</ul>'
inner_html = '<li>{choice_value}{sub_widgets}</li>'
def __init__(self, name, value, attrs, choices):
self.name = name
self.value = value
self.attrs = attrs
self.choices = choices
def __getitem__(self, idx):
choice = self.choices[idx] # Let the IndexError propagate
return self.choice_input_class(self.name, self.value, self.attrs.copy(), choice, idx)
def __str__(self):
return self.render()
def render(self):
"""
Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
id_ = self.attrs.get('id')
output = []
for i, choice in enumerate(self.choices):
choice_value, choice_label = choice
if isinstance(choice_label, (tuple, list)):
attrs_plus = self.attrs.copy()
if id_:
attrs_plus['id'] += '_{}'.format(i)
sub_ul_renderer = self.__class__(
name=self.name,
value=self.value,
attrs=attrs_plus,
choices=choice_label,
)
sub_ul_renderer.choice_input_class = self.choice_input_class
output.append(format_html(self.inner_html, choice_value=choice_value,
sub_widgets=sub_ul_renderer.render()))
else:
w = self.choice_input_class(self.name, self.value,
self.attrs.copy(), choice, i)
output.append(format_html(self.inner_html,
choice_value=force_text(w), sub_widgets=''))
return format_html(self.outer_html,
id_attr=format_html(' id="{}"', id_) if id_ else '',
content=mark_safe('\n'.join(output)))
class RadioFieldRenderer(ChoiceFieldRenderer):
choice_input_class = RadioChoiceInput
class CheckboxFieldRenderer(ChoiceFieldRenderer):
choice_input_class = CheckboxChoiceInput
class RendererMixin(object):
renderer = None # subclasses must define this
_empty_value = None
def __init__(self, *args, **kwargs):
# Override the default renderer if we were passed one.
renderer = kwargs.pop('renderer', None)
if renderer:
self.renderer = renderer
super(RendererMixin, self).__init__(*args, **kwargs)
def subwidgets(self, name, value, attrs=None, choices=()):
for widget in self.get_renderer(name, value, attrs, choices):
yield widget
def get_renderer(self, name, value, attrs=None, choices=()):
"""Returns an instance of the renderer."""
if value is None:
value = self._empty_value
final_attrs = self.build_attrs(attrs)
choices = list(chain(self.choices, choices))
return self.renderer(name, value, final_attrs, choices)
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices).render()
def id_for_label(self, id_):
# Widgets using this RendererMixin are made of a collection of
# subwidgets, each with their own <label>, and distinct ID.
# The IDs are made distinct by y "_X" suffix, where X is the zero-based
# index of the choice field. Thus, the label for the main widget should
# reference the first subwidget, hence the "_0" suffix.
if id_:
id_ += '_0'
return id_
class RadioSelect(RendererMixin, Select):
renderer = RadioFieldRenderer
_empty_value = ''
class CheckboxSelectMultiple(RendererMixin, SelectMultiple):
renderer = CheckboxFieldRenderer
_empty_value = []
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
In the second case -- i.e., if the value is NOT a list -- render() will
first "decompress" the value into a list before rendering it. It does so by
calling the decompress() method, which MultiWidget subclasses must
implement. This method takes a single "compressed" value and returns a
list.
When render() does its HTML rendering, each value in the list is rendered
with the corresponding widget -- the first value is rendered in the first
widget, the second value is rendered in the second widget, etc.
Subclasses may implement format_output(), which takes the list of rendered
widgets and returns a string of HTML that formats them any way you'd like.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, widgets, attrs=None):
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
super(MultiWidget, self).__init__(attrs)
@property
def is_hidden(self):
return all(w.is_hidden for w in self.widgets)
def render(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id')
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def format_output(self, rendered_widgets):
"""
Given a list of rendered widgets (as strings), returns a Unicode string
representing the HTML for the whole lot.
This hook allows you to format the HTML design of the widgets, if
needed.
"""
return ''.join(rendered_widgets)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super(MultiWidget, self).__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
@property
def needs_multipart_form(self):
return any(w.needs_multipart_form for w in self.widgets)
class SplitDateTimeWidget(MultiWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes.
"""
supports_microseconds = False
def __init__(self, attrs=None, date_format=None, time_format=None):
widgets = (DateInput(attrs=attrs, format=date_format),
TimeInput(attrs=attrs, format=time_format))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="hidden"> inputs.
"""
def __init__(self, attrs=None, date_format=None, time_format=None):
super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format, time_format)
for widget in self.widgets:
widget.input_type = 'hidden'
class SelectDateWidget(Widget):
"""
A Widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = (0, '---')
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
select_widget = Select
date_re = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$')
def __init__(self, attrs=None, years=None, months=None, empty_label=None):
self.attrs = attrs or {}
# Optional list or tuple of years to use in the "year" select box.
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year + 10)
# Optional dict of months to use in the "month" select box.
if months:
self.months = months
else:
self.months = MONTHS
# Optional string, list, or tuple to use as empty_label.
if isinstance(empty_label, (list, tuple)):
if not len(empty_label) == 3:
raise ValueError('empty_label list/tuple must have 3 elements.')
self.year_none_value = (0, empty_label[0])
self.month_none_value = (0, empty_label[1])
self.day_none_value = (0, empty_label[2])
else:
if empty_label is not None:
self.none_value = (0, empty_label)
self.year_none_value = self.none_value
self.month_none_value = self.none_value
self.day_none_value = self.none_value
@staticmethod
def _parse_date_fmt():
fmt = get_format('DATE_FORMAT')
escaped = False
for char in fmt:
if escaped:
escaped = False
elif char == '\\':
escaped = True
elif char in 'Yy':
yield 'year'
elif char in 'bEFMmNn':
yield 'month'
elif char in 'dj':
yield 'day'
def render(self, name, value, attrs=None):
try:
year_val, month_val, day_val = value.year, value.month, value.day
except AttributeError:
year_val = month_val = day_val = None
if isinstance(value, six.string_types):
if settings.USE_L10N:
try:
input_format = get_format('DATE_INPUT_FORMATS')[0]
v = datetime.datetime.strptime(force_str(value), input_format)
year_val, month_val, day_val = v.year, v.month, v.day
except ValueError:
pass
if year_val is None:
match = self.date_re.match(value)
if match:
year_val, month_val, day_val = [int(val) for val in match.groups()]
html = {}
choices = [(i, i) for i in self.years]
html['year'] = self.create_select(name, self.year_field, value, year_val, choices, self.year_none_value)
choices = list(self.months.items())
html['month'] = self.create_select(name, self.month_field, value, month_val, choices, self.month_none_value)
choices = [(i, i) for i in range(1, 32)]
html['day'] = self.create_select(name, self.day_field, value, day_val, choices, self.day_none_value)
output = []
for field in self._parse_date_fmt():
output.append(html[field])
return mark_safe('\n'.join(output))
def id_for_label(self, id_):
for first_select in self._parse_date_fmt():
return '%s_%s' % (id_, first_select)
else:
return '%s_month' % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "0":
return None
if y and m and d:
if settings.USE_L10N:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
return '%s-%s-%s' % (y, m, d)
else:
date_value = datetime_safe.new_date(date_value)
return date_value.strftime(input_format)
else:
return '%s-%s-%s' % (y, m, d)
return data.get(name)
def create_select(self, name, field, value, val, choices, none_value):
if 'id' in self.attrs:
id_ = self.attrs['id']
else:
id_ = 'id_%s' % name
if not self.is_required:
choices.insert(0, none_value)
local_attrs = self.build_attrs(id=field % id_)
s = self.select_widget(choices=choices)
select_html = s.render(field % name, val, local_attrs)
return select_html
| bsd-3-clause | -2,982,774,737,967,685,000 | 34.195076 | 116 | 0.586396 | false |
project-magpie/enigma2-openpli | lib/python/Components/PluginList.py | 6 | 1926 | from MenuList import MenuList
from Tools.Directories import resolveFilename, SCOPE_SKIN_IMAGE
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from enigma import eListboxPythonMultiContent, gFont
from Tools.LoadPixmap import LoadPixmap
def PluginEntryComponent(plugin, width=440):
if plugin.icon is None:
png = LoadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/icons/plugin.png"))
else:
png = plugin.icon
return [
plugin,
MultiContentEntryText(pos=(120, 5), size=(width-120, 25), font=0, text=plugin.name),
MultiContentEntryText(pos=(120, 26), size=(width-120, 17), font=1, text=plugin.description),
MultiContentEntryPixmapAlphaTest(pos=(10, 5), size=(100, 40), png = png)
]
def PluginCategoryComponent(name, png, width=440):
return [
name,
MultiContentEntryText(pos=(80, 5), size=(width-80, 25), font=0, text=name),
MultiContentEntryPixmapAlphaTest(pos=(10, 0), size=(60, 50), png = png)
]
def PluginDownloadComponent(plugin, name, version=None, width=440):
if plugin.icon is None:
png = LoadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/icons/plugin.png"))
else:
png = plugin.icon
if version:
if "+git" in version:
# remove git "hash"
version = "+".join(version.split("+")[:2])
elif version.startswith('experimental-'):
version = version[13:]
name += " (" + version + ")"
return [
plugin,
MultiContentEntryText(pos=(80, 5), size=(width-80, 25), font=0, text=name),
MultiContentEntryText(pos=(80, 26), size=(width-80, 17), font=1, text=plugin.description),
MultiContentEntryPixmapAlphaTest(pos=(10, 0), size=(60, 50), png = png)
]
class PluginList(MenuList):
def __init__(self, list, enableWrapAround=True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", 20))
self.l.setFont(1, gFont("Regular", 14))
self.l.setItemHeight(50)
| gpl-2.0 | 8,180,996,352,548,178,000 | 34.666667 | 94 | 0.723261 | false |
paolodedios/shift-detect | build.py | 1 | 6581 | # -*- c-file-style: "sourcery" -*-
#
# Use and distribution of this software and its source code is governed
# by the terms and conditions defined in the "LICENSE" file that is part
# of this source code package.
#
from pybuilder.core import use_bldsup
from pybuilder.core import use_plugin
from pybuilder.core import init
from pybuilder.core import task
from pybuilder.core import Author
from pybuilder.utils import assert_can_execute
import glob
import os
import shutil
use_plugin("python.core")
use_plugin("python.flake8")
use_plugin("python.unittest")
use_plugin("python.integrationtest")
use_plugin("python.install_dependencies")
# Import local build support plugins
use_bldsup(build_support_dir="support/build")
use_plugin("copy_files")
use_plugin("clean_project")
use_plugin("distribute")
use_plugin("devpi")
use_plugin("exec")
# Declare default build phase tasks to execute
default_task = [ "clean_project", "analyze", "install_dependencies", "publish" ]
# Declare top level project properties
authors = [Author("Paolo de Dios", "[email protected]")]
name = "shift-detect"
url = "http://paolodedios.com"
summary = "Covariate shift detector."
version = "0.1.0"
license = "MPL"
@init
def set_properties(project) :
# Generate build and runtime dependency specs
project.set_property("dir_deps_requirements", "support/deps")
project.set_property("analyze_command", "support/deps/pip-compile-deps.sh {}".format(project.get_property("dir_deps_requirements")))
project.set_property("analyze_propagate_stdout", True)
project.set_property("analyze_propagate_stderr", True)
# Declare project build dependencies
project.build_depends_on_requirements("{}/requirements-build.txt".format(project.get_property("dir_deps_requirements")))
# Declare project runtime dependencies
project.depends_on_requirements("{}/requirements.txt".format(project.get_property("dir_deps_requirements")))
# Declare the location of all unit tests
project.set_property("dir_source_unittest_python", "src/test/unit/python")
project.set_property("unittest_module_glob", "*_tests")
project.set_property("unittest_test_method_prefix", "test")
# Declare the location of all integration tests
project.set_property("dir_source_integrationtest_python", "src/test/integration/python")
project.set_property("integrationtest_module_glob", "*_tests")
project.set_property("integrationtest_test_method_prefix", "test")
# Disable Teamcity output during normal builds. When the TEAMCITY_VERSION
# environment variable is set (by either Teamcity or a user), teamcity
# output will be generated automatically
project.set_property("teamcity_output", False)
# Specify unit and integration test artifacts that can be removed with the
# "clean_project" task
project.get_property("clean_project_files_glob").extend([
"{}/requirements-build.txt".format(project.get_property("dir_deps_requirements")),
"{}/requirements.txt".format(project.get_property("dir_deps_requirements")),
"{}/__pycache__".format(project.get_property("dir_source_unittest_python")),
"{}/*.pyc".format(project.get_property("dir_source_unittest_python")),
"{}/__pycache__".format(project.get_property("dir_source_integrationtest_python")),
"{}/*.pyc".format(project.get_property("dir_source_integrationtest_python"))
])
# Check sources during the analyze phase, but ignore certain PEP8 error codes.
# @see http://pep8.readthedocs.org/en/latest/intro.html#error-codes
project.set_property("flake8_ignore", "E201,E202,E203,E221,E272,E302,E303,E501")
project.set_property("flake8_verbose_output", True)
project.set_property("flake8_include_test_sources", True)
project.set_property("flake8_break_build", False)
# Copy files to the top level of the distribution staging directory
project.set_property("copy_root_files_target", "$dir_dist")
project.get_property("copy_root_files_glob").extend([
"LICENSE",
"README.rst",
"support/deps/requirements.txt",
"support/dist/setup.cfg",
"support/dist/tox.ini"
])
# Declare which copied resources will be packaged for installation via
# MAINIFEST.in
project.install_file(".", "LICENSE")
project.install_file(".", "README.rst")
project.install_file(".", "requirements.txt")
project.install_file(".", "tox.ini")
# Package all scripts in the bin directory
project.set_property("dir_dist_scripts", "bin")
# Add PyPi package metdata data classifiers.
#
# Note: Invoking "setup.py release" will typically release all code to the
# wild. In order to ensure that this doesn't accidentally happen during the
# publish phase of the build, the "Private" classifier property is specified
# by default. As a result the public PyPI service will reject this package
# but a private PyPi or DevPI server will accept it.
#
# For a complete classifier list, @see http://pypi.python.org/pypi?%3Aaction=list_classifiers
project.set_property("distutils_classifiers", [
"Private :: Do Not Upload",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: Other/Proprietary License"
"Operating System :: Unix",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Utilities"
])
# Force setup.py to generate and install a shell script for the entry point
project.set_property("distutils_console_scripts", [
"shift_detect = shift_detect.__main__:main"
])
# Extend the list of setup.py commands to be executed from sdist, bdist_dumb
project.get_property("distutils_commands").extend([ "bdist_egg", "bdist_wheel" ])
# Set user name and destination index for local devpi/PyPi central
# repository
project.set_property("devpi_user", "root")
project.set_property("devpi_developer_index", "dev")
project.set_property("devpi_staging_index" , "staging")
project.set_property("devpi_release_index" , "release")
| mpl-2.0 | 9,222,085,803,752,756,000 | 42.013072 | 136 | 0.696095 | false |
zafar-hussain/or-tools | examples/python/traffic_lights.py | 32 | 3902 | # Copyright 2010 Hakan Kjellerstrand [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Traffic lights problem in Google CP Solver.
CSPLib problem 16
http://www.cs.st-andrews.ac.uk/~ianm/CSPLib/prob/prob016/index.html
'''
Specification:
Consider a four way traffic junction with eight traffic lights. Four of the
traffic
lights are for the vehicles and can be represented by the variables V1 to V4
with domains
{r,ry,g,y} (for red, red-yellow, green and yellow). The other four traffic
lights are
for the pedestrians and can be represented by the variables P1 to P4 with
domains {r,g}.
The constraints on these variables can be modelled by quaternary constraints
on
(Vi, Pi, Vj, Pj ) for 1<=i<=4, j=(1+i)mod 4 which allow just the tuples
{(r,r,g,g), (ry,r,y,r), (g,g,r,r), (y,r,ry,r)}.
It would be interesting to consider other types of junction (e.g. five roads
intersecting) as well as modelling the evolution over time of the traffic
light sequence.
...
Results
Only 2^2 out of the 2^12 possible assignments are solutions.
(V1,P1,V2,P2,V3,P3,V4,P4) =
{(r,r,g,g,r,r,g,g), (ry,r,y,r,ry,r,y,r), (g,g,r,r,g,g,r,r),
(y,r,ry,r,y,r,ry,r)}
[(1,1,3,3,1,1,3,3), ( 2,1,4,1, 2,1,4,1), (3,3,1,1,3,3,1,1), (4,1, 2,1,4,1,
2,1)}
The problem has relative few constraints, but each is very tight. Local
propagation
appears to be rather ineffective on this problem.
'''
Note: In this model we use only the constraint solver.AllowedAssignments().
Compare with these models:
* MiniZinc: http://www.hakank.org/minizinc/traffic_lights.mzn
* Comet : http://www.hakank.org/comet/traffic_lights.co
* ECLiPSe : http://www.hakank.org/eclipse/traffic_lights.ecl
* Gecode : http://hakank.org/gecode/traffic_lights.cpp
* SICStus : http://hakank.org/sicstus/traffic_lights.pl
This model was created by Hakan Kjellerstrand ([email protected])
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import string
import sys
from ortools.constraint_solver import pywrapcp
def main(base=10, start=1, len1=1, len2=4):
# Create the solver.
solver = pywrapcp.Solver("Traffic lights")
#
# data
#
n = 4
r, ry, g, y = range(n)
lights = ["r", "ry", "g", "y"]
# The allowed combinations
allowed = []
allowed.extend([(r, r, g, g),
(ry, r, y, r),
(g, g, r, r),
(y, r, ry, r)])
#
# declare variables
#
V = [solver.IntVar(0, n - 1, "V[%i]" % i) for i in range(n)]
P = [solver.IntVar(0, n - 1, "P[%i]" % i) for i in range(n)]
#
# constraints
#
for i in range(n):
for j in range(n):
if j == (1 + i) % n:
solver.Add(solver.AllowedAssignments((V[i], P[i], V[j], P[j]), allowed))
#
# Search and result
#
db = solver.Phase(V + P,
solver.INT_VAR_SIMPLE,
solver.INT_VALUE_DEFAULT)
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
for i in range(n):
print "%+2s %+2s" % (lights[V[i].Value()], lights[P[i].Value()]),
print
num_solutions += 1
solver.EndSearch()
print
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
print
if __name__ == "__main__":
main()
| apache-2.0 | 1,034,879,588,576,957,300 | 27.071942 | 80 | 0.645566 | false |
isaac-playground/git-python | run_git_python.py | 1 | 1941 | """
"""
import os.path
import git
REPO_ROOT = os.path.abspath(os.path.dirname(__file__))
DATA_DIR = os.path.join(REPO_ROOT, 'data')
CURRENT_EXECUTION_VERSION = 16
NEW_AND_MODIFIED = '.'
REMOVED = '-A'
COMMIT_MSG='-m "Automated commit {index}. Running through."'.format(index=CURRENT_EXECUTION_VERSION)
VERSION_TAG = 'v1.0.{build}'.format(build=CURRENT_EXECUTION_VERSION)
print("Repo root: " + REPO_ROOT)
print("Data directory: " + DATA_DIR)
repo = git.Repo(REPO_ROOT)
git_driver = repo.git
# Making some changes that we can commit.
new_file = os.path.join(DATA_DIR, "created {number}.txt".format(number=CURRENT_EXECUTION_VERSION))
old_file = os.path.join(DATA_DIR, "created {number}.txt".format(number=CURRENT_EXECUTION_VERSION-1))
modifiable_file = os.path.join(DATA_DIR, "modifiable.txt".format(number=CURRENT_EXECUTION_VERSION-1))
with open(new_file, mode='w') as fout:
contents = "Created file {number}".format(number=CURRENT_EXECUTION_VERSION)
fout.write(contents)
with open(modifiable_file, mode='a') as fout:
contents = "Modified {number} times.\n".format(number=CURRENT_EXECUTION_VERSION)
fout.write(contents)
if os.path.exists(old_file):
print("Removing file: " + old_file)
os.remove(old_file)
print("Repo is dirty: " + repr(repo.is_dirty()))
# Adding new and modified, and deleting removed files from the repo.
print('Adding new and modified....')
git_driver.add(NEW_AND_MODIFIED)
print('Removing deleted from tree....')
git_driver.add(REMOVED)
print(git_driver.status())
print('Committing changes....')
print(git_driver.commit(COMMIT_MSG))
# Let's tag this version if the tag doesn't exist and push it preventing override.
if VERSION_TAG not in repo.tags:
print('Tagging repository with: {tag}....'.format(tag=VERSION_TAG))
repo.create_tag(VERSION_TAG, message='Annotated tag {version}'.format(version=VERSION_TAG))
print('Pushing changes....')
git_driver.push('--follow-tags')
| mit | 7,852,443,103,548,331,000 | 34.290909 | 101 | 0.716126 | false |
ahamilton/vigil | eris/webserver.py | 1 | 2593 | #!/usr/bin/env python3.8
# Copyright (C) 2018-2019 Andrew Hamilton. All rights reserved.
# Licensed under the Artistic License 2.0.
import gzip
import http.server
import os
import sys
import pickle
import eris.fill3 as fill3
import eris.tools as tools
USAGE = """Usage:
eris-webserver <directory>
Example:
eris-webserver my_project
"""
def make_page(body_html, title):
return (f"<html><head><title>{title}</title></head><body><style>body "
f"{{ background-color: black; }} </style>{body_html}</body></html>"
).encode("utf-8")
class Webserver(http.server.BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
def do_GET(self):
self._set_headers()
if self.path == "/":
page = summary_page
elif "/" in self.path[1:]:
path, tool = os.path.split(self.path[1:])
result = index[(path, tool)]
body = fill3.appearance_as_html(
fill3.Border(result).appearance_min())
page = make_page(body, f"{tool} of {path}")
else:
return
self.wfile.write(page)
def do_HEAD(self):
self._set_headers()
def do_POST(self):
self._set_headers()
self.wfile.write("posted".encode("utf-8"))
def make_summary_page(project_name, summary):
summary_html, summary_styles = summary.as_html()
body_html = ("\n".join(style.as_html() for style in summary_styles)
+ "\n" + summary_html)
return make_page(body_html, "Summary of " + project_name)
def run(server_class=http.server.HTTPServer, handler_class=Webserver, port=80):
server_address = ("", port)
httpd = server_class(server_address, handler_class)
print("Starting httpd…")
httpd.serve_forever()
def main():
global summary_page, index
if len(sys.argv) == 1:
print(USAGE)
sys.exit(1)
project_path = os.path.abspath(sys.argv[1])
os.chdir(project_path)
project_name = os.path.basename(project_path)
pickle_path = os.path.join(project_path, tools.CACHE_PATH,
"summary.pickle")
with gzip.open(pickle_path, "rb") as file_:
screen = pickle.load(file_)
summary_page = make_summary_page(project_name, screen._summary)
index = {}
for row in screen._summary._entries:
for result in row:
index[(result.path[2:], result.tool.__name__)] = result.result
run()
if __name__ == "__main__":
main()
| artistic-2.0 | 8,779,917,104,911,399,000 | 26.273684 | 79 | 0.601698 | false |
yd0str/infernal-twin | build/pillow/PIL/FliImagePlugin.py | 26 | 4782 | #
# The Python Imaging Library.
# $Id$
#
# FLI/FLC file handling.
#
# History:
# 95-09-01 fl Created
# 97-01-03 fl Fixed parser, setup decoder tile
# 98-07-15 fl Renamed offset attribute to avoid name clash
#
# Copyright (c) Secret Labs AB 1997-98.
# Copyright (c) Fredrik Lundh 1995-97.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.2"
from PIL import Image, ImageFile, ImagePalette, _binary
i8 = _binary.i8
i16 = _binary.i16le
i32 = _binary.i32le
o8 = _binary.o8
#
# decoder
def _accept(prefix):
return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12]
##
# Image plugin for the FLI/FLC animation format. Use the <b>seek</b>
# method to load individual frames.
class FliImageFile(ImageFile.ImageFile):
format = "FLI"
format_description = "Autodesk FLI/FLC Animation"
def _open(self):
# HEAD
s = self.fp.read(128)
magic = i16(s[4:6])
if not (magic in [0xAF11, 0xAF12] and
i16(s[14:16]) in [0, 3] and # flags
s[20:22] == b"\x00\x00"): # reserved
raise SyntaxError("not an FLI/FLC file")
# image characteristics
self.mode = "P"
self.size = i16(s[8:10]), i16(s[10:12])
# animation speed
duration = i32(s[16:20])
if magic == 0xAF11:
duration = (duration * 1000) / 70
self.info["duration"] = duration
# look for palette
palette = [(a, a, a) for a in range(256)]
s = self.fp.read(16)
self.__offset = 128
if i16(s[4:6]) == 0xF100:
# prefix chunk; ignore it
self.__offset = self.__offset + i32(s)
s = self.fp.read(16)
if i16(s[4:6]) == 0xF1FA:
# look for palette chunk
s = self.fp.read(6)
if i16(s[4:6]) == 11:
self._palette(palette, 2)
elif i16(s[4:6]) == 4:
self._palette(palette, 0)
palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette]
self.palette = ImagePalette.raw("RGB", b"".join(palette))
# set things up to decode first frame
self.__frame = -1
self.__fp = self.fp
self.__rewind = self.fp.tell()
self._n_frames = None
self._is_animated = None
self.seek(0)
def _palette(self, palette, shift):
# load palette
i = 0
for e in range(i16(self.fp.read(2))):
s = self.fp.read(2)
i = i + i8(s[0])
n = i8(s[1])
if n == 0:
n = 256
s = self.fp.read(n * 3)
for n in range(0, len(s), 3):
r = i8(s[n]) << shift
g = i8(s[n+1]) << shift
b = i8(s[n+2]) << shift
palette[i] = (r, g, b)
i += 1
@property
def n_frames(self):
if self._n_frames is None:
current = self.tell()
try:
while True:
self.seek(self.tell() + 1)
except EOFError:
self._n_frames = self.tell() + 1
self.seek(current)
return self._n_frames
@property
def is_animated(self):
if self._is_animated is None:
current = self.tell()
try:
self.seek(1)
self._is_animated = True
except EOFError:
self._is_animated = False
self.seek(current)
return self._is_animated
def seek(self, frame):
if frame == self.__frame:
return
if frame < self.__frame:
self._seek(0)
last_frame = self.__frame
for f in range(self.__frame + 1, frame + 1):
try:
self._seek(f)
except EOFError:
self.seek(last_frame)
raise EOFError("no more images in FLI file")
def _seek(self, frame):
if frame == 0:
self.__frame = -1
self.__fp.seek(self.__rewind)
self.__offset = 128
if frame != self.__frame + 1:
raise ValueError("cannot seek to frame %d" % frame)
self.__frame = frame
# move to next frame
self.fp = self.__fp
self.fp.seek(self.__offset)
s = self.fp.read(4)
if not s:
raise EOFError
framesize = i32(s)
self.decodermaxblock = framesize
self.tile = [("fli", (0, 0)+self.size, self.__offset, None)]
self.__offset += framesize
def tell(self):
return self.__frame
#
# registry
Image.register_open("FLI", FliImageFile, _accept)
Image.register_extension("FLI", ".fli")
Image.register_extension("FLI", ".flc")
| gpl-3.0 | 5,004,803,420,460,204,000 | 24.43617 | 69 | 0.5 | false |
imzers/gsutil-with-php | gslib/tests/test_defacl.py | 22 | 8847 | # -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for the defacl command."""
from __future__ import absolute_import
import re
from gslib.cs_api_map import ApiSelector
import gslib.tests.testcase as case
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import ObjectToURI as suri
PUBLIC_READ_JSON_ACL_TEXT = '"entity":"allUsers","role":"READER"'
@SkipForS3('S3 does not support default object ACLs.')
class TestDefacl(case.GsUtilIntegrationTestCase):
"""Integration tests for the defacl command."""
_defacl_ch_prefix = ['defacl', 'ch']
_defacl_get_prefix = ['defacl', 'get']
_defacl_set_prefix = ['defacl', 'set']
def _MakeScopeRegex(self, role, entity_type, email_address):
template_regex = (r'\{.*"entity":\s*"%s-%s".*"role":\s*"%s".*\}' %
(entity_type, email_address, role))
return re.compile(template_regex, flags=re.DOTALL)
def testChangeDefaultAcl(self):
"""Tests defacl ch."""
bucket = self.CreateBucket()
test_regex = self._MakeScopeRegex(
'OWNER', 'group', self.GROUP_TEST_ADDRESS)
test_regex2 = self._MakeScopeRegex(
'READER', 'group', self.GROUP_TEST_ADDRESS)
json_text = self.RunGsUtil(self._defacl_get_prefix +
[suri(bucket)], return_stdout=True)
self.assertNotRegexpMatches(json_text, test_regex)
self.RunGsUtil(self._defacl_ch_prefix +
['-g', self.GROUP_TEST_ADDRESS+':FC', suri(bucket)])
json_text2 = self.RunGsUtil(self._defacl_get_prefix +
[suri(bucket)], return_stdout=True)
self.assertRegexpMatches(json_text2, test_regex)
self.RunGsUtil(self._defacl_ch_prefix +
['-g', self.GROUP_TEST_ADDRESS+':READ', suri(bucket)])
json_text3 = self.RunGsUtil(self._defacl_get_prefix +
[suri(bucket)], return_stdout=True)
self.assertRegexpMatches(json_text3, test_regex2)
stderr = self.RunGsUtil(self._defacl_ch_prefix +
['-g', self.GROUP_TEST_ADDRESS+':WRITE',
suri(bucket)],
return_stderr=True, expected_status=1)
self.assertIn('WRITER cannot be set as a default object ACL', stderr)
def testChangeDefaultAclEmpty(self):
"""Tests adding and removing an entry from an empty default object ACL."""
bucket = self.CreateBucket()
# First, clear out the default object ACL on the bucket.
self.RunGsUtil(self._defacl_set_prefix + ['private', suri(bucket)])
json_text = self.RunGsUtil(self._defacl_get_prefix +
[suri(bucket)], return_stdout=True)
empty_regex = r'\[\]\s*'
self.assertRegexpMatches(json_text, empty_regex)
group_regex = self._MakeScopeRegex(
'READER', 'group', self.GROUP_TEST_ADDRESS)
self.RunGsUtil(self._defacl_ch_prefix +
['-g', self.GROUP_TEST_ADDRESS+':READ', suri(bucket)])
json_text2 = self.RunGsUtil(self._defacl_get_prefix +
[suri(bucket)], return_stdout=True)
self.assertRegexpMatches(json_text2, group_regex)
if self.test_api == ApiSelector.JSON:
# TODO: Enable when JSON service respects creating a private (no entries)
# default object ACL via PATCH. For now, only supported in XML.
return
# After adding and removing a group, the default object ACL should be empty.
self.RunGsUtil(self._defacl_ch_prefix +
['-d', self.GROUP_TEST_ADDRESS, suri(bucket)])
json_text3 = self.RunGsUtil(self._defacl_get_prefix +
[suri(bucket)], return_stdout=True)
self.assertRegexpMatches(json_text3, empty_regex)
def testChangeMultipleBuckets(self):
"""Tests defacl ch on multiple buckets."""
bucket1 = self.CreateBucket()
bucket2 = self.CreateBucket()
test_regex = self._MakeScopeRegex(
'READER', 'group', self.GROUP_TEST_ADDRESS)
json_text = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket1)],
return_stdout=True)
self.assertNotRegexpMatches(json_text, test_regex)
json_text = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket2)],
return_stdout=True)
self.assertNotRegexpMatches(json_text, test_regex)
self.RunGsUtil(self._defacl_ch_prefix +
['-g', self.GROUP_TEST_ADDRESS+':READ',
suri(bucket1), suri(bucket2)])
json_text = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket1)],
return_stdout=True)
self.assertRegexpMatches(json_text, test_regex)
json_text = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket2)],
return_stdout=True)
self.assertRegexpMatches(json_text, test_regex)
def testChangeMultipleAcls(self):
"""Tests defacl ch with multiple ACL entries."""
bucket = self.CreateBucket()
test_regex_group = self._MakeScopeRegex(
'READER', 'group', self.GROUP_TEST_ADDRESS)
test_regex_user = self._MakeScopeRegex(
'OWNER', 'user', self.USER_TEST_ADDRESS)
json_text = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket)],
return_stdout=True)
self.assertNotRegexpMatches(json_text, test_regex_group)
self.assertNotRegexpMatches(json_text, test_regex_user)
self.RunGsUtil(self._defacl_ch_prefix +
['-g', self.GROUP_TEST_ADDRESS+':READ',
'-u', self.USER_TEST_ADDRESS+':fc', suri(bucket)])
json_text = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket)],
return_stdout=True)
self.assertRegexpMatches(json_text, test_regex_group)
self.assertRegexpMatches(json_text, test_regex_user)
def testEmptyDefAcl(self):
bucket = self.CreateBucket()
self.RunGsUtil(self._defacl_set_prefix + ['private', suri(bucket)])
stdout = self.RunGsUtil(self._defacl_get_prefix + [suri(bucket)],
return_stdout=True)
self.assertEquals(stdout.rstrip(), '[]')
self.RunGsUtil(self._defacl_ch_prefix +
['-u', self.USER_TEST_ADDRESS+':fc', suri(bucket)])
def testDeletePermissionsWithCh(self):
"""Tests removing permissions with defacl ch."""
bucket = self.CreateBucket()
test_regex = self._MakeScopeRegex(
'OWNER', 'user', self.USER_TEST_ADDRESS)
json_text = self.RunGsUtil(
self._defacl_get_prefix + [suri(bucket)], return_stdout=True)
self.assertNotRegexpMatches(json_text, test_regex)
self.RunGsUtil(self._defacl_ch_prefix +
['-u', self.USER_TEST_ADDRESS+':fc', suri(bucket)])
json_text = self.RunGsUtil(
self._defacl_get_prefix + [suri(bucket)], return_stdout=True)
self.assertRegexpMatches(json_text, test_regex)
self.RunGsUtil(self._defacl_ch_prefix +
['-d', self.USER_TEST_ADDRESS, suri(bucket)])
json_text = self.RunGsUtil(
self._defacl_get_prefix + [suri(bucket)], return_stdout=True)
self.assertNotRegexpMatches(json_text, test_regex)
def testTooFewArgumentsFails(self):
"""Tests calling defacl with insufficient number of arguments."""
# No arguments for get, but valid subcommand.
stderr = self.RunGsUtil(self._defacl_get_prefix, return_stderr=True,
expected_status=1)
self.assertIn('command requires at least', stderr)
# No arguments for set, but valid subcommand.
stderr = self.RunGsUtil(self._defacl_set_prefix, return_stderr=True,
expected_status=1)
self.assertIn('command requires at least', stderr)
# No arguments for ch, but valid subcommand.
stderr = self.RunGsUtil(self._defacl_ch_prefix, return_stderr=True,
expected_status=1)
self.assertIn('command requires at least', stderr)
# Neither arguments nor subcommand.
stderr = self.RunGsUtil(['defacl'], return_stderr=True, expected_status=1)
self.assertIn('command requires at least', stderr)
class TestDefaclOldAlias(TestDefacl):
_defacl_ch_prefix = ['chdefacl']
_defacl_get_prefix = ['getdefacl']
_defacl_set_prefix = ['setdefacl']
| apache-2.0 | 5,353,667,576,438,776,000 | 42.156098 | 80 | 0.640782 | false |
ddelemeny/calligra | 3rdparty/google-breakpad/src/tools/gyp/test/small/gyptest-small.py | 89 | 1405 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Runs small tests.
"""
import imp
import os
import sys
import unittest
import TestGyp
test = TestGyp.TestGyp()
# Add pylib to the import path (so tests can import their dependencies).
# This is consistant with the path.append done in the top file "gyp".
sys.path.append(os.path.join(test._cwd, 'pylib'))
# Add new test suites here.
files_to_test = [
'pylib/gyp/MSVSSettings_test.py',
'pylib/gyp/easy_xml_test.py',
'pylib/gyp/generator/msvs_test.py',
'pylib/gyp/generator/ninja_test.py',
'pylib/gyp/common_test.py',
]
# Collect all the suites from the above files.
suites = []
for filename in files_to_test:
# Carve the module name out of the path.
name = os.path.splitext(os.path.split(filename)[1])[0]
# Find the complete module path.
full_filename = os.path.join(test._cwd, filename)
# Load the module.
module = imp.load_source(name, full_filename)
# Add it to the list of test suites.
suites.append(unittest.defaultTestLoader.loadTestsFromModule(module))
# Create combined suite.
all_tests = unittest.TestSuite(suites)
# Run all the tests.
result = unittest.TextTestRunner(verbosity=2).run(all_tests)
if result.failures or result.errors:
test.fail_test()
test.pass_test()
| gpl-2.0 | 7,685,911,221,956,526,000 | 25.509434 | 72 | 0.720996 | false |
claws/txcosm | examples/subscribe.py | 1 | 3214 | #!/usr/bin/env python
"""
Subscribe to a feed or a datastream that is visible to the supplied Cosm user API key
To use this script you must create a text file containing your API key
and pass it to this script using the --keyfile argument as follows:
Subscribe for updates to a particular feed:
$ simple_subscribe.py --keyfile=/path/to/apikey/file --feed=XXX
Subscribe for updates to a particular datastream within a feed:
$ simple_subscribe.py --keyfile=path/to/apikey/file --feed=XXX --datastream=YYY
txcosm must be installed or visible on the PYTHONPATH.
"""
import logging
from optparse import OptionParser
import os
import sys
from twisted.internet import reactor
from txcosm.PAWSClient import PAWSClient
parser = OptionParser("")
parser.add_option("-k", "--keyfile", dest="keyfile", default=None, help="Path to file containing your Cosm API key")
parser.add_option("-f", "--feed", dest="feed", default=None, help="The feed to subscribe to")
parser.add_option("-d", "--datastream", dest="datastream", default=None, help="The datastream within the feed to subscribe to")
(options, args) = parser.parse_args()
#
# Set up callback handlers
#
def updateHandler(dataStructure):
"""
Handle a txcosm data structure object resulting from the receipt
of a subscription update message received from Cosm.
The data structure returned will vary depending on the particular
resource subscribed to.
If a datastream is specified the returned data structure will be
a txcosm.Datastream object. If just a feed is specified then the
returned data structure will be a txcosm.Environment object.
"""
logging.info("Subscription update message received:\n%s\n" % str(dataStructure))
def do_subscribe(connected, client, resource):
""" Subscribe to the specified resource if the connection is established """
if connected:
def handleSubscribeResponse(result):
token, response_code = result
print "Subscription token is: %s" % token
print "Subscribe response status: %s" % response_code
return result
d = client.subscribe(resource, updateHandler)
d.addCallback(handleSubscribeResponse)
else:
print "Connection failed"
reactor.callLater(0.1, reactor.stop)
return
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s : %(message)s")
# confirm keyfile is suppplied and valid
if options.keyfile is None:
print parser.get_usage()
sys.exit(1)
keyfile = os.path.expanduser(options.keyfile)
if not os.path.exists(keyfile):
print "Invalid API key file path: %s" % keyfile
sys.exit(1)
fd = open(keyfile, 'r')
key = fd.read().strip()
fd.close()
if options.feed is None:
print "No feed identifier specified"
print parser.get_usage()
sys.exit(1)
if options.datastream:
resource = "/feeds/%s/datastreams/%s" % (options.feed, options.datastream)
else:
resource = "/feeds/%s" % (options.feed)
client = PAWSClient(api_key=key)
d = client.connect()
d.addCallback(do_subscribe, client, resource)
reactor.run()
| mit | -1,308,822,438,510,303,500 | 30.203883 | 127 | 0.690728 | false |
followloda/PornGuys | FlaskServer/venv/Lib/site-packages/werkzeug/exceptions.py | 176 | 18733 | # -*- coding: utf-8 -*-
"""
werkzeug.exceptions
~~~~~~~~~~~~~~~~~~~
This module implements a number of Python exceptions you can raise from
within your views to trigger a standard non-200 response.
Usage Example
-------------
::
from werkzeug.wrappers import BaseRequest
from werkzeug.wsgi import responder
from werkzeug.exceptions import HTTPException, NotFound
def view(request):
raise NotFound()
@responder
def application(environ, start_response):
request = BaseRequest(environ)
try:
return view(request)
except HTTPException as e:
return e
As you can see from this example those exceptions are callable WSGI
applications. Because of Python 2.4 compatibility those do not extend
from the response objects but only from the python exception class.
As a matter of fact they are not Werkzeug response objects. However you
can get a response object by calling ``get_response()`` on a HTTP
exception.
Keep in mind that you have to pass an environment to ``get_response()``
because some errors fetch additional information from the WSGI
environment.
If you want to hook in a different exception page to say, a 404 status
code, you can add a second except for a specific subclass of an error::
@responder
def application(environ, start_response):
request = BaseRequest(environ)
try:
return view(request)
except NotFound, e:
return not_found(request)
except HTTPException, e:
return e
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import sys
# Because of bootstrapping reasons we need to manually patch ourselves
# onto our parent module.
import werkzeug
werkzeug.exceptions = sys.modules[__name__]
from werkzeug._internal import _get_environ
from werkzeug._compat import iteritems, integer_types, text_type, \
implements_to_string
from werkzeug.wrappers import Response
@implements_to_string
class HTTPException(Exception):
"""
Baseclass for all HTTP exceptions. This exception can be called as WSGI
application to render a default error page or you can catch the subclasses
of it independently and render nicer error messages.
"""
code = None
description = None
def __init__(self, description=None, response=None):
Exception.__init__(self)
if description is not None:
self.description = description
self.response = response
@classmethod
def wrap(cls, exception, name=None):
"""This method returns a new subclass of the exception provided that
also is a subclass of `BadRequest`.
"""
class newcls(cls, exception):
def __init__(self, arg=None, *args, **kwargs):
cls.__init__(self, *args, **kwargs)
exception.__init__(self, arg)
newcls.__module__ = sys._getframe(1).f_globals.get('__name__')
newcls.__name__ = name or cls.__name__ + exception.__name__
return newcls
@property
def name(self):
"""The status name."""
return HTTP_STATUS_CODES.get(self.code, 'Unknown Error')
def get_description(self, environ=None):
"""Get the description."""
return u'<p>%s</p>' % escape(self.description)
def get_body(self, environ=None):
"""Get the HTML body."""
return text_type((
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
u'<title>%(code)s %(name)s</title>\n'
u'<h1>%(name)s</h1>\n'
u'%(description)s\n'
) % {
'code': self.code,
'name': escape(self.name),
'description': self.get_description(environ)
})
def get_headers(self, environ=None):
"""Get a list of headers."""
return [('Content-Type', 'text/html')]
def get_response(self, environ=None):
"""Get a response object. If one was passed to the exception
it's returned directly.
:param environ: the optional environ for the request. This
can be used to modify the response depending
on how the request looked like.
:return: a :class:`Response` object or a subclass thereof.
"""
if self.response is not None:
return self.response
if environ is not None:
environ = _get_environ(environ)
headers = self.get_headers(environ)
return Response(self.get_body(environ), self.code, headers)
def __call__(self, environ, start_response):
"""Call the exception as WSGI application.
:param environ: the WSGI environment.
:param start_response: the response callable provided by the WSGI
server.
"""
response = self.get_response(environ)
return response(environ, start_response)
def __str__(self):
return '%d: %s' % (self.code, self.name)
def __repr__(self):
return '<%s \'%s\'>' % (self.__class__.__name__, self)
class BadRequest(HTTPException):
"""*400* `Bad Request`
Raise if the browser sends something to the application the application
or server cannot handle.
"""
code = 400
description = (
'The browser (or proxy) sent a request that this server could '
'not understand.'
)
class ClientDisconnected(BadRequest):
"""Internal exception that is raised if Werkzeug detects a disconnected
client. Since the client is already gone at that point attempting to
send the error message to the client might not work and might ultimately
result in another exception in the server. Mainly this is here so that
it is silenced by default as far as Werkzeug is concerned.
Since disconnections cannot be reliably detected and are unspecified
by WSGI to a large extent this might or might not be raised if a client
is gone.
.. versionadded:: 0.8
"""
class SecurityError(BadRequest):
"""Raised if something triggers a security error. This is otherwise
exactly like a bad request error.
.. versionadded:: 0.9
"""
class BadHost(BadRequest):
"""Raised if the submitted host is badly formatted.
.. versionadded:: 0.11.2
"""
class Unauthorized(HTTPException):
"""*401* `Unauthorized`
Raise if the user is not authorized. Also used if you want to use HTTP
basic auth.
"""
code = 401
description = (
'The server could not verify that you are authorized to access '
'the URL requested. You either supplied the wrong credentials (e.g. '
'a bad password), or your browser doesn\'t understand how to supply '
'the credentials required.'
)
class Forbidden(HTTPException):
"""*403* `Forbidden`
Raise if the user doesn't have the permission for the requested resource
but was authenticated.
"""
code = 403
description = (
'You don\'t have the permission to access the requested resource. '
'It is either read-protected or not readable by the server.'
)
class NotFound(HTTPException):
"""*404* `Not Found`
Raise if a resource does not exist and never existed.
"""
code = 404
description = (
'The requested URL was not found on the server. '
'If you entered the URL manually please check your spelling and '
'try again.'
)
class MethodNotAllowed(HTTPException):
"""*405* `Method Not Allowed`
Raise if the server used a method the resource does not handle. For
example `POST` if the resource is view only. Especially useful for REST.
The first argument for this exception should be a list of allowed methods.
Strictly speaking the response would be invalid if you don't provide valid
methods in the header which you can do with that list.
"""
code = 405
description = 'The method is not allowed for the requested URL.'
def __init__(self, valid_methods=None, description=None):
"""Takes an optional list of valid http methods
starting with werkzeug 0.3 the list will be mandatory."""
HTTPException.__init__(self, description)
self.valid_methods = valid_methods
def get_headers(self, environ):
headers = HTTPException.get_headers(self, environ)
if self.valid_methods:
headers.append(('Allow', ', '.join(self.valid_methods)))
return headers
class NotAcceptable(HTTPException):
"""*406* `Not Acceptable`
Raise if the server can't return any content conforming to the
`Accept` headers of the client.
"""
code = 406
description = (
'The resource identified by the request is only capable of '
'generating response entities which have content characteristics '
'not acceptable according to the accept headers sent in the '
'request.'
)
class RequestTimeout(HTTPException):
"""*408* `Request Timeout`
Raise to signalize a timeout.
"""
code = 408
description = (
'The server closed the network connection because the browser '
'didn\'t finish the request within the specified time.'
)
class Conflict(HTTPException):
"""*409* `Conflict`
Raise to signal that a request cannot be completed because it conflicts
with the current state on the server.
.. versionadded:: 0.7
"""
code = 409
description = (
'A conflict happened while processing the request. The resource '
'might have been modified while the request was being processed.'
)
class Gone(HTTPException):
"""*410* `Gone`
Raise if a resource existed previously and went away without new location.
"""
code = 410
description = (
'The requested URL is no longer available on this server and there '
'is no forwarding address. If you followed a link from a foreign '
'page, please contact the author of this page.'
)
class LengthRequired(HTTPException):
"""*411* `Length Required`
Raise if the browser submitted data but no ``Content-Length`` header which
is required for the kind of processing the server does.
"""
code = 411
description = (
'A request with this method requires a valid <code>Content-'
'Length</code> header.'
)
class PreconditionFailed(HTTPException):
"""*412* `Precondition Failed`
Status code used in combination with ``If-Match``, ``If-None-Match``, or
``If-Unmodified-Since``.
"""
code = 412
description = (
'The precondition on the request for the URL failed positive '
'evaluation.'
)
class RequestEntityTooLarge(HTTPException):
"""*413* `Request Entity Too Large`
The status code one should return if the data submitted exceeded a given
limit.
"""
code = 413
description = (
'The data value transmitted exceeds the capacity limit.'
)
class RequestURITooLarge(HTTPException):
"""*414* `Request URI Too Large`
Like *413* but for too long URLs.
"""
code = 414
description = (
'The length of the requested URL exceeds the capacity limit '
'for this server. The request cannot be processed.'
)
class UnsupportedMediaType(HTTPException):
"""*415* `Unsupported Media Type`
The status code returned if the server is unable to handle the media type
the client transmitted.
"""
code = 415
description = (
'The server does not support the media type transmitted in '
'the request.'
)
class RequestedRangeNotSatisfiable(HTTPException):
"""*416* `Requested Range Not Satisfiable`
The client asked for a part of the file that lies beyond the end
of the file.
.. versionadded:: 0.7
"""
code = 416
description = (
'The server cannot provide the requested range.'
)
class ExpectationFailed(HTTPException):
"""*417* `Expectation Failed`
The server cannot meet the requirements of the Expect request-header.
.. versionadded:: 0.7
"""
code = 417
description = (
'The server could not meet the requirements of the Expect header'
)
class ImATeapot(HTTPException):
"""*418* `I'm a teapot`
The server should return this if it is a teapot and someone attempted
to brew coffee with it.
.. versionadded:: 0.7
"""
code = 418
description = (
'This server is a teapot, not a coffee machine'
)
class UnprocessableEntity(HTTPException):
"""*422* `Unprocessable Entity`
Used if the request is well formed, but the instructions are otherwise
incorrect.
"""
code = 422
description = (
'The request was well-formed but was unable to be followed '
'due to semantic errors.'
)
class PreconditionRequired(HTTPException):
"""*428* `Precondition Required`
The server requires this request to be conditional, typically to prevent
the lost update problem, which is a race condition between two or more
clients attempting to update a resource through PUT or DELETE. By requiring
each client to include a conditional header ("If-Match" or "If-Unmodified-
Since") with the proper value retained from a recent GET request, the
server ensures that each client has at least seen the previous revision of
the resource.
"""
code = 428
description = (
'This request is required to be conditional; try using "If-Match" '
'or "If-Unmodified-Since".'
)
class TooManyRequests(HTTPException):
"""*429* `Too Many Requests`
The server is limiting the rate at which this user receives responses, and
this request exceeds that rate. (The server may use any convenient method
to identify users and their request rates). The server may include a
"Retry-After" header to indicate how long the user should wait before
retrying.
"""
code = 429
description = (
'This user has exceeded an allotted request count. Try again later.'
)
class RequestHeaderFieldsTooLarge(HTTPException):
"""*431* `Request Header Fields Too Large`
The server refuses to process the request because the header fields are too
large. One or more individual fields may be too large, or the set of all
headers is too large.
"""
code = 431
description = (
'One or more header fields exceeds the maximum size.'
)
class InternalServerError(HTTPException):
"""*500* `Internal Server Error`
Raise if an internal server error occurred. This is a good fallback if an
unknown error occurred in the dispatcher.
"""
code = 500
description = (
'The server encountered an internal error and was unable to '
'complete your request. Either the server is overloaded or there '
'is an error in the application.'
)
class NotImplemented(HTTPException):
"""*501* `Not Implemented`
Raise if the application does not support the action requested by the
browser.
"""
code = 501
description = (
'The server does not support the action requested by the '
'browser.'
)
class BadGateway(HTTPException):
"""*502* `Bad Gateway`
If you do proxying in your application you should return this status code
if you received an invalid response from the upstream server it accessed
in attempting to fulfill the request.
"""
code = 502
description = (
'The proxy server received an invalid response from an upstream '
'server.'
)
class ServiceUnavailable(HTTPException):
"""*503* `Service Unavailable`
Status code you should return if a service is temporarily unavailable.
"""
code = 503
description = (
'The server is temporarily unable to service your request due to '
'maintenance downtime or capacity problems. Please try again '
'later.'
)
class GatewayTimeout(HTTPException):
"""*504* `Gateway Timeout`
Status code you should return if a connection to an upstream server
times out.
"""
code = 504
description = (
'The connection to an upstream server timed out.'
)
class HTTPVersionNotSupported(HTTPException):
"""*505* `HTTP Version Not Supported`
The server does not support the HTTP protocol version used in the request.
"""
code = 505
description = (
'The server does not support the HTTP protocol version used in the '
'request.'
)
default_exceptions = {}
__all__ = ['HTTPException']
def _find_exceptions():
for name, obj in iteritems(globals()):
try:
is_http_exception = issubclass(obj, HTTPException)
except TypeError:
is_http_exception = False
if not is_http_exception or obj.code is None:
continue
__all__.append(obj.__name__)
old_obj = default_exceptions.get(obj.code, None)
if old_obj is not None and issubclass(obj, old_obj):
continue
default_exceptions[obj.code] = obj
_find_exceptions()
del _find_exceptions
class Aborter(object):
"""
When passed a dict of code -> exception items it can be used as
callable that raises exceptions. If the first argument to the
callable is an integer it will be looked up in the mapping, if it's
a WSGI application it will be raised in a proxy exception.
The rest of the arguments are forwarded to the exception constructor.
"""
def __init__(self, mapping=None, extra=None):
if mapping is None:
mapping = default_exceptions
self.mapping = dict(mapping)
if extra is not None:
self.mapping.update(extra)
def __call__(self, code, *args, **kwargs):
if not args and not kwargs and not isinstance(code, integer_types):
raise HTTPException(response=code)
if code not in self.mapping:
raise LookupError('no exception for %r' % code)
raise self.mapping[code](*args, **kwargs)
abort = Aborter()
#: an exception that is used internally to signal both a key error and a
#: bad request. Used by a lot of the datastructures.
BadRequestKeyError = BadRequest.wrap(KeyError)
# imported here because of circular dependencies of werkzeug.utils
from werkzeug.utils import escape
from werkzeug.http import HTTP_STATUS_CODES
| gpl-3.0 | -712,162,899,535,290,400 | 27.469605 | 79 | 0.645172 | false |
flit/cmdis | cmdis/formatter.py | 1 | 6910 | # Copyright (c) 2016-2019 Chris Reed
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .utilities import bytes_to_le16
from .registers import CORE_REGISTER_NAMES
from .helpers import SRType
class Operand(object):
def format(self, formatter):
raise NotImplemented()
class RegisterOperand(Operand):
def __init__(self, reg, wback=False):
self._reg = reg
self._wback = wback
def format(self, formatter):
result = CORE_REGISTER_NAMES[self._reg]
if self._wback:
result += "!"
return result
class ReglistOperand(Operand):
def __init__(self, reglist):
self._reglist = reglist
def format(self, formatter):
regs = []
startRange = -1
endRange = -1
def add_reg_range(regs):
if startRange != -1:
if startRange == endRange:
regs.append(CORE_REGISTER_NAMES[startRange])
else:
startReg = CORE_REGISTER_NAMES[startRange]
endReg = CORE_REGISTER_NAMES[endRange]
regs.append("%s-%s" % (startReg, endReg))
for n, b in enumerate(self._reglist):
if b:
if startRange == -1:
startRange = n
endRange = n
else:
add_reg_range(regs)
startRange = -1
endRange = -1
add_reg_range(regs)
return '{' + ','.join(regs) + '}'
class ImmediateOperand(Operand):
def __init__(self, imm, hideIfZero=False):
self._imm = imm
self._hideIfZero = hideIfZero
def format(self, formatter):
if self._imm == 0 and self._hideIfZero:
return None
if self._imm > 9:
comment = "0x%x" % (self._imm)
formatter.add_comment(comment)
return "#%d" % self._imm
class LabelOperand(Operand):
def __init__(self, offset):
self._offset = offset
def format(self, formatter):
# Add a comment with the absolute address of the label.
comment = "0x%x" % (formatter.instruction.address + 4 + self._offset)
formatter.add_comment(comment)
return ".%+d" % (self._offset + 4)
class ShiftRotateOperand(Operand):
OP_NAMES = ["None",
"LSL",
"LSR",
"ASR",
"ROR",
"RRX",]
def __init__(self, type, amount):
self._type = type
self._amount = amount
def format(self, formatter):
if self._type == SRType.SRType_None:
return None
return "%s #%d" % (self.OP_NAMES[self._type.value], self._amount)
class BarrierOperand(Operand):
def __init__(self, option):
self._option = option
def format(self, formatter):
if self._option == 0b1111:
return "sy"
else:
return "#%d" % self._option
class MemoryAccessOperand(Operand):
def __init__(self, *args, **kwargs):
self._operands = args
self._wback = kwargs.get("wback", False)
def format(self, formatter):
formattedOperands = []
for o in self._operands:
formatted = o.format(formatter)
if formatted is not None:
formattedOperands.append(formatted)
result = "[" + ", ".join(formattedOperands) + "]"
if self._wback:
result += "!"
return result
class CpsOperand(Operand):
def __init__(self, affectPri, affectFault):
self._affectPri = affectPri
self._affectFault = affectFault
def format(self, formatter):
result = ""
if self._affectPri:
result += "i"
if self._affectFault:
result += "f"
return result
class SpecialRegisterOperand(Operand):
def __init__(self, spec, mask=-1):
self._spec = spec
self._mask = mask
def format(self, formatter):
result = ""
upper = self._spec[3:8]
lower = self._spec[0:3]
if upper == '00000':
if lower == '000':
result = "APSR"
elif lower == '001':
result = "IAPSR"
elif lower == '010':
result = "EAPSR"
elif lower == '011':
result = "XPSR"
elif lower == '101':
result = "IPSR"
elif lower == '110':
result = "EPSR"
elif lower == '111':
result = "IEPSR"
if lower < 4 and self._mask != -1:
if self._mask == '10':
result += '_nzcvq'
elif self._mask == '01':
result += '_g'
elif self._mask == '11':
result += '_nzcvqg'
elif upper == '00001':
if lower == '000':
result = "MSP"
elif lower == '001':
result = "PSP"
elif upper == '00010':
if lower == '000':
result = "PRIMASK"
elif lower == '001':
result = "BASEPRI"
elif lower == '010':
result = "BASEPRI_MAX"
elif lower == '011':
result = "FAULTMASK"
elif lower == '100':
result = "CONTROL"
return result
class Formatter(object):
def __init__(self, cpu):
self.instruction = None
self.cpu = cpu
self._comments = []
def format(self, instruction):
self.instruction = instruction
self._comments = []
b = instruction.bytes
hw1 = bytes_to_le16(b, 0)
byteString = "%04x" % hw1
if len(b) == 4:
hw2 = bytes_to_le16(b, 2)
byteString += " %04x" % hw2
result = "{0:<12} {1:<8}".format(byteString, self.instruction.mnemonic)
formattedOperands = []
for o in self.instruction.operands:
formatted = o.format(self)
if formatted is not None:
formattedOperands.append(formatted)
result += ", ".join(formattedOperands)
if self._comments:
result = "{0:<36} ; {1}".format(result, " ".join(self._comments))
self.instruction = None
return result
def add_comment(self, comment):
self._comments.append(comment)
| apache-2.0 | -2,868,916,636,484,870,700 | 28.279661 | 79 | 0.519392 | false |
mdblv2/joatu-django | application/site-packages/django/contrib/admindocs/utils.py | 216 | 3801 | "Misc. utility functions/classes for admin documentation generator."
import re
from email.parser import HeaderParser
from email.errors import HeaderParseError
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.utils.encoding import force_bytes
try:
import docutils.core
import docutils.nodes
import docutils.parsers.rst.roles
except ImportError:
docutils_is_available = False
else:
docutils_is_available = True
def trim_docstring(docstring):
"""
Uniformly trims leading/trailing whitespace from docstrings.
Based on http://www.python.org/peps/pep-0257.html#handling-docstring-indentation
"""
if not docstring or not docstring.strip():
return ''
# Convert tabs to spaces and split into lines
lines = docstring.expandtabs().splitlines()
indent = min([len(line) - len(line.lstrip()) for line in lines if line.lstrip()])
trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]]
return "\n".join(trimmed).strip()
def parse_docstring(docstring):
"""
Parse out the parts of a docstring. Returns (title, body, metadata).
"""
docstring = trim_docstring(docstring)
parts = re.split(r'\n{2,}', docstring)
title = parts[0]
if len(parts) == 1:
body = ''
metadata = {}
else:
parser = HeaderParser()
try:
metadata = parser.parsestr(parts[-1])
except HeaderParseError:
metadata = {}
body = "\n\n".join(parts[1:])
else:
metadata = dict(metadata.items())
if metadata:
body = "\n\n".join(parts[1:-1])
else:
body = "\n\n".join(parts[1:])
return title, body, metadata
def parse_rst(text, default_reference_context, thing_being_parsed=None):
"""
Convert the string from reST to an XHTML fragment.
"""
overrides = {
'doctitle_xform' : True,
'inital_header_level' : 3,
"default_reference_context" : default_reference_context,
"link_base" : reverse('django-admindocs-docroot').rstrip('/')
}
if thing_being_parsed:
thing_being_parsed = force_bytes("<%s>" % thing_being_parsed)
parts = docutils.core.publish_parts(text, source_path=thing_being_parsed,
destination_path=None, writer_name='html',
settings_overrides=overrides)
return mark_safe(parts['fragment'])
#
# reST roles
#
ROLES = {
'model' : '%s/models/%s/',
'view' : '%s/views/%s/',
'template' : '%s/templates/%s/',
'filter' : '%s/filters/#%s',
'tag' : '%s/tags/#%s',
}
def create_reference_role(rolename, urlbase):
def _role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
node = docutils.nodes.reference(rawtext, text, refuri=(urlbase % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
def default_reference_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(rawtext, text, refuri=(ROLES[context] % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
if docutils_is_available:
docutils.parsers.rst.roles.register_canonical_role('cmsreference', default_reference_role)
docutils.parsers.rst.roles.DEFAULT_INTERPRETED_ROLE = 'cmsreference'
for name, urlbase in ROLES.items():
create_reference_role(name, urlbase)
| apache-2.0 | 1,197,862,237,521,927,400 | 34.858491 | 140 | 0.643778 | false |
ismtabo/huffman_algorithm | main.py | 2 | 2098 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python implementation of Huffman Coding
This script test the implementation software of Huffman Coding at \'main.py\'.
"""
__author__ = 'Ismael Taboada'
__version__= '1.0'
from collections import defaultdict
import csv
import os.path
import time
from huffman import HuffmanCoding
from graphviz import Digraph
DEBUG = False
DIA_FILE = 'huffman.tree'
LOG_FILE = 'log.csv'
TEST = "this is an example for huffman encoding"
"""Test for Graphviz software
"""
try:
dot = Digraph()
except Exception as e:
raise
print "Error: Graphviz software not found.\nPlease install Graphviz software on your computer.(http://www.graphviz.org/Download.php)"
exit(1)
"""User input
"""
txtin = raw_input("Write some symbols(blank for sample case):")
txtin = TEST if txtin=="" else txtin
txtout = txtin
"""Extract frecuency of each symbol of set
"""
symb2freq = defaultdict(int)
for ch in txtin:
symb2freq[ch] += 1
"""Implementation of Huffman Algorithm
"""
start = time.time()
huff = HuffmanCoding()
huff.encode(symb2freq)
end = time.time()
time_lapse = end - start
"""Conversion from Huffman Coding Tree to Coding table
"""
coding_table = huff.tree_to_table()
"""Outputs
"""
print "Codes table"
print "Symbol\tFrec\tCode"
for coding in coding_table:
print "\t".join(map(str,coding))
# Replace at the input text the symbol with the propper code
txtout = txtout.replace(coding[0],coding[2])
print "Time: ",time_lapse,"ms"
print "\nText input:",txtin
print "Text output:",txtout
"""Huffman tree Graphviz visualization
"""
dot = huff.tree_to_graph()
print "\nDiagram saved at: ",DIA_FILE+'.png'
dot.render(DIA_FILE, view=DEBUG)
"""Log of input's size and execution time
"""
log_exits = os.path.isfile(LOG_FILE)
with open(LOG_FILE, 'ab') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
if not log_exits:
spamwriter.writerow(['length', 'time'])
spamwriter.writerow([len(txtin), time_lapse])
print 'Log update at: ',LOG_FILE
| gpl-2.0 | -3,842,315,511,699,367,000 | 22.840909 | 137 | 0.691611 | false |
harterj/moose | modules/tensor_mechanics/test/tests/capped_mohr_coulomb/small_deform_hard_21.py | 12 | 1567 | #!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
def expected(ini, res, ini_x, res_x):
lo2 = 0.5 * (res_x - ini_x)
alpha = (ini - res) / 4.0 / lo2**3
beta = -3.0 * alpha * lo2**2
data = [ini_x + i*(res_x - ini_x)/100 for i in range(100)]
data = [(x, alpha * (x - ini_x - lo2)**3 + beta * (x - ini_x - lo2) + (ini + res) / 2.0) for x in data]
return zip(*data)
def moose(fn):
sinphi = np.sin(30.0 * np.pi / 180.0)
cosphi = np.cos(30.0 * np.pi / 180.0)
f = open(fn)
data = [map(float, line.strip().split(",")) for line in f.readlines()[4:-1]]
f.close()
intnl = [d[2] for d in data]
coh = [(0.5 * (d[5] - d[7]) + 0.5 * (d[5] + d[7]) * sinphi) / cosphi for d in data]
return (intnl, coh)
plt.figure()
expect21 = expected(10.0, 20.0, 0.0, 5E-6)
m21 = moose("gold/small_deform_hard21.csv")
plt.plot(expect21[0], expect21[1], 'k-', linewidth = 3.0, label = 'expected')
plt.plot(m21[0], m21[1], 'k^', label = 'MOOSE')
plt.legend(loc = 'lower right')
plt.xlabel("internal parameter")
plt.ylabel("Cohesion")
plt.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
plt.title("Cohesion hardening")
plt.savefig("figures/small_deform_hard_21.eps")
sys.exit(0)
| lgpl-2.1 | 2,339,575,841,629,909,000 | 32.340426 | 107 | 0.620932 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.