repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
atyenoria/seafile | scripts/upgrade/db_update_helper.py | 18 | 10798 | #coding: UTF-8
import sys
import os
import ConfigParser
import glob
HAS_MYSQLDB = True
try:
import MySQLdb
except ImportError:
HAS_MYSQLDB = False
HAS_SQLITE3 = True
try:
import sqlite3
except ImportError:
HAS_SQLITE3 = False
class EnvManager(object):
def __init__(self):
self.upgrade_dir = os.path.dirname(__file__)
self.install_path = os.path.dirname(self.upgrade_dir)
self.top_dir = os.path.dirname(self.install_path)
self.ccnet_dir = os.environ['CCNET_CONF_DIR']
self.seafile_dir = os.environ['SEAFILE_CONF_DIR']
env_mgr = EnvManager()
class Utils(object):
@staticmethod
def highlight(content, is_error=False):
'''Add ANSI color to content to get it highlighted on terminal'''
if is_error:
return '\x1b[1;31m%s\x1b[m' % content
else:
return '\x1b[1;32m%s\x1b[m' % content
@staticmethod
def info(msg):
print Utils.highlight('[INFO] ') + msg
@staticmethod
def error(msg):
print Utils.highlight('[ERROR] ') + msg
sys.exit(1)
@staticmethod
def read_config(config_path, defaults):
cp = ConfigParser.ConfigParser(defaults)
cp.read(config_path)
return cp
class MySQLDBInfo(object):
def __init__(self, host, port, username, password, db, unix_socket=None):
self.host = host
self.port = port
self.username = username
self.password = password
self.db = db
self.unix_socket = unix_socket
class DBUpdater(object):
def __init__(self, version, name):
self.sql_dir = os.path.join(env_mgr.upgrade_dir, 'sql', version, name)
@staticmethod
def get_instance(version):
'''Detect whether we are using mysql or sqlite3'''
ccnet_db_info = DBUpdater.get_ccnet_mysql_info()
seafile_db_info = DBUpdater.get_seafile_mysql_info()
seahub_db_info = DBUpdater.get_seahub_mysql_info()
if ccnet_db_info and seafile_db_info and seahub_db_info:
Utils.info('You are using MySQL')
if not HAS_MYSQLDB:
Utils.error('Python MySQLdb module is not found')
updater = MySQLDBUpdater(version, ccnet_db_info, seafile_db_info, seahub_db_info)
elif (ccnet_db_info is None) and (seafile_db_info is None) and (seahub_db_info is None):
Utils.info('You are using SQLite3')
if not HAS_SQLITE3:
Utils.error('Python sqlite3 module is not found')
updater = SQLiteDBUpdater(version)
else:
def to_db_string(info):
if info is None:
return 'SQLite3'
else:
return 'MySQL'
Utils.error('Error:\n ccnet is using %s\n seafile is using %s\n seahub is using %s\n'
% (to_db_string(ccnet_db_info),
to_db_string(seafile_db_info),
to_db_string(seahub_db_info)))
return updater
def update_db(self):
ccnet_sql = os.path.join(self.sql_dir, 'ccnet.sql')
seafile_sql = os.path.join(self.sql_dir, 'seafile.sql')
seahub_sql = os.path.join(self.sql_dir, 'seahub.sql')
if os.path.exists(ccnet_sql):
Utils.info('updating ccnet database...')
self.update_ccnet_sql(ccnet_sql)
if os.path.exists(seafile_sql):
Utils.info('updating seafile database...')
self.update_seafile_sql(seafile_sql)
if os.path.exists(seahub_sql):
Utils.info('updating seahub database...')
self.update_seahub_sql(seahub_sql)
@staticmethod
def get_ccnet_mysql_info():
ccnet_conf = os.path.join(env_mgr.ccnet_dir, 'ccnet.conf')
defaults = {
'HOST': '127.0.0.1',
'PORT': '3306',
'UNIX_SOCKET': '',
}
config = Utils.read_config(ccnet_conf, defaults)
db_section = 'Database'
if not config.has_section(db_section):
return None
type = config.get(db_section, 'ENGINE')
if type != 'mysql':
return None
try:
host = config.get(db_section, 'HOST')
port = config.getint(db_section, 'PORT')
username = config.get(db_section, 'USER')
password = config.get(db_section, 'PASSWD')
db = config.get(db_section, 'DB')
unix_socket = config.get(db_section, 'UNIX_SOCKET')
except ConfigParser.NoOptionError, e:
Utils.error('Database config in ccnet.conf is invalid: %s' % e)
info = MySQLDBInfo(host, port, username, password, db, unix_socket)
return info
@staticmethod
def get_seafile_mysql_info():
seafile_conf = os.path.join(env_mgr.seafile_dir, 'seafile.conf')
defaults = {
'HOST': '127.0.0.1',
'PORT': '3306',
'UNIX_SOCKET': '',
}
config = Utils.read_config(seafile_conf, defaults)
db_section = 'database'
if not config.has_section(db_section):
return None
type = config.get(db_section, 'type')
if type != 'mysql':
return None
try:
host = config.get(db_section, 'host')
port = config.getint(db_section, 'port')
username = config.get(db_section, 'user')
password = config.get(db_section, 'password')
db = config.get(db_section, 'db_name')
unix_socket = config.get(db_section, 'unix_socket')
except ConfigParser.NoOptionError, e:
Utils.error('Database config in seafile.conf is invalid: %s' % e)
info = MySQLDBInfo(host, port, username, password, db, unix_socket)
return info
@staticmethod
def get_seahub_mysql_info():
sys.path.insert(0, env_mgr.top_dir)
try:
import seahub_settings # pylint: disable=F0401
except ImportError, e:
Utils.error('Failed to import seahub_settings.py: %s' % e)
if not hasattr(seahub_settings, 'DATABASES'):
return None
try:
d = seahub_settings.DATABASES['default']
if d['ENGINE'] != 'django.db.backends.mysql':
return None
host = d.get('HOST', '127.0.0.1')
port = int(d.get('PORT', 3306))
username = d['USER']
password = d['PASSWORD']
db = d['NAME']
unix_socket = host if host.startswith('/') else None
except KeyError:
Utils.error('Database config in seahub_settings.py is invalid: %s' % e)
info = MySQLDBInfo(host, port, username, password, db, unix_socket)
return info
def update_ccnet_sql(self, ccnet_sql):
raise NotImplementedError
def update_seafile_sql(self, seafile_sql):
raise NotImplementedError
def update_seahub_sql(self, seahub_sql):
raise NotImplementedError
class CcnetSQLiteDB(object):
def __init__(self, ccnet_dir):
self.ccnet_dir = ccnet_dir
def get_db(self, dbname):
dbs = (
'ccnet.db',
'GroupMgr/groupmgr.db',
'misc/config.db',
'OrgMgr/orgmgr.db',
)
for db in dbs:
if os.path.splitext(os.path.basename(db))[0] == dbname:
return os.path.join(self.ccnet_dir, db)
class SQLiteDBUpdater(DBUpdater):
def __init__(self, version):
DBUpdater.__init__(self, version, 'sqlite3')
self.ccnet_db = CcnetSQLiteDB(env_mgr.ccnet_dir)
self.seafile_db = os.path.join(env_mgr.seafile_dir, 'seafile.db')
self.seahub_db = os.path.join(env_mgr.top_dir, 'seahub.db')
def update_db(self):
super(SQLiteDBUpdater, self).update_db()
for sql_path in glob.glob(os.path.join(self.sql_dir, 'ccnet', '*.sql')):
self.update_ccnet_sql(sql_path)
def apply_sqls(self, db_path, sql_path):
with open(sql_path, 'r') as fp:
lines = fp.read().split(';')
with sqlite3.connect(db_path) as conn:
for line in lines:
line = line.strip()
if not line:
continue
else:
conn.execute(line)
def update_ccnet_sql(self, sql_path):
dbname = os.path.splitext(os.path.basename(sql_path))[0]
self.apply_sqls(self.ccnet_db.get_db(dbname), sql_path)
def update_seafile_sql(self, sql_path):
self.apply_sqls(self.seafile_db, sql_path)
def update_seahub_sql(self, sql_path):
self.apply_sqls(self.seahub_db, sql_path)
class MySQLDBUpdater(DBUpdater):
def __init__(self, version, ccnet_db_info, seafile_db_info, seahub_db_info):
DBUpdater.__init__(self, version, 'mysql')
self.ccnet_db_info = ccnet_db_info
self.seafile_db_info = seafile_db_info
self.seahub_db_info = seahub_db_info
def update_ccnet_sql(self, ccnet_sql):
self.apply_sqls(self.ccnet_db_info, ccnet_sql)
def update_seafile_sql(self, seafile_sql):
self.apply_sqls(self.seafile_db_info, seafile_sql)
def update_seahub_sql(self, seahub_sql):
self.apply_sqls(self.seahub_db_info, seahub_sql)
def get_conn(self, info):
kw = dict(
user=info.username,
passwd=info.password,
db=info.db,
)
if info.unix_socket:
kw['unix_socket'] = info.unix_socket
else:
kw['host'] = info.host
kw['port'] = info.port
try:
conn = MySQLdb.connect(**kw)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
msg = str(e.args[1])
else:
msg = str(e)
Utils.error('Failed to connect to mysql database %s: %s' % (info.db, msg))
return conn
def execute_sql(self, conn, sql):
cursor = conn.cursor()
try:
cursor.execute(sql)
conn.commit()
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
msg = str(e.args[1])
else:
msg = str(e)
Utils.error('Failed to execute sql: %s' % msg)
def apply_sqls(self, info, sql_path):
with open(sql_path, 'r') as fp:
lines = fp.read().split(';')
conn = self.get_conn(info)
for line in lines:
line = line.strip()
if not line:
continue
else:
self.execute_sql(conn, line)
def main():
version = sys.argv[1]
db_updater = DBUpdater.get_instance(version)
db_updater.update_db()
return 0
if __name__ == '__main__':
main()
| gpl-2.0 | 4,865,397,214,910,636,000 | 30.573099 | 97 | 0.562697 | false |
OriHoch/Open-Knesset | persons/models.py | 3 | 8989 | from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from django.forms.fields import IntegerField
from django.dispatch import receiver
from django.db.models.signals import post_save
from mks.models import Member, GENDER_CHOICES
from links.models import Link
from .managers import PersonManager
from django.contrib.auth.models import User
class Title(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class PersonAlias(models.Model):
name = models.CharField(max_length=64)
person = models.ForeignKey('Person', related_name='aliases')
def __unicode__(self):
return "%s -> %s" % (self.name, self.person.name)
GENDER_CHOICES = (
(u'M', _('Male')),
(u'F', _('Female')),
)
class Person(models.Model):
name = models.CharField(max_length=64)
mk = models.ForeignKey('mks.Member', blank=True, null=True, related_name='person')
titles = models.ManyToManyField(Title, blank=True, null=True, related_name='persons')
# TODO: change to an ImageField
img_url = models.URLField(blank=True)
phone = models.CharField(blank=True, null=True, max_length=20)
fax = models.CharField(blank=True, null=True, max_length=20)
email = models.EmailField(blank=True, null=True)
family_status = models.CharField(blank=True, null=True, max_length=10)
number_of_children = models.IntegerField(blank=True, null=True)
date_of_birth = models.DateField(blank=True, null=True)
place_of_birth = models.CharField(blank=True, null=True, max_length=100)
date_of_death = models.DateField(blank=True, null=True)
year_of_aliyah = models.IntegerField(blank=True, null=True)
place_of_residence = models.CharField(blank=True, null=True, max_length=100,
help_text=_('an accurate place of residence (for example, an address'))
area_of_residence = models.CharField(blank=True, null=True, max_length=100,
help_text=_('a general area of residence (for example, "the negev"'))
place_of_residence_lat = models.CharField(blank=True, null=True, max_length=16)
place_of_residence_lon = models.CharField(blank=True, null=True, max_length=16)
residence_centrality = models.IntegerField(blank=True, null=True)
residence_economy = models.IntegerField(blank=True, null=True)
gender = models.CharField(max_length=1, choices=GENDER_CHOICES, blank=True, null=True)
calendar_url = models.CharField(blank=True, null=True, max_length=1024)
calendar_sync_token = models.CharField(blank=True, null=True, max_length=1024)
user = models.ForeignKey('auth.User', blank=True, null=True)
objects = PersonManager()
def __unicode__(self):
return self.name
class Meta:
ordering = ('name',)
verbose_name = _('Person')
verbose_name_plural = _('Persons')
def get_absolute_url(self):
if self.mk:
return self.mk.get_absolute_url()
else:
return reverse('person-detail', kwargs={'pk': self.id})
def number_of_meetings(self):
return self.protocol_parts.values('meeting').distinct().count()
def number_of_committees(self):
return self.protocol_parts.values('meeting__committee').distinct().count()
def copy(self, mk=mk):
""" copy relelvant mk's data to self """
roles = other.mk.all()
for role in roles:
role.pk = None
role.person = self
role.save()
links = Link.objects.for_model(mk)
for link in links:
link.pk = None
link.content_object = self
link.save()
for field in mk._meta.fields:
if field in ['id']:
continue
field_name = field.get_attname()
val = getattr(mk, field_name)
if val and hasattr(self, field_name):
# update only empty fields
if not getattr(self, field_name):
setattr(self, field_name, val)
def merge(self, other):
"""make other into an alias of self"""
roles = other.roles.all()
links = Link.objects.for_model(other)
parts = other.protocol_parts.all()
external_info = other.external_info.all()
external_relation = other.external_relation.all()
if other.mk:
if self.mk and self.mk != other.mk:
# something is wrong, we are trying to merge two persons with non matching MKs
raise ValidationError('Trying to merge persons with non matching MKs')
self.mk = other.mk
roles = chain(roles, other.mk.roles.all())
links = chain(links, other.mk.links.all())
for title in other.titles.all():
self.titles.add(title)
for role in roles:
role.person = self
role.save()
for link in links:
link.content_object = self
link.save()
for part in parts:
part.speaker = self
part.save()
for i in external_info:
i.person = self
i.save()
for i in external_relation:
i.person = self
i.save()
# copy all the model's fields
for field in self._meta.fields:
if field in ['id']:
continue
field_name = field.get_attname()
val = getattr(other, field_name)
if val and not getattr(self, field_name):
setattr(self, field_name, val)
if self.name != other.name:
(pa, created) = PersonAlias.objects.get_or_create(name=other.name, person=self)
other.delete()
self.save()
def create_user(self, username=None, password=None):
"""Create an Auth User for this person - so she can login to django"""
name_split = self.name.split(' ')
user = User(
username=username if username is not None else self.email.split('@')[0],
first_name=name_split[0],
last_name=' '.join(name_split[1:]) if len(name_split) > 1 else '',
email=self.email
)
if password is not None:
user.set_password(password)
user.save()
self.user = user
self.save()
return user
def add_alias(self, alias):
PersonAlias.objects.get_or_create(name=alias, person=self)
def del_alias(self, alias):
PersonAlias.objects.filter(name=alias, person=self).delete()
@receiver(post_save, sender=Member)
def member_post_save(sender, **kwargs):
instance = kwargs['instance']
person = Person.objects.get_or_create(mk=instance)[0]
for field in instance._meta.fields:
if field.name != 'id' and hasattr(person, field.name):
setattr(person, field.name, getattr(instance, field.name))
person.save()
class Role(models.Model):
start_date = models.DateField(null=True)
end_date = models.DateField(blank=True, null=True)
text = models.CharField(blank=True, null=True, max_length=1024)
org = models.TextField(blank=True, null=True)
person = models.ForeignKey(Person, related_name='roles')
def __unicode__(self):
return _('{person} serverd as {text} in {org} from {start_date} to {end_date}').format(
person=self.person, text=self.text, org=self.org,
start_date=self.start_date, end_date=self.end_date)
class ProcessedProtocolPart(models.Model):
"""This model is used to keep track of protocol parts already searched for creating persons.
There should be only 1 record in it, with the max id of a protocol part searched"""
protocol_part_id = models.IntegerField()
class ExternalData(models.Model):
''' an abstract class for extranl data meta data '''
source = models.CharField(max_length=64)
created = models.DateTimeField(auto_now_add=True, editable=False)
updated = models.DateTimeField(auto_now=True, editable=False)
class Meta:
abstract = True
class ExternalInfo(ExternalData):
''' a model for a text key and its value tied to a person '''
person = models.ForeignKey(Person, related_name='external_info')
key = models.CharField(max_length=64)
value = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"{} - {}: {}".format(self.person, self.key, self.value)
class ExternalRelation(ExternalData):
''' a relationship between two persons '''
person = models.ForeignKey(Person, related_name='external_relation')
relationship = models.CharField(max_length=64)
with_person = models.ForeignKey(Person, null=True, blank=True)
def __unicode__(self):
return u"{} - {}: {}".format(self.person, self.relationship,
self.with_person)
| bsd-3-clause | 2,833,651,343,132,175,400 | 37.088983 | 113 | 0.627656 | false |
ellio167/lammps | examples/SPIN/test_problems/validation_damped_precession/plot_precession.py | 9 | 1126 | #!/usr/bin/env python3
import numpy as np, pylab, tkinter
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from decimal import *
import sys, string, os
argv = sys.argv
if len(argv) != 3:
print("Syntax: ./plot_precession.py res_lammps.dat res_llg.dat")
sys.exit()
lammps_file = sys.argv[1]
llg_file = sys.argv[2]
t_lmp,Sx_lmp,Sy_lmp,Sz_lmp,en_lmp = np.loadtxt(lammps_file,
skiprows=0, usecols=(1,2,3,4,5),unpack=True)
t_llg,Sx_llg,Sy_llg,Sz_llg,en_llg = np.loadtxt(llg_file, skiprows=0, usecols=(0,1,2,3,4),unpack=True)
plt.figure()
plt.subplot(411)
plt.ylabel('Sx')
plt.plot(t_lmp, Sx_lmp, 'b-', label='LAMMPS')
plt.plot(t_llg, Sx_llg, 'r--', label='LLG')
plt.subplot(412)
plt.ylabel('Sy')
plt.plot(t_lmp, Sy_lmp, 'b-', label='LAMMPS')
plt.plot(t_llg, Sy_llg, 'r--', label='LLG')
plt.subplot(413)
plt.ylabel('Sz')
plt.plot(t_lmp, Sz_lmp, 'b-', label='LAMMPS')
plt.plot(t_llg, Sz_llg, 'r--', label='LLG')
plt.subplot(414)
plt.ylabel('En (eV)')
plt.plot(t_lmp, en_lmp, 'b-', label='LAMMPS')
plt.plot(t_llg, en_llg, 'r--', label='LLG')
plt.xlabel('time (in ps)')
plt.legend()
plt.show()
| gpl-2.0 | -3,260,827,091,300,556,300 | 24.022222 | 101 | 0.658082 | false |
jardiacaj/finem_imperii | turn/building_production.py | 1 | 2510 | from world.models.buildings import Building
from world.models.geography import World
field_input_months = (0, 1, 2, 3, 4, 5, 8, 9, 10, 11)
field_output_months = (4, 5, 6, 7, 8)
field_output_multipliers = {4: 0.5, 5: 1, 6: 0.5, 7: 1, 8: 0.5}
field_production_reset_month = 8
def worldwide_building_production(world: World):
for building in Building.objects.filter(
settlement__tile__world=world):
do_building_production(building)
def do_building_production(building: Building):
workers = building.worker
ideal_workers = min(building.max_ideal_workers(), workers.count())
surplus_workers = max(
0,
workers.count() - building.max_ideal_workers()
)
work_input = 0
if building.max_ideal_workers():
work_input += min(
(ideal_workers / building.max_ideal_workers()),
1)
if building.max_surplus_workers():
work_input += min(
(surplus_workers / building.max_surplus_workers()) * 0.5,
0.5)
if building.type == Building.GRAIN_FIELD and building.level > 0:
current_month = building.settlement.tile.world.current_turn % 12
if current_month in field_output_months and work_input > 0:
time_portion = \
field_output_multipliers[current_month] / \
sum(field_output_multipliers.values())
production_counter_remove = min(
work_input * time_portion * 1000,
building.field_production_counter
)
building.field_production_counter -= production_counter_remove
building.save()
bushel_output = (
building.quantity
* production_counter_remove / 1000
* 2.4
)
building.settlement.get_default_granary().add_bushels(
round(bushel_output)
)
if current_month == field_production_reset_month:
building.field_production_counter = 0
building.save()
if current_month in field_input_months and work_input > 0:
time_portion = 1 / len(field_input_months)
production_counter_add = work_input * time_portion * 1000
building.field_production_counter += production_counter_add
building.save()
if building.type == Building.GUILD:
building.field_production_counter *= 0.9
building.field_production_counter += workers.count()
building.save()
| agpl-3.0 | -6,371,737,180,009,782,000 | 35.376812 | 74 | 0.598406 | false |
gkotton/neutron | neutron/tests/unit/vmware/extensions/test_qosqueues.py | 20 | 13063 | # Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from oslo.config import cfg
import webob.exc
from neutron import context
from neutron.plugins.vmware.dbexts import qos_db
from neutron.plugins.vmware.extensions import qos as ext_qos
from neutron.plugins.vmware import nsxlib
from neutron.tests.unit import test_extensions
from neutron.tests.unit import vmware
from neutron.tests.unit.vmware import test_nsx_plugin
class QoSTestExtensionManager(object):
def get_resources(self):
return ext_qos.Qos.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class TestQoSQueue(test_nsx_plugin.NsxPluginV2TestCase):
def setUp(self, plugin=None):
cfg.CONF.set_override('api_extensions_path', vmware.NSXEXT_PATH)
super(TestQoSQueue, self).setUp()
ext_mgr = QoSTestExtensionManager()
self.ext_api = test_extensions.setup_extensions_middleware(ext_mgr)
def _create_qos_queue(self, fmt, body, **kwargs):
qos_queue = self.new_create_request('qos-queues', body)
if (kwargs.get('set_context') and 'tenant_id' in kwargs):
# create a specific auth context for this request
qos_queue.environ['neutron.context'] = context.Context(
'', kwargs['tenant_id'])
return qos_queue.get_response(self.ext_api)
@contextlib.contextmanager
def qos_queue(self, name='foo', min='0', max='10',
qos_marking=None, dscp='0', default=None, do_delete=True):
body = {'qos_queue': {'tenant_id': 'tenant',
'name': name,
'min': min,
'max': max}}
if qos_marking:
body['qos_queue']['qos_marking'] = qos_marking
if dscp:
body['qos_queue']['dscp'] = dscp
if default:
body['qos_queue']['default'] = default
res = self._create_qos_queue('json', body)
qos_queue = self.deserialize('json', res)
if res.status_int >= 400:
raise webob.exc.HTTPClientError(code=res.status_int)
yield qos_queue
if do_delete:
self._delete('qos-queues',
qos_queue['qos_queue']['id'])
def test_create_qos_queue(self):
with self.qos_queue(name='fake_lqueue', min=34, max=44,
qos_marking='untrusted', default=False) as q:
self.assertEqual(q['qos_queue']['name'], 'fake_lqueue')
self.assertEqual(q['qos_queue']['min'], 34)
self.assertEqual(q['qos_queue']['max'], 44)
self.assertEqual(q['qos_queue']['qos_marking'], 'untrusted')
self.assertFalse(q['qos_queue']['default'])
def test_create_trusted_qos_queue(self):
with mock.patch.object(qos_db.LOG, 'info') as log:
with mock.patch.object(nsxlib, 'do_request',
return_value={"uuid": "fake_queue"}):
with self.qos_queue(name='fake_lqueue', min=34, max=44,
qos_marking='trusted', default=False) as q:
self.assertIsNone(q['qos_queue']['dscp'])
self.assertTrue(log.called)
def test_create_qos_queue_name_exceeds_40_chars(self):
name = 'this_is_a_queue_whose_name_is_longer_than_40_chars'
with self.qos_queue(name=name) as queue:
# Assert Neutron name is not truncated
self.assertEqual(queue['qos_queue']['name'], name)
def test_create_qos_queue_default(self):
with self.qos_queue(default=True) as q:
self.assertTrue(q['qos_queue']['default'])
def test_create_qos_queue_two_default_queues_fail(self):
with self.qos_queue(default=True):
body = {'qos_queue': {'tenant_id': 'tenant',
'name': 'second_default_queue',
'default': True}}
res = self._create_qos_queue('json', body)
self.assertEqual(res.status_int, 409)
def test_create_port_with_queue(self):
with self.qos_queue(default=True) as q1:
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
self.assertEqual(net1['network'][ext_qos.QUEUE],
q1['qos_queue']['id'])
device_id = "00fff4d0-e4a8-4a3a-8906-4c4cdafb59f1"
with self.port(device_id=device_id) as p:
self.assertEqual(len(p['port'][ext_qos.QUEUE]), 36)
def test_create_shared_queue_networks(self):
with self.qos_queue(default=True, do_delete=False) as q1:
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
self.assertEqual(net1['network'][ext_qos.QUEUE],
q1['qos_queue']['id'])
res = self._create_network('json', 'net2', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net2 = self.deserialize('json', res)
self.assertEqual(net1['network'][ext_qos.QUEUE],
q1['qos_queue']['id'])
device_id = "00fff4d0-e4a8-4a3a-8906-4c4cdafb59f1"
res = self._create_port('json', net1['network']['id'],
device_id=device_id)
port1 = self.deserialize('json', res)
res = self._create_port('json', net2['network']['id'],
device_id=device_id)
port2 = self.deserialize('json', res)
self.assertEqual(port1['port'][ext_qos.QUEUE],
port2['port'][ext_qos.QUEUE])
self._delete('ports', port1['port']['id'])
self._delete('ports', port2['port']['id'])
def test_remove_queue_in_use_fail(self):
with self.qos_queue(do_delete=False) as q1:
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
device_id = "00fff4d0-e4a8-4a3a-8906-4c4cdafb59f1"
res = self._create_port('json', net1['network']['id'],
device_id=device_id)
port = self.deserialize('json', res)
self._delete('qos-queues', port['port'][ext_qos.QUEUE], 409)
def test_update_network_new_queue(self):
with self.qos_queue() as q1:
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
with self.qos_queue() as new_q:
data = {'network': {ext_qos.QUEUE: new_q['qos_queue']['id']}}
req = self.new_update_request('networks', data,
net1['network']['id'])
res = req.get_response(self.api)
net1 = self.deserialize('json', res)
self.assertEqual(net1['network'][ext_qos.QUEUE],
new_q['qos_queue']['id'])
def test_update_port_adding_device_id(self):
with self.qos_queue(do_delete=False) as q1:
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
device_id = "00fff4d0-e4a8-4a3a-8906-4c4cdafb59f1"
res = self._create_port('json', net1['network']['id'])
port = self.deserialize('json', res)
self.assertIsNone(port['port'][ext_qos.QUEUE])
data = {'port': {'device_id': device_id}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = req.get_response(self.api)
port = self.deserialize('json', res)
self.assertEqual(len(port['port'][ext_qos.QUEUE]), 36)
def test_get_port_with_qos_not_admin(self):
body = {'qos_queue': {'tenant_id': 'not_admin',
'name': 'foo', 'min': 20, 'max': 20}}
res = self._create_qos_queue('json', body, tenant_id='not_admin')
q1 = self.deserialize('json', res)
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE, 'tenant_id',),
queue_id=q1['qos_queue']['id'],
tenant_id="not_admin")
net1 = self.deserialize('json', res)
self.assertEqual(len(net1['network'][ext_qos.QUEUE]), 36)
res = self._create_port('json', net1['network']['id'],
tenant_id='not_admin', set_context=True)
port = self.deserialize('json', res)
self.assertNotIn(ext_qos.QUEUE, port['port'])
def test_dscp_value_out_of_range(self):
body = {'qos_queue': {'tenant_id': 'admin', 'dscp': '64',
'name': 'foo', 'min': 20, 'max': 20}}
res = self._create_qos_queue('json', body)
self.assertEqual(res.status_int, 400)
def test_dscp_value_with_qos_marking_trusted_returns_400(self):
body = {'qos_queue': {'tenant_id': 'admin', 'dscp': '1',
'qos_marking': 'trusted',
'name': 'foo', 'min': 20, 'max': 20}}
res = self._create_qos_queue('json', body)
self.assertEqual(res.status_int, 400)
def test_non_admin_cannot_create_queue(self):
body = {'qos_queue': {'tenant_id': 'not_admin',
'name': 'foo', 'min': 20, 'max': 20}}
res = self._create_qos_queue('json', body, tenant_id='not_admin',
set_context=True)
self.assertEqual(res.status_int, 403)
def test_update_port_non_admin_does_not_show_queue_id(self):
body = {'qos_queue': {'tenant_id': 'not_admin',
'name': 'foo', 'min': 20, 'max': 20}}
res = self._create_qos_queue('json', body, tenant_id='not_admin')
q1 = self.deserialize('json', res)
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
tenant_id='not_admin',
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
res = self._create_port('json', net1['network']['id'],
tenant_id='not_admin', set_context=True)
port = self.deserialize('json', res)
device_id = "00fff4d0-e4a8-4a3a-8906-4c4cdafb59f1"
data = {'port': {'device_id': device_id}}
neutron_context = context.Context('', 'not_admin')
port = self._update('ports', port['port']['id'], data,
neutron_context=neutron_context)
self.assertNotIn(ext_qos.QUEUE, port['port'])
def test_rxtx_factor(self):
with self.qos_queue(max=10) as q1:
res = self._create_network('json', 'net1', True,
arg_list=(ext_qos.QUEUE,),
queue_id=q1['qos_queue']['id'])
net1 = self.deserialize('json', res)
res = self._create_port('json', net1['network']['id'],
arg_list=(ext_qos.RXTX_FACTOR,),
rxtx_factor=2, device_id='1')
port = self.deserialize('json', res)
req = self.new_show_request('qos-queues',
port['port'][ext_qos.QUEUE])
res = req.get_response(self.ext_api)
queue = self.deserialize('json', res)
self.assertEqual(queue['qos_queue']['max'], 20)
| apache-2.0 | 4,460,923,737,718,054,400 | 45.653571 | 79 | 0.5191 | false |
basmot/futsal_management | futsal_management/wsgi.py | 1 | 1159 | ##############################################################################
#
# Copyright 2015-2016 Bastien Mottiaux
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
"""
WSGI config for futsal_management project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "futsal_management.settings")
application = get_wsgi_application()
| apache-2.0 | 2,817,613,756,585,528,000 | 33.088235 | 78 | 0.661777 | false |
okuta/chainer | chainer/distributions/cauchy.py | 2 | 3110 | import warnings
import numpy
import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.math import exponential
from chainer.functions.math import trigonometric
from chainer.utils import cache
def _cauchy_icdf(x):
x = chainer.as_variable(x)
h = (x - 0.5) * numpy.pi
y = chainer.functions.tan(h)
return y
class Cauchy(distribution.Distribution):
"""Cauchy Distribution.
The probability density function of the distribution is expressed as
.. math::
p(x;x_0,\\gamma) = \\frac{1}{\\pi}\\frac{\\gamma}{(x-x_0)^2+\\gamma^2}
Args:
loc(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution representing the location :math:`\\x_0`.
scale(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution representing the scale :math:`\\gamma`.
"""
def __init__(self, loc, scale):
super(Cauchy, self).__init__()
self.__loc = loc
self.__scale = scale
@cache.cached_property
def loc(self):
return chainer.as_variable(self.__loc)
@cache.cached_property
def scale(self):
return chainer.as_variable(self.__scale)
@property
def batch_shape(self):
return self.loc.shape
def cdf(self, x):
return (
(1 / numpy.pi
* trigonometric.arctan((x - self.loc) / self.scale))
+ 0.5)
@cache.cached_property
def entropy(self):
return exponential.log(4 * numpy.pi * self.scale)
@property
def event_shape(self):
return ()
def icdf(self, x):
return self.loc + self.scale * _cauchy_icdf(x)
@property
def _is_gpu(self):
return isinstance(self.loc.data, cuda.ndarray)
def log_prob(self, x):
return (
- numpy.log(numpy.pi)
+ exponential.log(self.scale)
- exponential.log((x - self.loc)**2 + self.scale**2))
@cache.cached_property
def mean(self):
warnings.warn('Mean of the cauchy distribution is undefined.',
RuntimeWarning)
xp = chainer.backend.get_array_module(self.loc)
return chainer.as_variable(xp.full_like(self.loc.data, xp.nan))
@property
def params(self):
return {'loc': self.loc, 'scale': self.scale}
def sample_n(self, n):
xp = chainer.backend.get_array_module(self.loc)
if xp is cuda.cupy:
eps = xp.random.standard_cauchy(
(n,)+self.loc.shape, dtype=self.loc.dtype)
else:
eps = xp.random.standard_cauchy(
(n,)+self.loc.shape).astype(self.loc.dtype)
noise = self.scale * eps + self.loc
return noise
@property
def support(self):
return 'real'
@cache.cached_property
def variance(self):
warnings.warn('Variance of the cauchy distribution is undefined.',
RuntimeWarning)
xp = chainer.backend.get_array_module(self.loc)
return chainer.as_variable(xp.full_like(self.loc.data, xp.nan))
| mit | -7,719,349,248,181,158,000 | 26.522124 | 78 | 0.599035 | false |
coxmediagroup/googleads-python-lib | examples/dfp/v201411/workflow_request_service/get_workflow_approval_requests_by_statement.py | 4 | 2211 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets workflow approval requests.
Workflow approval requests must be approved or rejected for a workflow to
finish.
Tags: WorkflowRequestService.getWorkflowRequestsByStatement
"""
__author__ = 'Nicholas Chen'
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
workflow_request_service = client.GetService('WorkflowRequestService',
version='v201411')
# Create statement object to select all workflow approval requests.
values = [{
'key': 'type',
'value': {
'xsi_type': 'TextValue',
'value': 'WORKFLOW_APPROVAL_REQUEST'
}
}]
query = 'WHERE type = :type'
statement = dfp.FilterStatement(query, values)
# Get proposals by statement.
while True:
response = workflow_request_service.getWorkflowRequestsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for workflow_request in response['results']:
print ('Workflow approval request with id \'%s\' for %s with id \'%s\' '
'was found.' % (workflow_request['id'],
workflow_request['entityType'],
workflow_request['entityId']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| apache-2.0 | -655,117,663,579,758,700 | 31.043478 | 80 | 0.669833 | false |
partofthething/home-assistant | homeassistant/components/august/__init__.py | 1 | 14632 | """Support for August devices."""
import asyncio
import itertools
import logging
from aiohttp import ClientError, ClientResponseError
from august.authenticator import ValidationResult
from august.exceptions import AugustApiAIOHTTPError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_PASSWORD,
CONF_TIMEOUT,
CONF_USERNAME,
HTTP_UNAUTHORIZED,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
import homeassistant.helpers.config_validation as cv
from .activity import ActivityStream
from .const import (
CONF_ACCESS_TOKEN_CACHE_FILE,
CONF_INSTALL_ID,
CONF_LOGIN_METHOD,
DATA_AUGUST,
DEFAULT_AUGUST_CONFIG_FILE,
DEFAULT_NAME,
DEFAULT_TIMEOUT,
DOMAIN,
LOGIN_METHODS,
MIN_TIME_BETWEEN_DETAIL_UPDATES,
PLATFORMS,
VERIFICATION_CODE_KEY,
)
from .exceptions import CannotConnect, InvalidAuth, RequireValidation
from .gateway import AugustGateway
from .subscriber import AugustSubscriberMixin
_LOGGER = logging.getLogger(__name__)
TWO_FA_REVALIDATE = "verify_configurator"
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_LOGIN_METHOD): vol.In(LOGIN_METHODS),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_INSTALL_ID): cv.string,
vol.Optional(
CONF_TIMEOUT, default=DEFAULT_TIMEOUT
): cv.positive_int,
}
)
},
),
extra=vol.ALLOW_EXTRA,
)
async def async_request_validation(hass, config_entry, august_gateway):
"""Request a new verification code from the user."""
#
# In the future this should start a new config flow
# instead of using the legacy configurator
#
_LOGGER.error("Access token is no longer valid")
configurator = hass.components.configurator
entry_id = config_entry.entry_id
async def async_august_configuration_validation_callback(data):
code = data.get(VERIFICATION_CODE_KEY)
result = await august_gateway.authenticator.async_validate_verification_code(
code
)
if result == ValidationResult.INVALID_VERIFICATION_CODE:
configurator.async_notify_errors(
hass.data[DOMAIN][entry_id][TWO_FA_REVALIDATE],
"Invalid verification code, please make sure you are using the latest code and try again.",
)
elif result == ValidationResult.VALIDATED:
return await async_setup_august(hass, config_entry, august_gateway)
return False
if TWO_FA_REVALIDATE not in hass.data[DOMAIN][entry_id]:
await august_gateway.authenticator.async_send_verification_code()
entry_data = config_entry.data
login_method = entry_data.get(CONF_LOGIN_METHOD)
username = entry_data.get(CONF_USERNAME)
hass.data[DOMAIN][entry_id][TWO_FA_REVALIDATE] = configurator.async_request_config(
f"{DEFAULT_NAME} ({username})",
async_august_configuration_validation_callback,
description=(
"August must be re-verified. "
f"Please check your {login_method} ({username}) "
"and enter the verification code below"
),
submit_caption="Verify",
fields=[
{"id": VERIFICATION_CODE_KEY, "name": "Verification code", "type": "string"}
],
)
return
async def async_setup_august(hass, config_entry, august_gateway):
"""Set up the August component."""
entry_id = config_entry.entry_id
hass.data[DOMAIN].setdefault(entry_id, {})
try:
await august_gateway.async_authenticate()
except RequireValidation:
await async_request_validation(hass, config_entry, august_gateway)
raise
# We still use the configurator to get a new 2fa code
# when needed since config_flow doesn't have a way
# to re-request if it expires
if TWO_FA_REVALIDATE in hass.data[DOMAIN][entry_id]:
hass.components.configurator.async_request_done(
hass.data[DOMAIN][entry_id].pop(TWO_FA_REVALIDATE)
)
hass.data[DOMAIN][entry_id][DATA_AUGUST] = AugustData(hass, august_gateway)
await hass.data[DOMAIN][entry_id][DATA_AUGUST].async_setup()
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the August component from YAML."""
conf = config.get(DOMAIN)
hass.data.setdefault(DOMAIN, {})
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_LOGIN_METHOD: conf.get(CONF_LOGIN_METHOD),
CONF_USERNAME: conf.get(CONF_USERNAME),
CONF_PASSWORD: conf.get(CONF_PASSWORD),
CONF_INSTALL_ID: conf.get(CONF_INSTALL_ID),
CONF_ACCESS_TOKEN_CACHE_FILE: DEFAULT_AUGUST_CONFIG_FILE,
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up August from a config entry."""
august_gateway = AugustGateway(hass)
try:
await august_gateway.async_setup(entry.data)
return await async_setup_august(hass, entry, august_gateway)
except ClientResponseError as err:
if err.status == HTTP_UNAUTHORIZED:
_async_start_reauth(hass, entry)
return False
raise ConfigEntryNotReady from err
except InvalidAuth:
_async_start_reauth(hass, entry)
return False
except RequireValidation:
return False
except (CannotConnect, asyncio.TimeoutError) as err:
raise ConfigEntryNotReady from err
def _async_start_reauth(hass: HomeAssistant, entry: ConfigEntry):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth"},
data=entry.data,
)
)
_LOGGER.error("Password is no longer valid. Please reauthenticate")
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class AugustData(AugustSubscriberMixin):
"""August data object."""
def __init__(self, hass, august_gateway):
"""Init August data object."""
super().__init__(hass, MIN_TIME_BETWEEN_DETAIL_UPDATES)
self._hass = hass
self._august_gateway = august_gateway
self.activity_stream = None
self._api = august_gateway.api
self._device_detail_by_id = {}
self._doorbells_by_id = {}
self._locks_by_id = {}
self._house_ids = set()
async def async_setup(self):
"""Async setup of august device data and activities."""
locks = (
await self._api.async_get_operable_locks(self._august_gateway.access_token)
or []
)
doorbells = (
await self._api.async_get_doorbells(self._august_gateway.access_token) or []
)
self._doorbells_by_id = {device.device_id: device for device in doorbells}
self._locks_by_id = {device.device_id: device for device in locks}
self._house_ids = {
device.house_id for device in itertools.chain(locks, doorbells)
}
await self._async_refresh_device_detail_by_ids(
[device.device_id for device in itertools.chain(locks, doorbells)]
)
# We remove all devices that we are missing
# detail as we cannot determine if they are usable.
# This also allows us to avoid checking for
# detail being None all over the place
self._remove_inoperative_locks()
self._remove_inoperative_doorbells()
self.activity_stream = ActivityStream(
self._hass, self._api, self._august_gateway, self._house_ids
)
await self.activity_stream.async_setup()
@property
def doorbells(self):
"""Return a list of py-august Doorbell objects."""
return self._doorbells_by_id.values()
@property
def locks(self):
"""Return a list of py-august Lock objects."""
return self._locks_by_id.values()
def get_device_detail(self, device_id):
"""Return the py-august LockDetail or DoorbellDetail object for a device."""
return self._device_detail_by_id[device_id]
async def _async_refresh(self, time):
await self._async_refresh_device_detail_by_ids(self._subscriptions.keys())
async def _async_refresh_device_detail_by_ids(self, device_ids_list):
for device_id in device_ids_list:
if device_id in self._locks_by_id:
await self._async_update_device_detail(
self._locks_by_id[device_id], self._api.async_get_lock_detail
)
# keypads are always attached to locks
if (
device_id in self._device_detail_by_id
and self._device_detail_by_id[device_id].keypad is not None
):
keypad = self._device_detail_by_id[device_id].keypad
self._device_detail_by_id[keypad.device_id] = keypad
elif device_id in self._doorbells_by_id:
await self._async_update_device_detail(
self._doorbells_by_id[device_id],
self._api.async_get_doorbell_detail,
)
_LOGGER.debug(
"async_signal_device_id_update (from detail updates): %s", device_id
)
self.async_signal_device_id_update(device_id)
async def _async_update_device_detail(self, device, api_call):
_LOGGER.debug(
"Started retrieving detail for %s (%s)",
device.device_name,
device.device_id,
)
try:
self._device_detail_by_id[device.device_id] = await api_call(
self._august_gateway.access_token, device.device_id
)
except ClientError as ex:
_LOGGER.error(
"Request error trying to retrieve %s details for %s. %s",
device.device_id,
device.device_name,
ex,
)
_LOGGER.debug(
"Completed retrieving detail for %s (%s)",
device.device_name,
device.device_id,
)
def _get_device_name(self, device_id):
"""Return doorbell or lock name as August has it stored."""
if self._locks_by_id.get(device_id):
return self._locks_by_id[device_id].device_name
if self._doorbells_by_id.get(device_id):
return self._doorbells_by_id[device_id].device_name
async def async_lock(self, device_id):
"""Lock the device."""
return await self._async_call_api_op_requires_bridge(
device_id,
self._api.async_lock_return_activities,
self._august_gateway.access_token,
device_id,
)
async def async_unlock(self, device_id):
"""Unlock the device."""
return await self._async_call_api_op_requires_bridge(
device_id,
self._api.async_unlock_return_activities,
self._august_gateway.access_token,
device_id,
)
async def _async_call_api_op_requires_bridge(
self, device_id, func, *args, **kwargs
):
"""Call an API that requires the bridge to be online and will change the device state."""
ret = None
try:
ret = await func(*args, **kwargs)
except AugustApiAIOHTTPError as err:
device_name = self._get_device_name(device_id)
if device_name is None:
device_name = f"DeviceID: {device_id}"
raise HomeAssistantError(f"{device_name}: {err}") from err
return ret
def _remove_inoperative_doorbells(self):
doorbells = list(self.doorbells)
for doorbell in doorbells:
device_id = doorbell.device_id
doorbell_is_operative = False
doorbell_detail = self._device_detail_by_id.get(device_id)
if doorbell_detail is None:
_LOGGER.info(
"The doorbell %s could not be setup because the system could not fetch details about the doorbell",
doorbell.device_name,
)
else:
doorbell_is_operative = True
if not doorbell_is_operative:
del self._doorbells_by_id[device_id]
del self._device_detail_by_id[device_id]
def _remove_inoperative_locks(self):
# Remove non-operative locks as there must
# be a bridge (August Connect) for them to
# be usable
locks = list(self.locks)
for lock in locks:
device_id = lock.device_id
lock_is_operative = False
lock_detail = self._device_detail_by_id.get(device_id)
if lock_detail is None:
_LOGGER.info(
"The lock %s could not be setup because the system could not fetch details about the lock",
lock.device_name,
)
elif lock_detail.bridge is None:
_LOGGER.info(
"The lock %s could not be setup because it does not have a bridge (Connect)",
lock.device_name,
)
elif not lock_detail.bridge.operative:
_LOGGER.info(
"The lock %s could not be setup because the bridge (Connect) is not operative",
lock.device_name,
)
else:
lock_is_operative = True
if not lock_is_operative:
del self._locks_by_id[device_id]
del self._device_detail_by_id[device_id]
| mit | -3,008,204,262,912,377,000 | 33.591017 | 119 | 0.598551 | false |
Daniel-CA/odoomrp-wip-public | stock_quant_name_search/tests/test_stock_quant_name_search.py | 11 | 2993 | # -*- coding: utf-8 -*-
# © 2016 Oihane Crucelaegui - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
import openerp.tests.common as common
class TestStockQuantNameSearch(common.TransactionCase):
def setUp(self):
super(TestStockQuantNameSearch, self).setUp()
self.lot_name = 'StockQuantTestLot'
self.product_name = 'StockQuantTestProduct'
self.uom = self.browse_ref('product.product_uom_dozen')
self.quant_model = self.env['stock.quant'].sudo()
product = self.env['product.product'].create({
'name': self.product_name,
'uom_id': self.uom.id,
})
lot_id = self.env['stock.production.lot'].create({
'name': self.lot_name,
'product_id': product.id,
})
location_id = self.ref('stock.stock_location_customers')
self.quant1 = self.quant_model.create({
'product_id': product.id,
'lot_id': lot_id.id,
'qty': 10.0,
'location_id': location_id,
})
self.quant2 = self.quant_model.create({
'product_id': product.id,
'qty': 10.0,
'location_id': location_id,
})
self.quant3 = self.quant_model.create({
'product_id': self.ref('product.product_product_3'),
'qty': 10.0,
'location_id': location_id,
})
def test_quant_name_search_lot(self):
res_search = self.quant_model.name_search(name=self.lot_name)
quant_ids = map(lambda x: x[0], res_search)
self.assertNotEqual(
len(res_search), 0, 'There must be at least one quant created.')
self.assertEqual(
len(res_search), 1, 'There must be only one quant created.')
self.assertIn(self.quant1.id, quant_ids)
self.assertNotIn(self.quant2.id, quant_ids)
self.assertNotIn(self.quant3.id, quant_ids)
def test_quant_name_search_product(self):
res_search = self.quant_model.name_search(name=self.product_name)
quant_ids = map(lambda x: x[0], res_search)
self.assertNotEqual(
len(res_search), 0, 'There must be at least one quants created.')
self.assertEqual(
len(res_search), 2, 'There must be only two quants created.')
self.assertIn(self.quant1.id, quant_ids)
self.assertIn(self.quant2.id, quant_ids)
self.assertNotIn(self.quant3.id, quant_ids)
def test_quant_name_search_uom(self):
res_search = self.quant_model.name_search(name=self.uom.name)
quant_ids = map(lambda x: x[0], res_search)
self.assertNotEqual(
len(res_search), 0, 'There must be at least one quants created.')
self.assertTrue(
len(res_search) >= 2, 'There must be at least two quants created.')
self.assertIn(self.quant1.id, quant_ids)
self.assertIn(self.quant2.id, quant_ids)
self.assertNotIn(self.quant3.id, quant_ids)
| agpl-3.0 | -7,587,487,223,193,359,000 | 40.555556 | 79 | 0.598262 | false |
willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/Utilities/Text/Lowercase.py | 5 | 2883 | # -*- coding: utf-8 -*-
###############################################################################
#
# Lowercase
# Returns the contents of the specified string converted to lowercase.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class Lowercase(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the Lowercase Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(Lowercase, self).__init__(temboo_session, '/Library/Utilities/Text/Lowercase')
def new_input_set(self):
return LowercaseInputSet()
def _make_result_set(self, result, path):
return LowercaseResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return LowercaseChoreographyExecution(session, exec_id, path)
class LowercaseInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the Lowercase
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Text(self, value):
"""
Set the value of the Text input for this Choreo. ((required, multiline) The text that should be converted to lowercase.)
"""
super(LowercaseInputSet, self)._set_input('Text', value)
class LowercaseResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the Lowercase Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((string) The converted lowercase text.)
"""
return self._output.get('Response', None)
class LowercaseChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return LowercaseResultSet(response, path)
| gpl-2.0 | 9,085,289,131,971,519,000 | 34.158537 | 128 | 0.673604 | false |
stackdio/stackdio | stackdio/api/users/filters.py | 2 | 1651 | # -*- coding: utf-8 -*-
# Copyright 2017, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
import django_filters
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from stackdio.core.filters import OrFieldsFilter
class UserFilter(django_filters.FilterSet):
username = django_filters.CharFilter(lookup_type='icontains')
first_name = django_filters.CharFilter(lookup_type='icontains')
last_name = django_filters.CharFilter(lookup_type='icontains')
q = OrFieldsFilter(field_names=('username', 'first_name', 'last_name', 'email'),
lookup_type='icontains')
class Meta:
model = get_user_model()
fields = (
'username',
'first_name',
'last_name',
'q',
)
class GroupFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
q = OrFieldsFilter(field_names=('name',), lookup_type='icontains')
class Meta:
model = Group
fields = (
'name',
'q',
)
| apache-2.0 | 2,748,405,130,019,256,300 | 30.75 | 84 | 0.670503 | false |
vbicer/RealmStore | Carthage/Checkouts/realm-cocoa/plugin/rlm_lldb.py | 6 | 10776 | #!/usr/bin/python
##############################################################################
#
# Copyright 2014 Realm Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
# In the lldb shell, load with:
# command script import [Realm path]/plugin/rlm_lldb.py --allow-reload
# To load automatically, add that line to your ~/.lldbinit file (which you will
# have to create if you have not set up any previous lldb scripts), or run this
# file as a Python script outside of Xcode to install it automatically
if __name__ == '__main__':
# Script is being run directly, so install it
import errno
import shutil
import os
source = os.path.realpath(__file__)
destination = os.path.expanduser("~/Library/Application Support/Realm")
# Copy the file into place
try:
os.makedirs(destination, 0744)
except os.error as e:
# It's fine if the directory already exists
if e.errno != errno.EEXIST:
raise
shutil.copy2(source, destination + '/rlm_lldb.py')
# Add it to ~/.lldbinit
load_line = 'command script import "~/Library/Application Support/Realm/rlm_lldb.py" --allow-reload\n'
is_installed = False
try:
with open(os.path.expanduser('~/.lldbinit')) as f:
for line in f:
if line == load_line:
is_installed = True
break
except IOError as e:
if e.errno != errno.ENOENT:
raise
# File not existing yet is fine
if not is_installed:
with open(os.path.expanduser('~/.lldbinit'), 'a') as f:
f.write('\n' + load_line)
exit(0)
import lldb
property_types = {
0: 'int64_t',
10: 'double',
1: 'bool',
9: 'float',
}
def cache_lookup(cache, key, generator):
value = cache.get(key, None)
if not value:
value = generator(key)
cache[key] = value
return value
ivar_cache = {}
def get_ivar_info(obj, ivar):
def get_offset(ivar):
class_name, ivar_name = ivar.split('.')
frame = obj.GetThread().GetSelectedFrame()
ptr = frame.EvaluateExpression("&(({} *)0)->{}".format(class_name, ivar_name))
return (ptr.GetValueAsUnsigned(), ptr.deref.type, ptr.deref.size)
return cache_lookup(ivar_cache, ivar, get_offset)
def get_ivar(obj, addr, ivar):
offset, _, size = get_ivar_info(obj, ivar)
if isinstance(addr, lldb.SBAddress):
addr = addr.GetFileAddress()
return obj.GetProcess().ReadUnsignedFromMemory(addr + offset, size, lldb.SBError())
object_table_ptr_offset = None
def is_object_deleted(obj):
addr = obj.GetAddress().GetFileAddress()
global object_table_ptr_offset
if not object_table_ptr_offset:
row, _, _ = get_ivar_info(obj, 'RLMObject._row')
table, _, _ = get_ivar_info(obj, 'realm::Row.m_table')
ptr, _, _ = get_ivar_info(obj, 'realm::TableRef.m_ptr')
object_table_ptr_offset = row + table + ptr
ptr = obj.GetProcess().ReadUnsignedFromMemory(addr + object_table_ptr_offset,
obj.target.addr_size, lldb.SBError())
return ptr == 0
class SyntheticChildrenProvider(object):
def __init__(self, class_name):
self._class_name = class_name
def _eval(self, expr):
frame = self.obj.GetThread().GetSelectedFrame()
return frame.EvaluateExpression(expr)
def _get_ivar(self, addr, ivar):
return get_ivar(self.obj, addr, ivar)
def _to_str(self, val):
return self.obj.GetProcess().ReadCStringFromMemory(val, 1024, lldb.SBError())
def _value_from_ivar(self, ivar):
offset, ivar_type, _ = get_ivar_info(self.obj, '{}._{}'.format(self._class_name, ivar))
return self.obj.CreateChildAtOffset(ivar, offset, ivar_type)
def RLMObject_SummaryProvider(obj, _):
if is_object_deleted(obj):
return '[Deleted object]'
return None
schema_cache = {}
class RLMObject_SyntheticChildrenProvider(SyntheticChildrenProvider):
def __init__(self, obj, _):
super(RLMObject_SyntheticChildrenProvider, self).__init__('RLMObject')
self.obj = obj
if not obj.GetAddress() or is_object_deleted(obj):
self.props = []
return
object_schema = self._get_ivar(self.obj.GetAddress(), 'RLMObject._objectSchema')
def get_schema(object_schema):
properties = self._get_ivar(object_schema, 'RLMObjectSchema._properties')
if not properties:
return None
count = self._eval("(NSUInteger)[((NSArray *){}) count]".format(properties)).GetValueAsUnsigned()
return [self._get_prop(properties, i) for i in range(count)]
self.props = cache_lookup(schema_cache, object_schema, get_schema)
def num_children(self):
return len(self.props) + 2
def has_children(self):
return not is_object_deleted(self.obj)
def get_child_index(self, name):
if name == 'realm':
return 0
if name == 'objectSchema':
return 1
return next(i for i, (prop_name, _) in enumerate(self.props) if prop_name == name)
def get_child_at_index(self, index):
if index == 0:
return self._value_from_ivar('realm')
if index == 1:
return self._value_from_ivar('objectSchema')
name, getter = self.props[index - 2]
value = self._eval(getter)
return self.obj.CreateValueFromData(name, value.GetData(), value.GetType())
def update(self):
pass
def _get_prop(self, props, i):
prop = self._eval("(NSUInteger)[((NSArray *){}) objectAtIndex:{}]".format(props, i)).GetValueAsUnsigned()
name = self._to_str(self._eval('[(NSString *){} UTF8String]'.format(self._get_ivar(prop, "RLMProperty._name"))).GetValueAsUnsigned())
type = self._get_ivar(prop, 'RLMProperty._type')
getter = "({})[(id){} {}]".format(property_types.get(type, 'id'), self.obj.GetAddress(), name)
return name, getter
class_name_cache = {}
def get_object_class_name(frame, obj, addr, ivar):
class_name_ptr = get_ivar(obj, addr, ivar)
def get_class_name(ptr):
utf8_addr = frame.EvaluateExpression('(const char *)[(NSString *){} UTF8String]'.format(class_name_ptr)).GetValueAsUnsigned()
return obj.GetProcess().ReadCStringFromMemory(utf8_addr, 1024, lldb.SBError())
return cache_lookup(class_name_cache, class_name_ptr, get_class_name)
def RLMArray_SummaryProvider(obj, _):
frame = obj.GetThread().GetSelectedFrame()
class_name = get_object_class_name(frame, obj, obj.GetAddress(), 'RLMArray._objectClassName')
count = frame.EvaluateExpression('(NSUInteger)[(RLMArray *){} count]'.format(obj.GetAddress())).GetValueAsUnsigned()
return "({}[{}])".format(class_name, count)
results_mode_offset = None
mode_type = None
mode_query_value = None
def is_results_evaluated(obj):
global results_mode_offset, mode_type, mode_query_value
if not results_mode_offset:
results_offset, _, _ = get_ivar_info(obj, 'RLMResults._results')
mode_offset, mode_type, _ = get_ivar_info(obj, 'Results.m_mode')
results_mode_offset = results_offset + mode_offset
mode_query_value = next(m for m in mode_type.enum_members if m.name == 'Query').GetValueAsUnsigned()
addr = obj.GetAddress().GetFileAddress()
mode = obj.GetProcess().ReadUnsignedFromMemory(addr + results_mode_offset, mode_type.size, lldb.SBError())
return mode != mode_query_value
def results_object_class_name(obj):
class_info = get_ivar(obj, obj.GetAddress(), 'RLMResults._info')
object_schema = get_ivar(obj, class_info, 'RLMClassInfo.rlmObjectSchema')
return get_object_class_name(obj.GetThread().GetSelectedFrame(), obj, object_schema, 'RLMObjectSchema._className')
def RLMResults_SummaryProvider(obj, _):
class_name = results_object_class_name(obj)
if not is_results_evaluated(obj):
return 'Unevaluated query on ' + class_name
frame = obj.GetThread().GetSelectedFrame()
count = frame.EvaluateExpression('(NSUInteger)[(RLMResults *){} count]'.format(obj.GetAddress())).GetValueAsUnsigned()
return "({}[{}])".format(class_name, count)
class RLMCollection_SyntheticChildrenProvider(SyntheticChildrenProvider):
def __init__(self, valobj, _):
super(RLMCollection_SyntheticChildrenProvider, self).__init__(valobj.deref.type.name)
self.obj = valobj
self.addr = self.obj.GetAddress()
def num_children(self):
if not self.count:
self.count = self._eval("(NSUInteger)[(id){} count]".format(self.addr)).GetValueAsUnsigned()
return self.count + 1
def has_children(self):
return True
def get_child_index(self, name):
if name == 'realm':
return 0
if not name.startswith('['):
return None
return int(name.lstrip('[').rstrip(']')) + 1
def get_child_at_index(self, index):
if index == 0:
return self._value_from_ivar('realm')
value = self._eval('(id)[(id){} objectAtIndex:{}]'.format(self.addr, index - 1))
return self.obj.CreateValueFromData('[' + str(index - 1) + ']', value.GetData(), value.GetType())
def update(self):
self.count = None
def __lldb_init_module(debugger, _):
debugger.HandleCommand('type summary add RLMArray -F rlm_lldb.RLMArray_SummaryProvider')
debugger.HandleCommand('type summary add RLMArrayLinkView -F rlm_lldb.RLMArray_SummaryProvider')
debugger.HandleCommand('type summary add RLMResults -F rlm_lldb.RLMResults_SummaryProvider')
debugger.HandleCommand('type summary add -x RLMAccessor_ -F rlm_lldb.RLMObject_SummaryProvider')
debugger.HandleCommand('type synthetic add RLMArray --python-class rlm_lldb.RLMCollection_SyntheticChildrenProvider')
debugger.HandleCommand('type synthetic add RLMArrayLinkView --python-class rlm_lldb.RLMCollection_SyntheticChildrenProvider')
debugger.HandleCommand('type synthetic add RLMResults --python-class rlm_lldb.RLMCollection_SyntheticChildrenProvider')
debugger.HandleCommand('type synthetic add -x RLMAccessor_.* --python-class rlm_lldb.RLMObject_SyntheticChildrenProvider')
| mit | 7,322,575,906,561,276,000 | 38.328467 | 141 | 0.64523 | false |
lsst-sqre/ltd-keeper | keeper/editiontracking/trackingmodes.py | 1 | 3019 | from __future__ import annotations
from typing import Union
from keeper.editiontracking.base import TrackingModeBase
from keeper.editiontracking.eupsdailymode import EupsDailyReleaseTrackingMode
from keeper.editiontracking.eupsmajormode import EupsMajorReleaseTrackingMode
from keeper.editiontracking.eupsweeklymode import EupsWeeklyReleaseTrackingMode
from keeper.editiontracking.gitrefmode import GitRefTrackingMode
from keeper.editiontracking.lsstdocmode import LsstDocTrackingMode
from keeper.editiontracking.manualmode import ManualTrackingMode
from keeper.exceptions import ValidationError
__all__ = ["EditionTrackingModes"]
class EditionTrackingModes:
"""Collection of edition tracking mode objects.
These modes determine how an edition should be updated with new builds.
"""
_modes = {
1: GitRefTrackingMode(),
2: LsstDocTrackingMode(),
3: EupsMajorReleaseTrackingMode(),
4: EupsWeeklyReleaseTrackingMode(),
5: EupsDailyReleaseTrackingMode(),
6: ManualTrackingMode(),
}
"""Map of tracking mode ID (an integer stored in the DB to the tracking
mode instance that can evaluate whether an edition should be updated
based on its own logic.
"""
_name_map = {mode.name: _id for _id, mode in _modes.items()}
"""Map of mode names to DB IDs.
This is the inverse of ``_modes``.
"""
def __getitem__(self, key: Union[int, str]) -> TrackingModeBase:
if not isinstance(key, int):
key = self.name_to_id(key)
return self._modes[key]
def name_to_id(self, mode: str) -> int:
"""Convert a mode name (string used by the web API) to a mode ID
(integer) used by the DB.
Parameters
----------
mode : `str`
Mode name.
Returns
-------
mode_id : `int`
Mode ID.
Raises
------
ValidationError
Raised if ``mode`` is unknown.
"""
try:
mode_id = self._name_map[mode]
except KeyError:
message = (
"Edition tracking mode {!r} unknown. Valid values " "are {!r}"
)
raise ValidationError(message.format(mode, self._name_map.keys()))
return mode_id
def id_to_name(self, mode_id: int) -> str:
"""Convert a mode ID (integer used by the DB) to a name used by the
web API.
Parameters
----------
mode_id : `int`
Mode ID.
Returns
-------
mode : `str`
Mode name.
Raises
------
ValidationError
Raised if ``mode`` is unknown.
"""
try:
mode = self._modes[mode_id]
except KeyError:
message = (
"Edition tracking mode ID {!r} unknown. Valid values "
"are {!r}"
)
raise ValidationError(message.format(mode_id, self._modes.keys()))
return mode.name
| mit | 3,918,184,076,186,657,000 | 28.598039 | 79 | 0.595561 | false |
aurix/lammps-induced-dipole-polarization-pair-style | tools/i-pi/ipi/utils/prng.py | 33 | 4174 | """Contains the classes used to generate pseudo-random numbers.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Allows the user to specify a seed for the random number generator.
These are used in initialising the velocities and in stochastic thermostats.
The state of the random number generator is kept track of, so that the if the
simulation is restarted from a checkpoint, we will see the same dynamics as if
it had not been stopped.
Classes:
Random: An interface between the numpy.random module and the user.
"""
__all__ = ['Random']
import numpy as np
import math
class Random(object):
"""Class to interface with the standard pseudo-random number generator.
Initialises the standard numpy pseudo-random number generator from a seed
at the beginning of the simulation, and keeps track of the state so that
it can be output to the checkpoint files throughout the simulation.
Attributes:
rng: The random number generator to be used.
seed: The seed number to start the generator.
state: A tuple of five objects giving the current state of the random
number generator. The first is the type of random number generator,
here 'MT19937', the second is an array of 624 integers, the third
is the current position in the array that is being read from, the
fourth gives whether it has a gaussian random number stored, and
the fifth is this stored Gaussian random number, or else the last
Gaussian random number returned.
"""
def __init__(self, seed=12345, state=None):
"""Initialises Random.
Args:
seed: An optional seed giving an integer to initialise the state with.
state: An optional state tuple to initialise the state with.
"""
self.rng = np.random.mtrand.RandomState(seed=seed)
self.seed = seed
if state is None:
self.rng.seed(seed)
else:
self.state = state
def get_state(self):
"""Interface to the standard get_state() function."""
return self.rng.get_state()
def set_state(self, value):
"""Interface to the standard set_state() function.
Should only be used with states generated from another similar random
number generator, such as one from a previous run.
"""
return self.rng.set_state(value)
state=property(get_state, set_state)
@property
def u(self):
"""Interface to the standard random_sample() function.
Returns:
A pseudo-random number from a uniform distribution from 0-1.
"""
return self.rng.random_sample()
@property
def g(self):
"""Interface to the standard standard_normal() function.
Returns:
A pseudo-random number from a normal Gaussian distribution.
"""
return self.rng.standard_normal()
def gamma(self, k, theta=1.0):
"""Interface to the standard gamma() function.
Args:
k: Shape parameter for the gamma distribution.
theta: Mean of the distribution.
Returns:
A random number from a gamma distribution with a shape k and a
mean value theta.
"""
return self.rng.gamma(k,theta)
def gvec(self, shape):
"""Interface to the standard_normal array function.
Args:
shape: The shape of the array to be returned.
Returns:
An array with the required shape where each element is taken from
a normal Gaussian distribution.
"""
return self.rng.standard_normal(shape)
| gpl-2.0 | -7,860,324,772,582,543,000 | 31.356589 | 79 | 0.688548 | false |
cysuncn/python | spark/crm/PROC_O_LNA_XDXT_VILLAGE_INFO.py | 1 | 4073 | #coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_LNA_XDXT_VILLAGE_INFO').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_CI_XDXT_VILLAGE_INFO = sqlContext.read.parquet(hdfs+'/O_CI_XDXT_VILLAGE_INFO/*')
O_CI_XDXT_VILLAGE_INFO.registerTempTable("O_CI_XDXT_VILLAGE_INFO")
#任务[11] 001-01::
V_STEP = V_STEP + 1
sql = """
SELECT A.EVALUATERESULT AS EVALUATERESULT
,A.SERIALNO AS SERIALNO
,A.VILLAGETYPE AS VILLAGETYPE
,A.REGIONALISMCODE AS REGIONALISMCODE
,A.REGIONALISMNAME AS REGIONALISMNAME
,A.FINANCEBELONG AS FINANCEBELONG
,A.GROUPNUMBER AS GROUPNUMBER
,A.VILLAGENUMBER AS VILLAGENUMBER
,A.EMPLOYEENUMBER AS EMPLOYEENUMBER
,A.FARMERNUMBER AS FARMERNUMBER
,A.LOANFARMERNUMBER AS LOANFARMERNUMBER
,A.ACCORDFARMERNUMBER AS ACCORDFARMERNUMBER
,A.FINANCEINFO AS FINANCEINFO
,A.MOSTLYINDUSTRY AS MOSTLYINDUSTRY
,A.OFFICETEL AS OFFICETEL
,A.FIXTEL AS FIXTEL
,A.INPUTORGID AS INPUTORGID
,A.RELATIVESERIALNO AS RELATIVESERIALNO
,A.INPUTUSERID AS INPUTUSERID
,A.INPUTDATE AS INPUTDATE
,A.UPDATEDATE AS UPDATEDATE
,A.REMARK AS REMARK
,A.RELATIVENO AS RELATIVENO
,A.VALIDFARMERNUMBER AS VALIDFARMERNUMBER
,A.RELATIVENAME AS RELATIVENAME
,A.BELONGORGID AS BELONGORGID
,A.CHECKSTATUS AS CHECKSTATUS
,A.SMALLVILAGENUMBER AS SMALLVILAGENUMBER
,A.FAMILYNUMBER AS FAMILYNUMBER
,A.FARMERPERSONNUMBER AS FARMERPERSONNUMBER
,A.FARMERLOANNUMBER AS FARMERLOANNUMBER
,A.CREDITNUMBER AS CREDITNUMBER
,A.FIELDAREA AS FIELDAREA
,A.REGISTERFIELDAREA AS REGISTERFIELDAREA
,A.GLEBEFIELDAREA AS GLEBEFIELDAREA
,A.PADDYFIELDAREA AS PADDYFIELDAREA
,A.EVALUATEOVERDATE AS EVALUATEOVERDATE
,A.CREDITFLAG AS CREDITFLAG
,A.CORPORATEORGID AS CORPORATEORGID
,A.FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'LNA' AS ODS_SYS_ID
FROM O_CI_XDXT_VILLAGE_INFO A --乡村镇信息
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CI_XDXT_VILLAGE_INFO = sqlContext.sql(sql)
F_CI_XDXT_VILLAGE_INFO.registerTempTable("F_CI_XDXT_VILLAGE_INFO")
dfn="F_CI_XDXT_VILLAGE_INFO/"+V_DT+".parquet"
F_CI_XDXT_VILLAGE_INFO.cache()
nrows = F_CI_XDXT_VILLAGE_INFO.count()
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CI_XDXT_VILLAGE_INFO/*.parquet")
F_CI_XDXT_VILLAGE_INFO.write.save(path=hdfs + '/' + dfn, mode='overwrite')
F_CI_XDXT_VILLAGE_INFO.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_CI_XDXT_VILLAGE_INFO lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrows)
| gpl-3.0 | 1,761,726,224,240,423,700 | 41.136842 | 174 | 0.601049 | false |
Maronato/aosalunos | misago/models/checkpointmodel.py | 3 | 2728 | from django.db import models
from misago.signals import (merge_post, merge_thread, move_forum_content,
move_post, move_thread, rename_forum, rename_user)
class Checkpoint(models.Model):
forum = models.ForeignKey('Forum')
thread = models.ForeignKey('Thread')
action = models.CharField(max_length=255)
extra = models.CharField(max_length=255, null=True, blank=True)
user = models.ForeignKey('User', null=True, blank=True, on_delete=models.SET_NULL)
user_name = models.CharField(max_length=255)
user_slug = models.CharField(max_length=255)
target_user = models.ForeignKey('User', null=True, blank=True, on_delete=models.SET_NULL, related_name='+')
target_user_name = models.CharField(max_length=255, null=True, blank=True)
target_user_slug = models.CharField(max_length=255, null=True, blank=True)
old_forum = models.ForeignKey('Forum', null=True, blank=True, related_name='+')
old_forum_name = models.CharField(max_length=255, null=True, blank=True)
old_forum_slug = models.CharField(max_length=255, null=True, blank=True)
date = models.DateTimeField()
ip = models.GenericIPAddressField()
agent = models.CharField(max_length=255)
deleted = models.BooleanField(default=False)
class Meta:
app_label = 'misago'
def rename_forum_handler(sender, **kwargs):
Checkpoint.objects.filter(old_forum=sender).update(
old_forum_name=sender.name,
old_forum_slug=sender.slug,
)
rename_forum.connect(rename_forum_handler, dispatch_uid="rename_forum_checkpoints")
def rename_user_handler(sender, **kwargs):
Checkpoint.objects.filter(user=sender).update(
user_name=sender.username,
user_slug=sender.username_slug,
)
rename_user.connect(rename_user_handler, dispatch_uid="rename_user_checkpoints")
def move_forum_content_handler(sender, **kwargs):
Checkpoint.objects.filter(forum=sender).update(forum=kwargs['move_to'])
move_forum_content.connect(move_forum_content_handler, dispatch_uid="move_forum_checkpoints")
def move_thread_handler(sender, **kwargs):
Checkpoint.objects.filter(thread=sender).update(forum=kwargs['move_to'])
move_thread.connect(move_thread_handler, dispatch_uid="move_thread_checkpoints")
def merge_thread_handler(sender, **kwargs):
Checkpoint.objects.filter(thread=sender).delete()
merge_thread.connect(merge_thread_handler, dispatch_uid="merge_threads_checkpoints")
| gpl-2.0 | 5,986,445,095,160,315,000 | 43.721311 | 111 | 0.648827 | false |
blrm/openshift-tools | ansible/roles/lib_openshift_3.2/library/oadm_router.py | 6 | 79435 | #!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.extend(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oc', 'adm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def create_file(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
path = os.path.join('/tmp', rname)
with open(path, 'w') as fds:
if ftype == 'yaml':
fds.write(yaml.dump(data, Dumper=yaml.RoundTripDumper))
elif ftype == 'json':
fds.write(json.dumps(data))
else:
fds.write(data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [path])
return path
@staticmethod
def create_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_file(item['path'], item['data'], ftype=content_type)
files.append({'name': os.path.basename(path), 'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if result.has_key('metadata') and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
contents = yaml.load(contents, yaml.RoundTripLoader)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if not user_def.has_key(key):
if debug:
print 'User data does not have key [%s]' % key
print 'User data: %s' % user_def
return False
if not isinstance(user_def[key], list):
if debug:
print 'user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key])
return False
if len(user_def[key]) != len(value):
if debug:
print "List lengths are not equal."
print "key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value))
print "user_def: %s" % user_def[key]
print "value: %s" % value
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print 'sending list - list'
print type(values[0])
print type(values[1])
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print 'list compare returned false'
return False
elif value != user_def[key]:
if debug:
print 'value should be identical'
print value
print user_def[key]
return False
# recurse on a dictionary
elif isinstance(value, dict):
if not user_def.has_key(key):
if debug:
print "user_def does not have key [%s]" % key
return False
if not isinstance(user_def[key], dict):
if debug:
print "dict returned false: not instance of dict"
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print "keys are not equal in dict"
print api_values
print user_values
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print "dict returned false"
print result
return False
# Verify each key, value pair is the same
else:
if not user_def.has_key(key) or value != user_def[key]:
if debug:
print "value not equal; user_def does not have key"
print key
print value
if user_def.has_key(key):
print user_def[key]
return False
if debug:
print 'returning true'
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
class YeditException(Exception):
''' Exception class for Yedit '''
pass
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self, filename=None, content=None, content_type='yaml', separator='.', backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict == None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and data.has_key(dict_key) and data[dict_key]:
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding data to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
return data
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
tmp_filename = self.filename + '.yedit'
try:
with open(tmp_filename, 'w') as yfd:
# pylint: disable=no-member,maybe-no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except Exception as err:
raise YeditException(err.message)
os.rename(tmp_filename, self.filename)
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename == None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
# pylint: disable=no-member,maybe-no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError as _:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if entry.has_key(key_or_item):
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# pylint: disable=no-member,maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type.' \
' value=[%s] [%s]' % (value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index != None:
ind = index
if ind != None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
#already exists, return
if ind != None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
# pylint: disable=too-many-instance-attributes
class ServiceConfig(object):
''' Handle service options '''
# pylint: disable=too-many-arguments
def __init__(self,
sname,
namespace,
ports,
selector=None,
labels=None,
cluster_ip=None,
portal_ip=None,
session_affinity=None,
service_type=None):
''' constructor for handling service options '''
self.name = sname
self.namespace = namespace
self.ports = ports
self.selector = selector
self.labels = labels
self.cluster_ip = cluster_ip
self.portal_ip = portal_ip
self.session_affinity = session_affinity
self.service_type = service_type
self.data = {}
self.create_dict()
def create_dict(self):
''' return a service as a dict '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'Service'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
if self.labels:
for lab, lab_value in self.labels.items():
self.data['metadata'][lab] = lab_value
self.data['spec'] = {}
if self.ports:
self.data['spec']['ports'] = self.ports
else:
self.data['spec']['ports'] = []
if self.selector:
self.data['spec']['selector'] = self.selector
self.data['spec']['sessionAffinity'] = self.session_affinity or 'None'
if self.cluster_ip:
self.data['spec']['clusterIP'] = self.cluster_ip
if self.portal_ip:
self.data['spec']['portalIP'] = self.portal_ip
if self.service_type:
self.data['spec']['type'] = self.service_type
# pylint: disable=too-many-instance-attributes,too-many-public-methods
class Service(Yedit):
''' Class to wrap the oc command line tools '''
port_path = "spec.ports"
portal_ip = "spec.portalIP"
cluster_ip = "spec.clusterIP"
kind = 'Service'
def __init__(self, content):
'''Service constructor'''
super(Service, self).__init__(content=content)
def get_ports(self):
''' get a list of ports '''
return self.get(Service.port_path) or []
def add_ports(self, inc_ports):
''' add a port object to the ports list '''
if not isinstance(inc_ports, list):
inc_ports = [inc_ports]
ports = self.get_ports()
if not ports:
self.put(Service.port_path, inc_ports)
else:
ports.extend(inc_ports)
return True
def find_ports(self, inc_port):
''' find a specific port '''
for port in self.get_ports():
if port['port'] == inc_port['port']:
return port
return None
def delete_ports(self, inc_ports):
''' remove a port from a service '''
if not isinstance(inc_ports, list):
inc_ports = [inc_ports]
ports = self.get(Service.port_path) or []
if not ports:
return True
removed = False
for inc_port in inc_ports:
port = self.find_ports(inc_port)
if port:
ports.remove(port)
removed = True
return removed
def add_cluster_ip(self, sip):
'''add cluster ip'''
self.put(Service.cluster_ip, sip)
def add_portal_ip(self, pip):
'''add cluster ip'''
self.put(Service.portal_ip, pip)
# pylint: disable=too-many-public-methods
class DeploymentConfig(Yedit):
''' Class to wrap the oc command line tools '''
default_deployment_config = '''
apiVersion: v1
kind: DeploymentConfig
metadata:
name: default_dc
namespace: default
spec:
replicas: 0
selector:
default_dc: default_dc
strategy:
resources: {}
rollingParams:
intervalSeconds: 1
maxSurge: 0
maxUnavailable: 25%
timeoutSeconds: 600
updatePercent: -25
updatePeriodSeconds: 1
type: Rolling
template:
metadata:
spec:
containers:
- env:
- name: default
value: default
image: default
imagePullPolicy: IfNotPresent
name: default_dc
ports:
- containerPort: 8000
hostPort: 8000
protocol: TCP
name: default_port
resources: {}
terminationMessagePath: /dev/termination-log
dnsPolicy: ClusterFirst
hostNetwork: true
nodeSelector:
type: compute
restartPolicy: Always
securityContext: {}
serviceAccount: default
serviceAccountName: default
terminationGracePeriodSeconds: 30
triggers:
- type: ConfigChange
'''
replicas_path = "spec.replicas"
env_path = "spec.template.spec.containers[0].env"
volumes_path = "spec.template.spec.volumes"
container_path = "spec.template.spec.containers"
volume_mounts_path = "spec.template.spec.containers[0].volumeMounts"
def __init__(self, content=None):
''' Constructor for OpenshiftOC '''
if not content:
content = DeploymentConfig.default_deployment_config
super(DeploymentConfig, self).__init__(content=content)
# pylint: disable=no-member
def add_env_value(self, key, value):
''' add key, value pair to env array '''
rval = False
env = self.get_env_vars()
if env:
env.append({'name': key, 'value': value})
rval = True
else:
result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value})
rval = result[0]
return rval
def exists_env_value(self, key, value):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key and result['value'] == value:
return True
return False
def exists_env_key(self, key):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key:
return True
return False
def get_env_vars(self):
'''return a environment variables '''
return self.get(DeploymentConfig.env_path) or []
def delete_env_var(self, keys):
'''delete a list of keys '''
if not isinstance(keys, list):
keys = [keys]
env_vars_array = self.get_env_vars()
modified = False
idx = None
for key in keys:
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
modified = True
del env_vars_array[idx]
if modified:
return True
return False
def update_env_var(self, key, value):
'''place an env in the env var list'''
env_vars_array = self.get_env_vars()
idx = None
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
env_vars_array[idx]['value'] = value
else:
self.add_env_value(key, value)
return True
def exists_volume_mount(self, volume_mount):
''' return whether a volume mount exists '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts:
return False
volume_mount_found = False
for exist_volume_mount in exist_volume_mounts:
if exist_volume_mount['name'] == volume_mount['name']:
volume_mount_found = True
break
return volume_mount_found
def exists_volume(self, volume):
''' return whether a volume exists '''
exist_volumes = self.get_volumes()
volume_found = False
for exist_volume in exist_volumes:
if exist_volume['name'] == volume['name']:
volume_found = True
break
return volume_found
def find_volume_by_name(self, volume, mounts=False):
''' return the index of a volume '''
volumes = []
if mounts:
volumes = self.get_volume_mounts()
else:
volumes = self.get_volumes()
for exist_volume in volumes:
if exist_volume['name'] == volume['name']:
return exist_volume
return None
def get_replicas(self):
''' return replicas setting '''
return self.get(DeploymentConfig.replicas_path)
def get_volume_mounts(self):
'''return volume mount information '''
return self.get_volumes(mounts=True)
def get_volumes(self, mounts=False):
'''return volume mount information '''
if mounts:
return self.get(DeploymentConfig.volume_mounts_path) or []
return self.get(DeploymentConfig.volumes_path) or []
def delete_volume_by_name(self, volume):
'''delete a volume '''
modified = False
exist_volume_mounts = self.get_volume_mounts()
exist_volumes = self.get_volumes()
del_idx = None
for idx, exist_volume in enumerate(exist_volumes):
if exist_volume.has_key('name') and exist_volume['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volumes[del_idx]
modified = True
del_idx = None
for idx, exist_volume_mount in enumerate(exist_volume_mounts):
if exist_volume_mount.has_key('name') and exist_volume_mount['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volume_mounts[idx]
modified = True
return modified
def add_volume_mount(self, volume_mount):
''' add a volume or volume mount to the proper location '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts and volume_mount:
self.put(DeploymentConfig.volume_mounts_path, [volume_mount])
else:
exist_volume_mounts.append(volume_mount)
def add_volume(self, volume):
''' add a volume or volume mount to the proper location '''
exist_volumes = self.get_volumes()
if not volume:
return
if not exist_volumes:
self.put(DeploymentConfig.volumes_path, [volume])
else:
exist_volumes.append(volume)
def update_replicas(self, replicas):
''' update replicas value '''
self.put(DeploymentConfig.replicas_path, replicas)
def update_volume(self, volume):
'''place an env in the env var list'''
exist_volumes = self.get_volumes()
if not volume:
return False
# update the volume
update_idx = None
for idx, exist_vol in enumerate(exist_volumes):
if exist_vol['name'] == volume['name']:
update_idx = idx
break
if update_idx != None:
exist_volumes[update_idx] = volume
else:
self.add_volume(volume)
return True
def update_volume_mount(self, volume_mount):
'''place an env in the env var list'''
modified = False
exist_volume_mounts = self.get_volume_mounts()
if not volume_mount:
return False
# update the volume mount
for exist_vol_mount in exist_volume_mounts:
if exist_vol_mount['name'] == volume_mount['name']:
if exist_vol_mount.has_key('mountPath') and \
str(exist_vol_mount['mountPath']) != str(volume_mount['mountPath']):
exist_vol_mount['mountPath'] = volume_mount['mountPath']
modified = True
break
if not modified:
self.add_volume_mount(volume_mount)
modified = True
return modified
def needs_update_volume(self, volume, volume_mount):
''' verify a volume update is needed '''
exist_volume = self.find_volume_by_name(volume)
exist_volume_mount = self.find_volume_by_name(volume, mounts=True)
results = []
results.append(exist_volume['name'] == volume['name'])
if volume.has_key('secret'):
results.append(exist_volume.has_key('secret'))
results.append(exist_volume['secret']['secretName'] == volume['secret']['secretName'])
results.append(exist_volume_mount['name'] == volume_mount['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif volume.has_key('emptyDir'):
results.append(exist_volume_mount['name'] == volume['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif volume.has_key('persistentVolumeClaim'):
pvc = 'persistentVolumeClaim'
results.append(exist_volume.has_key(pvc))
if results[-1]:
results.append(exist_volume[pvc]['claimName'] == volume[pvc]['claimName'])
if volume[pvc].has_key('claimSize'):
results.append(exist_volume[pvc]['claimSize'] == volume[pvc]['claimSize'])
elif volume.has_key('hostpath'):
results.append(exist_volume.has_key('hostPath'))
results.append(exist_volume['hostPath']['path'] == volume_mount['mountPath'])
return not all(results)
def needs_update_replicas(self, replicas):
''' verify whether a replica update is needed '''
current_reps = self.get(DeploymentConfig.replicas_path)
return not current_reps == replicas
# pylint: disable=too-many-instance-attributes
class SecretConfig(object):
''' Handle secret options '''
# pylint: disable=too-many-arguments
def __init__(self,
sname,
namespace,
kubeconfig,
secrets=None):
''' constructor for handling secret options '''
self.kubeconfig = kubeconfig
self.name = sname
self.namespace = namespace
self.secrets = secrets
self.data = {}
self.create_dict()
def create_dict(self):
''' return a secret as a dict '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'Secret'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['data'] = {}
if self.secrets:
for key, value in self.secrets.items():
self.data['data'][key] = value
# pylint: disable=too-many-instance-attributes
class Secret(Yedit):
''' Class to wrap the oc command line tools '''
secret_path = "data"
kind = 'secret'
def __init__(self, content):
'''secret constructor'''
super(Secret, self).__init__(content=content)
self._secrets = None
@property
def secrets(self):
'''secret property'''
if self._secrets == None:
self._secrets = self.get_secrets()
return self._secrets
@secrets.setter
def secrets(self):
'''secret property setter'''
if self._secrets == None:
self._secrets = self.get_secrets()
return self._secrets
def get_secrets(self):
''' return cert '''
return self.get(Secret.secret_path) or {}
def add_secret(self, key, value):
''' return cert '''
if self.secrets:
self.secrets[key] = value
else:
self.put(Secret.secret_path, {key: value})
return True
def delete_secret(self, key):
''' delete secret'''
try:
del self.secrets[key]
except KeyError as _:
return False
return True
def find_secret(self, key):
''' find secret'''
rval = None
try:
rval = self.secrets[key]
except KeyError as _:
return None
return {'key': key, 'value': rval}
def update_secret(self, key, value):
''' update a secret'''
if self.secrets.has_key(key):
self.secrets[key] = value
else:
self.add_secret(key, value)
return True
class ServiceAccountConfig(object):
'''Service account config class
This class stores the options and returns a default service account
'''
# pylint: disable=too-many-arguments
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
''' return a properly structured volume '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
# pylint: disable=too-many-public-methods
class ServiceAccount(Yedit):
''' Class to wrap the oc command line tools '''
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
'''ServiceAccount constructor'''
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
''' property for image_pull_secrets '''
if self._image_pull_secrets == None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
''' property for secrets '''
self._image_pull_secrets = secrets
@property
def secrets(self):
''' property for secrets '''
print "Getting secrets property"
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
''' property for secrets '''
self._secrets = secrets
def delete_secret(self, inc_secret):
''' remove a secret '''
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
''' remove a image_pull_secret '''
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
'''find secret'''
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
'''find secret'''
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
'''add secret'''
if self.secrets:
self.secrets.append({"name": inc_secret})
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
'''add image_pull_secret'''
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret})
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
# pylint: disable=too-many-instance-attributes
class RoleBindingConfig(object):
''' Handle route options '''
# pylint: disable=too-many-arguments
def __init__(self,
sname,
namespace,
kubeconfig,
group_names=None,
role_ref=None,
subjects=None,
usernames=None):
''' constructor for handling route options '''
self.kubeconfig = kubeconfig
self.name = sname
self.namespace = namespace
self.group_names = group_names
self.role_ref = role_ref
self.subjects = subjects
self.usernames = usernames
self.data = {}
self.create_dict()
def create_dict(self):
''' return a service as a dict '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'RoleBinding'
self.data['groupNames'] = self.group_names
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['roleRef'] = self.role_ref
self.data['subjects'] = self.subjects
self.data['userNames'] = self.usernames
# pylint: disable=too-many-instance-attributes,too-many-public-methods
class RoleBinding(Yedit):
''' Class to wrap the oc command line tools '''
group_names_path = "groupNames"
role_ref_path = "roleRef"
subjects_path = "subjects"
user_names_path = "userNames"
kind = 'RoleBinding'
def __init__(self, content):
'''RoleBinding constructor'''
super(RoleBinding, self).__init__(content=content)
self._subjects = None
self._role_ref = None
self._group_names = None
self._user_names = None
@property
def subjects(self):
''' subjects property '''
if self._subjects == None:
self._subjects = self.get_subjects()
return self._subjects
@subjects.setter
def subjects(self, data):
''' subjects property setter'''
self._subjects = data
@property
def role_ref(self):
''' role_ref property '''
if self._role_ref == None:
self._role_ref = self.get_role_ref()
return self._role_ref
@role_ref.setter
def role_ref(self, data):
''' role_ref property setter'''
self._role_ref = data
@property
def group_names(self):
''' group_names property '''
if self._group_names == None:
self._group_names = self.get_group_names()
return self._group_names
@group_names.setter
def group_names(self, data):
''' group_names property setter'''
self._group_names = data
@property
def user_names(self):
''' user_names property '''
if self._user_names == None:
self._user_names = self.get_user_names()
return self._user_names
@user_names.setter
def user_names(self, data):
''' user_names property setter'''
self._user_names = data
def get_group_names(self):
''' return groupNames '''
return self.get(RoleBinding.group_names_path) or []
def get_user_names(self):
''' return usernames '''
return self.get(RoleBinding.user_names_path) or []
def get_role_ref(self):
''' return role_ref '''
return self.get(RoleBinding.role_ref_path) or {}
def get_subjects(self):
''' return subjects '''
return self.get(RoleBinding.subjects_path) or []
#### ADD #####
def add_subject(self, inc_subject):
''' add a subject '''
if self.subjects:
self.subjects.append(inc_subject)
else:
self.put(RoleBinding.subjects_path, [inc_subject])
return True
def add_role_ref(self, inc_role_ref):
''' add a role_ref '''
if not self.role_ref:
self.put(RoleBinding.role_ref_path, {"name": inc_role_ref})
return True
return False
def add_group_names(self, inc_group_names):
''' add a group_names '''
if self.group_names:
self.group_names.append(inc_group_names)
else:
self.put(RoleBinding.group_names_path, [inc_group_names])
return True
def add_user_name(self, inc_user_name):
''' add a username '''
if self.user_names:
self.user_names.append(inc_user_name)
else:
self.put(RoleBinding.user_names_path, [inc_user_name])
return True
#### /ADD #####
#### Remove #####
def remove_subject(self, inc_subject):
''' remove a subject '''
try:
self.subjects.remove(inc_subject)
except ValueError as _:
return False
return True
def remove_role_ref(self, inc_role_ref):
''' remove a role_ref '''
if self.role_ref and self.role_ref['name'] == inc_role_ref:
del self.role_ref['name']
return True
return False
def remove_group_name(self, inc_group_name):
''' remove a groupname '''
try:
self.group_names.remove(inc_group_name)
except ValueError as _:
return False
return True
def remove_user_name(self, inc_user_name):
''' remove a username '''
try:
self.user_names.remove(inc_user_name)
except ValueError as _:
return False
return True
#### /REMOVE #####
#### UPDATE #####
def update_subject(self, inc_subject):
''' update a subject '''
try:
index = self.subjects.index(inc_subject)
except ValueError as _:
return self.add_subject(inc_subject)
self.subjects[index] = inc_subject
return True
def update_group_name(self, inc_group_name):
''' update a groupname '''
try:
index = self.group_names.index(inc_group_name)
except ValueError as _:
return self.add_group_names(inc_group_name)
self.group_names[index] = inc_group_name
return True
def update_user_name(self, inc_user_name):
''' update a username '''
try:
index = self.user_names.index(inc_user_name)
except ValueError as _:
return self.add_user_name(inc_user_name)
self.user_names[index] = inc_user_name
return True
def update_role_ref(self, inc_role_ref):
''' update a role_ref '''
self.role_ref['name'] = inc_role_ref
return True
#### /UPDATE #####
#### FIND ####
def find_subject(self, inc_subject):
''' find a subject '''
index = None
try:
index = self.subjects.index(inc_subject)
except ValueError as _:
return index
return index
def find_group_name(self, inc_group_name):
''' find a group_name '''
index = None
try:
index = self.group_names.index(inc_group_name)
except ValueError as _:
return index
return index
def find_user_name(self, inc_user_name):
''' find a user_name '''
index = None
try:
index = self.user_names.index(inc_user_name)
except ValueError as _:
return index
return index
def find_role_ref(self, inc_role_ref):
''' find a user_name '''
if self.role_ref and self.role_ref['name'] == inc_role_ref['name']:
return self.role_ref
return None
class RouterException(Exception):
''' Router exception'''
pass
class RouterConfig(OpenShiftCLIConfig):
''' RouterConfig is a DTO for the router. '''
def __init__(self, rname, namespace, kubeconfig, router_options):
super(RouterConfig, self).__init__(rname, namespace, kubeconfig, router_options)
class Router(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
def __init__(self,
router_config,
verbose=False):
''' Constructor for OpenshiftOC
a router consists of 3 or more parts
- dc/router
- svc/router
- endpoint/router
'''
super(Router, self).__init__('default', router_config.kubeconfig, verbose)
self.config = router_config
self.verbose = verbose
self.router_parts = [{'kind': 'dc', 'name': self.config.name},
{'kind': 'svc', 'name': self.config.name},
{'kind': 'sa', 'name': self.config.config_options['service_account']['value']},
{'kind': 'secret', 'name': self.config.name + '-certs'},
{'kind': 'clusterrolebinding', 'name': 'router-' + self.config.name + '-role'},
#{'kind': 'endpoints', 'name': self.config.name},
]
self.__router_prep = None
self.dconfig = None
self.svc = None
self._secret = None
self._serviceaccount = None
self._rolebinding = None
self.get()
@property
def router_prep(self):
''' property deploymentconfig'''
if self.__router_prep == None:
results = self.prepare_router()
if not results:
raise RouterException('Could not perform router preparation')
self.__router_prep = results
return self.__router_prep
@router_prep.setter
def router_prep(self, obj):
'''set the router prep property'''
self.__router_prep = obj
@property
def deploymentconfig(self):
''' property deploymentconfig'''
return self.dconfig
@deploymentconfig.setter
def deploymentconfig(self, config):
''' setter for property deploymentconfig '''
self.dconfig = config
@property
def service(self):
''' property service '''
return self.svc
@service.setter
def service(self, config):
''' setter for property service '''
self.svc = config
@property
def secret(self):
''' property secret '''
return self._secret
@secret.setter
def secret(self, config):
''' setter for property secret '''
self._secret = config
@property
def serviceaccount(self):
''' property secret '''
return self._serviceaccount
@serviceaccount.setter
def serviceaccount(self, config):
''' setter for property secret '''
self._serviceaccount = config
@property
def rolebinding(self):
''' property rolebinding '''
return self._rolebinding
@rolebinding.setter
def rolebinding(self, config):
''' setter for property rolebinding '''
self._rolebinding = config
def get(self):
''' return the self.router_parts '''
self.service = None
self.deploymentconfig = None
self.serviceaccount = None
self.secret = None
self.rolebinding = None
for part in self.router_parts:
result = self._get(part['kind'], rname=part['name'])
if result['returncode'] == 0 and part['kind'] == 'dc':
self.deploymentconfig = DeploymentConfig(result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'svc':
self.service = Service(content=result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'sa':
self.serviceaccount = ServiceAccount(content=result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'secret':
self.secret = Secret(content=result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'clusterrolebinding':
self.rolebinding = RoleBinding(content=result['results'][0])
return {'deploymentconfig': self.deploymentconfig,
'service': self.service,
'serviceaccount': self.serviceaccount,
'secret': self.secret,
'clusterrolebinding': self.rolebinding,
}
def exists(self):
'''return a whether svc or dc exists '''
if self.deploymentconfig and self.service and self.secret and self.serviceaccount:
return True
return False
def delete(self):
'''return all pods '''
parts = []
for part in self.router_parts:
parts.append(self._delete(part['kind'], part['name']))
return parts
def add_modifications(self, deploymentconfig):
'''modify the deployment config'''
# We want modifications in the form of edits coming in from the module.
# Let's apply these here
edit_results = []
for edit in self.config.config_options['edits'].get('value', []):
if edit['action'] == 'put':
edit_results.append(deploymentconfig.put(edit['key'],
edit['value']))
if edit['action'] == 'update':
edit_results.append(deploymentconfig.update(edit['key'],
edit['value'],
edit.get('index', None),
edit.get('curr_value', None)))
if edit['action'] == 'append':
edit_results.append(deploymentconfig.append(edit['key'],
edit['value']))
if edit_results and not any([res[0] for res in edit_results]):
return None
return deploymentconfig
def prepare_router(self):
'''prepare router for instantiation'''
# We need to create the pem file
router_pem = '/tmp/router.pem'
with open(router_pem, 'w') as rfd:
rfd.write(open(self.config.config_options['cert_file']['value']).read())
rfd.write(open(self.config.config_options['key_file']['value']).read())
if self.config.config_options['cacert_file']['value'] and \
os.path.exists(self.config.config_options['cacert_file']['value']):
rfd.write(open(self.config.config_options['cacert_file']['value']).read())
atexit.register(Utils.cleanup, [router_pem])
self.config.config_options['default_cert']['value'] = router_pem
options = self.config.to_option_list()
cmd = ['router', self.config.name, '-n', self.config.namespace]
cmd.extend(options)
cmd.extend(['--dry-run=True', '-o', 'json'])
results = self.openshift_cmd(cmd, oadm=True, output=True, output_type='json')
# pylint: disable=no-member
if results['returncode'] != 0 and results['results'].has_key('items'):
return results
oc_objects = {'DeploymentConfig': {'obj': None, 'path': None},
'Secret': {'obj': None, 'path': None},
'ServiceAccount': {'obj': None, 'path': None},
'ClusterRoleBinding': {'obj': None, 'path': None},
'Service': {'obj': None, 'path': None},
}
# pylint: disable=invalid-sequence-index
for res in results['results']['items']:
if res['kind'] == 'DeploymentConfig':
oc_objects['DeploymentConfig']['obj'] = DeploymentConfig(res)
elif res['kind'] == 'Service':
oc_objects['Service']['obj'] = Service(res)
elif res['kind'] == 'ServiceAccount':
oc_objects['ServiceAccount']['obj'] = ServiceAccount(res)
elif res['kind'] == 'Secret':
oc_objects['Secret']['obj'] = Secret(res)
elif res['kind'] == 'ClusterRoleBinding':
oc_objects['ClusterRoleBinding']['obj'] = RoleBinding(res)
# Currently only deploymentconfig needs updating
# Verify we got a deploymentconfig
if not oc_objects['DeploymentConfig']['obj']:
return results
# results will need to get parsed here and modifications added
oc_objects['DeploymentConfig']['obj'] = self.add_modifications(oc_objects['DeploymentConfig']['obj'])
for oc_type in oc_objects.keys():
oc_objects[oc_type]['path'] = Utils.create_file(oc_type, oc_objects[oc_type]['obj'].yaml_dict)
return oc_objects
def create(self):
'''Create a deploymentconfig '''
# generate the objects and prepare for instantiation
self.prepare_router()
results = []
for _, oc_data in self.router_prep.items():
results.append(self._create(oc_data['path']))
rval = 0
for result in results:
if result['returncode'] != 0 and not 'already exist' in result['stderr']:
rval = result['returncode']
return {'returncode': rval, 'results': results}
def update(self):
'''run update for the router. This performs a delete and then create '''
# generate the objects and prepare for instantiation
self.prepare_router()
results = []
for _, oc_data in self.router_prep.items():
results.append(self._replace(oc_data['path']))
rval = 0
for result in results:
if result['returncode'] != 0 and not 'already exist' in result['stderr']:
rval = result['returncode']
return {'returncode': rval, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
def needs_update(self, verbose=False):
''' check to see if we need to update '''
if not self.deploymentconfig or not self.service or not self.serviceaccount or not self.secret:
return True
oc_objects_prep = self.prepare_router()
# Since the output from oadm_router is returned as raw
# we need to parse it. The first line is the stats_password in 3.1
# Inside of 3.2, it is just json
# ServiceAccount:
# Need to determine the pregenerated ones from the original
# Since these are auto generated, we can skip
skip = ['secrets', 'imagePullSecrets']
if not Utils.check_def_equal(oc_objects_prep['ServiceAccount']['obj'].yaml_dict,
self.serviceaccount.yaml_dict,
skip_keys=skip,
debug=verbose):
return True
# Secret:
# In 3.2 oadm router generates a secret volume for certificates
# See if one was generated from our dry-run and verify it if needed
if oc_objects_prep['Secret']['obj']:
if not self.secret:
return True
if not Utils.check_def_equal(oc_objects_prep['Secret']['obj'].yaml_dict,
self.secret.yaml_dict,
skip_keys=skip,
debug=verbose):
return True
# Service:
# Fix the ports to have protocol=TCP
for port in oc_objects_prep['Service']['obj'].get('spec.ports'):
port['protocol'] = 'TCP'
skip = ['portalIP', 'clusterIP', 'sessionAffinity', 'type']
if not Utils.check_def_equal(oc_objects_prep['Service']['obj'].yaml_dict,
self.service.yaml_dict,
skip_keys=skip,
debug=verbose):
return True
# DeploymentConfig:
# Router needs some exceptions.
# We do not want to check the autogenerated password for stats admin
if not self.config.config_options['stats_password']['value']:
for idx, env_var in enumerate(oc_objects_prep['DeploymentConfig']['obj'].get(\
'spec.template.spec.containers[0].env') or []):
if env_var['name'] == 'STATS_PASSWORD':
env_var['value'] = \
self.deploymentconfig.get('spec.template.spec.containers[0].env[%s].value' % idx)
break
# dry-run doesn't add the protocol to the ports section. We will manually do that.
for idx, port in enumerate(oc_objects_prep['DeploymentConfig']['obj'].get(\
'spec.template.spec.containers[0].ports') or []):
if not port.has_key('protocol'):
port['protocol'] = 'TCP'
# These are different when generating
skip = ['dnsPolicy',
'terminationGracePeriodSeconds',
'restartPolicy', 'timeoutSeconds',
'livenessProbe', 'readinessProbe',
'terminationMessagePath', 'hostPort',
'defaultMode',
]
return not Utils.check_def_equal(oc_objects_prep['DeploymentConfig']['obj'].yaml_dict,
self.deploymentconfig.yaml_dict,
skip_keys=skip,
debug=False)
def main():
'''
ansible oc module for router
'''
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', type='str',
choices=['present', 'absent']),
debug=dict(default=False, type='bool'),
namespace=dict(default='default', type='str'),
name=dict(default='router', type='str'),
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
cert_file=dict(default=None, type='str'),
key_file=dict(default=None, type='str'),
images=dict(default=None, type='str'), #'openshift3/ose-${component}:${version}'
latest_images=dict(default=False, type='bool'),
labels=dict(default=None, type='list'),
ports=dict(default=['80:80', '443:443'], type='list'),
replicas=dict(default=1, type='int'),
selector=dict(default=None, type='str'),
service_account=dict(default='router', type='str'),
router_type=dict(default='haproxy-router', type='str'),
host_network=dict(default=True, type='bool'),
# external host options
external_host=dict(default=None, type='str'),
external_host_vserver=dict(default=None, type='str'),
external_host_insecure=dict(default=False, type='bool'),
external_host_partition_path=dict(default=None, type='str'),
external_host_username=dict(default=None, type='str'),
external_host_password=dict(default=None, type='str'),
external_host_private_key=dict(default=None, type='str'),
# Metrics
expose_metrics=dict(default=False, type='bool'),
metrics_image=dict(default=None, type='str'),
# Stats
stats_user=dict(default=None, type='str'),
stats_password=dict(default=None, type='str'),
stats_port=dict(default=1936, type='int'),
# extra
cacert_file=dict(default=None, type='str'),
# edits
edits=dict(default=[], type='list'),
),
mutually_exclusive=[["router_type", "images"]],
supports_check_mode=True,
)
rconfig = RouterConfig(module.params['name'],
module.params['namespace'],
module.params['kubeconfig'],
{'default_cert': {'value': None, 'include': True},
'cert_file': {'value': module.params['cert_file'], 'include': False},
'key_file': {'value': module.params['key_file'], 'include': False},
'images': {'value': module.params['images'], 'include': True},
'latest_images': {'value': module.params['latest_images'], 'include': True},
'labels': {'value': module.params['labels'], 'include': True},
'ports': {'value': ','.join(module.params['ports']), 'include': True},
'replicas': {'value': module.params['replicas'], 'include': True},
'selector': {'value': module.params['selector'], 'include': True},
'service_account': {'value': module.params['service_account'], 'include': True},
'router_type': {'value': module.params['router_type'], 'include': False},
'host_network': {'value': module.params['host_network'], 'include': True},
'external_host': {'value': module.params['external_host'], 'include': True},
'external_host_vserver': {'value': module.params['external_host_vserver'],
'include': True},
'external_host_insecure': {'value': module.params['external_host_insecure'],
'include': True},
'external_host_partition_path': {'value': module.params['external_host_partition_path'],
'include': True},
'external_host_username': {'value': module.params['external_host_username'],
'include': True},
'external_host_password': {'value': module.params['external_host_password'],
'include': True},
'external_host_private_key': {'value': module.params['external_host_private_key'],
'include': True},
'expose_metrics': {'value': module.params['expose_metrics'], 'include': True},
'metrics_image': {'value': module.params['metrics_image'], 'include': True},
'stats_user': {'value': module.params['stats_user'], 'include': True},
'stats_password': {'value': module.params['stats_password'], 'include': True},
'stats_port': {'value': module.params['stats_port'], 'include': True},
# extra
'cacert_file': {'value': module.params['cacert_file'], 'include': False},
# edits
'edits': {'value': module.params['edits'], 'include': False},
})
ocrouter = Router(rconfig)
state = module.params['state']
########
# Delete
########
if state == 'absent':
if not ocrouter.exists():
module.exit_json(changed=False, state="absent")
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a delete.')
api_rval = ocrouter.delete()
module.exit_json(changed=True, results=api_rval, state="absent")
if state == 'present':
########
# Create
########
if not ocrouter.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a create.')
api_rval = ocrouter.create()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
########
# Update
########
if not ocrouter.needs_update():
module.exit_json(changed=False, state="present")
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed an update.')
api_rval = ocrouter.update()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
if __name__ == '__main__':
from ansible.module_utils.basic import *
main()
| apache-2.0 | 8,668,944,091,224,195,000 | 32.759031 | 118 | 0.53314 | false |
JorisDeRieck/Flexget | flexget/components/sites/sites/wordpress.py | 4 | 3229 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.moves.urllib.parse import urlencode
import logging
import re
from flexget import plugin
from flexget.event import event
from flexget.plugin import PluginError
from requests import Request, RequestException
from requests.utils import dict_from_cookiejar, cookiejar_from_dict
log = logging.getLogger('wordpress_auth')
def construct_request(url, username='', password='', redirect='/wp-admin/'):
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/50.0.2661.102 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded',
'DNT': '1',
}
data = {
'log': username,
'pwd': password,
'wp-submit': 'Log In',
'testcookie': '1',
'redirect_to': redirect,
}
return Request(
method='POST', url=url, headers=headers, data=urlencode(data).encode('UTF-8')
).prepare()
def collect_cookies(response):
cookies = dict_from_cookiejar(response.cookies)
for h_resp in response.history:
cookies.update(dict_from_cookiejar(h_resp.cookies))
return cookiejar_from_dict(cookies)
def get_valid_cookies(cookies):
def is_wp_cookie(key):
return re.match(r'(wordpress|wp)(?!_*test)[A-z0-9]*', key, re.IGNORECASE)
valid_cookies = {key: value for key, value in cookies.items() if is_wp_cookie(key)}
return cookiejar_from_dict(valid_cookies)
class PluginWordPress(object):
"""
Supports accessing feeds and media that require wordpress account credentials
Usage:
wordpress_auth:
url: 'your wordpress blog login page (ex http://example.org/wp-login.php)'
username: 'your username'
password: 'your password'
"""
schema = {
'type': 'object',
'properties': {
'url': {'type': 'string', 'oneOf': [{'format': 'url'}]},
'username': {'type': 'string', 'default': ''},
'password': {'type': 'string', 'default': ''},
},
'required': ['url'],
'additionalProperties': False,
}
@plugin.priority(135)
def on_task_start(self, task, config):
url = config['url']
username = config['username']
password = config['password']
try:
response = task.requests.send(
construct_request(url, username=username, password=password)
)
if not response.ok:
raise RequestException(str(response))
cookies = collect_cookies(response)
if len(get_valid_cookies(cookies)) < 1:
raise RequestException(
'No recognized WordPress cookies found. Perhaps username/password is invalid?'
)
task.requests.add_cookiejar(cookies)
except RequestException as err:
log.error('%s', err)
raise PluginError('WordPress Authentication at %s failed' % (url,))
@event('plugin.register')
def register_plugin():
plugin.register(PluginWordPress, 'wordpress_auth', api_ver=2)
| mit | -7,364,879,603,703,187,000 | 31.94898 | 111 | 0.621555 | false |
jamespcole/home-assistant | homeassistant/components/onboarding/views.py | 7 | 3642 | """Onboarding views."""
import asyncio
import voluptuous as vol
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.core import callback
from .const import DOMAIN, STEP_USER, STEPS
async def async_setup(hass, data, store):
"""Set up the onboarding view."""
hass.http.register_view(OnboardingView(data, store))
hass.http.register_view(UserOnboardingView(data, store))
class OnboardingView(HomeAssistantView):
"""Return the onboarding status."""
requires_auth = False
url = '/api/onboarding'
name = 'api:onboarding'
def __init__(self, data, store):
"""Initialize the onboarding view."""
self._store = store
self._data = data
async def get(self, request):
"""Return the onboarding status."""
return self.json([
{
'step': key,
'done': key in self._data['done'],
} for key in STEPS
])
class _BaseOnboardingView(HomeAssistantView):
"""Base class for onboarding."""
requires_auth = False
step = None
def __init__(self, data, store):
"""Initialize the onboarding view."""
self._store = store
self._data = data
self._lock = asyncio.Lock()
@callback
def _async_is_done(self):
"""Return if this step is done."""
return self.step in self._data['done']
async def _async_mark_done(self, hass):
"""Mark step as done."""
self._data['done'].append(self.step)
await self._store.async_save(self._data)
hass.data[DOMAIN] = len(self._data) == len(STEPS)
class UserOnboardingView(_BaseOnboardingView):
"""View to handle onboarding."""
url = '/api/onboarding/users'
name = 'api:onboarding:users'
step = STEP_USER
@RequestDataValidator(vol.Schema({
vol.Required('name'): str,
vol.Required('username'): str,
vol.Required('password'): str,
vol.Required('client_id'): str,
}))
async def post(self, request, data):
"""Return the manifest.json."""
hass = request.app['hass']
async with self._lock:
if self._async_is_done():
return self.json_message('User step already done', 403)
provider = _async_get_hass_provider(hass)
await provider.async_initialize()
user = await hass.auth.async_create_user(data['name'])
await hass.async_add_executor_job(
provider.data.add_auth, data['username'], data['password'])
credentials = await provider.async_get_or_create_credentials({
'username': data['username']
})
await provider.data.async_save()
await hass.auth.async_link_user(user, credentials)
if 'person' in hass.config.components:
await hass.components.person.async_create_person(
data['name'], user_id=user.id
)
await self._async_mark_done(hass)
# Return an authorization code to allow fetching tokens.
auth_code = hass.components.auth.create_auth_code(
data['client_id'], user
)
return self.json({
'auth_code': auth_code
})
@callback
def _async_get_hass_provider(hass):
"""Get the Home Assistant auth provider."""
for prv in hass.auth.auth_providers:
if prv.type == 'homeassistant':
return prv
raise RuntimeError('No Home Assistant provider found')
| apache-2.0 | 5,741,273,418,620,851,000 | 29.099174 | 77 | 0.595003 | false |
PhilipsIII/ndnSIM | PyNDN/Face.py | 15 | 3883 | ## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
#
# Copyright (c) 2011-2013, Regents of the University of California
# Alexander Afanasyev
#
# GNU 3.0 license, See the LICENSE file for more information
#
# Author: Alexander Afanasyev <[email protected]>
#
#
# Based on PyCCN code, copyrighted and licensed as follows
#
# Copyright (c) 2011-2013, Regents of the University of California
# BSD license, See the COPYING file for more information
# Written by: Derek Kulinski <[email protected]>
# Jeff Burke <[email protected]>
#
import ns.core
import ns.network
import ns.ndnSIM
from Data import Data
from Interest import Interest
from Name import Name
import functools
import traceback
class Face (object):
deleteList = []
def __init__(self):
self.nodeId = ns.core.Simulator.GetContext ()
self.node = ns.network.NodeList.GetNode (self.nodeId)
self._face = ns.ndnSIM.ndn.ApiFace (self.node)
# super(Face, self).__init__ (self.node)
def connect (self):
pass
def disconnect (self):
self._face.Shutdown ()
def defer_verification (self, deferVerification = True):
pass
def expressInterest (self, name, onData, onTimeout, template = None):
"""
onData: void <interest, name>
onTimeout: void <interest>
"""
interest = Interest (interest = template)
interest.name = name
converter = ExpressInterestConverter (onData, onTimeout)
self._face.ExpressInterest (interest._interest, converter.handleOnData, converter.handleOnTimeout)
def setInterestFilter (self, name, onInterest, flags = None):
"""
onInterest: void <name, interest>
"""
if isinstance (name, Name):
name = name._name
elif isinstance (name, ns.ndnSIM.ndn.Name):
pass
else:
raise TypeError ("Wrong type for 'name' parameter [%s]" % type (name))
self._face.SetInterestFilter (name, OnInterestConvert (onInterest))
def clearInterestFilter (self, name):
if isinstance (name, Name):
name = name._name
elif isinstance (name, ns.ndnSIM.ndn.Name):
pass
else:
raise TypeError ("Wrong type for 'name' parameter [%s]" % type (name))
# @bug: memory leak, deleteList need to remove previosly set callback... but how?
self._face.ClearInterestFilter (name)
def get (self, name, template = None, timeoutms = 3000):
raise NotImplementedError ("NS-3 simulation cannot have syncrhonous operations")
def put (self, data):
if isinstance (data, Data):
self._face.Put (data._data)
elif isinstance (data, ns.ndnSIM.ndn.Data):
self._face.Put (data)
else:
raise TypeError ("Unsupported type to publish data [%s]" % type (data))
class ExpressInterestConverter:
def __init__ (self, onData, onTimeout):
self.onData = onData
self.onTimeout = onTimeout
def handleOnData (self, interest, data):
try:
if self.onData:
return self.onData (Interest (interest=interest), Data (data = data))
except Exception, e:
traceback.print_exc()
def handleOnTimeout (self, interest):
try:
if self.onTimeout:
self.onTimeout (Interest (interest=interest))
except Exception, e:
traceback.print_exc()
class OnInterestConvert (object):
def __init__ (self, onInterest):
self.onInterest = onInterest
def __call__ (self, name, interest):
try:
if self.onInterest:
self.onInterest (Name (name = name), Interest (interest = interest))
except Exception, e:
traceback.print_exc()
| gpl-3.0 | -5,620,824,200,922,968,000 | 30.064 | 106 | 0.617306 | false |
MelbourneGenomics/UcscBed | ucsc_bed/__init__.py | 1 | 4224 | #!/usr/bin/env python3
import argparse
import ftplib
import gzip
import functools
import io
import pandas as pd
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('reference',
help='The version of the reference genome to use. Must start with "hg", e.g. "hg38"')
parser.add_argument('--limit', '-l', type=int, required=False,
help='The maximum number of transcripts to use as a data source (defaults to all)')
parser.add_argument('--email', '-e', required=False, help='The email address to use when logging onto the ftp site')
parser.add_argument('--method', '-m', choices=['ftp', 'sql'],
help='The method to use to obtain the transcript information', default='sql')
parser.add_argument('--strip-alt', '-a', required=False, default=False, action='store_true',
help='Strip the exons on alternative contigs (e.g. from HG38 onwards)')
return parser.parse_args()
def download_table(reference, email):
# Download the zip file into memory
file = io.BytesIO()
ftp = ftplib.FTP('hgdownload.cse.ucsc.edu', user='anonymous', passwd=email)
ftp.retrbinary(f'RETR goldenPath/{reference}/database/refFlat.txt.gz', file.write)
# Rewind the file
file.seek(0)
# Return an unzipped stream
gz = gzip.GzipFile(fileobj=file)
df = (pd.read_csv(gz, sep='\t',
names=["geneName", "name", "chrom", "strand", "txStart", "txEnd", "cdsStart", "cdsEnd",
"exonCount", "exonStarts", "exonEnds"])
.ix[:, ['chrom', 'exonStarts', 'exonEnds', 'geneName']]
)
return df
def query_table(reference, limit):
# First, read the file in as a data frame
query_str = 'SELECT chrom, exonStarts, exonEnds, geneName from refFlat'
if limit:
query_str += f'LIMIT {limit};'
else:
query_str += ';'
df = pd.read_sql(query_str,
con=f'mysql+mysqldb://[email protected]/{reference}?charset=utf8&use_unicode=1')
df['exonStarts'] = df['exonStarts'].str.decode('utf-8')
df['exonEnds'] = df['exonEnds'].str.decode('utf-8')
return df
def convert_to_bed(left, strip_alt):
# Split the start and end of the exons into separate series each
right_components = [
left[col]
.str
.split(',', expand=True)
.stack()
.replace('', pd.np.nan)
.dropna()
.astype(pd.np.int32)
.to_frame()
.rename(columns={0: col[0:-1]})
for col in ['exonStarts', 'exonEnds']
]
# Merge these two series into one data frame based on both of their indices, then drop the index that indicates
# the index of the exon for this transcript (because we don't need it anymore)
right = (
functools
.reduce(lambda a, b: a.join(b) if a is not None else b, right_components)
.reset_index(level=1, drop=True)
)
# Merge the exon data frame with the main data frame on the index that indicates the original row number
df = (
left.
ix[:, ['chrom', 'geneName']]
.merge(right, left_index=True, right_index=True)
.sort_values(by=['chrom', 'exonStart', 'exonEnd'])
)
if strip_alt:
df = df[~df.chrom.str.contains('_alt')]
return df.to_csv(sep='\t', columns=['chrom', 'exonStart', 'exonEnd', 'geneName'], index=False, header=False)
def generate_bed(reference, method, limit=None, email=None, strip_alt=False):
"""
Python entry point. Takes a reference string and an email and returns the bed file as a string
:param reference: The genome reference to use to generate a BED file. Must start with "hg", e.g. "hg38"
"""
if method == 'sql':
df = query_table(reference, limit)
else:
df = download_table(reference, email)
return convert_to_bed(df, strip_alt)
def main():
"""
Command line entry point. Has no python parameters, so parses its parameters from argv
"""
args = get_args()
bed = generate_bed(**vars(args))
print(bed)
if __name__ == '__main__':
main()
| gpl-3.0 | -1,142,844,148,849,937,200 | 33.341463 | 120 | 0.602273 | false |
mola/qgis | python/plugins/fTools/tools/doVectorGrid.py | 3 | 8539 | # -*- coding: utf-8 -*-
#-----------------------------------------------------------
#
# Generate Vector Grid
#
# A QGIS plugin for generating a line or polygon grid
#
# Copyright (C) 2008 Carson Farmer
#
# EMAIL: carson.farmer (at) gmail.com
# WEB : www.geog.uvic.ca/spar/carson
#
#-----------------------------------------------------------
#
# licensed under the terms of GNU GPL 2
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#---------------------------------------------------------------------
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import ftools_utils
from qgis.core import *
from ui_frmVectorGrid import Ui_Dialog
class Dialog(QDialog, Ui_Dialog):
def __init__(self, iface):
QDialog.__init__(self)
self.iface = iface
self.setupUi(self)
QObject.connect(self.toolOut, SIGNAL("clicked()"), self.outFile)
QObject.connect(self.spnX, SIGNAL("valueChanged(double)"), self.offset)
#QObject.connect(self.inShape, SIGNAL("currentIndexChanged(QString)"), self.updateInput)
QObject.connect(self.btnUpdate, SIGNAL("clicked()"), self.updateLayer)
QObject.connect(self.btnCanvas, SIGNAL("clicked()"), self.updateCanvas)
self.buttonOk = self.buttonBox_2.button( QDialogButtonBox.Ok )
self.setWindowTitle(self.tr("Vector grid"))
self.xMin.setValidator(QDoubleValidator(self.xMin))
self.xMax.setValidator(QDoubleValidator(self.xMax))
self.yMin.setValidator(QDoubleValidator(self.yMin))
self.yMax.setValidator(QDoubleValidator(self.yMax))
layermap = QgsMapLayerRegistry.instance().mapLayers()
for name, layer in layermap.iteritems():
self.inShape.addItem( unicode( layer.name() ) )
def offset(self, value):
if self.chkLock.isChecked():
self.spnY.setValue(value)
def updateLayer( self ):
mLayerName = self.inShape.currentText()
if not mLayerName == "":
mLayer = ftools_utils.getMapLayerByName( unicode( mLayerName ) )
boundBox = mLayer.extent()
self.updateExtents( boundBox )
def updateCanvas( self ):
canvas = self.iface.mapCanvas()
boundBox = canvas.extent()
self.updateExtents( boundBox )
def updateExtents( self, boundBox ):
self.xMin.setText( unicode( boundBox.xMinimum() ) )
self.yMin.setText( unicode( boundBox.yMinimum() ) )
self.xMax.setText( unicode( boundBox.xMaximum() ) )
self.yMax.setText( unicode( boundBox.yMaximum() ) )
def accept(self):
self.buttonOk.setEnabled( False )
if self.xMin.text() == "" or self.xMax.text() == "" or self.yMin.text() == "" or self.yMax.text() == "":
QMessageBox.information(self, self.tr("Vector grid"), self.tr("Please specify valid extent coordinates"))
elif self.outShape.text() == "":
QMessageBox.information(self, self.tr("Vector grid"), self.tr("Please specify output shapefile"))
else:
try:
boundBox = QgsRectangle(
float( self.xMin.text() ),
float( self.yMin.text() ),
float( self.xMax.text() ),
float( self.yMax.text() ) )
except:
QMessageBox.information(self, self.tr("Vector grid"), self.tr("Invalid extent coordinates entered"))
xSpace = self.spnX.value()
ySpace = self.spnY.value()
if self.rdoPolygons.isChecked(): polygon = True
else: polygon = False
self.outShape.clear()
self.compute( boundBox, xSpace, ySpace, polygon )
addToTOC = QMessageBox.question(self, self.tr("Generate Vector Grid"), self.tr("Created output shapefile:\n%1\n\nWould you like to add the new layer to the TOC?").arg(unicode(self.shapefileName)), QMessageBox.Yes, QMessageBox.No, QMessageBox.NoButton)
if addToTOC == QMessageBox.Yes:
ftools_utils.addShapeToCanvas( self.shapefileName )
self.progressBar.setValue( 0 )
self.buttonOk.setEnabled( True )
def compute( self, bound, xOffset, yOffset, polygon ):
crs = self.iface.mapCanvas().mapRenderer().destinationSrs()
if not crs.isValid(): crs = None
if polygon:
fields = {0:QgsField("ID", QVariant.Int), 1:QgsField("XMIN", QVariant.Double), 2:QgsField("XMAX", QVariant.Double),
3:QgsField("YMIN", QVariant.Double), 4:QgsField("YMAX", QVariant.Double)}
check = QFile(self.shapefileName)
if check.exists():
if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName):
return
writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fields, QGis.WKBPolygon, crs)
#writer = QgsVectorFileWriter(outPath, "CP1250", fields, QGis.WKBPolygon, None)
else:
fields = {0:QgsField("ID", QVariant.Int), 1:QgsField("COORD", QVariant.Double)}
check = QFile(self.shapefileName)
if check.exists():
if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName):
return
writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fields, QGis.WKBLineString, crs)
#writer = QgsVectorFileWriter(unicode(outPath), "CP1250", fields, QGis.WKBLineString, None)
outFeat = QgsFeature()
outGeom = QgsGeometry()
idVar = 0
self.progressBar.setRange( 0, 0 )
if not polygon:
y = bound.yMaximum()
while y >= bound.yMinimum():
pt1 = QgsPoint(bound.xMinimum(), y)
pt2 = QgsPoint(bound.xMaximum(), y)
line = [pt1, pt2]
outFeat.setGeometry(outGeom.fromPolyline(line))
outFeat.addAttribute(0, QVariant(idVar))
outFeat.addAttribute(1, QVariant(y))
writer.addFeature(outFeat)
y = y - yOffset
idVar = idVar + 1
x = bound.xMinimum()
while x <= bound.xMaximum():
pt1 = QgsPoint(x, bound.yMaximum())
pt2 = QgsPoint(x, bound.yMinimum())
line = [pt1, pt2]
outFeat.setGeometry(outGeom.fromPolyline(line))
outFeat.addAttribute(0, QVariant(idVar))
outFeat.addAttribute(1, QVariant(x))
writer.addFeature(outFeat)
x = x + xOffset
idVar = idVar + 1
else:
y = bound.yMaximum()
while y >= bound.yMinimum():
x = bound.xMinimum()
while x <= bound.xMaximum():
pt1 = QgsPoint(x, y)
pt2 = QgsPoint(x + xOffset, y)
pt3 = QgsPoint(x + xOffset, y - yOffset)
pt4 = QgsPoint(x, y - yOffset)
pt5 = QgsPoint(x, y)
polygon = [[pt1, pt2, pt3, pt4, pt5]]
outFeat.setGeometry(outGeom.fromPolygon(polygon))
outFeat.addAttribute(0, QVariant(idVar))
outFeat.addAttribute(1, QVariant(x))
outFeat.addAttribute(2, QVariant(x + xOffset))
outFeat.addAttribute(3, QVariant(y - yOffset))
outFeat.addAttribute(4, QVariant(y))
writer.addFeature(outFeat)
idVar = idVar + 1
x = x + xOffset
y = y - yOffset
self.progressBar.setRange( 0, 100 )
del writer
def outFile(self):
self.outShape.clear()
( self.shapefileName, self.encoding ) = ftools_utils.saveDialog( self )
if self.shapefileName is None or self.encoding is None:
return
self.outShape.setText( QString( self.shapefileName ) )
| gpl-2.0 | -7,316,185,131,752,502,000 | 45.407609 | 263 | 0.585666 | false |
atsaki/ansible-modules-extras | cloud/misc/virt.py | 72 | 14049 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Virt management features
Copyright 2007, 2012 Red Hat, Inc
Michael DeHaan <[email protected]>
Seth Vidal <[email protected]>
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: virt
short_description: Manages virtual machines supported by libvirt
description:
- Manages virtual machines supported by I(libvirt).
version_added: "0.2"
options:
name:
description:
- name of the guest VM being managed. Note that VM must be previously
defined with xml.
required: true
default: null
aliases: []
state:
description:
- Note that there may be some lag for state requests like C(shutdown)
since these refer only to VM states. After starting a guest, it may not
be immediately accessible.
required: false
choices: [ "running", "shutdown", "destroyed", "paused" ]
default: "no"
command:
description:
- in addition to state management, various non-idempotent commands are available. See examples
required: false
choices: ["create","status", "start", "stop", "pause", "unpause",
"shutdown", "undefine", "destroy", "get_xml", "autostart",
"freemem", "list_vms", "info", "nodeinfo", "virttype", "define"]
uri:
description:
- libvirt connection uri
required: false
defaults: qemu:///system
xml:
description:
- XML document used with the define command
required: false
default: null
requirements:
- "python >= 2.6"
- "libvirt-python"
author:
- "Ansible Core Team"
- "Michael DeHaan"
- "Seth Vidal"
'''
EXAMPLES = '''
# a playbook task line:
- virt: name=alpha state=running
# /usr/bin/ansible invocations
ansible host -m virt -a "name=alpha command=status"
ansible host -m virt -a "name=alpha command=get_xml"
ansible host -m virt -a "name=alpha command=create uri=lxc:///"
# a playbook example of defining and launching an LXC guest
tasks:
- name: define vm
virt: name=foo
command=define
xml="{{ lookup('template', 'container-template.xml.j2') }}"
uri=lxc:///
- name: start vm
virt: name=foo state=running uri=lxc:///
'''
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE=2
import sys
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
ALL_COMMANDS = []
VM_COMMANDS = ['create','status', 'start', 'stop', 'pause', 'unpause',
'shutdown', 'undefine', 'destroy', 'get_xml', 'autostart', 'define']
HOST_COMMANDS = ['freemem', 'list_vms', 'info', 'nodeinfo', 'virttype']
ALL_COMMANDS.extend(VM_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
VIRT_STATE_NAME_MAP = {
0 : "running",
1 : "running",
2 : "running",
3 : "paused",
4 : "shutdown",
5 : "shutdown",
6 : "crashed"
}
class VMNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
cmd = "uname -r"
rc, stdout, stderr = self.module.run_command(cmd)
if "xen" in stdout:
conn = libvirt.open(None)
else:
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_vm(self, vmid):
"""
Extra bonus feature: vmid = -1 returns a list of everything
"""
conn = self.conn
vms = []
# this block of code borrowed from virt-manager:
# get working domain's name
ids = conn.listDomainsID()
for id in ids:
vm = conn.lookupByID(id)
vms.append(vm)
# get defined domain
names = conn.listDefinedDomains()
for name in names:
vm = conn.lookupByName(name)
vms.append(vm)
if vmid == -1:
return vms
for vm in vms:
if vm.name() == vmid:
return vm
raise VMNotFound("virtual machine %s not found" % vmid)
def shutdown(self, vmid):
return self.find_vm(vmid).shutdown()
def pause(self, vmid):
return self.suspend(self.conn,vmid)
def unpause(self, vmid):
return self.resume(self.conn,vmid)
def suspend(self, vmid):
return self.find_vm(vmid).suspend()
def resume(self, vmid):
return self.find_vm(vmid).resume()
def create(self, vmid):
return self.find_vm(vmid).create()
def destroy(self, vmid):
return self.find_vm(vmid).destroy()
def undefine(self, vmid):
return self.find_vm(vmid).undefine()
def get_status2(self, vm):
state = vm.info()[0]
return VIRT_STATE_NAME_MAP.get(state,"unknown")
def get_status(self, vmid):
state = self.find_vm(vmid).info()[0]
return VIRT_STATE_NAME_MAP.get(state,"unknown")
def nodeinfo(self):
return self.conn.getInfo()
def get_type(self):
return self.conn.getType()
def get_xml(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.XMLDesc(0)
def get_maxVcpus(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.maxVcpus()
def get_maxMemory(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.maxMemory()
def getFreeMemory(self):
return self.conn.getFreeMemory()
def get_autostart(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.autostart()
def set_autostart(self, vmid, val):
vm = self.conn.lookupByName(vmid)
return vm.setAutostart(val)
def define_from_xml(self, xml):
return self.conn.defineXML(xml)
class Virt(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
def __get_conn(self):
self.conn = LibvirtConnection(self.uri, self.module)
return self.conn
def get_vm(self, vmid):
self.__get_conn()
return self.conn.find_vm(vmid)
def state(self):
vms = self.list_vms()
state = []
for vm in vms:
state_blurb = self.conn.get_status(vm)
state.append("%s %s" % (vm,state_blurb))
return state
def info(self):
vms = self.list_vms()
info = dict()
for vm in vms:
data = self.conn.find_vm(vm).info()
# libvirt returns maxMem, memory, and cpuTime as long()'s, which
# xmlrpclib tries to convert to regular int's during serialization.
# This throws exceptions, so convert them to strings here and
# assume the other end of the xmlrpc connection can figure things
# out or doesn't care.
info[vm] = {
"state" : VIRT_STATE_NAME_MAP.get(data[0],"unknown"),
"maxMem" : str(data[1]),
"memory" : str(data[2]),
"nrVirtCpu" : data[3],
"cpuTime" : str(data[4]),
}
info[vm]["autostart"] = self.conn.get_autostart(vm)
return info
def nodeinfo(self):
self.__get_conn()
info = dict()
data = self.conn.nodeinfo()
info = {
"cpumodel" : str(data[0]),
"phymemory" : str(data[1]),
"cpus" : str(data[2]),
"cpumhz" : str(data[3]),
"numanodes" : str(data[4]),
"sockets" : str(data[5]),
"cpucores" : str(data[6]),
"cputhreads" : str(data[7])
}
return info
def list_vms(self, state=None):
self.conn = self.__get_conn()
vms = self.conn.find_vm(-1)
results = []
for x in vms:
try:
if state:
vmstate = self.conn.get_status2(x)
if vmstate == state:
results.append(x.name())
else:
results.append(x.name())
except:
pass
return results
def virttype(self):
return self.__get_conn().get_type()
def autostart(self, vmid):
self.conn = self.__get_conn()
return self.conn.set_autostart(vmid, True)
def freemem(self):
self.conn = self.__get_conn()
return self.conn.getFreeMemory()
def shutdown(self, vmid):
""" Make the machine with the given vmid stop running. Whatever that takes. """
self.__get_conn()
self.conn.shutdown(vmid)
return 0
def pause(self, vmid):
""" Pause the machine with the given vmid. """
self.__get_conn()
return self.conn.suspend(vmid)
def unpause(self, vmid):
""" Unpause the machine with the given vmid. """
self.__get_conn()
return self.conn.resume(vmid)
def create(self, vmid):
""" Start the machine via the given vmid """
self.__get_conn()
return self.conn.create(vmid)
def start(self, vmid):
""" Start the machine via the given id/name """
self.__get_conn()
return self.conn.create(vmid)
def destroy(self, vmid):
""" Pull the virtual power from the virtual domain, giving it virtually no time to virtually shut down. """
self.__get_conn()
return self.conn.destroy(vmid)
def undefine(self, vmid):
""" Stop a domain, and then wipe it from the face of the earth. (delete disk/config file) """
self.__get_conn()
return self.conn.undefine(vmid)
def status(self, vmid):
"""
Return a state suitable for server consumption. Aka, codes.py values, not XM output.
"""
self.__get_conn()
return self.conn.get_status(vmid)
def get_xml(self, vmid):
"""
Receive a Vm id as input
Return an xml describing vm config returned by a libvirt call
"""
self.__get_conn()
return self.conn.get_xml(vmid)
def get_maxVcpus(self, vmid):
"""
Gets the max number of VCPUs on a guest
"""
self.__get_conn()
return self.conn.get_maxVcpus(vmid)
def get_max_memory(self, vmid):
"""
Gets the max memory on a guest
"""
self.__get_conn()
return self.conn.get_MaxMemory(vmid)
def define(self, xml):
"""
Define a guest with the given xml
"""
self.__get_conn()
return self.conn.define_from_xml(xml)
def core(module):
state = module.params.get('state', None)
guest = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
v = Virt(uri, module)
res = {}
if state and command=='list_vms':
res = v.list_vms(state=state)
if type(res) != dict:
res = { command: res }
return VIRT_SUCCESS, res
if state:
if not guest:
module.fail_json(msg = "state change requires a guest specified")
res['changed'] = False
if state == 'running':
if v.status(guest) is 'paused':
res['changed'] = True
res['msg'] = v.unpause(guest)
elif v.status(guest) is not 'running':
res['changed'] = True
res['msg'] = v.start(guest)
elif state == 'shutdown':
if v.status(guest) is not 'shutdown':
res['changed'] = True
res['msg'] = v.shutdown(guest)
elif state == 'destroyed':
if v.status(guest) is not 'shutdown':
res['changed'] = True
res['msg'] = v.destroy(guest)
elif state == 'paused':
if v.status(guest) is 'running':
res['changed'] = True
res['msg'] = v.pause(guest)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if command:
if command in VM_COMMANDS:
if not guest:
module.fail_json(msg = "%s requires 1 argument: guest" % command)
if command == 'define':
if not xml:
module.fail_json(msg = "define requires xml argument")
try:
v.get_vm(guest)
except VMNotFound:
v.define(xml)
res = {'changed': True, 'created': guest}
return VIRT_SUCCESS, res
res = getattr(v, command)(guest)
if type(res) != dict:
res = { command: res }
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if type(res) != dict:
res = { command: res }
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % basecmd)
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule(argument_spec=dict(
name = dict(aliases=['guest']),
state = dict(choices=['running', 'shutdown', 'destroyed', 'paused']),
command = dict(choices=ALL_COMMANDS),
uri = dict(default='qemu:///system'),
xml = dict(),
))
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception, e:
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 | 1,615,391,288,458,508,300 | 26.875 | 116 | 0.556552 | false |
VinaCorp/HV | medcal/migrations/0001_initial.py | 1 | 3248 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-13 14:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Agenda',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datahora', models.DateTimeField()),
],
),
migrations.CreateModel(
name='Especialidade',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(blank=True, max_length=45, null=True)),
],
),
migrations.CreateModel(
name='Localizacao',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cep', models.CharField(blank=True, max_length=9, null=True)),
('rua', models.CharField(blank=True, max_length=45, null=True)),
('num', models.CharField(blank=True, max_length=5, null=True)),
('compl', models.CharField(blank=True, max_length=45, null=True)),
('bairro', models.CharField(blank=True, max_length=45, null=True)),
('cidade', models.CharField(blank=True, max_length=45, null=True)),
('estado', models.CharField(blank=True, max_length=2, null=True)),
],
),
migrations.CreateModel(
name='Medico',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(blank=True, max_length=45, null=True)),
('especialidade', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='medcal.Especialidade')),
],
),
migrations.CreateModel(
name='Paciente',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(blank=True, max_length=45, null=True)),
],
),
migrations.AddField(
model_name='localizacao',
name='medico',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='medcal.Medico'),
),
migrations.AddField(
model_name='agenda',
name='medico',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='medcal.Medico'),
),
migrations.AddField(
model_name='agenda',
name='paciente',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='medcal.Paciente'),
),
migrations.AlterUniqueTogether(
name='agenda',
unique_together=set([('id', 'medico', 'datahora')]),
),
]
| isc | 1,665,061,046,442,569,500 | 40.641026 | 148 | 0.563116 | false |
henrysher/yum | yum/failover.py | 12 | 5125 | #!/usr/bin/python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Copyright 2003 Jack Neely, NC State University
# Here we define a base class for failover methods. The idea here is that each
# failover method uses a class derived from the base class so yum only has to
# worry about calling get_serverurl() and server_failed() and these classes will
# figure out which URL to cough up based on the failover method.
"""Classes for handling failovers for server URLs."""
import random
class baseFailOverMethod:
"""A base class to provide a failover to switch to a new server if
the current one fails.
"""
def __init__(self, repo):
self.repo = repo
self.failures = 0
def get_serverurl(self, i=None):
"""Return a server URL based on this failover method, or None
if there is a complete failure. This method should always be
used to translate an index into a URL, as this object may
change how indexes map.
:param i: if given, this is the index of the server URL to
return, instead of using the failures counter
:return: the next server URL
"""
return None
def server_failed(self):
"""Notify the failover method that the current server has
failed.
"""
self.failures = self.failures + 1
def reset(self, i=0):
"""Reset the failures counter to the given index.
:param i: the index to reset the failures counter to
"""
self.failures = i
def get_index(self):
"""Return the current number of failures, which is also the
current index into the list of URLs that this object
represents. :fun:`get_serverurl` should always be used to
translate an index into a URL, as this object may change how
indexes map.
:return: the current number of failures, which is also the
current index
"""
return self.failures
def len(self):
"""Return the total number of URLs available to cycle through
in this object.
:return: the total number of URLs available
"""
return len(self.repo.urls)
class priority(baseFailOverMethod):
"""A class to provide a failover to switch to a new server
if the current one fails. This classes chooses the next server
based on the first success in the list of servers.
"""
def get_serverurl(self, i=None):
"""Return the next successful server URL in the list, or None
if there is a complete failure. This method should always be
used to translate an index into a URL, as this object may
change how indexes map.
:param i: if given, this is the index of the server URL to
return, instead of using the failures counter
:return: the next server URL
"""
if i == None:
index = self.failures
else:
index = i
if index >= len(self.repo.urls):
return None
return self.repo.urls[index]
class roundRobin(baseFailOverMethod):
"""A class to provide a failover to switch to a new server
if the current one fails. When an object of this class is
created, it selects a random place in the list of URLs to begin
with, then each time :func:`get_serveurl` is called, the next URL
in the list is returned, cycling back to the beginning of the list
after the end is reached.
"""
def __init__(self, repo):
baseFailOverMethod.__init__(self, repo)
random.seed()
self.offset = random.randint(0, 37)
def get_serverurl(self, i=None):
"""Return the next successful server URL in the list, using
the round robin scheme, or None if there is a complete
failure. This method should always be used to translate an
index into a URL, as this object may change how indexes map.
:param i: if given, this is the index of the server URL to
return, instead of using the failures counter
:return: the next server URL
"""
if i == None:
index = self.failures
else:
index = i
if index >= len(self.repo.urls):
return None
rr = (index + self.offset) % len(self.repo.urls)
return self.repo.urls[rr]
# SDG
| gpl-2.0 | 1,171,450,252,914,406,000 | 35.091549 | 81 | 0.638439 | false |
jbogaardt/chainladder-python | chainladder/development/learning.py | 1 | 8140 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import numpy as np
import pandas as pd
from sklearn.preprocessing import OneHotEncoder, StandardScaler, PolynomialFeatures
from sklearn.compose import ColumnTransformer
from chainladder.development.base import DevelopmentBase
from chainladder import ULT_VAL
class DevelopmentML(DevelopmentBase):
""" A Estimator that interfaces with machine learning (ML) tools that implement
the scikit-learn API.
The `DevelopmentML` estimator is used to generate ``ldf_`` patterns from
the data.
.. versionadded:: 0.8.1
Parameters
----------
estimator_ml : skearn Estimator
Any sklearn compatible regression estimator, including Pipelines and
y_ml : list or str or sklearn_transformer
The response column(s) for the machine learning algorithm. It must be
present within the Triangle.
autoregressive : tuple, (autoregressive_col_name, lag, source_col_name)
The subset of response column(s) to use as lagged features for the
Time Series aspects of the model. Predictions from one development period
get used as featues in the next development period. Lags should be negative
integers.
fit_incrementals :
Whether the response variable should be converted to an incremental basis
for fitting.
Attributes
----------
estimator_ml : Estimator
An sklearn-style estimator to predict development patterns
ldf_ : Triangle
The estimated loss development patterns.
cdf_ : Triangle
The estimated cumulative development patterns.
"""
def __init__(self, estimator_ml=None, y_ml=None, autoregressive=False,
weight_ml=None, fit_incrementals=True):
self.estimator_ml=estimator_ml
self.y_ml=y_ml
self.weight_ml = weight_ml
self.autoregressive=autoregressive
self.fit_incrementals=fit_incrementals
def _get_y_names(self):
""" private function to get the response column name"""
if not self.y_ml:
y_names = self._columns
if hasattr(self.y_ml, '_columns'):
y_names = self.y_ml._columns
elif isinstance(self.y_ml, ColumnTransformer):
y_names = self.y_ml.transformers[0][-1]
if type(self.y_ml) is list:
y_names = self.y_ml
elif type(self.y_ml) is str:
y_names = [self.y_ml]
return y_names
@property
def y_ml_(self):
defaults = self._get_y_names()
transformer = self.y_ml
if not transformer:
return ColumnTransformer(
transformers=[('passthrough', 'passthrough', defaults)])
elif type(transformer) is list:
return ColumnTransformer(
transformers=[('passthrough', 'passthrough', transformer)])
elif type(transformer) is str:
return ColumnTransformer(
transformers=[('passthrough', 'passthrough', [transformer])])
else:
return transformer
def _get_triangle_ml(self, df, preds=None):
""" Create fitted Triangle """
from chainladder.core import Triangle
if preds is None:
preds = self.estimator_ml.predict(df)
X_r = [df]
y_r = [preds]
dgrain = {'Y':12, 'Q':3, 'M': 1}[self.development_grain_]
ograin = {'Y':1, 'Q':4, 'M': 12}[self.origin_grain_]
latest_filter = (df['origin']+1)*ograin+(df['development']-dgrain)/dgrain
latest_filter = latest_filter == latest_filter.max()
preds=pd.DataFrame(preds.copy())[latest_filter].values
out = df.loc[latest_filter].copy()
dev_lags = df['development'].drop_duplicates().sort_values()
for d in dev_lags[1:]:
out['development'] = out['development'] + dgrain
out['valuation'] = out['valuation'] + dgrain / 12
if len(preds.shape) == 1:
preds = preds[:, None]
if self.autoregressive:
for num, col in enumerate(self.autoregressive):
out[col[0]]=preds[:, num]
out = out[out['development']<=dev_lags.max()]
if len(out) == 0:
continue
X_r.append(out.copy())
preds = self.estimator_ml.predict(out)
y_r.append(preds.copy())
X_r = pd.concat(X_r, 0).reset_index(drop=True)
if True:
X_r = X_r.drop(self._get_y_names(), 1)
out = pd.concat((X_r,
pd.DataFrame(np.concatenate(y_r, 0), columns=self._get_y_names())),1)
out['origin'] = out['origin'].map({v: k for k, v in self.origin_encoder_.items()})
out['valuation'] = out['valuation'].map({v: k for k, v in self.valuation_encoder_.items()})
return Triangle(
out, origin='origin', development='valuation',
index=self._key_labels, columns=self._get_y_names(),
cumulative=not self.fit_incrementals).dropna()
def _prep_X_ml(self, X):
""" Preps Triangle data ahead of the pipeline """
if self.fit_incrementals:
X_ = X.cum_to_incr()
else:
X_ = X.copy()
if self.autoregressive:
for i in self.autoregressive:
lag = X[i[2]].shift(i[1])
X_[i[0]] = lag[lag.valuation<=X.valuation_date]
df_base = X.incr_to_cum().to_frame(keepdims=True, implicit_axis=True).reset_index().iloc[:, :-1]
df = df_base.merge(
X.cum_to_incr().to_frame(keepdims=True, implicit_axis=True).reset_index(), how='left',
on=list(df_base.columns)).fillna(0)
df['origin'] = df['origin'].map(self.origin_encoder_)
df['valuation'] = df['valuation'].map(self.valuation_encoder_)
return df
def fit(self, X, y=None, sample_weight=None):
"""Fit the model with X.
Parameters
----------
X : Triangle-like
Set of LDFs to which the munich adjustment will be applied.
y : None
Ignored, use y_ml to set a reponse variable for the ML algorithm
sample_weight : None
Ignored
Returns
-------
self : object
Returns the instance itself.
"""
self._columns = list(X.columns)
self._key_labels = X.key_labels
self.origin_grain_ = X.origin_grain
self.development_grain_ = X.development_grain
self.origin_encoder_ = dict(zip(
X.origin.to_timestamp(how='s'),
(pd.Series(X.origin).rank()-1)/{'Y':1, 'Q':4, 'M': 12}[X.origin_grain]))
val = X.valuation.sort_values().unique()
self.valuation_encoder_ = dict(zip(
val,
(pd.Series(val).rank()-1)/{'Y':1, 'Q':4, 'M': 12}[X.development_grain]))
df = self._prep_X_ml(X)
self.df_ = df
# Fit model
self.estimator_ml.fit(df, self.y_ml_.fit_transform(df).squeeze())
#return self
self.triangle_ml_ = self._get_triangle_ml(df)
return self
@property
def ldf_(self):
ldf = self.triangle_ml_.incr_to_cum().link_ratio
ldf.valuation_date = pd.to_datetime(ULT_VAL)
return ldf
def transform(self, X):
""" If X and self are of different shapes, align self to X, else
return self.
Parameters
----------
X : Triangle
The triangle to be transformed
Returns
-------
X_new : New triangle with transformed attributes.
"""
X_new = X.copy()
X_ml = self._prep_X_ml(X)
y_ml=self.estimator_ml.predict(X_ml)
triangle_ml = self._get_triangle_ml(X_ml, y_ml)
backend = "cupy" if X.array_backend == "cupy" else "numpy"
X_new.ldf_ = triangle_ml.incr_to_cum().link_ratio.set_backend(backend)
X_new.ldf_.valuation_date = pd.to_datetime(ULT_VAL)
X_new._set_slicers()
return X_new
| mit | 6,286,292,862,475,518,000 | 37.578199 | 104 | 0.583292 | false |
gangadharkadam/vlinkfrappe | frappe/utils/bench_helper.py | 9 | 1943 | from __future__ import unicode_literals
import click
import frappe
import os
import json
import importlib
import frappe.utils
def main():
commands = get_app_groups()
commands.update({'get-frappe-commands': get_frappe_commands,
'get-frappe-help': get_frappe_help
})
click.Group(commands=commands)(prog_name='bench')
def get_app_groups():
ret = {}
for app in ["frappe"]: #get_apps():
app_group = get_app_group(app)
if app_group:
ret[app] = app_group
return ret
def get_app_group(app):
app_commands = get_app_commands(app)
if app_commands:
return click.group(name=app, commands=app_commands)(app_group)
@click.option('--site')
@click.option('--profile', is_flag=True, default=False, help='Profile')
@click.option('--verbose', is_flag=True, default=False, help='Verbose')
@click.option('--force', is_flag=True, default=False, help='Force')
@click.pass_context
def app_group(ctx, site, force, verbose, profile):
ctx.obj = {
'sites': get_sites(site),
'force': force,
'verbose': verbose,
'profile': profile
}
if ctx.info_name == 'frappe':
ctx.info_name = ''
def get_sites(site_arg):
if site_arg and site_arg == 'all':
return frappe.utils.get_sites()
else:
if site_arg:
return [site_arg]
if os.path.exists('currentsite.txt'):
with open('currentsite.txt') as f:
return [f.read().strip()]
def get_app_commands(app):
try:
app_command_module = importlib.import_module(app + '.commands')
except ImportError:
return []
ret = {}
for command in getattr(app_command_module, 'commands', []):
ret[command.name] = command
return ret
@click.command('get-frappe-commands')
def get_frappe_commands():
print json.dumps(get_app_commands('frappe').keys())
@click.command('get-frappe-help')
def get_frappe_help():
print click.Context(get_app_group('frappe')).get_help()
def get_apps():
return frappe.get_all_apps(with_internal_apps=False, sites_path='.')
if __name__ == "__main__":
main()
| mit | 537,282,905,177,141,400 | 23.910256 | 71 | 0.683994 | false |
Sweetgrassbuffalo/ReactionSweeGrass-v2 | .meteor/local/dev_bundle/python/Lib/macpath.py | 35 | 6289 | """Pathname and path-related operations for the Macintosh."""
import os
import warnings
from stat import *
import genericpath
from genericpath import *
from genericpath import _unicode
__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
"basename","dirname","commonprefix","getsize","getmtime",
"getatime","getctime", "islink","exists","lexists","isdir","isfile",
"walk","expanduser","expandvars","normpath","abspath",
"curdir","pardir","sep","pathsep","defpath","altsep","extsep",
"devnull","realpath","supports_unicode_filenames"]
# strings representing various path-related bits and pieces
curdir = ':'
pardir = '::'
extsep = '.'
sep = ':'
pathsep = '\n'
defpath = ':'
altsep = None
devnull = 'Dev:Null'
# Normalize the case of a pathname. Dummy in Posix, but <s>.lower() here.
def normcase(path):
return path.lower()
def isabs(s):
"""Return true if a path is absolute.
On the Mac, relative paths begin with a colon,
but as a special case, paths with no colons at all are also relative.
Anything else is absolute (the string up to the first colon is the
volume name)."""
return ':' in s and s[0] != ':'
def join(s, *p):
path = s
for t in p:
if (not path) or isabs(t):
path = t
continue
if t[:1] == ':':
t = t[1:]
if ':' not in path:
path = ':' + path
if path[-1:] != ':':
path = path + ':'
path = path + t
return path
def split(s):
"""Split a pathname into two parts: the directory leading up to the final
bit, and the basename (the filename, without colons, in that directory).
The result (s, t) is such that join(s, t) yields the original argument."""
if ':' not in s: return '', s
colon = 0
for i in range(len(s)):
if s[i] == ':': colon = i + 1
path, file = s[:colon-1], s[colon:]
if path and not ':' in path:
path = path + ':'
return path, file
def splitext(p):
return genericpath._splitext(p, sep, altsep, extsep)
splitext.__doc__ = genericpath._splitext.__doc__
def splitdrive(p):
"""Split a pathname into a drive specification and the rest of the
path. Useful on DOS/Windows/NT; on the Mac, the drive is always
empty (don't use the volume name -- it doesn't have the same
syntactic and semantic oddities as DOS drive letters, such as there
being a separate current directory per drive)."""
return '', p
# Short interfaces to split()
def dirname(s): return split(s)[0]
def basename(s): return split(s)[1]
def ismount(s):
if not isabs(s):
return False
components = split(s)
return len(components) == 2 and components[1] == ''
def islink(s):
"""Return true if the pathname refers to a symbolic link."""
try:
import Carbon.File
return Carbon.File.ResolveAliasFile(s, 0)[2]
except:
return False
# Is `stat`/`lstat` a meaningful difference on the Mac? This is safe in any
# case.
def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
st = os.lstat(path)
except os.error:
return False
return True
def expandvars(path):
"""Dummy to retain interface-compatibility with other operating systems."""
return path
def expanduser(path):
"""Dummy to retain interface-compatibility with other operating systems."""
return path
class norm_error(Exception):
"""Path cannot be normalized"""
def normpath(s):
"""Normalize a pathname. Will return the same result for
equivalent paths."""
if ":" not in s:
return ":"+s
comps = s.split(":")
i = 1
while i < len(comps)-1:
if comps[i] == "" and comps[i-1] != "":
if i > 1:
del comps[i-1:i+1]
i = i - 1
else:
# best way to handle this is to raise an exception
raise norm_error, 'Cannot use :: immediately after volume name'
else:
i = i + 1
s = ":".join(comps)
# remove trailing ":" except for ":" and "Volume:"
if s[-1] == ":" and len(comps) > 2 and s != ":"*len(s):
s = s[:-1]
return s
def walk(top, func, arg):
"""Directory tree walk with callback function.
For each directory in the directory tree rooted at top (including top
itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
dirname is the name of the directory, and fnames a list of the names of
the files and subdirectories in dirname (excluding '.' and '..'). func
may modify the fnames list in-place (e.g. via del or slice assignment),
and walk will only recurse into the subdirectories whose names remain in
fnames; this can be used to implement a filter, or to impose a specific
order of visiting. No semantics are defined for, or required of, arg,
beyond that arg is always passed to func. It can be used, e.g., to pass
a filename pattern, or a mutable object designed to accumulate
statistics. Passing None for arg is common."""
warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.",
stacklevel=2)
try:
names = os.listdir(top)
except os.error:
return
func(arg, top, names)
for name in names:
name = join(top, name)
if isdir(name) and not islink(name):
walk(name, func, arg)
def abspath(path):
"""Return an absolute path."""
if not isabs(path):
if isinstance(path, _unicode):
cwd = os.getcwdu()
else:
cwd = os.getcwd()
path = join(cwd, path)
return normpath(path)
# realpath is a no-op on systems without islink support
def realpath(path):
path = abspath(path)
try:
import Carbon.File
except ImportError:
return path
if not path:
return path
components = path.split(':')
path = components[0] + ':'
for c in components[1:]:
path = join(path, c)
try:
path = Carbon.File.FSResolveAliasFile(path, 1)[0].as_pathname()
except Carbon.File.Error:
pass
return path
supports_unicode_filenames = True
| gpl-3.0 | 999,285,283,670,129,400 | 28.115741 | 79 | 0.603912 | false |
kikocorreoso/brython | www/src/Lib/_dummy_thread.py | 12 | 6027 | """Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType', 'RLock']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return 1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
def _set_sentinel():
"""Dummy implementation of _thread._set_sentinel()."""
return LockType()
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
def __repr__(self):
return "<%s %s.%s object at %s>" % (
"locked" if self.locked_status else "unlocked",
self.__class__.__module__,
self.__class__.__qualname__,
hex(id(self))
)
class RLock(LockType):
"""Dummy implementation of threading._RLock.
Re-entrant lock can be aquired multiple times and needs to be released
just as many times. This dummy implemention does not check wheter the
current thread actually owns the lock, but does accounting on the call
counts.
"""
def __init__(self):
super().__init__()
self._levels = 0
def acquire(self, waitflag=None, timeout=-1):
"""Aquire the lock, can be called multiple times in succession.
"""
locked = super().acquire(waitflag, timeout)
if locked:
self._levels += 1
return locked
def release(self):
"""Release needs to be called once for every call to acquire().
"""
if self._levels == 0:
raise error
if self._levels == 1:
super().release()
self._levels -= 1
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
| bsd-3-clause | 4,998,584,670,166,008,000 | 30.227979 | 79 | 0.630164 | false |
sdlBasic/sdlbrt | win32/mingw/opt/lib/python2.7/_pyio.py | 28 | 69294 | """
Python implementation of the io module.
"""
from __future__ import (print_function, unicode_literals)
import os
import abc
import codecs
import warnings
import errno
# Import thread instead of threading to reduce startup cost
try:
from thread import allocate_lock as Lock
except ImportError:
from dummy_thread import allocate_lock as Lock
import io
from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
from errno import EINTR
__metaclass__ = type
# open() uses st_blksize whenever we can
DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
# NOTE: Base classes defined here are registered with the "official" ABCs
# defined in io.py. We don't use real inheritance though, because we don't
# want to inherit the C implementations.
class BlockingIOError(IOError):
"""Exception raised when I/O would block on a non-blocking I/O stream."""
def __init__(self, errno, strerror, characters_written=0):
super(IOError, self).__init__(errno, strerror)
if not isinstance(characters_written, (int, long)):
raise TypeError("characters_written must be a integer")
self.characters_written = characters_written
def open(file, mode="r", buffering=-1,
encoding=None, errors=None,
newline=None, closefd=True):
r"""Open file and return a stream. Raise IOError upon failure.
file is either a text or byte string giving the name (and the path
if the file isn't in the current working directory) of the file to
be opened or an integer file descriptor of the file to be
wrapped. (If a file descriptor is given, it is closed when the
returned I/O object is closed, unless closefd is set to False.)
mode is an optional string that specifies the mode in which the file
is opened. It defaults to 'r' which means open for reading in text
mode. Other common values are 'w' for writing (truncating the file if
it already exists), and 'a' for appending (which on some Unix systems,
means that all writes append to the end of the file regardless of the
current seek position). In text mode, if encoding is not specified the
encoding used is platform dependent. (For reading and writing raw
bytes use binary mode and leave encoding unspecified.) The available
modes are:
========= ===============================================================
Character Meaning
--------- ---------------------------------------------------------------
'r' open for reading (default)
'w' open for writing, truncating the file first
'a' open for writing, appending to the end of the file if it exists
'b' binary mode
't' text mode (default)
'+' open a disk file for updating (reading and writing)
'U' universal newline mode (for backwards compatibility; unneeded
for new code)
========= ===============================================================
The default mode is 'rt' (open for reading text). For binary random
access, the mode 'w+b' opens and truncates the file to 0 bytes, while
'r+b' opens the file without truncation.
Python distinguishes between files opened in binary and text modes,
even when the underlying operating system doesn't. Files opened in
binary mode (appending 'b' to the mode argument) return contents as
bytes objects without any decoding. In text mode (the default, or when
't' is appended to the mode argument), the contents of the file are
returned as strings, the bytes having been first decoded using a
platform-dependent encoding or using the specified encoding if given.
buffering is an optional integer used to set the buffering policy.
Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
line buffering (only usable in text mode), and an integer > 1 to indicate
the size of a fixed-size chunk buffer. When no buffering argument is
given, the default buffering policy works as follows:
* Binary files are buffered in fixed-size chunks; the size of the buffer
is chosen using a heuristic trying to determine the underlying device's
"block size" and falling back on `io.DEFAULT_BUFFER_SIZE`.
On many systems, the buffer will typically be 4096 or 8192 bytes long.
* "Interactive" text files (files for which isatty() returns True)
use line buffering. Other text files use the policy described above
for binary files.
encoding is the name of the encoding used to decode or encode the
file. This should only be used in text mode. The default encoding is
platform dependent, but any encoding supported by Python can be
passed. See the codecs module for the list of supported encodings.
errors is an optional string that specifies how encoding errors are to
be handled---this argument should not be used in binary mode. Pass
'strict' to raise a ValueError exception if there is an encoding error
(the default of None has the same effect), or pass 'ignore' to ignore
errors. (Note that ignoring encoding errors can lead to data loss.)
See the documentation for codecs.register for a list of the permitted
encoding error strings.
newline controls how universal newlines works (it only applies to text
mode). It can be None, '', '\n', '\r', and '\r\n'. It works as
follows:
* On input, if newline is None, universal newlines mode is
enabled. Lines in the input can end in '\n', '\r', or '\r\n', and
these are translated into '\n' before being returned to the
caller. If it is '', universal newline mode is enabled, but line
endings are returned to the caller untranslated. If it has any of
the other legal values, input lines are only terminated by the given
string, and the line ending is returned to the caller untranslated.
* On output, if newline is None, any '\n' characters written are
translated to the system default line separator, os.linesep. If
newline is '', no translation takes place. If newline is any of the
other legal values, any '\n' characters written are translated to
the given string.
If closefd is False, the underlying file descriptor will be kept open
when the file is closed. This does not work when a file name is given
and must be True in that case.
open() returns a file object whose type depends on the mode, and
through which the standard file operations such as reading and writing
are performed. When open() is used to open a file in a text mode ('w',
'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
a file in a binary mode, the returned class varies: in read binary
mode, it returns a BufferedReader; in write binary and append binary
modes, it returns a BufferedWriter, and in read/write mode, it returns
a BufferedRandom.
It is also possible to use a string or bytearray as a file for both
reading and writing. For strings StringIO can be used like a file
opened in a text mode, and for bytes a BytesIO can be used like a file
opened in a binary mode.
"""
if not isinstance(file, (basestring, int, long)):
raise TypeError("invalid file: %r" % file)
if not isinstance(mode, basestring):
raise TypeError("invalid mode: %r" % mode)
if not isinstance(buffering, (int, long)):
raise TypeError("invalid buffering: %r" % buffering)
if encoding is not None and not isinstance(encoding, basestring):
raise TypeError("invalid encoding: %r" % encoding)
if errors is not None and not isinstance(errors, basestring):
raise TypeError("invalid errors: %r" % errors)
modes = set(mode)
if modes - set("arwb+tU") or len(mode) > len(modes):
raise ValueError("invalid mode: %r" % mode)
reading = "r" in modes
writing = "w" in modes
appending = "a" in modes
updating = "+" in modes
text = "t" in modes
binary = "b" in modes
if "U" in modes:
if writing or appending:
raise ValueError("can't use U and writing mode at once")
reading = True
if text and binary:
raise ValueError("can't have text and binary mode at once")
if reading + writing + appending > 1:
raise ValueError("can't have read/write/append mode at once")
if not (reading or writing or appending):
raise ValueError("must have exactly one of read/write/append mode")
if binary and encoding is not None:
raise ValueError("binary mode doesn't take an encoding argument")
if binary and errors is not None:
raise ValueError("binary mode doesn't take an errors argument")
if binary and newline is not None:
raise ValueError("binary mode doesn't take a newline argument")
raw = FileIO(file,
(reading and "r" or "") +
(writing and "w" or "") +
(appending and "a" or "") +
(updating and "+" or ""),
closefd)
result = raw
try:
line_buffering = False
if buffering == 1 or buffering < 0 and raw.isatty():
buffering = -1
line_buffering = True
if buffering < 0:
buffering = DEFAULT_BUFFER_SIZE
try:
bs = os.fstat(raw.fileno()).st_blksize
except (os.error, AttributeError):
pass
else:
if bs > 1:
buffering = bs
if buffering < 0:
raise ValueError("invalid buffering size")
if buffering == 0:
if binary:
return result
raise ValueError("can't have unbuffered text I/O")
if updating:
buffer = BufferedRandom(raw, buffering)
elif writing or appending:
buffer = BufferedWriter(raw, buffering)
elif reading:
buffer = BufferedReader(raw, buffering)
else:
raise ValueError("unknown mode: %r" % mode)
result = buffer
if binary:
return result
text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
result = text
text.mode = mode
return result
except:
result.close()
raise
class DocDescriptor:
"""Helper for builtins.open.__doc__
"""
def __get__(self, obj, typ):
return (
"open(file, mode='r', buffering=-1, encoding=None, "
"errors=None, newline=None, closefd=True)\n\n" +
open.__doc__)
class OpenWrapper:
"""Wrapper for builtins.open
Trick so that open won't become a bound method when stored
as a class variable (as dbm.dumb does).
See initstdio() in Python/pythonrun.c.
"""
__doc__ = DocDescriptor()
def __new__(cls, *args, **kwargs):
return open(*args, **kwargs)
class UnsupportedOperation(ValueError, IOError):
pass
class IOBase:
__metaclass__ = abc.ABCMeta
"""The abstract base class for all I/O classes, acting on streams of
bytes. There is no public constructor.
This class provides dummy implementations for many methods that
derived classes can override selectively; the default implementations
represent a file that cannot be read, written or seeked.
Even though IOBase does not declare read, readinto, or write because
their signatures will vary, implementations and clients should
consider those methods part of the interface. Also, implementations
may raise a IOError when operations they do not support are called.
The basic type used for binary data read from or written to a file is
bytes. bytearrays are accepted too, and in some cases (such as
readinto) needed. Text I/O classes work with str data.
Note that calling any method (even inquiries) on a closed stream is
undefined. Implementations may raise IOError in this case.
IOBase (and its subclasses) support the iterator protocol, meaning
that an IOBase object can be iterated over yielding the lines in a
stream.
IOBase also supports the :keyword:`with` statement. In this example,
fp is closed after the suite of the with statement is complete:
with open('spam.txt', 'r') as fp:
fp.write('Spam and eggs!')
"""
### Internal ###
def _unsupported(self, name):
"""Internal: raise an exception for unsupported operations."""
raise UnsupportedOperation("%s.%s() not supported" %
(self.__class__.__name__, name))
### Positioning ###
def seek(self, pos, whence=0):
"""Change stream position.
Change the stream position to byte offset pos. Argument pos is
interpreted relative to the position indicated by whence. Values
for whence are:
* 0 -- start of stream (the default); offset should be zero or positive
* 1 -- current stream position; offset may be negative
* 2 -- end of stream; offset is usually negative
Return the new absolute position.
"""
self._unsupported("seek")
def tell(self):
"""Return current stream position."""
return self.seek(0, 1)
def truncate(self, pos=None):
"""Truncate file to size bytes.
Size defaults to the current IO position as reported by tell(). Return
the new size.
"""
self._unsupported("truncate")
### Flush and close ###
def flush(self):
"""Flush write buffers, if applicable.
This is not implemented for read-only and non-blocking streams.
"""
self._checkClosed()
# XXX Should this return the number of bytes written???
__closed = False
def close(self):
"""Flush and close the IO object.
This method has no effect if the file is already closed.
"""
if not self.__closed:
try:
self.flush()
finally:
self.__closed = True
def __del__(self):
"""Destructor. Calls close()."""
# The try/except block is in case this is called at program
# exit time, when it's possible that globals have already been
# deleted, and then the close() call might fail. Since
# there's nothing we can do about such failures and they annoy
# the end users, we suppress the traceback.
try:
self.close()
except:
pass
### Inquiries ###
def seekable(self):
"""Return whether object supports random access.
If False, seek(), tell() and truncate() will raise IOError.
This method may need to do a test seek().
"""
return False
def _checkSeekable(self, msg=None):
"""Internal: raise an IOError if file is not seekable
"""
if not self.seekable():
raise IOError("File or stream is not seekable."
if msg is None else msg)
def readable(self):
"""Return whether object was opened for reading.
If False, read() will raise IOError.
"""
return False
def _checkReadable(self, msg=None):
"""Internal: raise an IOError if file is not readable
"""
if not self.readable():
raise IOError("File or stream is not readable."
if msg is None else msg)
def writable(self):
"""Return whether object was opened for writing.
If False, write() and truncate() will raise IOError.
"""
return False
def _checkWritable(self, msg=None):
"""Internal: raise an IOError if file is not writable
"""
if not self.writable():
raise IOError("File or stream is not writable."
if msg is None else msg)
@property
def closed(self):
"""closed: bool. True iff the file has been closed.
For backwards compatibility, this is a property, not a predicate.
"""
return self.__closed
def _checkClosed(self, msg=None):
"""Internal: raise an ValueError if file is closed
"""
if self.closed:
raise ValueError("I/O operation on closed file."
if msg is None else msg)
### Context manager ###
def __enter__(self):
"""Context management protocol. Returns self."""
self._checkClosed()
return self
def __exit__(self, *args):
"""Context management protocol. Calls close()"""
self.close()
### Lower-level APIs ###
# XXX Should these be present even if unimplemented?
def fileno(self):
"""Returns underlying file descriptor if one exists.
An IOError is raised if the IO object does not use a file descriptor.
"""
self._unsupported("fileno")
def isatty(self):
"""Return whether this is an 'interactive' stream.
Return False if it can't be determined.
"""
self._checkClosed()
return False
### Readline[s] and writelines ###
def readline(self, limit=-1):
r"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
The line terminator is always b'\n' for binary files; for text
files, the newlines argument to open can be used to select the line
terminator(s) recognized.
"""
# For backwards compatibility, a (slowish) readline().
if hasattr(self, "peek"):
def nreadahead():
readahead = self.peek(1)
if not readahead:
return 1
n = (readahead.find(b"\n") + 1) or len(readahead)
if limit >= 0:
n = min(n, limit)
return n
else:
def nreadahead():
return 1
if limit is None:
limit = -1
elif not isinstance(limit, (int, long)):
raise TypeError("limit must be an integer")
res = bytearray()
while limit < 0 or len(res) < limit:
b = self.read(nreadahead())
if not b:
break
res += b
if res.endswith(b"\n"):
break
return bytes(res)
def __iter__(self):
self._checkClosed()
return self
def next(self):
line = self.readline()
if not line:
raise StopIteration
return line
def readlines(self, hint=None):
"""Return a list of lines from the stream.
hint can be specified to control the number of lines read: no more
lines will be read if the total size (in bytes/characters) of all
lines so far exceeds hint.
"""
if hint is not None and not isinstance(hint, (int, long)):
raise TypeError("integer or None expected")
if hint is None or hint <= 0:
return list(self)
n = 0
lines = []
for line in self:
lines.append(line)
n += len(line)
if n >= hint:
break
return lines
def writelines(self, lines):
self._checkClosed()
for line in lines:
self.write(line)
io.IOBase.register(IOBase)
class RawIOBase(IOBase):
"""Base class for raw binary I/O."""
# The read() method is implemented by calling readinto(); derived
# classes that want to support read() only need to implement
# readinto() as a primitive operation. In general, readinto() can be
# more efficient than read().
# (It would be tempting to also provide an implementation of
# readinto() in terms of read(), in case the latter is a more suitable
# primitive operation, but that would lead to nasty recursion in case
# a subclass doesn't implement either.)
def read(self, n=-1):
"""Read and return up to n bytes.
Returns an empty bytes object on EOF, or None if the object is
set not to block and has no data to read.
"""
if n is None:
n = -1
if n < 0:
return self.readall()
b = bytearray(n.__index__())
n = self.readinto(b)
if n is None:
return None
del b[n:]
return bytes(b)
def readall(self):
"""Read until EOF, using multiple read() call."""
res = bytearray()
while True:
data = self.read(DEFAULT_BUFFER_SIZE)
if not data:
break
res += data
if res:
return bytes(res)
else:
# b'' or None
return data
def readinto(self, b):
"""Read up to len(b) bytes into b.
Returns number of bytes read (0 for EOF), or None if the object
is set not to block and has no data to read.
"""
self._unsupported("readinto")
def write(self, b):
"""Write the given buffer to the IO stream.
Returns the number of bytes written, which may be less than len(b).
"""
self._unsupported("write")
io.RawIOBase.register(RawIOBase)
from _io import FileIO
RawIOBase.register(FileIO)
class BufferedIOBase(IOBase):
"""Base class for buffered IO objects.
The main difference with RawIOBase is that the read() method
supports omitting the size argument, and does not have a default
implementation that defers to readinto().
In addition, read(), readinto() and write() may raise
BlockingIOError if the underlying raw stream is in non-blocking
mode and not ready; unlike their raw counterparts, they will never
return None.
A typical implementation should not inherit from a RawIOBase
implementation, but wrap one.
"""
def read(self, n=None):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, reads and
returns all data until EOF.
If the argument is positive, and the underlying raw stream is
not 'interactive', multiple raw reads may be issued to satisfy
the byte count (unless EOF is reached first). But for
interactive raw streams (XXX and for pipes?), at most one raw
read will be issued, and a short result does not imply that
EOF is imminent.
Returns an empty bytes array on EOF.
Raises BlockingIOError if the underlying raw stream has no
data at the moment.
"""
self._unsupported("read")
def read1(self, n=None):
"""Read up to n bytes with at most one read() system call."""
self._unsupported("read1")
def readinto(self, b):
"""Read up to len(b) bytes into b.
Like read(), this may issue multiple reads to the underlying raw
stream, unless the latter is 'interactive'.
Returns the number of bytes read (0 for EOF).
Raises BlockingIOError if the underlying raw stream has no
data at the moment.
"""
# XXX This ought to work with anything that supports the buffer API
data = self.read(len(b))
n = len(data)
try:
b[:n] = data
except TypeError as err:
import array
if not isinstance(b, array.array):
raise err
b[:n] = array.array(b'b', data)
return n
def write(self, b):
"""Write the given buffer to the IO stream.
Return the number of bytes written, which is never less than
len(b).
Raises BlockingIOError if the buffer is full and the
underlying raw stream cannot accept more data at the moment.
"""
self._unsupported("write")
def detach(self):
"""
Separate the underlying raw stream from the buffer and return it.
After the raw stream has been detached, the buffer is in an unusable
state.
"""
self._unsupported("detach")
io.BufferedIOBase.register(BufferedIOBase)
class _BufferedIOMixin(BufferedIOBase):
"""A mixin implementation of BufferedIOBase with an underlying raw stream.
This passes most requests on to the underlying raw stream. It
does *not* provide implementations of read(), readinto() or
write().
"""
def __init__(self, raw):
self._raw = raw
### Positioning ###
def seek(self, pos, whence=0):
new_position = self.raw.seek(pos, whence)
if new_position < 0:
raise IOError("seek() returned an invalid position")
return new_position
def tell(self):
pos = self.raw.tell()
if pos < 0:
raise IOError("tell() returned an invalid position")
return pos
def truncate(self, pos=None):
# Flush the stream. We're mixing buffered I/O with lower-level I/O,
# and a flush may be necessary to synch both views of the current
# file state.
self.flush()
if pos is None:
pos = self.tell()
# XXX: Should seek() be used, instead of passing the position
# XXX directly to truncate?
return self.raw.truncate(pos)
### Flush and close ###
def flush(self):
if self.closed:
raise ValueError("flush of closed file")
self.raw.flush()
def close(self):
if self.raw is not None and not self.closed:
try:
# may raise BlockingIOError or BrokenPipeError etc
self.flush()
finally:
self.raw.close()
def detach(self):
if self.raw is None:
raise ValueError("raw stream already detached")
self.flush()
raw = self._raw
self._raw = None
return raw
### Inquiries ###
def seekable(self):
return self.raw.seekable()
def readable(self):
return self.raw.readable()
def writable(self):
return self.raw.writable()
@property
def raw(self):
return self._raw
@property
def closed(self):
return self.raw.closed
@property
def name(self):
return self.raw.name
@property
def mode(self):
return self.raw.mode
def __repr__(self):
clsname = self.__class__.__name__
try:
name = self.name
except AttributeError:
return "<_pyio.{0}>".format(clsname)
else:
return "<_pyio.{0} name={1!r}>".format(clsname, name)
### Lower-level APIs ###
def fileno(self):
return self.raw.fileno()
def isatty(self):
return self.raw.isatty()
class BytesIO(BufferedIOBase):
"""Buffered I/O implementation using an in-memory bytes buffer."""
def __init__(self, initial_bytes=None):
buf = bytearray()
if initial_bytes is not None:
buf.extend(initial_bytes)
self._buffer = buf
self._pos = 0
def __getstate__(self):
if self.closed:
raise ValueError("__getstate__ on closed file")
return self.__dict__.copy()
def getvalue(self):
"""Return the bytes value (contents) of the buffer
"""
if self.closed:
raise ValueError("getvalue on closed file")
return bytes(self._buffer)
def read(self, n=None):
if self.closed:
raise ValueError("read from closed file")
if n is None:
n = -1
if not isinstance(n, (int, long)):
raise TypeError("integer argument expected, got {0!r}".format(
type(n)))
if n < 0:
n = len(self._buffer)
if len(self._buffer) <= self._pos:
return b""
newpos = min(len(self._buffer), self._pos + n)
b = self._buffer[self._pos : newpos]
self._pos = newpos
return bytes(b)
def read1(self, n):
"""This is the same as read.
"""
return self.read(n)
def write(self, b):
if self.closed:
raise ValueError("write to closed file")
if isinstance(b, unicode):
raise TypeError("can't write unicode to binary stream")
n = len(b)
if n == 0:
return 0
pos = self._pos
if pos > len(self._buffer):
# Inserts null bytes between the current end of the file
# and the new write position.
padding = b'\x00' * (pos - len(self._buffer))
self._buffer += padding
self._buffer[pos:pos + n] = b
self._pos += n
return n
def seek(self, pos, whence=0):
if self.closed:
raise ValueError("seek on closed file")
try:
pos.__index__
except AttributeError:
raise TypeError("an integer is required")
if whence == 0:
if pos < 0:
raise ValueError("negative seek position %r" % (pos,))
self._pos = pos
elif whence == 1:
self._pos = max(0, self._pos + pos)
elif whence == 2:
self._pos = max(0, len(self._buffer) + pos)
else:
raise ValueError("invalid whence value")
return self._pos
def tell(self):
if self.closed:
raise ValueError("tell on closed file")
return self._pos
def truncate(self, pos=None):
if self.closed:
raise ValueError("truncate on closed file")
if pos is None:
pos = self._pos
else:
try:
pos.__index__
except AttributeError:
raise TypeError("an integer is required")
if pos < 0:
raise ValueError("negative truncate position %r" % (pos,))
del self._buffer[pos:]
return pos
def readable(self):
if self.closed:
raise ValueError("I/O operation on closed file.")
return True
def writable(self):
if self.closed:
raise ValueError("I/O operation on closed file.")
return True
def seekable(self):
if self.closed:
raise ValueError("I/O operation on closed file.")
return True
class BufferedReader(_BufferedIOMixin):
"""BufferedReader(raw[, buffer_size])
A buffer for a readable, sequential BaseRawIO object.
The constructor creates a BufferedReader for the given readable raw
stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
is used.
"""
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE):
"""Create a new buffered reader using the given readable raw IO object.
"""
if not raw.readable():
raise IOError('"raw" argument must be readable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
self.buffer_size = buffer_size
self._reset_read_buf()
self._read_lock = Lock()
def _reset_read_buf(self):
self._read_buf = b""
self._read_pos = 0
def read(self, n=None):
"""Read n bytes.
Returns exactly n bytes of data unless the underlying raw IO
stream reaches EOF or if the call would block in non-blocking
mode. If n is negative, read until EOF or until read() would
block.
"""
if n is not None and n < -1:
raise ValueError("invalid number of bytes to read")
with self._read_lock:
return self._read_unlocked(n)
def _read_unlocked(self, n=None):
nodata_val = b""
empty_values = (b"", None)
buf = self._read_buf
pos = self._read_pos
# Special case for when the number of bytes to read is unspecified.
if n is None or n == -1:
self._reset_read_buf()
chunks = [buf[pos:]] # Strip the consumed bytes.
current_size = 0
while True:
# Read until EOF or until read() would block.
try:
chunk = self.raw.read()
except IOError as e:
if e.errno != EINTR:
raise
continue
if chunk in empty_values:
nodata_val = chunk
break
current_size += len(chunk)
chunks.append(chunk)
return b"".join(chunks) or nodata_val
# The number of bytes to read is specified, return at most n bytes.
avail = len(buf) - pos # Length of the available buffered data.
if n <= avail:
# Fast path: the data to read is fully buffered.
self._read_pos += n
return buf[pos:pos+n]
# Slow path: read from the stream until enough bytes are read,
# or until an EOF occurs or until read() would block.
chunks = [buf[pos:]]
wanted = max(self.buffer_size, n)
while avail < n:
try:
chunk = self.raw.read(wanted)
except IOError as e:
if e.errno != EINTR:
raise
continue
if chunk in empty_values:
nodata_val = chunk
break
avail += len(chunk)
chunks.append(chunk)
# n is more then avail only when an EOF occurred or when
# read() would have blocked.
n = min(n, avail)
out = b"".join(chunks)
self._read_buf = out[n:] # Save the extra data in the buffer.
self._read_pos = 0
return out[:n] if out else nodata_val
def peek(self, n=0):
"""Returns buffered bytes without advancing the position.
The argument indicates a desired minimal number of bytes; we
do at most one raw read to satisfy it. We never return more
than self.buffer_size.
"""
with self._read_lock:
return self._peek_unlocked(n)
def _peek_unlocked(self, n=0):
want = min(n, self.buffer_size)
have = len(self._read_buf) - self._read_pos
if have < want or have <= 0:
to_read = self.buffer_size - have
while True:
try:
current = self.raw.read(to_read)
except IOError as e:
if e.errno != EINTR:
raise
continue
break
if current:
self._read_buf = self._read_buf[self._read_pos:] + current
self._read_pos = 0
return self._read_buf[self._read_pos:]
def read1(self, n):
"""Reads up to n bytes, with at most one read() system call."""
# Returns up to n bytes. If at least one byte is buffered, we
# only return buffered bytes. Otherwise, we do one raw read.
if n < 0:
raise ValueError("number of bytes to read must be positive")
if n == 0:
return b""
with self._read_lock:
self._peek_unlocked(1)
return self._read_unlocked(
min(n, len(self._read_buf) - self._read_pos))
def tell(self):
return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence value")
with self._read_lock:
if whence == 1:
pos -= len(self._read_buf) - self._read_pos
pos = _BufferedIOMixin.seek(self, pos, whence)
self._reset_read_buf()
return pos
class BufferedWriter(_BufferedIOMixin):
"""A buffer for a writeable sequential RawIO object.
The constructor creates a BufferedWriter for the given writeable raw
stream. If the buffer_size is not given, it defaults to
DEFAULT_BUFFER_SIZE.
"""
_warning_stack_offset = 2
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
if not raw.writable():
raise IOError('"raw" argument must be writable.')
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
if max_buffer_size is not None:
warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
self._warning_stack_offset)
self.buffer_size = buffer_size
self._write_buf = bytearray()
self._write_lock = Lock()
def write(self, b):
if self.closed:
raise ValueError("write to closed file")
if isinstance(b, unicode):
raise TypeError("can't write unicode to binary stream")
with self._write_lock:
# XXX we can implement some more tricks to try and avoid
# partial writes
if len(self._write_buf) > self.buffer_size:
# We're full, so let's pre-flush the buffer. (This may
# raise BlockingIOError with characters_written == 0.)
self._flush_unlocked()
before = len(self._write_buf)
self._write_buf.extend(b)
written = len(self._write_buf) - before
if len(self._write_buf) > self.buffer_size:
try:
self._flush_unlocked()
except BlockingIOError as e:
if len(self._write_buf) > self.buffer_size:
# We've hit the buffer_size. We have to accept a partial
# write and cut back our buffer.
overage = len(self._write_buf) - self.buffer_size
written -= overage
self._write_buf = self._write_buf[:self.buffer_size]
raise BlockingIOError(e.errno, e.strerror, written)
return written
def truncate(self, pos=None):
with self._write_lock:
self._flush_unlocked()
if pos is None:
pos = self.raw.tell()
return self.raw.truncate(pos)
def flush(self):
with self._write_lock:
self._flush_unlocked()
def _flush_unlocked(self):
if self.closed:
raise ValueError("flush of closed file")
while self._write_buf:
try:
n = self.raw.write(self._write_buf)
except BlockingIOError:
raise RuntimeError("self.raw should implement RawIOBase: it "
"should not raise BlockingIOError")
except IOError as e:
if e.errno != EINTR:
raise
continue
if n is None:
raise BlockingIOError(
errno.EAGAIN,
"write could not complete without blocking", 0)
if n > len(self._write_buf) or n < 0:
raise IOError("write() returned incorrect number of bytes")
del self._write_buf[:n]
def tell(self):
return _BufferedIOMixin.tell(self) + len(self._write_buf)
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence")
with self._write_lock:
self._flush_unlocked()
return _BufferedIOMixin.seek(self, pos, whence)
class BufferedRWPair(BufferedIOBase):
"""A buffered reader and writer object together.
A buffered reader object and buffered writer object put together to
form a sequential IO object that can read and write. This is typically
used with a socket or two-way pipe.
reader and writer are RawIOBase objects that are readable and
writeable respectively. If the buffer_size is omitted it defaults to
DEFAULT_BUFFER_SIZE.
"""
# XXX The usefulness of this (compared to having two separate IO
# objects) is questionable.
def __init__(self, reader, writer,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
"""Constructor.
The arguments are two RawIO instances.
"""
if max_buffer_size is not None:
warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
if not reader.readable():
raise IOError('"reader" argument must be readable.')
if not writer.writable():
raise IOError('"writer" argument must be writable.')
self.reader = BufferedReader(reader, buffer_size)
self.writer = BufferedWriter(writer, buffer_size)
def read(self, n=None):
if n is None:
n = -1
return self.reader.read(n)
def readinto(self, b):
return self.reader.readinto(b)
def write(self, b):
return self.writer.write(b)
def peek(self, n=0):
return self.reader.peek(n)
def read1(self, n):
return self.reader.read1(n)
def readable(self):
return self.reader.readable()
def writable(self):
return self.writer.writable()
def flush(self):
return self.writer.flush()
def close(self):
self.writer.close()
self.reader.close()
def isatty(self):
return self.reader.isatty() or self.writer.isatty()
@property
def closed(self):
return self.writer.closed
class BufferedRandom(BufferedWriter, BufferedReader):
"""A buffered interface to random access streams.
The constructor creates a reader and writer for a seekable stream,
raw, given in the first argument. If the buffer_size is omitted it
defaults to DEFAULT_BUFFER_SIZE.
"""
_warning_stack_offset = 3
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
raw._checkSeekable()
BufferedReader.__init__(self, raw, buffer_size)
BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
def seek(self, pos, whence=0):
if not (0 <= whence <= 2):
raise ValueError("invalid whence")
self.flush()
if self._read_buf:
# Undo read ahead.
with self._read_lock:
self.raw.seek(self._read_pos - len(self._read_buf), 1)
# First do the raw seek, then empty the read buffer, so that
# if the raw seek fails, we don't lose buffered data forever.
pos = self.raw.seek(pos, whence)
with self._read_lock:
self._reset_read_buf()
if pos < 0:
raise IOError("seek() returned invalid position")
return pos
def tell(self):
if self._write_buf:
return BufferedWriter.tell(self)
else:
return BufferedReader.tell(self)
def truncate(self, pos=None):
if pos is None:
pos = self.tell()
# Use seek to flush the read buffer.
return BufferedWriter.truncate(self, pos)
def read(self, n=None):
if n is None:
n = -1
self.flush()
return BufferedReader.read(self, n)
def readinto(self, b):
self.flush()
return BufferedReader.readinto(self, b)
def peek(self, n=0):
self.flush()
return BufferedReader.peek(self, n)
def read1(self, n):
self.flush()
return BufferedReader.read1(self, n)
def write(self, b):
if self._read_buf:
# Undo readahead
with self._read_lock:
self.raw.seek(self._read_pos - len(self._read_buf), 1)
self._reset_read_buf()
return BufferedWriter.write(self, b)
class TextIOBase(IOBase):
"""Base class for text I/O.
This class provides a character and line based interface to stream
I/O. There is no readinto method because Python's character strings
are immutable. There is no public constructor.
"""
def read(self, n=-1):
"""Read at most n characters from stream.
Read from underlying buffer until we have n characters or we hit EOF.
If n is negative or omitted, read until EOF.
"""
self._unsupported("read")
def write(self, s):
"""Write string s to stream."""
self._unsupported("write")
def truncate(self, pos=None):
"""Truncate size to pos."""
self._unsupported("truncate")
def readline(self):
"""Read until newline or EOF.
Returns an empty string if EOF is hit immediately.
"""
self._unsupported("readline")
def detach(self):
"""
Separate the underlying buffer from the TextIOBase and return it.
After the underlying buffer has been detached, the TextIO is in an
unusable state.
"""
self._unsupported("detach")
@property
def encoding(self):
"""Subclasses should override."""
return None
@property
def newlines(self):
"""Line endings translated so far.
Only line endings translated during reading are considered.
Subclasses should override.
"""
return None
@property
def errors(self):
"""Error setting of the decoder or encoder.
Subclasses should override."""
return None
io.TextIOBase.register(TextIOBase)
class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
r"""Codec used when reading a file in universal newlines mode. It wraps
another incremental decoder, translating \r\n and \r into \n. It also
records the types of newlines encountered. When used with
translate=False, it ensures that the newline sequence is returned in
one piece.
"""
def __init__(self, decoder, translate, errors='strict'):
codecs.IncrementalDecoder.__init__(self, errors=errors)
self.translate = translate
self.decoder = decoder
self.seennl = 0
self.pendingcr = False
def decode(self, input, final=False):
# decode input (with the eventual \r from a previous pass)
if self.decoder is None:
output = input
else:
output = self.decoder.decode(input, final=final)
if self.pendingcr and (output or final):
output = "\r" + output
self.pendingcr = False
# retain last \r even when not translating data:
# then readline() is sure to get \r\n in one pass
if output.endswith("\r") and not final:
output = output[:-1]
self.pendingcr = True
# Record which newlines are read
crlf = output.count('\r\n')
cr = output.count('\r') - crlf
lf = output.count('\n') - crlf
self.seennl |= (lf and self._LF) | (cr and self._CR) \
| (crlf and self._CRLF)
if self.translate:
if crlf:
output = output.replace("\r\n", "\n")
if cr:
output = output.replace("\r", "\n")
return output
def getstate(self):
if self.decoder is None:
buf = b""
flag = 0
else:
buf, flag = self.decoder.getstate()
flag <<= 1
if self.pendingcr:
flag |= 1
return buf, flag
def setstate(self, state):
buf, flag = state
self.pendingcr = bool(flag & 1)
if self.decoder is not None:
self.decoder.setstate((buf, flag >> 1))
def reset(self):
self.seennl = 0
self.pendingcr = False
if self.decoder is not None:
self.decoder.reset()
_LF = 1
_CR = 2
_CRLF = 4
@property
def newlines(self):
return (None,
"\n",
"\r",
("\r", "\n"),
"\r\n",
("\n", "\r\n"),
("\r", "\r\n"),
("\r", "\n", "\r\n")
)[self.seennl]
class TextIOWrapper(TextIOBase):
r"""Character and line based layer over a BufferedIOBase object, buffer.
encoding gives the name of the encoding that the stream will be
decoded or encoded with. It defaults to locale.getpreferredencoding.
errors determines the strictness of encoding and decoding (see the
codecs.register) and defaults to "strict".
newline can be None, '', '\n', '\r', or '\r\n'. It controls the
handling of line endings. If it is None, universal newlines is
enabled. With this enabled, on input, the lines endings '\n', '\r',
or '\r\n' are translated to '\n' before being returned to the
caller. Conversely, on output, '\n' is translated to the system
default line separator, os.linesep. If newline is any other of its
legal values, that newline becomes the newline when the file is read
and it is returned untranslated. On output, '\n' is converted to the
newline.
If line_buffering is True, a call to flush is implied when a call to
write contains a newline character.
"""
_CHUNK_SIZE = 2048
def __init__(self, buffer, encoding=None, errors=None, newline=None,
line_buffering=False):
if newline is not None and not isinstance(newline, basestring):
raise TypeError("illegal newline type: %r" % (type(newline),))
if newline not in (None, "", "\n", "\r", "\r\n"):
raise ValueError("illegal newline value: %r" % (newline,))
if encoding is None:
try:
import locale
except ImportError:
# Importing locale may fail if Python is being built
encoding = "ascii"
else:
encoding = locale.getpreferredencoding()
if not isinstance(encoding, basestring):
raise ValueError("invalid encoding: %r" % encoding)
if errors is None:
errors = "strict"
else:
if not isinstance(errors, basestring):
raise ValueError("invalid errors: %r" % errors)
self._buffer = buffer
self._line_buffering = line_buffering
self._encoding = encoding
self._errors = errors
self._readuniversal = not newline
self._readtranslate = newline is None
self._readnl = newline
self._writetranslate = newline != ''
self._writenl = newline or os.linesep
self._encoder = None
self._decoder = None
self._decoded_chars = '' # buffer for text returned from decoder
self._decoded_chars_used = 0 # offset into _decoded_chars for read()
self._snapshot = None # info for reconstructing decoder state
self._seekable = self._telling = self.buffer.seekable()
if self._seekable and self.writable():
position = self.buffer.tell()
if position != 0:
try:
self._get_encoder().setstate(0)
except LookupError:
# Sometimes the encoder doesn't exist
pass
# self._snapshot is either None, or a tuple (dec_flags, next_input)
# where dec_flags is the second (integer) item of the decoder state
# and next_input is the chunk of input bytes that comes next after the
# snapshot point. We use this to reconstruct decoder states in tell().
# Naming convention:
# - "bytes_..." for integer variables that count input bytes
# - "chars_..." for integer variables that count decoded characters
def __repr__(self):
try:
name = self.name
except AttributeError:
return "<_pyio.TextIOWrapper encoding='{0}'>".format(self.encoding)
else:
return "<_pyio.TextIOWrapper name={0!r} encoding='{1}'>".format(
name, self.encoding)
@property
def encoding(self):
return self._encoding
@property
def errors(self):
return self._errors
@property
def line_buffering(self):
return self._line_buffering
@property
def buffer(self):
return self._buffer
def seekable(self):
if self.closed:
raise ValueError("I/O operation on closed file.")
return self._seekable
def readable(self):
return self.buffer.readable()
def writable(self):
return self.buffer.writable()
def flush(self):
self.buffer.flush()
self._telling = self._seekable
def close(self):
if self.buffer is not None and not self.closed:
try:
self.flush()
finally:
self.buffer.close()
@property
def closed(self):
return self.buffer.closed
@property
def name(self):
return self.buffer.name
def fileno(self):
return self.buffer.fileno()
def isatty(self):
return self.buffer.isatty()
def write(self, s):
if self.closed:
raise ValueError("write to closed file")
if not isinstance(s, unicode):
raise TypeError("can't write %s to text stream" %
s.__class__.__name__)
length = len(s)
haslf = (self._writetranslate or self._line_buffering) and "\n" in s
if haslf and self._writetranslate and self._writenl != "\n":
s = s.replace("\n", self._writenl)
encoder = self._encoder or self._get_encoder()
# XXX What if we were just reading?
b = encoder.encode(s)
self.buffer.write(b)
if self._line_buffering and (haslf or "\r" in s):
self.flush()
self._snapshot = None
if self._decoder:
self._decoder.reset()
return length
def _get_encoder(self):
make_encoder = codecs.getincrementalencoder(self._encoding)
self._encoder = make_encoder(self._errors)
return self._encoder
def _get_decoder(self):
make_decoder = codecs.getincrementaldecoder(self._encoding)
decoder = make_decoder(self._errors)
if self._readuniversal:
decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
self._decoder = decoder
return decoder
# The following three methods implement an ADT for _decoded_chars.
# Text returned from the decoder is buffered here until the client
# requests it by calling our read() or readline() method.
def _set_decoded_chars(self, chars):
"""Set the _decoded_chars buffer."""
self._decoded_chars = chars
self._decoded_chars_used = 0
def _get_decoded_chars(self, n=None):
"""Advance into the _decoded_chars buffer."""
offset = self._decoded_chars_used
if n is None:
chars = self._decoded_chars[offset:]
else:
chars = self._decoded_chars[offset:offset + n]
self._decoded_chars_used += len(chars)
return chars
def _rewind_decoded_chars(self, n):
"""Rewind the _decoded_chars buffer."""
if self._decoded_chars_used < n:
raise AssertionError("rewind decoded_chars out of bounds")
self._decoded_chars_used -= n
def _read_chunk(self):
"""
Read and decode the next chunk of data from the BufferedReader.
"""
# The return value is True unless EOF was reached. The decoded
# string is placed in self._decoded_chars (replacing its previous
# value). The entire input chunk is sent to the decoder, though
# some of it may remain buffered in the decoder, yet to be
# converted.
if self._decoder is None:
raise ValueError("no decoder")
if self._telling:
# To prepare for tell(), we need to snapshot a point in the
# file where the decoder's input buffer is empty.
dec_buffer, dec_flags = self._decoder.getstate()
# Given this, we know there was a valid snapshot point
# len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
# Read a chunk, decode it, and put the result in self._decoded_chars.
input_chunk = self.buffer.read1(self._CHUNK_SIZE)
eof = not input_chunk
self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
if self._telling:
# At the snapshot point, len(dec_buffer) bytes before the read,
# the next input to be decoded is dec_buffer + input_chunk.
self._snapshot = (dec_flags, dec_buffer + input_chunk)
return not eof
def _pack_cookie(self, position, dec_flags=0,
bytes_to_feed=0, need_eof=0, chars_to_skip=0):
# The meaning of a tell() cookie is: seek to position, set the
# decoder flags to dec_flags, read bytes_to_feed bytes, feed them
# into the decoder with need_eof as the EOF flag, then skip
# chars_to_skip characters of the decoded result. For most simple
# decoders, tell() will often just give a byte offset in the file.
return (position | (dec_flags<<64) | (bytes_to_feed<<128) |
(chars_to_skip<<192) | bool(need_eof)<<256)
def _unpack_cookie(self, bigint):
rest, position = divmod(bigint, 1<<64)
rest, dec_flags = divmod(rest, 1<<64)
rest, bytes_to_feed = divmod(rest, 1<<64)
need_eof, chars_to_skip = divmod(rest, 1<<64)
return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
def tell(self):
if not self._seekable:
raise IOError("underlying stream is not seekable")
if not self._telling:
raise IOError("telling position disabled by next() call")
self.flush()
position = self.buffer.tell()
decoder = self._decoder
if decoder is None or self._snapshot is None:
if self._decoded_chars:
# This should never happen.
raise AssertionError("pending decoded text")
return position
# Skip backward to the snapshot point (see _read_chunk).
dec_flags, next_input = self._snapshot
position -= len(next_input)
# How many decoded characters have been used up since the snapshot?
chars_to_skip = self._decoded_chars_used
if chars_to_skip == 0:
# We haven't moved from the snapshot point.
return self._pack_cookie(position, dec_flags)
# Starting from the snapshot position, we will walk the decoder
# forward until it gives us enough decoded characters.
saved_state = decoder.getstate()
try:
# Note our initial start point.
decoder.setstate((b'', dec_flags))
start_pos = position
start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
need_eof = 0
# Feed the decoder one byte at a time. As we go, note the
# nearest "safe start point" before the current location
# (a point where the decoder has nothing buffered, so seek()
# can safely start from there and advance to this location).
for next_byte in next_input:
bytes_fed += 1
chars_decoded += len(decoder.decode(next_byte))
dec_buffer, dec_flags = decoder.getstate()
if not dec_buffer and chars_decoded <= chars_to_skip:
# Decoder buffer is empty, so this is a safe start point.
start_pos += bytes_fed
chars_to_skip -= chars_decoded
start_flags, bytes_fed, chars_decoded = dec_flags, 0, 0
if chars_decoded >= chars_to_skip:
break
else:
# We didn't get enough decoded data; signal EOF to get more.
chars_decoded += len(decoder.decode(b'', final=True))
need_eof = 1
if chars_decoded < chars_to_skip:
raise IOError("can't reconstruct logical file position")
# The returned cookie corresponds to the last safe start point.
return self._pack_cookie(
start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
finally:
decoder.setstate(saved_state)
def truncate(self, pos=None):
self.flush()
if pos is None:
pos = self.tell()
return self.buffer.truncate(pos)
def detach(self):
if self.buffer is None:
raise ValueError("buffer is already detached")
self.flush()
buffer = self._buffer
self._buffer = None
return buffer
def seek(self, cookie, whence=0):
if self.closed:
raise ValueError("tell on closed file")
if not self._seekable:
raise IOError("underlying stream is not seekable")
if whence == 1: # seek relative to current position
if cookie != 0:
raise IOError("can't do nonzero cur-relative seeks")
# Seeking to the current position should attempt to
# sync the underlying buffer with the current position.
whence = 0
cookie = self.tell()
if whence == 2: # seek relative to end of file
if cookie != 0:
raise IOError("can't do nonzero end-relative seeks")
self.flush()
position = self.buffer.seek(0, 2)
self._set_decoded_chars('')
self._snapshot = None
if self._decoder:
self._decoder.reset()
return position
if whence != 0:
raise ValueError("invalid whence (%r, should be 0, 1 or 2)" %
(whence,))
if cookie < 0:
raise ValueError("negative seek position %r" % (cookie,))
self.flush()
# The strategy of seek() is to go back to the safe start point
# and replay the effect of read(chars_to_skip) from there.
start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip = \
self._unpack_cookie(cookie)
# Seek back to the safe start point.
self.buffer.seek(start_pos)
self._set_decoded_chars('')
self._snapshot = None
# Restore the decoder to its state from the safe start point.
if cookie == 0 and self._decoder:
self._decoder.reset()
elif self._decoder or dec_flags or chars_to_skip:
self._decoder = self._decoder or self._get_decoder()
self._decoder.setstate((b'', dec_flags))
self._snapshot = (dec_flags, b'')
if chars_to_skip:
# Just like _read_chunk, feed the decoder and save a snapshot.
input_chunk = self.buffer.read(bytes_to_feed)
self._set_decoded_chars(
self._decoder.decode(input_chunk, need_eof))
self._snapshot = (dec_flags, input_chunk)
# Skip chars_to_skip of the decoded characters.
if len(self._decoded_chars) < chars_to_skip:
raise IOError("can't restore logical file position")
self._decoded_chars_used = chars_to_skip
# Finally, reset the encoder (merely useful for proper BOM handling)
try:
encoder = self._encoder or self._get_encoder()
except LookupError:
# Sometimes the encoder doesn't exist
pass
else:
if cookie != 0:
encoder.setstate(0)
else:
encoder.reset()
return cookie
def read(self, n=None):
self._checkReadable()
if n is None:
n = -1
decoder = self._decoder or self._get_decoder()
try:
n.__index__
except AttributeError:
raise TypeError("an integer is required")
if n < 0:
# Read everything.
result = (self._get_decoded_chars() +
decoder.decode(self.buffer.read(), final=True))
self._set_decoded_chars('')
self._snapshot = None
return result
else:
# Keep reading chunks until we have n characters to return.
eof = False
result = self._get_decoded_chars(n)
while len(result) < n and not eof:
eof = not self._read_chunk()
result += self._get_decoded_chars(n - len(result))
return result
def next(self):
self._telling = False
line = self.readline()
if not line:
self._snapshot = None
self._telling = self._seekable
raise StopIteration
return line
def readline(self, limit=None):
if self.closed:
raise ValueError("read from closed file")
if limit is None:
limit = -1
elif not isinstance(limit, (int, long)):
raise TypeError("limit must be an integer")
# Grab all the decoded text (we will rewind any extra bits later).
line = self._get_decoded_chars()
start = 0
# Make the decoder if it doesn't already exist.
if not self._decoder:
self._get_decoder()
pos = endpos = None
while True:
if self._readtranslate:
# Newlines are already translated, only search for \n
pos = line.find('\n', start)
if pos >= 0:
endpos = pos + 1
break
else:
start = len(line)
elif self._readuniversal:
# Universal newline search. Find any of \r, \r\n, \n
# The decoder ensures that \r\n are not split in two pieces
# In C we'd look for these in parallel of course.
nlpos = line.find("\n", start)
crpos = line.find("\r", start)
if crpos == -1:
if nlpos == -1:
# Nothing found
start = len(line)
else:
# Found \n
endpos = nlpos + 1
break
elif nlpos == -1:
# Found lone \r
endpos = crpos + 1
break
elif nlpos < crpos:
# Found \n
endpos = nlpos + 1
break
elif nlpos == crpos + 1:
# Found \r\n
endpos = crpos + 2
break
else:
# Found \r
endpos = crpos + 1
break
else:
# non-universal
pos = line.find(self._readnl)
if pos >= 0:
endpos = pos + len(self._readnl)
break
if limit >= 0 and len(line) >= limit:
endpos = limit # reached length limit
break
# No line ending seen yet - get more data'
while self._read_chunk():
if self._decoded_chars:
break
if self._decoded_chars:
line += self._get_decoded_chars()
else:
# end of file
self._set_decoded_chars('')
self._snapshot = None
return line
if limit >= 0 and endpos > limit:
endpos = limit # don't exceed limit
# Rewind _decoded_chars to just after the line ending we found.
self._rewind_decoded_chars(len(line) - endpos)
return line[:endpos]
@property
def newlines(self):
return self._decoder.newlines if self._decoder else None
class StringIO(TextIOWrapper):
"""Text I/O implementation using an in-memory buffer.
The initial_value argument sets the value of object. The newline
argument is like the one of TextIOWrapper's constructor.
"""
def __init__(self, initial_value="", newline="\n"):
super(StringIO, self).__init__(BytesIO(),
encoding="utf-8",
errors="strict",
newline=newline)
# Issue #5645: make universal newlines semantics the same as in the
# C version, even under Windows.
if newline is None:
self._writetranslate = False
if initial_value:
if not isinstance(initial_value, unicode):
initial_value = unicode(initial_value)
self.write(initial_value)
self.seek(0)
def getvalue(self):
self.flush()
decoder = self._decoder or self._get_decoder()
old_state = decoder.getstate()
decoder.reset()
try:
return decoder.decode(self.buffer.getvalue(), final=True)
finally:
decoder.setstate(old_state)
def __repr__(self):
# TextIOWrapper tells the encoding in its repr. In StringIO,
# that's a implementation detail.
return object.__repr__(self)
@property
def errors(self):
return None
@property
def encoding(self):
return None
def detach(self):
# This doesn't make sense on StringIO.
self._unsupported("detach")
| lgpl-2.1 | -8,064,648,548,969,156,000 | 33.134975 | 81 | 0.574465 | false |
google/mediapipe | mediapipe/python/packet_creator.py | 1 | 11356 | # Copyright 2020 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The public facing packet creator APIs."""
from typing import List, Union
import warnings
import numpy as np
from google.protobuf import message
from mediapipe.python._framework_bindings import _packet_creator
from mediapipe.python._framework_bindings import image
from mediapipe.python._framework_bindings import image_frame
from mediapipe.python._framework_bindings import packet
create_string = _packet_creator.create_string
create_bool = _packet_creator.create_bool
create_int = _packet_creator.create_int
create_int8 = _packet_creator.create_int8
create_int16 = _packet_creator.create_int16
create_int32 = _packet_creator.create_int32
create_int64 = _packet_creator.create_int64
create_uint8 = _packet_creator.create_uint8
create_uint16 = _packet_creator.create_uint16
create_uint32 = _packet_creator.create_uint32
create_uint64 = _packet_creator.create_uint64
create_float = _packet_creator.create_float
create_double = _packet_creator.create_double
create_int_array = _packet_creator.create_int_array
create_float_array = _packet_creator.create_float_array
create_int_vector = _packet_creator.create_int_vector
create_bool_vector = _packet_creator.create_bool_vector
create_float_vector = _packet_creator.create_float_vector
create_string_vector = _packet_creator.create_string_vector
create_packet_vector = _packet_creator.create_packet_vector
create_string_to_packet_map = _packet_creator.create_string_to_packet_map
create_matrix = _packet_creator.create_matrix
def create_image_frame(data: Union[image_frame.ImageFrame, np.ndarray],
*,
image_format: image_frame.ImageFormat = None,
copy: bool = None) -> packet.Packet:
"""Create a MediaPipe ImageFrame packet.
A MediaPipe ImageFrame packet can be created from an existing MediaPipe
ImageFrame object and the data will be realigned and copied into a new
ImageFrame object inside of the packet.
A MediaPipe ImageFrame packet can also be created from the raw pixel data
represented as a numpy array with one of the uint8, uint16, and float data
types. There are three data ownership modes depending on how the 'copy' arg
is set.
i) Default mode
If copy is not set, mutable data is always copied while the immutable data
is by reference.
ii) Copy mode (safe)
If copy is set to True, the data will be realigned and copied into an
ImageFrame object inside of the packet regardless the immutablity of the
original data.
iii) Reference mode (dangerous)
If copy is set to False, the data will be forced to be shared. If the data is
mutable (data.flags.writeable is True), a warning will be raised.
Args:
data: A MediaPipe ImageFrame object or the raw pixel data that is
represnted as a numpy ndarray.
image_format: One of the image_frame.ImageFormat enum types.
copy: Indicate if the packet should copy the data from the numpy nparray.
Returns:
A MediaPipe ImageFrame Packet.
Raises:
ValueError:
i) When "data" is a numpy ndarray, "image_format" is not provided or
the "data" array is not c_contiguous in the reference mode.
ii) When "data" is an ImageFrame object, the "image_format" arg doesn't
match the image format of the "data" ImageFrame object or "copy" is
explicitly set to False.
TypeError: If "image format" doesn't match "data" array's data type.
Examples:
np_array = np.random.randint(255, size=(321, 123, 3), dtype=np.uint8)
# Copy mode by default if the data array is writable.
image_frame_packet = mp.packet_creator.create_image_frame(
image_format=mp.ImageFormat.SRGB, data=np_array)
# Make the array unwriteable to trigger the reference mode.
np_array.flags.writeable = False
image_frame_packet = mp.packet_creator.create_image_frame(
image_format=mp.ImageFormat.SRGB, data=np_array)
image_frame = mp.ImageFrame(image_format=mp.ImageFormat.SRGB, data=np_array)
image_frame_packet = mp.packet_creator.create_image_frame(image_frame)
"""
if isinstance(data, image_frame.ImageFrame):
if image_format is not None and data.image_format != image_format:
raise ValueError(
'The provided image_format doesn\'t match the one from the data arg.')
if copy is not None and not copy:
# Taking a reference will make the created packet be mutable since the
# ImageFrame object can still be manipulated in Python, which voids packet
# immutability.
raise ValueError(
'Creating ImageFrame packet by taking a reference of another ImageFrame object is not supported yet.'
)
# pylint:disable=protected-access
return _packet_creator._create_image_frame_from_image_frame(data)
# pylint:enable=protected-access
else:
if image_format is None:
raise ValueError('Please provide \'image_format\' with \'data\'.')
# If copy arg is not set, copying the data if it's immutable. Otherwise,
# take a reference of the immutable data to avoid data copy.
if copy is None:
copy = True if data.flags.writeable else False
if not copy:
# TODO: Investigate why the first 2 bytes of the data has data
# corruption when "data" is not c_contiguous.
if not data.flags.c_contiguous:
raise ValueError(
'Reference mode is unavailable if \'data\' is not c_contiguous.')
if data.flags.writeable:
warnings.warn(
'\'data\' is still writeable. Taking a reference of the data to create ImageFrame packet is dangerous.',
RuntimeWarning, 2)
# pylint:disable=protected-access
return _packet_creator._create_image_frame_from_pixel_data(
image_format, data, copy)
# pylint:enable=protected-access
def create_image(data: Union[image.Image, np.ndarray],
*,
image_format: image_frame.ImageFormat = None,
copy: bool = None) -> packet.Packet:
"""Create a MediaPipe Image packet.
A MediaPipe Image packet can be created from an existing MediaPipe
Image object and the data will be realigned and copied into a new
Image object inside of the packet.
A MediaPipe Image packet can also be created from the raw pixel data
represented as a numpy array with one of the uint8, uint16, and float data
types. There are three data ownership modes depending on how the 'copy' arg
is set.
i) Default mode
If copy is not set, mutable data is always copied while the immutable data
is by reference.
ii) Copy mode (safe)
If copy is set to True, the data will be realigned and copied into an
Image object inside of the packet regardless the immutablity of the
original data.
iii) Reference mode (dangerous)
If copy is set to False, the data will be forced to be shared. If the data is
mutable (data.flags.writeable is True), a warning will be raised.
Args:
data: A MediaPipe Image object or the raw pixel data that is represnted as a
numpy ndarray.
image_format: One of the mp.ImageFormat enum types.
copy: Indicate if the packet should copy the data from the numpy nparray.
Returns:
A MediaPipe Image Packet.
Raises:
ValueError:
i) When "data" is a numpy ndarray, "image_format" is not provided or
the "data" array is not c_contiguous in the reference mode.
ii) When "data" is an Image object, the "image_format" arg doesn't
match the image format of the "data" Image object or "copy" is
explicitly set to False.
TypeError: If "image format" doesn't match "data" array's data type.
Examples:
np_array = np.random.randint(255, size=(321, 123, 3), dtype=np.uint8)
# Copy mode by default if the data array is writable.
image_packet = mp.packet_creator.create_image(
image_format=mp.ImageFormat.SRGB, data=np_array)
# Make the array unwriteable to trigger the reference mode.
np_array.flags.writeable = False
image_packet = mp.packet_creator.create_image(
image_format=mp.ImageFormat.SRGB, data=np_array)
image = mp.Image(image_format=mp.ImageFormat.SRGB, data=np_array)
image_packet = mp.packet_creator.create_image(image)
"""
if isinstance(data, image.Image):
if image_format is not None and data.image_format != image_format:
raise ValueError(
'The provided image_format doesn\'t match the one from the data arg.')
if copy is not None and not copy:
# Taking a reference will make the created packet be mutable since the
# Image object can still be manipulated in Python, which voids packet
# immutability.
raise ValueError(
'Creating Image packet by taking a reference of another Image object is not supported yet.'
)
# pylint:disable=protected-access
return _packet_creator._create_image_from_image(data)
# pylint:enable=protected-access
else:
if image_format is None:
raise ValueError('Please provide \'image_format\' with \'data\'.')
# If copy arg is not set, copying the data if it's immutable. Otherwise,
# take a reference of the immutable data to avoid data copy.
if copy is None:
copy = True if data.flags.writeable else False
if not copy:
# TODO: Investigate why the first 2 bytes of the data has data
# corruption when "data" is not c_contiguous.
if not data.flags.c_contiguous:
raise ValueError(
'Reference mode is unavailable if \'data\' is not c_contiguous.')
if data.flags.writeable:
warnings.warn(
'\'data\' is still writeable. Taking a reference of the data to create Image packet is dangerous.',
RuntimeWarning, 2)
# pylint:disable=protected-access
return _packet_creator._create_image_from_pixel_data(
image_format, data, copy)
# pylint:enable=protected-access
def create_proto(proto_message: message.Message) -> packet.Packet:
"""Create a MediaPipe protobuf message packet.
Args:
proto_message: A Python protobuf message.
Returns:
A MediaPipe protobuf message Packet.
Raises:
RuntimeError: If the protobuf message type is not registered in MediaPipe.
Examples:
detection = detection_pb2.Detection()
text_format.Parse('score: 0.5', detection)
packet = mp.packet_creator.create_proto(detection)
output_detection = mp.packet_getter.get_proto(packet)
"""
# pylint:disable=protected-access
return _packet_creator._create_proto(proto_message.DESCRIPTOR.full_name,
proto_message.SerializeToString())
# pylint:enable=protected-access
def create_proto_vector(message_list: List[message.Message]) -> packet.Packet:
raise NotImplementedError('create_proto_vector is not implemented.')
| apache-2.0 | -5,221,229,872,574,769,000 | 40.445255 | 116 | 0.712575 | false |
yongtang/tensorflow | tensorflow/python/ops/tensor_array_ops.py | 6 | 54164 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorArray: a dynamically sized array of Tensors."""
# Mixture of pep8 and non-pep8 names, so disable pylint bad-name
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import traceback
import weakref
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import type_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_should_use
from tensorflow.python.util.tf_export import tf_export
# _GraphTensorArray accesses many of the hidden generated ops, but is in
# fact built to wrap these methods.
# pylint: disable=protected-access
class _GraphTensorArray(object):
"""Graph-mode implementation of TensorArray.
"""
def __init__(self,
dtype,
size=None,
dynamic_size=None,
clear_after_read=None,
tensor_array_name=None,
handle=None,
flow=None,
infer_shape=True,
element_shape=None,
colocate_with_first_write_call=True,
name=None):
"""Constructs a graph mode TensorArray.
Args:
dtype: (required) data type of the TensorArray.
size: (optional) int32 scalar `Tensor`: the size of the TensorArray.
Required if handle is not provided.
dynamic_size: (optional) Python bool: If true, writes to the TensorArray
can grow the TensorArray past its initial size. Default: False.
clear_after_read: Boolean (optional, default: True). If True, clear
TensorArray values after reading them. This disables read-many
semantics, but allows early release of memory.
tensor_array_name: (optional) Python string: the name of the TensorArray.
This is used when creating the TensorArray handle. If this value is
set, handle should be None.
handle: (optional) A `Tensor` handle to an existing TensorArray. If this
is set, tensor_array_name should be None. Only supported in graph mode.
flow: (optional) A float `Tensor` scalar coming from an existing
`TensorArray.flow`. Only supported in graph mode.
infer_shape: (optional, default: True) If True, shape inference
is enabled. In this case, all elements must have the same shape.
element_shape: (optional, default: None) A `TensorShape` object specifying
the shape constraints of each of the elements of the TensorArray.
Need not be fully defined.
colocate_with_first_write_call: If `True`, the TensorArray will be
colocated on the same device as the Tensor used on its first write
(write operations include `write`, `unstack`, and `split`). If `False`,
the TensorArray will be placed on the device determined by the
device context available during its initialization.
name: A name for the operation (optional).
Raises:
ValueError: if both handle and tensor_array_name are provided.
TypeError: if handle is provided but is not a Tensor.
"""
if handle is not None and tensor_array_name:
raise ValueError(
"Cannot construct with both handle and tensor_array_name")
if handle is not None and not isinstance(handle, ops.Tensor):
raise TypeError("Handle must be a Tensor")
if handle is None and size is None:
raise ValueError("Size must be provided if handle is not provided")
if handle is not None and size is not None:
raise ValueError("Cannot provide both a handle and size "
"at the same time")
if handle is not None and element_shape is not None:
raise ValueError("Cannot provide both a handle and element_shape "
"at the same time")
if handle is not None and dynamic_size is not None:
raise ValueError("Cannot provide both a handle and dynamic_size "
"at the same time")
if handle is not None and clear_after_read is not None:
raise ValueError("Cannot provide both a handle and clear_after_read "
"at the same time")
if clear_after_read is None:
clear_after_read = True
self._dynamic_size = dynamic_size or False
self._dtype = dtypes.as_dtype(dtype).base_dtype
# Used to keep track of what tensors the TensorArray should be
# colocated with. We choose to colocate the TensorArray with the
# first tensor written to it.
self._colocate_with_first_write_call = colocate_with_first_write_call
if colocate_with_first_write_call:
self._colocate_with = []
else:
self._colocate_with = None
# Record the current static shape for the array elements. The element
# shape is defined either by `element_shape` or the shape of the tensor
# of the first write. If `infer_shape` is true, all writes checks for
# shape equality.
self._element_shape = [tensor_shape.as_shape(element_shape)]
self._infer_shape = infer_shape
self._size = size
with ops.name_scope(name, "TensorArray", [handle, size, flow]) as scope:
if handle is not None:
self._handle = handle
if flow is None:
raise ValueError("flow must not be None if handle is not None.")
self._flow = flow
else:
# Construct the TensorArray with an empty device. The first
# write into the TensorArray from a Tensor with a set device
# will retroactively set the device value of this op.
def create():
"""Create the TensorArray op."""
return gen_data_flow_ops.tensor_array_v3(
dtype=dtype,
size=size,
element_shape=element_shape,
identical_element_shapes=infer_shape,
dynamic_size=self._dynamic_size,
clear_after_read=clear_after_read,
tensor_array_name=tensor_array_name,
name=scope)
if colocate_with_first_write_call:
with ops.device(None), ops.colocate_with(None, ignore_existing=True):
self._handle, self._flow = create()
else:
self._handle, self._flow = create()
@property
def flow(self):
return self._flow
@property
def dtype(self):
return self._dtype
@property
def handle(self):
return self._handle
@property
def element_shape(self):
return self._element_shape[0]
def _check_element_shape(self, shape):
"""Changes the element shape of the array given a shape to merge with.
Args:
shape: A `TensorShape` object to merge with.
Raises:
ValueError: if the provided shape is incompatible with the current
element shape of the `TensorArray`.
"""
if not shape.is_compatible_with(self.element_shape):
raise ValueError("Inconsistent shapes: saw %s but expected %s " %
(shape, self.element_shape))
if self._infer_shape:
self._element_shape[0] = self.element_shape.merge_with(shape)
@contextlib.contextmanager
def _maybe_colocate_with(self, value):
"""Colocate operations with an internal colocation group or `value`.
Args:
value: `Tensor`, the tensor to try to colocate with.
Yields:
Does not yield anything, but the new context is a colocation context.
If no internal colocation group is set, colocate with `value` and set
the internal colocation group to be value.
"""
if not self._colocate_with_first_write_call:
yield
else:
if not self._colocate_with:
self._colocate_with.append(value)
with ops.colocate_with(self._colocate_with[0]):
yield
def identity(self):
"""See TensorArray."""
flow = array_ops.identity(self._flow)
return build_ta_with_new_flow(self, flow)
def grad(self, source, flow=None, name=None):
"""See TensorArray."""
# tensor_array_grad requires a flow input when forward
# TensorArrays are dynamically sized. This forces the creation
# of the grad TensorArray only once the final forward array's size
# is fixed.
if flow is None:
flow = self.flow
with ops.name_scope(name, "TensorArrayGrad", [self._handle]):
with ops.colocate_with(self._handle):
g_handle, unused_flow = gen_data_flow_ops.tensor_array_grad_v3(
handle=self._handle, source=source, flow_in=flow, name=name)
with ops.control_dependencies([g_handle]):
flow = array_ops.identity(flow, name="gradient_flow")
g = TensorArray(
dtype=self._dtype,
handle=g_handle,
flow=flow,
infer_shape=self._infer_shape,
colocate_with_first_write_call=False)
# pylint: disable=protected-access
g._implementation._element_shape = self._element_shape
# pylint: enable=protected-access
return g
def read(self, index, name=None):
"""See TensorArray."""
value = gen_data_flow_ops.tensor_array_read_v3(
handle=self._handle,
index=index,
flow_in=self._flow,
dtype=self._dtype,
name=name)
if self._element_shape:
value.set_shape(self._element_shape[0].dims)
return value
def write(self, index, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayWrite", [self._handle, index, value]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape)
with self._maybe_colocate_with(value):
flow_out = gen_data_flow_ops.tensor_array_write_v3(
handle=self._handle,
index=index,
value=value,
flow_in=self._flow,
name=name)
return build_ta_with_new_flow(self, flow_out)
def stack(self, name=None):
"""See TensorArray."""
with ops.colocate_with(self._handle):
with ops.name_scope(name, "TensorArrayStack", [self._handle]):
value = self.gather(math_ops.range(0, self.size()), name=name)
if (self.element_shape and not self._dynamic_size and
self._size is not None):
value.set_shape([tensor_util.constant_value(self._size)] +
self.element_shape.dims)
return value
def gather(self, indices, name=None):
"""See TensorArray."""
if self._element_shape:
element_shape = self._element_shape[0]
else:
element_shape = tensor_shape.unknown_shape(None)
value = gen_data_flow_ops.tensor_array_gather_v3(
handle=self._handle,
indices=indices,
flow_in=self._flow,
dtype=self._dtype,
name=name,
element_shape=element_shape)
if self.element_shape:
value.set_shape([None] + self.element_shape.dims)
return value
def concat(self, name=None):
"""See TensorArray."""
value, _ = gen_data_flow_ops.tensor_array_concat_v3(
handle=self._handle,
flow_in=self._flow,
dtype=self._dtype,
name=name,
element_shape_except0=self.element_shape[1:])
if self.element_shape:
value.set_shape([None] + self.element_shape.dims[1:])
return value
@tf_should_use.should_use_result
def unstack(self, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayUnstack", [self._handle, value]):
num_elements = array_ops.shape(value)[0]
return self.scatter(
indices=math_ops.range(0, num_elements), value=value, name=name)
@tf_should_use.should_use_result
def scatter(self, indices, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayScatter",
[self._handle, value, indices]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
if not context.executing_eagerly():
self._check_element_shape(value.shape[1:])
with self._maybe_colocate_with(value):
flow_out = gen_data_flow_ops.tensor_array_scatter_v3(
handle=self._handle,
indices=indices,
value=value,
flow_in=self._flow,
name=name)
return build_ta_with_new_flow(self, flow_out)
@tf_should_use.should_use_result
def split(self, value, lengths, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArraySplit",
[self._handle, value, lengths]):
value = ops.convert_to_tensor(value, dtype=self._dtype, name="value")
with self._maybe_colocate_with(value):
lengths_64 = math_ops.cast(lengths, dtypes.int64)
if not context.executing_eagerly():
clengths = tensor_util.constant_value(lengths_64)
if value.shape.dims is not None and clengths is not None:
if clengths.shape and clengths.max() == clengths.min():
self._check_element_shape(
tensor_shape.TensorShape([clengths[0]]).concatenate(
value.shape[1:]))
flow_out = gen_data_flow_ops.tensor_array_split_v3(
handle=self._handle,
value=value,
lengths=lengths_64,
flow_in=self._flow,
name=name)
return build_ta_with_new_flow(self, flow_out)
def size(self, name=None):
"""See TensorArray."""
if not self._dynamic_size and self._size is not None:
return ops.convert_to_tensor(self._size, dtype=dtypes.int32)
else:
return gen_data_flow_ops.tensor_array_size_v3(
handle=self._handle, flow_in=self.flow, name=name)
@tf_should_use.should_use_result
def close(self, name=None):
"""See TensorArray."""
return gen_data_flow_ops.tensor_array_close_v3(
handle=self._handle, name=name)
class _GraphTensorArrayV2(object):
"""Graph-mode implementation of TensorArray backed by TensorLists.
The backing tensor of this TensorArray is a TensorList variant tensor which is
stored in the `flow`. The `handle` is always none here. The reason we use the
`flow` field and not the `handle` field is to ensure backwards compatibility
with legacy control flow.
"""
def __init__(self,
dtype,
size=None,
dynamic_size=None,
clear_after_read=None,
tensor_array_name=None,
handle=None,
flow=None,
infer_shape=True,
element_shape=None,
colocate_with_first_write_call=True,
name=None):
"""Constructs a graph mode TensorArray.
Args:
dtype: (required) data type of the TensorArray.
size: (optional) int32 scalar `Tensor`: the size of the TensorArray.
Required if flow is not provided.
dynamic_size: (optional) Python bool: If true, writes to the TensorArray
can grow the TensorArray past its initial size. Default: False.
clear_after_read: (optional) unused. Not supported in TensorLists.
tensor_array_name: (optional) unused.
handle: (optional) Must always be None.
flow: (optional) A variant `Tensor` scalar for a TensorList.
infer_shape: (optional, default: True) If True, shape inference is
enabled. In this case, all elements must have the same shape.
element_shape: (optional, default: None) A `TensorShape` object specifying
the shape constraints of each of the elements of the TensorArray. Need
not be fully defined.
colocate_with_first_write_call: (optional). unused.
name: (optional) A name for the operation.
Raises:
ValueError: if both handle and tensor_array_name are provided.
TypeError: if handle is provided but is not a Tensor.
"""
assert handle is None
del handle
del clear_after_read
del tensor_array_name
del colocate_with_first_write_call
self._dynamic_size = dynamic_size
self._size = size
if (flow is not None and
(not isinstance(flow, ops.Tensor) or flow.dtype != dtypes.variant)):
raise TypeError("flow must be a variant tensor")
if flow is None and size is None:
raise ValueError("Size must be provided if flow is not provided")
if flow is not None and size is not None:
raise ValueError("Cannot provide both a flow and size "
"at the same time")
if flow is not None and element_shape is not None:
raise ValueError("Cannot provide both a flow and element_shape "
"at the same time")
self._dtype = dtypes.as_dtype(dtype).base_dtype
# Record the current static shape for the array elements. The element
# shape is defined either by `element_shape` or the shape of the tensor
# of the first write. If `infer_shape` is true, all writes checks for
# shape equality.
self._element_shape = [tensor_shape.as_shape(element_shape)]
self._infer_shape = infer_shape
with ops.name_scope(name, "TensorArrayV2", [size, flow]) as scope:
if flow is None:
self._flow = list_ops.tensor_list_reserve(
element_shape=element_shape,
num_elements=size,
element_dtype=dtype,
name=scope)
else:
self._flow = flow
# For backwards compatibility.
self._colocate_with_first_write_call = None
self._colocate_with = None
@property
def flow(self):
return self._flow
@property
def dtype(self):
return self._dtype
@property
def element_shape(self):
return self._element_shape[0]
@property
def handle(self):
# We intentionally do not raise an error so that legacy while_loop does not
# complain.
return None
def _check_element_shape(self, shape):
"""Changes the element shape of the array given a shape to merge with.
Args:
shape: A `TensorShape` object to merge with.
Raises:
ValueError: if the provided shape is incompatible with the current
element shape of the `TensorArray`.
"""
if not shape.is_compatible_with(self.element_shape):
raise ValueError("Inconsistent shapes: saw %s but expected %s " %
(shape, self.element_shape))
if self._infer_shape:
self._element_shape[0] = self.element_shape.merge_with(shape)
def identity(self):
"""See TensorArray."""
flow = array_ops.identity(self._flow)
return build_ta_with_new_flow(self, flow)
def grad(self, source, flow=None, name=None):
"""Not supported."""
raise NotImplementedError()
def read(self, index, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayV2Read", [self._flow, index]):
value = list_ops.tensor_list_get_item(
input_handle=self._flow,
index=index,
element_dtype=self._dtype,
element_shape=self.element_shape,
name=name)
return value
def write(self, index, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayV2Write", [self._flow, index, value]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape)
flow_out = list_ops.tensor_list_set_item(
input_handle=self._flow,
index=index,
item=value,
resize_if_index_out_of_bounds=self._dynamic_size,
name=name)
return build_ta_with_new_flow(self, flow_out)
def stack(self, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayV2Stack", [self._flow]):
# TODO(b/139941163): remove constant_value after changing num_elements to regular input
if not self._dynamic_size and self._size is not None:
ta_size = tensor_util.constant_value(self._size)
else:
ta_size = -1
value = list_ops.tensor_list_stack(
input_handle=self._flow,
element_dtype=self._dtype,
num_elements=ta_size,
element_shape=self.element_shape)
return value
def gather(self, indices, name=None):
"""See TensorArray."""
value = list_ops.tensor_list_gather(
input_handle=self._flow,
indices=indices,
element_dtype=self._dtype,
element_shape=self.element_shape,
name=name)
return value
def concat(self, name=None):
"""See TensorArray."""
if self.element_shape:
element_shape = [None] + self.element_shape.dims[1:]
else:
element_shape = None
value = list_ops.tensor_list_concat(
input_handle=self._flow,
element_dtype=self._dtype,
element_shape=element_shape,
name=name)
return value
@tf_should_use.should_use_result
def unstack(self, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayUnstack", [self._flow, value]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape[1:])
flow_out = list_ops.tensor_list_from_tensor(
tensor=value, element_shape=value.shape[1:])
return build_ta_with_new_flow(self, flow_out)
@tf_should_use.should_use_result
def scatter(self, indices, value, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArrayScatter",
[self._flow, value, indices]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
self._check_element_shape(value.shape[1:])
flow_out = list_ops.tensor_list_scatter(
tensor=value, indices=indices, element_shape=self.element_shape,
input_handle=self._flow)
return build_ta_with_new_flow(self, flow_out)
@tf_should_use.should_use_result
def split(self, value, lengths, name=None):
"""See TensorArray."""
with ops.name_scope(name, "TensorArraySplit", [self._flow, value, lengths]):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
lengths_64 = math_ops.cast(lengths, dtypes.int64)
if not context.executing_eagerly():
clengths = tensor_util.constant_value(lengths_64)
if value.shape.dims is not None and clengths is not None:
if clengths.shape and clengths.max() == clengths.min():
self._check_element_shape(
tensor_shape.TensorShape([clengths[0]]).concatenate(
value.shape[1:]))
flow_out = list_ops.tensor_list_split(
tensor=value,
lengths=lengths_64,
element_shape=self.element_shape,
name=name)
return build_ta_with_new_flow(self, flow_out)
def size(self, name=None):
"""See TensorArray."""
if not self._dynamic_size and self._size is not None:
return ops.convert_to_tensor(self._size, dtype=dtypes.int32)
else:
return list_ops.tensor_list_length(input_handle=self._flow, name=name)
def close(self, name=None):
"""See TensorArray."""
return gen_control_flow_ops.no_op(name=name)
# pylint: enable=protected-access
class _EagerTensorArray(object):
"""Eager-compatible implementation of TensorArray.
"""
def __init__(self,
dtype,
size=None,
dynamic_size=None,
clear_after_read=None,
tensor_array_name=None,
handle=None,
flow=None,
infer_shape=True,
element_shape=None,
colocate_with_first_write_call=True,
name=None):
"""Constructs a TensorArray compatible with eager execution.
Args:
dtype: (required) data type of the TensorArray.
size: (optional) int32 scalar `Tensor`: the size of the TensorArray.
Required if handle is not provided.
dynamic_size: (optional) Python bool: If true, writes to the TensorArray
can grow the TensorArray past its initial size. Default: False.
clear_after_read: Boolean (optional, default: True). If True, clear
TensorArray values after reading them. This disables read-many
semantics, but allows early release of memory.
tensor_array_name: unused.
handle: unsupported.
flow: unsupported.
infer_shape: used for error checking, same semantics as TensorArray.
element_shape: used for error checking, same semantics as TensorArray.
colocate_with_first_write_call: unsupported.
name: unsupported.
Raises:
ValueError: handle or flow are supplied, or if size is not supplied.
"""
del (flow, tensor_array_name, name) # Unused.
if handle is not None:
raise ValueError("TensorArray handles are not supported when eager "
"execution is enabled.")
if size is None:
raise ValueError("Size must be declared for TensorArrays when eager "
"execution is enabled.")
# These attributes are not meaningful when eager is enabled, but some
# library functions (e.g., those in control_flow_ops.py) access them to
# create new tensor arrays; as such, we define them for the sake of
# compatibility.
self._handle = None
# we assign a dummy value to _flow in case other code assumes it to be
# a Tensor
self._flow = constant_op.constant(0, dtype=dtypes.int32)
self._infer_shape = infer_shape
self._element_shape = tensor_shape.as_shape(element_shape)
self._colocate_with_first_write_call = colocate_with_first_write_call
self._dtype = dtypes.as_dtype(dtype).base_dtype
self._dynamic_size = dynamic_size or False
self._clear_after_read = (
True if clear_after_read is None else clear_after_read)
self._previously_read_indices = []
if isinstance(size, ops.EagerTensor):
size = size.numpy()
self._tensor_array = [None for _ in range(size)]
@property
def flow(self):
"""For compatibility; flows are not meaningful when eager is enabled."""
return self._flow
@property
def dtype(self):
return self._dtype
@property
def handle(self):
"""For compatibility; handles are not meaningful when eager is enabled."""
return self._handle
@property
def element_shape(self):
return self._element_shape
def identity(self):
"""See TensorArray."""
return self.parent()
def grad(self, source, flow=None, name=None):
raise NotImplementedError(
"TensorArray.grad is not supported when executing eagerly; eager's "
"gradient implementation does not use/need this function to compute "
"gradients of operations that use TensorArrays.")
def read(self, index, name=None):
"""See TensorArray."""
del name # not meaningful when executing eagerly.
if isinstance(index, ops.EagerTensor):
index = index.numpy()
if index < 0:
raise errors_impl.OutOfRangeError(
None, None,
"Reading from negative indices (index %d) is not allowed." % index)
if index >= len(self._tensor_array):
raise errors_impl.OutOfRangeError(
None, None, "Tried to read from index %d but array size is: %d" %
(index, len(self._tensor_array)))
tensor = self._tensor_array[index]
if tensor is None:
if index in self._previously_read_indices:
raise errors_impl.InvalidArgumentError(
None, None,
"Could not read index %d twice because it was cleared after "
"a previous read (perhaps try setting clear_after_read = false?)" %
index)
else:
tensor = self._maybe_zero(index)
if self._clear_after_read:
self._tensor_array[index] = None
self._previously_read_indices.append(index)
return tensor
def _write(self, index, value):
"""Writes `value` into index named by `index`.
Args:
index: 0-D. int32 scalar with the index to write to.
value: N-D. Tensor of type `dtype`. The `Tensor` to write to `index`.
Raises:
errors_impl.InvalidArgumentError: `value` dtype does not match dtype.
errors_impl.OutOfRangeError: `index` is out of bounds.
ValueError: shape of `value` is not consistent with inferred shape.
"""
if isinstance(index, ops.EagerTensor):
index = index.numpy()
if index < 0:
raise errors_impl.OutOfRangeError(
None, None,
"Writing to negative indices (index %d) is not allowed." % index)
size = len(self._tensor_array)
if index >= size:
if not self._dynamic_size:
raise errors_impl.OutOfRangeError(
None, None,
"Tried to write to index %d but array is not resizeable and size "
"is: %d" % (index, size))
self._tensor_array.extend(None for _ in range(index - size + 1))
if not isinstance(value, ops.EagerTensor):
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
if self._dtype != value.dtype:
raise errors_impl.InvalidArgumentError(
None, None,
"TensorArray dtype is %s but Op is trying to write dtype %s" %
(self._dtype.name, value.dtype.name))
if not self._element_shape.is_compatible_with(value.shape):
raise ValueError("Incompatible shape for value (%s), expected (%s)" %
(value.shape, self._element_shape))
if self._infer_shape:
self._element_shape = self._element_shape.merge_with(value.shape)
self._tensor_array[index] = value
def write(self, index, value, name=None):
"""See TensorArray."""
del name # not meaningful when executing eagerly.
self._write(index, value)
return self.parent()
def _maybe_zero(self, ix):
val = self._tensor_array[ix]
if val is None:
val = self._tensor_array[ix] = array_ops.zeros(
shape=self._element_shape, dtype=self._dtype)
return val
def stack(self, name=None):
"""See TensorArray."""
if self._tensor_array:
for ix in range(len(self._tensor_array)):
self._maybe_zero(ix)
if not self._tensor_array and self._element_shape.is_fully_defined():
return ops.convert_to_tensor(
np.ndarray([0] + self._element_shape), name=name, dtype=self._dtype)
else:
return ops.convert_to_tensor(
self._tensor_array, name=name, dtype=self._dtype)
def gather(self, indices, name=None):
"""See TensorArray."""
del name # not meaningful when executing eagerly.
if isinstance(indices, ops.EagerTensor):
indices = indices.numpy()
return array_ops.stack([self._maybe_zero(i) for i in indices])
def concat(self, name=None):
"""See TensorArray."""
try:
return array_ops.concat(
[self._maybe_zero(ix) for ix in range(len(self._tensor_array))],
0, name=name)
except errors_impl.OpError:
# Reproduce a subset of the error-handling for graph-mode TensorArrays.
shapes = [t.shape for t in self._tensor_array]
ndims = [s.ndims for s in shapes]
if 0 in ndims:
idx = ndims.index(0)
raise errors_impl.InvalidArgumentError(
None, None, "Concat saw a scalar shape at index %d but requires "
"at least vectors." % idx)
else:
raise
def unstack(self, value, name=None):
"""See TensorArray."""
tensors = array_ops.unstack(value, name=name)
if len(tensors) > len(self._tensor_array) and not self._dynamic_size:
raise ValueError(
"Cannot unstack %d tensors into a TensorArray of static size %d" %
(len(tensors), len(self._tensor_array)))
self._tensor_array = tensors
return self.parent()
def scatter(self, indices, value, name=None):
"""See TensorArray."""
del name # not meaningful when executing eagerly.
if isinstance(indices, ops.EagerTensor):
indices = indices.numpy()
for index, val in zip(indices, array_ops.unstack(value)):
self._write(index, val) # pylint: disable=protected-access
return self.parent()
def split(self, value, lengths, name=None):
"""See TensorArray."""
# TODO(b/129870929): Fix after all callers provide proper init dtype.
value = ops.convert_to_tensor(
value, preferred_dtype=self._dtype, name="value")
_check_dtypes(value, self._dtype)
lengths = ops.convert_to_tensor(lengths)
sum_lengths = math_ops.reduce_sum(lengths)
if lengths.shape.ndims != 1:
raise errors_impl.InvalidArgumentError(
None, None, "Expected lengths to be a vector, received shape: %s" %
lengths.shape.as_list())
elif value.shape.ndims == 0:
raise errors_impl.InvalidArgumentError(
None, None, "Expected value to be at least a vector, "
"but received shape: %s" % value.shape.as_list())
elif sum_lengths.numpy() != value.shape.as_list()[0]:
raise errors_impl.InvalidArgumentError(
None, None, "Expected sum of lengths to be equal to "
"values.shape[0], but sum of lengths is %d and "
"value's shape is: %s " % (sum_lengths.numpy(),
value.shape.as_list()))
elif not self._dynamic_size and lengths.shape[0] != len(self._tensor_array):
raise errors_impl.InvalidArgumentError(
None, None, "TensorArray's size is not equal to the size of "
"lengths (%d vs. %d), and the TensorArray is not marked as "
"dynamically resizeable" % (len(self._tensor_array),
lengths.shape[0]))
else:
self._tensor_array = array_ops.split(value, lengths, name=name)
return self.parent()
def size(self, name=None):
"""See TensorArray."""
del name # not meaningful when executing eagerly.
return constant_op.constant(len(self._tensor_array))
def close(self, name=None):
del name # not meaningful when executing eagerly.
del self._tensor_array[:]
# TensorArray is designed to hide an underlying implementation object
# and as such accesses many of that object's hidden fields.
# pylint: disable=protected-access
# pylint:disable=line-too-long
@tf_export("TensorArray")
class TensorArray(object):
"""Class wrapping dynamic-sized, per-time-step, write-once Tensor arrays.
This class is meant to be used with dynamic iteration primitives such as
`while_loop` and `map_fn`. It supports gradient back-propagation via special
"flow" control flow dependencies.
Example 1: Plain reading and writing.
>>> ta = tf.TensorArray(tf.float32, size=0, dynamic_size=True, clear_after_read=False)
>>> ta = ta.write(0, 10)
>>> ta = ta.write(1, 20)
>>> ta = ta.write(2, 30)
>>>
>>> ta.read(0)
<tf.Tensor: shape=(), dtype=float32, numpy=10.0>
>>> ta.read(1)
<tf.Tensor: shape=(), dtype=float32, numpy=20.0>
>>> ta.read(2)
<tf.Tensor: shape=(), dtype=float32, numpy=30.0>
>>> ta.stack()
<tf.Tensor: shape=(3,), dtype=float32, numpy=array([10., 20., 30.],
dtype=float32)>
Example 2: Fibonacci sequence algorithm that writes in a loop then returns.
>>> @tf.function
... def fibonacci(n):
... ta = tf.TensorArray(tf.float32, size=0, dynamic_size=True)
... ta = ta.unstack([0., 1.])
...
... for i in range(2, n):
... ta = ta.write(i, ta.read(i - 1) + ta.read(i - 2))
...
... return ta.stack()
>>>
>>> fibonacci(7)
<tf.Tensor: shape=(7,), dtype=float32,
numpy=array([0., 1., 1., 2., 3., 5., 8.], dtype=float32)>
Example 3: A simple loop interacting with a `tf.Variable`.
>>> v = tf.Variable(1)
>>> @tf.function
... def f(x):
... ta = tf.TensorArray(tf.int32, size=0, dynamic_size=True)
... for i in tf.range(x):
... v.assign_add(i)
... ta = ta.write(i, v)
... return ta.stack()
>>> f(5)
<tf.Tensor: shape=(5,), dtype=int32, numpy=array([ 1, 2, 4, 7, 11],
dtype=int32)>
"""
def __init__(self,
dtype,
size=None,
dynamic_size=None,
clear_after_read=None,
tensor_array_name=None,
handle=None,
flow=None,
infer_shape=True,
element_shape=None,
colocate_with_first_write_call=True,
name=None):
"""Construct a new TensorArray or wrap an existing TensorArray handle.
A note about the parameter `name`:
The name of the `TensorArray` (even if passed in) is uniquified: each time
a new `TensorArray` is created at runtime it is assigned its own name for
the duration of the run. This avoids name collisions if a `TensorArray`
is created within a `while_loop`.
Args:
dtype: (required) data type of the TensorArray.
size: (optional) int32 scalar `Tensor`: the size of the TensorArray.
Required if handle is not provided.
dynamic_size: (optional) Python bool: If true, writes to the TensorArray
can grow the TensorArray past its initial size. Default: False.
clear_after_read: Boolean (optional, default: True). If True, clear
TensorArray values after reading them. This disables read-many
semantics, but allows early release of memory.
tensor_array_name: (optional) Python string: the name of the TensorArray.
This is used when creating the TensorArray handle. If this value is
set, handle should be None.
handle: (optional) A `Tensor` handle to an existing TensorArray. If this
is set, tensor_array_name should be None. Only supported in graph mode.
flow: (optional) A float `Tensor` scalar coming from an existing
`TensorArray.flow`. Only supported in graph mode.
infer_shape: (optional, default: True) If True, shape inference
is enabled. In this case, all elements must have the same shape.
element_shape: (optional, default: None) A `TensorShape` object specifying
the shape constraints of each of the elements of the TensorArray.
Need not be fully defined.
colocate_with_first_write_call: If `True`, the TensorArray will be
colocated on the same device as the Tensor used on its first write
(write operations include `write`, `unstack`, and `split`). If `False`,
the TensorArray will be placed on the device determined by the
device context available during its initialization.
name: A name for the operation (optional).
Raises:
ValueError: if both handle and tensor_array_name are provided.
TypeError: if handle is provided but is not a Tensor.
"""
if (context.executing_eagerly() and
(flow is None or flow.dtype != dtypes.variant)):
# It is possible to create a Variant-style TensorArray even in eager mode,
# and this is fine but can have performance implications in eager.
# An example of when this happens is if a tf.function returns a
# TensorArray in its output; its flow variant object is returned to Eager.
# This can be wrapped back up in a Variant-style TensorArray.
implementation = _EagerTensorArray
elif (flow is not None and flow.dtype == dtypes.variant or
control_flow_util.EnableControlFlowV2(ops.get_default_graph())):
implementation = _GraphTensorArrayV2
else:
implementation = _GraphTensorArray
self._implementation = implementation(
dtype,
size=size,
dynamic_size=dynamic_size,
clear_after_read=clear_after_read,
tensor_array_name=tensor_array_name,
handle=handle,
flow=flow,
infer_shape=infer_shape,
element_shape=element_shape,
colocate_with_first_write_call=colocate_with_first_write_call,
name=name)
self._implementation.parent = weakref.ref(self)
@property
def flow(self):
"""The flow `Tensor` forcing ops leading to this TensorArray state."""
return self._implementation._flow
@property
def dtype(self):
"""The data type of this TensorArray."""
return self._implementation._dtype
@property
def handle(self):
"""The reference to the TensorArray."""
return self._implementation.handle
@property
def element_shape(self):
"""The `tf.TensorShape` of elements in this TensorArray."""
return self._implementation.element_shape
@property
def dynamic_size(self):
"""Python bool; if `True` the TensorArray can grow dynamically."""
return self._implementation._dynamic_size
@property
def _infer_shape(self):
# TODO(slebedev): consider making public or changing TensorArrayStructure
# to access _implementation directly. Note that dynamic_size is also
# only used by TensorArrayStructure.
return self._implementation._infer_shape
def identity(self):
"""Returns a TensorArray with the same content and properties.
Returns:
A new TensorArray object with flow that ensures the control dependencies
from the contexts will become control dependencies for writes, reads, etc.
Use this object for all subsequent operations.
"""
return self._implementation.identity()
def grad(self, source, flow=None, name=None):
return self._implementation.grad(source, flow=flow, name=name)
def read(self, index, name=None):
"""Read the value at location `index` in the TensorArray.
Args:
index: 0-D. int32 tensor with the index to read from.
name: A name for the operation (optional).
Returns:
The tensor at index `index`.
"""
return self._implementation.read(index, name=name)
@tf_should_use.should_use_result(warn_in_eager=True)
def write(self, index, value, name=None):
"""Write `value` into index `index` of the TensorArray.
Args:
index: 0-D. int32 scalar with the index to write to.
value: N-D. Tensor of type `dtype`. The Tensor to write to this index.
name: A name for the operation (optional).
Returns:
A new TensorArray object with flow that ensures the write occurs.
Use this object for all subsequent operations.
Raises:
ValueError: if there are more writers than specified.
"""
return self._implementation.write(index, value, name=name)
def stack(self, name=None):
"""Return the values in the TensorArray as a stacked `Tensor`.
All of the values must have been written and their shapes must all match.
If input shapes have rank-`R`, then output shape will have rank-`(R+1)`.
Args:
name: A name for the operation (optional).
Returns:
All the tensors in the TensorArray stacked into one tensor.
"""
return self._implementation.stack(name=name)
def gather(self, indices, name=None):
"""Return selected values in the TensorArray as a packed `Tensor`.
All of selected values must have been written and their shapes
must all match.
Args:
indices: A `1-D` `Tensor` taking values in `[0, max_value)`. If
the `TensorArray` is not dynamic, `max_value=size()`.
name: A name for the operation (optional).
Returns:
The tensors in the `TensorArray` selected by `indices`, packed into one
tensor.
"""
return self._implementation.gather(indices, name=name)
def concat(self, name=None):
"""Return the values in the TensorArray as a concatenated `Tensor`.
All of the values must have been written, their ranks must match, and
and their shapes must all match for all dimensions except the first.
Args:
name: A name for the operation (optional).
Returns:
All the tensors in the TensorArray concatenated into one tensor.
"""
return self._implementation.concat(name=name)
@tf_should_use.should_use_result
def unstack(self, value, name=None):
"""Unstack the values of a `Tensor` in the TensorArray.
If input value shapes have rank-`R`, then the output TensorArray will
contain elements whose shapes are rank-`(R-1)`.
Args:
value: (N+1)-D. Tensor of type `dtype`. The Tensor to unstack.
name: A name for the operation (optional).
Returns:
A new TensorArray object with flow that ensures the unstack occurs.
Use this object for all subsequent operations.
Raises:
ValueError: if the shape inference fails.
"""
return self._implementation.unstack(value, name=name)
@tf_should_use.should_use_result
def scatter(self, indices, value, name=None):
"""Scatter the values of a `Tensor` in specific indices of a `TensorArray`.
Args:
indices: A `1-D` `Tensor` taking values in `[0, max_value)`. If
the `TensorArray` is not dynamic, `max_value=size()`.
value: (N+1)-D. Tensor of type `dtype`. The Tensor to unpack.
name: A name for the operation (optional).
Returns:
A new TensorArray object with flow that ensures the scatter occurs.
Use this object for all subsequent operations.
Raises:
ValueError: if the shape inference fails.
"""
return self._implementation.scatter(indices, value, name=name)
@tf_should_use.should_use_result
def split(self, value, lengths, name=None):
"""Split the values of a `Tensor` into the TensorArray.
Args:
value: (N+1)-D. Tensor of type `dtype`. The Tensor to split.
lengths: 1-D. int32 vector with the lengths to use when splitting
`value` along its first dimension.
name: A name for the operation (optional).
Returns:
A new TensorArray object with flow that ensures the split occurs.
Use this object for all subsequent operations.
Raises:
ValueError: if the shape inference fails.
"""
return self._implementation.split(value, lengths, name=name)
def size(self, name=None):
"""Return the size of the TensorArray."""
return self._implementation.size(name=name)
@tf_should_use.should_use_result
def close(self, name=None):
"""Close the current TensorArray."""
return self._implementation.close(name=name)
def build_ta_with_new_flow(old_ta, flow):
"""Builds a TensorArray with a new `flow` tensor."""
# Sometimes we get old_ta as the implementation, sometimes it's the
# TensorArray wrapper object.
impl = (old_ta._implementation if isinstance(old_ta, TensorArray)
else old_ta)
if not context.executing_eagerly():
if (not isinstance(impl, _GraphTensorArrayV2) and
control_flow_util.EnableControlFlowV2(ops.get_default_graph())):
raise NotImplementedError("Attempting to build a graph-mode TF2-style "
"TensorArray from either an eager-mode "
"TensorArray or a TF1-style TensorArray. "
"This is not currently supported. You may be "
"attempting to capture a TensorArray "
"inside a tf.function or tf.data map function. "
"Instead, construct a new TensorArray inside "
"the function.")
new_ta = TensorArray(
dtype=impl.dtype,
handle=impl.handle,
flow=flow,
infer_shape=impl._infer_shape,
colocate_with_first_write_call=impl._colocate_with_first_write_call)
new_impl = new_ta._implementation
new_impl._dynamic_size = impl._dynamic_size
new_impl._size = impl._size
new_impl._colocate_with = impl._colocate_with
new_impl._element_shape = impl._element_shape # Share _element_shape.
return new_ta
# pylint: enable=protected-access
def _check_dtypes(value, dtype):
if value.dtype != dtype:
logging.error(
"Error: Input value {} has dtype {}, but expected dtype {}. "
"This leads to undefined behavior and will be an error "
"in future versions of TensorFlow. Traceback:\n{}".format(
value, str(value.dtype), str(dtype),
"".join(traceback.format_stack())))
@tf_export("TensorArraySpec")
@type_spec.register("tf.TensorArraySpec")
class TensorArraySpec(type_spec.TypeSpec):
"""Type specification for a `tf.TensorArray`."""
__slots__ = ["_element_shape", "_dtype", "_dynamic_size", "_infer_shape"]
value_type = property(lambda self: TensorArray)
def __init__(self, element_shape=None, dtype=dtypes.float32,
dynamic_size=False, infer_shape=True):
"""Constructs a type specification for a `tf.TensorArray`.
Args:
element_shape: The shape of each element in the `TensorArray`.
dtype: Data type of the `TensorArray`.
dynamic_size: Whether the `TensorArray` can grow past its initial size.
infer_shape: Whether shape inference is enabled.
"""
self._element_shape = tensor_shape.as_shape(element_shape)
self._dtype = dtypes.as_dtype(dtype)
self._dynamic_size = dynamic_size
self._infer_shape = infer_shape
def is_compatible_with(self, other):
# pylint: disable=protected-access
if not isinstance(other, type_spec.TypeSpec):
other = type_spec.type_spec_from_value(other)
# Note: we intentionally exclude infer_shape in this check.
return (isinstance(other, TensorArraySpec) and
self._dtype.is_compatible_with(other._dtype) and
self._element_shape.is_compatible_with(other._element_shape) and
self._dynamic_size == other._dynamic_size)
def most_specific_compatible_type(self, other):
# pylint: disable=protected-access
if not self.is_compatible_with(other):
raise ValueError("Types are not compatible")
infer_shape = self._infer_shape and other._infer_shape
return TensorArraySpec(
self._element_shape.most_specific_compatible_shape(
other._element_shape),
self._dtype, self._dynamic_size, infer_shape)
def _serialize(self):
return (self._element_shape, self._dtype, self._dynamic_size,
self._infer_shape)
@property
def _component_specs(self):
return [tensor_spec.TensorSpec([], dtypes.variant)]
def _to_components(self, value):
if not isinstance(value, TensorArray):
raise TypeError("value must be a TensorArray, but saw: {}"
.format(type(value)))
if value.flow is not None and value.flow.dtype == dtypes.variant:
return [value.flow]
else:
# Convert to a TF2-style TensorArray.
# TODO(ebrevdo): Add an "_as_variant" method to TensorArray class, or
# "implementation / as_variant" arg to TensorArray constructor.
with ops.name_scope("convert_tensor_array"):
flow = list_ops.tensor_list_from_tensor(
tensor=value.stack(), element_shape=value.element_shape)
return [flow]
def _from_components(self, tensor_list):
# This will return a TF2 Graph-style TensorArray because tensor_list[0] is
# a variant object. size == -1 implies unknown size.
ret = TensorArray(
dtype=self._dtype,
flow=tensor_list[0],
dynamic_size=self._dynamic_size,
infer_shape=self._infer_shape)
ret._implementation._element_shape = [self._element_shape] # pylint: disable=protected-access
return ret
@staticmethod
def from_value(value):
if not isinstance(value, TensorArray):
raise TypeError("Expected value to be a TensorArray, but saw: {}".
format(type(value)))
return TensorArraySpec(
dtype=value.dtype,
element_shape=value.element_shape,
dynamic_size=value.dynamic_size,
infer_shape=value._infer_shape) # pylint: disable=protected-access
def _to_legacy_output_types(self):
return self._dtype
def _to_legacy_output_shapes(self):
# Sneak the dynamic_size and infer_shape values into the legacy shape.
return (tensor_shape.TensorShape([self._dynamic_size, self._infer_shape
]).concatenate(self._element_shape))
def _to_legacy_output_classes(self):
return TensorArray
# Register the TypeSpec for TensorArray. If TensorArray is updated to be a
# CompositeTensor, then this registration can be deleted.
type_spec.register_type_spec_from_value_converter(
TensorArray, TensorArraySpec.from_value, allow_subclass=True)
| apache-2.0 | 6,391,439,884,115,813,000 | 37.116819 | 98 | 0.64853 | false |
rockychen-dpaw/oim-cms | registers/migrations/0002_auto_20160919_1303.py | 1 | 4044 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-19 05:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('organisation', '0001_initial'),
('tracking', '0001_initial'),
('registers', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='hardware',
name='computer',
field=models.OneToOneField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='tracking.Computer'),
),
migrations.AddField(
model_name='hardware',
name='cost_centre',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='organisation.CostCentre'),
),
migrations.AddField(
model_name='hardware',
name='location',
field=models.ForeignKey(blank=True, help_text='Physical location', null=True, on_delete=django.db.models.deletion.PROTECT, to='organisation.Location'),
),
migrations.AddField(
model_name='hardware',
name='mobile',
field=models.OneToOneField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='tracking.Mobile'),
),
migrations.AddField(
model_name='hardware',
name='org_unit',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='organisation.OrgUnit'),
),
migrations.AddField(
model_name='hardware',
name='os',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='registers.Software', verbose_name='operating system'),
),
migrations.AddField(
model_name='documentapproval',
name='department_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='organisation.DepartmentUser'),
),
migrations.AddField(
model_name='businessprocess',
name='functions',
field=models.ManyToManyField(to='registers.BusinessFunction'),
),
migrations.AddField(
model_name='businessfunction',
name='services',
field=models.ManyToManyField(to='registers.BusinessService'),
),
migrations.AddField(
model_name='backup',
name='cost_centre',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='organisation.CostCentre'),
),
migrations.AddField(
model_name='backup',
name='org_unit',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='organisation.OrgUnit'),
),
migrations.AddField(
model_name='backup',
name='parent_host',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='host', to='registers.Hardware'),
),
migrations.AddField(
model_name='backup',
name='system',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='registers.Hardware'),
),
migrations.AlterUniqueTogether(
name='processitsystemrelationship',
unique_together=set([('process', 'itsystem')]),
),
migrations.AlterUniqueTogether(
name='itsystemhardware',
unique_together=set([('host', 'role')]),
),
migrations.AlterUniqueTogether(
name='itsystemdependency',
unique_together=set([('itsystem', 'dependency')]),
),
migrations.AlterUniqueTogether(
name='hardware',
unique_together=set([('computer', 'mobile')]),
),
]
| apache-2.0 | -8,746,682,839,214,624,000 | 39.039604 | 163 | 0.599654 | false |
danakj/chromium | tools/android/loading/tracing.py | 7 | 19070 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Monitor tracing events on chrome via chrome remote debugging."""
import itertools
import logging
import operator
import clovis_constants
import devtools_monitor
class TracingTrack(devtools_monitor.Track):
"""Grabs and processes trace event messages.
See https://goo.gl/Qabkqk for details on the protocol.
"""
def __init__(self, connection, categories, fetch_stream=False):
"""Initialize this TracingTrack.
Args:
connection: a DevToolsConnection.
categories: ([str] or None) If set, a list of categories to enable or
disable in Chrome tracing. Categories prefixed with '-' are
disabled.
fetch_stream: if true, use a websocket stream to fetch tracing data rather
than dataCollected events. It appears based on very limited testing that
a stream is slower than the default reporting as dataCollected events.
"""
super(TracingTrack, self).__init__(connection)
if connection:
connection.RegisterListener('Tracing.dataCollected', self)
self._categories = set(categories)
params = {}
params['categories'] = ','.join(self._categories)
if fetch_stream:
params['transferMode'] = 'ReturnAsStream'
if connection:
connection.SyncRequestNoResponse('Tracing.start', params)
self._events = []
self._base_msec = None
self._interval_tree = None
self._main_frame_id = None
def Handle(self, method, event):
for e in event['params']['value']:
event = Event(e)
self._events.append(event)
if self._base_msec is None or event.start_msec < self._base_msec:
self._base_msec = event.start_msec
# Invalidate our index rather than trying to be fancy and incrementally
# update.
self._interval_tree = None
def Categories(self):
"""Returns the set of categories in this trace."""
return self._categories
def GetFirstEventMillis(self):
"""Find the canonical start time for this track.
Returns:
The millisecond timestamp of the first request.
"""
return self._base_msec
def GetEvents(self):
"""Returns a list of tracing.Event. Not sorted."""
return self._events
def GetMatchingEvents(self, category, name):
"""Gets events matching |category| and |name|."""
return [e for e in self.GetEvents() if e.Matches(category, name)]
def GetMatchingMainFrameEvents(self, category, name):
"""Gets events matching |category| and |name| that occur in the main frame.
Events without a 'frame' key in their |args| are discarded.
"""
matching_events = self.GetMatchingEvents(category, name)
return [e for e in matching_events
if 'frame' in e.args and e.args['frame'] == self.GetMainFrameID()]
def GetMainFrameRoutingID(self):
"""Returns the main frame routing ID."""
for event in self.GetMatchingEvents(
'navigation', 'RenderFrameImpl::OnNavigate'):
return event.args['id']
assert False
def GetMainFrameID(self):
"""Returns the main frame ID."""
if not self._main_frame_id:
navigation_start_events = self.GetMatchingEvents(
'blink.user_timing', 'navigationStart')
first_event = min(navigation_start_events, key=lambda e: e.start_msec)
self._main_frame_id = first_event.args['frame']
return self._main_frame_id
def SetMainFrameID(self, frame_id):
"""Set the main frame ID. Normally this is used only for testing."""
self._main_frame_id = frame_id
def EventsAt(self, msec):
"""Gets events active at a timestamp.
Args:
msec: tracing milliseconds to query. Tracing milliseconds appears to be
since chrome startup (ie, arbitrary epoch).
Returns:
List of events active at that timestamp. Instantaneous (ie, instant,
sample and counter) events are never included. Event end times are
exclusive, so that an event ending at the usec parameter will not be
returned.
"""
self._IndexEvents()
return self._interval_tree.EventsAt(msec)
def Filter(self, pid=None, tid=None, categories=None):
"""Returns a new TracingTrack with a subset of the events.
Args:
pid: (int or None) Selects events from this PID.
tid: (int or None) Selects events from this TID.
categories: (set([str]) or None) Selects events belonging to one of the
categories.
"""
events = self._events
if pid is not None:
events = filter(lambda e : e.tracing_event['pid'] == pid, events)
if tid is not None:
events = filter(lambda e : e.tracing_event['tid'] == tid, events)
if categories is not None:
events = filter(
lambda e : set(e.category.split(',')).intersection(categories),
events)
tracing_track = TracingTrack(None, clovis_constants.DEFAULT_CATEGORIES)
tracing_track._events = events
tracing_track._categories = self._categories
if categories is not None:
tracing_track._categories = self._categories.intersection(categories)
return tracing_track
def ToJsonDict(self):
return {'categories': list(self._categories),
'events': [e.ToJsonDict() for e in self._events]}
@classmethod
def FromJsonDict(cls, json_dict):
if not json_dict:
return None
assert 'events' in json_dict
events = [Event(e) for e in json_dict['events']]
tracing_track = TracingTrack(None, clovis_constants.DEFAULT_CATEGORIES)
tracing_track._categories = set(json_dict.get('categories', []))
tracing_track._events = events
tracing_track._base_msec = events[0].start_msec if events else 0
for e in events[1:]:
if e.type == 'M':
continue # No timestamp for metadata events.
assert e.start_msec > 0
if e.start_msec < tracing_track._base_msec:
tracing_track._base_msec = e.start_msec
return tracing_track
def OverlappingEvents(self, start_msec, end_msec):
self._IndexEvents()
return self._interval_tree.OverlappingEvents(start_msec, end_msec)
def EventsEndingBetween(self, start_msec, end_msec):
"""Gets the list of events ending within an interval.
Args:
start_msec: the start of the range to query, in milliseconds, inclusive.
end_msec: the end of the range to query, in milliseconds, inclusive.
Returns:
See OverlappingEvents() above.
"""
overlapping_events = self.OverlappingEvents(start_msec, end_msec)
return [e for e in overlapping_events
if start_msec <= e.end_msec <= end_msec]
def EventFromStep(self, step_event):
"""Returns the Event associated with a step event, or None.
Args:
step_event: (Event) Step event.
Returns:
an Event that matches the step event, or None.
"""
self._IndexEvents()
assert 'step' in step_event.args and step_event.tracing_event['ph'] == 'T'
candidates = self._interval_tree.EventsAt(step_event.start_msec)
for event in candidates:
# IDs are only unique within a process (often they are pointers).
if (event.pid == step_event.pid and event.tracing_event['ph'] != 'T'
and event.name == step_event.name and event.id == step_event.id):
return event
return None
def _IndexEvents(self, strict=False):
if self._interval_tree:
return
complete_events = []
spanning_events = self._SpanningEvents()
for event in self._events:
if not event.IsIndexable():
continue
if event.IsComplete():
complete_events.append(event)
continue
matched_event = spanning_events.Match(event, strict)
if matched_event is not None:
complete_events.append(matched_event)
self._interval_tree = _IntervalTree.FromEvents(complete_events)
if strict and spanning_events.HasPending():
raise devtools_monitor.DevToolsConnectionException(
'Pending spanning events: %s' %
'\n'.join([str(e) for e in spanning_events.PendingEvents()]))
def _GetEvents(self):
self._IndexEvents()
return self._interval_tree.GetEvents()
def HasLoadingSucceeded(self):
"""Returns whether the loading has succeed at recording time."""
main_frame_id = self.GetMainFrameRoutingID()
for event in self.GetMatchingEvents(
'navigation', 'RenderFrameImpl::didFailProvisionalLoad'):
if event.args['id'] == main_frame_id:
return False
for event in self.GetMatchingEvents(
'navigation', 'RenderFrameImpl::didFailLoad'):
if event.args['id'] == main_frame_id:
return False
return True
class _SpanningEvents(object):
def __init__(self):
self._duration_stack = []
self._async_stacks = {}
self._objects = {}
self._MATCH_HANDLER = {
'B': self._DurationBegin,
'E': self._DurationEnd,
'b': self._AsyncStart,
'e': self._AsyncEnd,
'S': self._AsyncStart,
'F': self._AsyncEnd,
'N': self._ObjectCreated,
'D': self._ObjectDestroyed,
'M': self._Ignore,
'X': self._Ignore,
'R': self._Ignore,
'p': self._Ignore,
'(': self._Ignore, # Context events.
')': self._Ignore, # Ditto.
None: self._Ignore,
}
def Match(self, event, strict=False):
return self._MATCH_HANDLER.get(
event.type, self._Unsupported)(event, strict)
def HasPending(self):
return (self._duration_stack or
self._async_stacks or
self._objects)
def PendingEvents(self):
return itertools.chain(
(e for e in self._duration_stack),
(o for o in self._objects),
itertools.chain.from_iterable((
(e for e in s) for s in self._async_stacks.itervalues())))
def _AsyncKey(self, event, _):
return (event.tracing_event['cat'], event.id)
def _Ignore(self, _event, _):
return None
def _Unsupported(self, event, _):
raise devtools_monitor.DevToolsConnectionException(
'Unsupported spanning event type: %s' % event)
def _DurationBegin(self, event, _):
self._duration_stack.append(event)
return None
def _DurationEnd(self, event, _):
if not self._duration_stack:
raise devtools_monitor.DevToolsConnectionException(
'Unmatched duration end: %s' % event)
start = self._duration_stack.pop()
start.SetClose(event)
return start
def _AsyncStart(self, event, strict):
key = self._AsyncKey(event, strict)
self._async_stacks.setdefault(key, []).append(event)
return None
def _AsyncEnd(self, event, strict):
key = self._AsyncKey(event, strict)
if key not in self._async_stacks:
message = 'Unmatched async end %s: %s' % (key, event)
if strict:
raise devtools_monitor.DevToolsConnectionException(message)
else:
logging.warning(message)
return None
stack = self._async_stacks[key]
start = stack.pop()
if not stack:
del self._async_stacks[key]
start.SetClose(event)
return start
def _ObjectCreated(self, event, _):
# The tracing event format has object deletion timestamps being exclusive,
# that is the timestamp for a deletion my equal that of the next create at
# the same address. This asserts that does not happen in practice as it is
# inconvenient to handle that correctly here.
if event.id in self._objects:
raise devtools_monitor.DevToolsConnectionException(
'Multiple objects at same address: %s, %s' %
(event, self._objects[event.id]))
self._objects[event.id] = event
return None
def _ObjectDestroyed(self, event, _):
if event.id not in self._objects:
raise devtools_monitor.DevToolsConnectionException(
'Missing object creation for %s' % event)
start = self._objects[event.id]
del self._objects[event.id]
start.SetClose(event)
return start
class Event(object):
"""Wraps a tracing event."""
CLOSING_EVENTS = {'E': 'B',
'e': 'b',
'F': 'S',
'D': 'N'}
__slots__ = ('_tracing_event', 'start_msec', 'end_msec', '_synthetic')
def __init__(self, tracing_event, synthetic=False):
"""Creates Event.
Intended to be created only by TracingTrack.
Args:
tracing_event: JSON tracing event, as defined in https://goo.gl/Qabkqk.
synthetic: True if the event is synthetic. This is only used for indexing
internal to TracingTrack.
"""
if not synthetic and tracing_event['ph'] in ['s', 't', 'f']:
raise devtools_monitor.DevToolsConnectionException(
'Unsupported event: %s' % tracing_event)
self._tracing_event = tracing_event
# Note tracing event times are in microseconds.
self.start_msec = tracing_event['ts'] / 1000.0
self.end_msec = None
self._synthetic = synthetic
if self.type == 'X':
# Some events don't have a duration.
duration = (tracing_event['dur']
if 'dur' in tracing_event else tracing_event['tdur'])
self.end_msec = self.start_msec + duration / 1000.0
@property
def type(self):
if self._synthetic:
return None
return self._tracing_event['ph']
@property
def category(self):
return self._tracing_event['cat']
@property
def pid(self):
return self._tracing_event['pid']
@property
def args(self):
return self._tracing_event.get('args', {})
@property
def id(self):
return self._tracing_event.get('id')
@property
def name(self):
return self._tracing_event['name']
@property
def tracing_event(self):
return self._tracing_event
@property
def synthetic(self):
return self._synthetic
def __str__(self):
return ''.join([str(self._tracing_event),
'[%s,%s]' % (self.start_msec, self.end_msec)])
def Matches(self, category, name):
"""Match tracing events.
Args:
category: a tracing category (event['cat']).
name: the tracing event name (event['name']).
Returns:
True if the event matches and False otherwise.
"""
if name != self.name:
return False
categories = self.category.split(',')
return category in categories
def IsIndexable(self):
"""True iff the event can be indexed by time."""
return self._synthetic or self.type not in [
'I', 'P', 'c', 'C',
'n', 'T', 'p', # TODO(mattcary): ?? instant types of async events.
'O', # TODO(mattcary): ?? object snapshot
'M' # Metadata
]
def IsComplete(self):
return self.type == 'X'
def Synthesize(self):
"""Expand into synthetic events.
Returns:
A list of events, possibly some synthetic, whose start times are all
interesting for purposes of indexing. If the event is not indexable the
set may be empty.
"""
if not self.IsIndexable():
return []
if self.IsComplete():
# Tracing event timestamps are microseconds!
return [self, Event({'ts': self.end_msec * 1000}, synthetic=True)]
return [self]
def SetClose(self, closing):
"""Close a spanning event.
Args:
closing: The closing event.
Raises:
devtools_monitor.DevToolsConnectionException if closing can't property
close this event.
"""
if self.type != self.CLOSING_EVENTS.get(closing.type):
raise devtools_monitor.DevToolsConnectionException(
'Bad closing: %s --> %s' % (self, closing))
if self.type in ['b', 'S'] and (
self.tracing_event['cat'] != closing.tracing_event['cat'] or
self.id != closing.id):
raise devtools_monitor.DevToolsConnectionException(
'Bad async closing: %s --> %s' % (self, closing))
self.end_msec = closing.start_msec
if 'args' in closing.tracing_event:
self.tracing_event.setdefault(
'args', {}).update(closing.tracing_event['args'])
def ToJsonDict(self):
return self._tracing_event
@classmethod
def FromJsonDict(cls, json_dict):
return Event(json_dict)
class _IntervalTree(object):
"""Simple interval tree. This is not an optimal one, as the split is done with
an equal number of events on each side, according to start time.
"""
_TRESHOLD = 100
def __init__(self, start, end, events):
"""Builds an interval tree.
Args:
start: start timestamp of this node, in ms.
end: end timestamp covered by this node, in ms.
events: Iterable of objects having start_msec and end_msec fields. Has to
be sorted by start_msec.
"""
self.start = start
self.end = end
self._events = events
self._left = self._right = None
if len(self._events) > self._TRESHOLD:
self._Divide()
@classmethod
def FromEvents(cls, events):
"""Returns an IntervalTree instance from a list of events."""
filtered_events = [e for e in events
if e.start_msec is not None and e.end_msec is not None]
filtered_events.sort(key=operator.attrgetter('start_msec'))
start = min(event.start_msec for event in filtered_events)
end = max(event.end_msec for event in filtered_events)
return _IntervalTree(start, end, filtered_events)
def OverlappingEvents(self, start, end):
"""Returns a set of events overlapping with [start, end)."""
if min(end, self.end) - max(start, self.start) <= 0:
return set()
elif self._IsLeaf():
result = set()
for event in self._events:
if self._Overlaps(event, start, end):
result.add(event)
return result
else:
return (self._left.OverlappingEvents(start, end)
| self._right.OverlappingEvents(start, end))
def EventsAt(self, timestamp):
result = set()
if self._IsLeaf():
for event in self._events:
if event.start_msec <= timestamp < event.end_msec:
result.add(event)
else:
if self._left.start <= timestamp < self._left.end:
result |= self._left.EventsAt(timestamp)
if self._right.start <= timestamp < self._right.end:
result |= self._right.EventsAt(timestamp)
return result
def GetEvents(self):
return self._events
def _Divide(self):
middle = len(self._events) / 2
left_events = self._events[:middle]
right_events = self._events[middle:]
left_end = max(e.end_msec for e in left_events)
right_start = min(e.start_msec for e in right_events)
self._left = _IntervalTree(self.start, left_end, left_events)
self._right = _IntervalTree(right_start, self.end, right_events)
def _IsLeaf(self):
return self._left is None
@classmethod
def _Overlaps(cls, event, start, end):
return (min(end, event.end_msec) - max(start, event.start_msec) > 0
or start <= event.start_msec < end) # For instant events.
| bsd-3-clause | 8,472,489,861,337,986,000 | 32.397548 | 80 | 0.637703 | false |
repology/repology | repology/parsers/parsers/crux.py | 1 | 2026 | # Copyright (C) 2017-2019 Dmitry Marakasov <[email protected]>
#
# This file is part of repology
#
# repology is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# repology is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with repology. If not, see <http://www.gnu.org/licenses/>.
from typing import Iterable
from repology.logger import Logger
from repology.packagemaker import NameType, PackageFactory, PackageMaker
from repology.parsers import Parser
from repology.parsers.json import iter_json_list
from repology.parsers.maintainers import extract_maintainers
from repology.transformer import PackageTransformer
class CRUXPortsJsonParser(Parser):
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]:
for port in iter_json_list(path, ('ports', None)):
with factory.begin() as pkg:
pkg.add_name(port['name'], NameType.CRUX_NAME)
pkg.set_summary(port['description'])
pkg.set_version(port['version'])
if port['maintainer'] == '':
pkg.log('Missing maintainer for port "{}"'.format(port['name']), severity=Logger.ERROR)
else:
pkg.add_maintainers(extract_maintainers(port['maintainer']))
pkg.add_homepages(port['url'])
pkg.set_subrepo(port['repository'])
pkg.add_downloads(port['sources'])
if '${' in port['name']:
raise RuntimeError(f'bad port name {port["name"]}')
yield pkg
| gpl-3.0 | -8,551,845,393,287,674,000 | 43.043478 | 120 | 0.674729 | false |
partofthething/home-assistant | homeassistant/components/demo/weather.py | 9 | 5451 | """Demo platform that offers fake meteorological data."""
from datetime import timedelta
from homeassistant.components.weather import (
ATTR_CONDITION_CLOUDY,
ATTR_CONDITION_EXCEPTIONAL,
ATTR_CONDITION_FOG,
ATTR_CONDITION_HAIL,
ATTR_CONDITION_LIGHTNING,
ATTR_CONDITION_LIGHTNING_RAINY,
ATTR_CONDITION_PARTLYCLOUDY,
ATTR_CONDITION_POURING,
ATTR_CONDITION_RAINY,
ATTR_CONDITION_SNOWY,
ATTR_CONDITION_SNOWY_RAINY,
ATTR_CONDITION_SUNNY,
ATTR_CONDITION_WINDY,
ATTR_CONDITION_WINDY_VARIANT,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
WeatherEntity,
)
from homeassistant.const import TEMP_CELSIUS, TEMP_FAHRENHEIT
import homeassistant.util.dt as dt_util
CONDITION_CLASSES = {
ATTR_CONDITION_CLOUDY: [],
ATTR_CONDITION_FOG: [],
ATTR_CONDITION_HAIL: [],
ATTR_CONDITION_LIGHTNING: [],
ATTR_CONDITION_LIGHTNING_RAINY: [],
ATTR_CONDITION_PARTLYCLOUDY: [],
ATTR_CONDITION_POURING: [],
ATTR_CONDITION_RAINY: ["shower rain"],
ATTR_CONDITION_SNOWY: [],
ATTR_CONDITION_SNOWY_RAINY: [],
ATTR_CONDITION_SUNNY: ["sunshine"],
ATTR_CONDITION_WINDY: [],
ATTR_CONDITION_WINDY_VARIANT: [],
ATTR_CONDITION_EXCEPTIONAL: [],
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
setup_platform(hass, {}, async_add_entities)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Demo weather."""
add_entities(
[
DemoWeather(
"South",
"Sunshine",
21.6414,
92,
1099,
0.5,
TEMP_CELSIUS,
[
[ATTR_CONDITION_RAINY, 1, 22, 15, 60],
[ATTR_CONDITION_RAINY, 5, 19, 8, 30],
[ATTR_CONDITION_CLOUDY, 0, 15, 9, 10],
[ATTR_CONDITION_SUNNY, 0, 12, 6, 0],
[ATTR_CONDITION_PARTLYCLOUDY, 2, 14, 7, 20],
[ATTR_CONDITION_RAINY, 15, 18, 7, 0],
[ATTR_CONDITION_FOG, 0.2, 21, 12, 100],
],
),
DemoWeather(
"North",
"Shower rain",
-12,
54,
987,
4.8,
TEMP_FAHRENHEIT,
[
[ATTR_CONDITION_SNOWY, 2, -10, -15, 60],
[ATTR_CONDITION_PARTLYCLOUDY, 1, -13, -14, 25],
[ATTR_CONDITION_SUNNY, 0, -18, -22, 70],
[ATTR_CONDITION_SUNNY, 0.1, -23, -23, 90],
[ATTR_CONDITION_SNOWY, 4, -19, -20, 40],
[ATTR_CONDITION_SUNNY, 0.3, -14, -19, 0],
[ATTR_CONDITION_SUNNY, 0, -9, -12, 0],
],
),
]
)
class DemoWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(
self,
name,
condition,
temperature,
humidity,
pressure,
wind_speed,
temperature_unit,
forecast,
):
"""Initialize the Demo weather."""
self._name = name
self._condition = condition
self._temperature = temperature
self._temperature_unit = temperature_unit
self._humidity = humidity
self._pressure = pressure
self._wind_speed = wind_speed
self._forecast = forecast
@property
def name(self):
"""Return the name of the sensor."""
return f"Demo Weather {self._name}"
@property
def should_poll(self):
"""No polling needed for a demo weather condition."""
return False
@property
def temperature(self):
"""Return the temperature."""
return self._temperature
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._temperature_unit
@property
def humidity(self):
"""Return the humidity."""
return self._humidity
@property
def wind_speed(self):
"""Return the wind speed."""
return self._wind_speed
@property
def pressure(self):
"""Return the pressure."""
return self._pressure
@property
def condition(self):
"""Return the weather condition."""
return [
k for k, v in CONDITION_CLASSES.items() if self._condition.lower() in v
][0]
@property
def attribution(self):
"""Return the attribution."""
return "Powered by Home Assistant"
@property
def forecast(self):
"""Return the forecast."""
reftime = dt_util.now().replace(hour=16, minute=00)
forecast_data = []
for entry in self._forecast:
data_dict = {
ATTR_FORECAST_TIME: reftime.isoformat(),
ATTR_FORECAST_CONDITION: entry[0],
ATTR_FORECAST_PRECIPITATION: entry[1],
ATTR_FORECAST_TEMP: entry[2],
ATTR_FORECAST_TEMP_LOW: entry[3],
ATTR_FORECAST_PRECIPITATION_PROBABILITY: entry[4],
}
reftime = reftime + timedelta(hours=4)
forecast_data.append(data_dict)
return forecast_data
| mit | 4,714,112,650,781,788,000 | 28.306452 | 83 | 0.545588 | false |
whbruce/upm | examples/python/adxrs610.py | 7 | 2285 | #!/usr/bin/python
# Author: Jon Trulson <[email protected]>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_adxrs610 as sensorObj
def main():
# Instantiate a ADXRS610 sensor on analog pin A0 (dataout), and
# analog A1 (temp out) with an analog reference voltage of
# 5.0
sensor = sensorObj.ADXRS610(0, 1, 5.0)
## Exit handlers ##
# This function stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# set a deadband region around the zero point to report 0.0 (optional)
sensor.setDeadband(0.015);
# Every tenth of a second, sample the ADXRS610 and output it's
# corresponding temperature and angular velocity
while (1):
print("Vel (deg/s):", sensor.getAngularVelocity())
print("Temp (C):", sensor.getTemperature())
time.sleep(.1)
if __name__ == '__main__':
main()
| mit | 8,625,103,420,399,247,000 | 37.083333 | 82 | 0.72035 | false |
uhef/fs-uae-gles | launcher/fs_uae_launcher/ui/settings/SettingsDialog.py | 2 | 2128 | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import fs_uae_launcher.fsui as fsui
from ...I18N import _, ngettext
from ...Settings import Settings
from ...Signal import Signal
from ..PagedDialog import PagedDialog
from .AudioSettingsPage import AudioSettingsPage
#from .CustomSettingsPage import CustomSettingsPage
from .ExperimentalFeaturesPage import ExperimentalFeaturesPage
from .FilterSettingsPage import FilterSettingsPage
from .GameDatabaseSettingsPage import GameDatabaseSettingsPage
from .InputSettingsPage import InputSettingsPage
from .JoystickSettingsPage import JoystickSettingsPage
from .NetplaySettingsPage import NetplaySettingsPage
from .ScanSettingsPage import ScanSettingsPage
from .VideoSettingsPage import VideoSettingsPage
class SettingsDialog(PagedDialog):
def __init__(self, parent, index=0):
PagedDialog.__init__(self, parent, _("FS-UAE Launcher Settings"))
self.add_page(_("Joystick Settings"), JoystickSettingsPage)
self.add_page(_("Scan & Indexing"), ScanSettingsPage)
self.add_page(_("Input Settings"), InputSettingsPage)
self.add_page(_("Audio Settings"), AudioSettingsPage)
self.add_page(_("Video Settings"), VideoSettingsPage)
self.add_page(_("Filters & Scaling"), FilterSettingsPage)
#self.add_page(_("OpenGL Settings"), OpenGLSettingsPage)
self.add_page(_("Experimental Features"), ExperimentalFeaturesPage)
if Settings.get("netplay_feature") == "1":
self.add_page(_("Net Play Settings"), NetplaySettingsPage)
if Settings.get("database_feature") == "1":
self.add_page(_("Game Database"), GameDatabaseSettingsPage)
#self.add_page(_("Custom Settings"), CustomSettingsPage)
self.list_view.set_index(index)
self.set_size((900, 540))
self.center_on_parent()
@classmethod
def run(cls, parent, index=0):
dialog = cls(parent, index)
dialog.show_modal()
dialog.close()
Signal.broadcast("settings_updated")
| gpl-2.0 | -1,679,251,159,061,336,000 | 41.56 | 75 | 0.720865 | false |
dlazz/ansible | lib/ansible/modules/cloud/cloudscale/cloudscale_server.py | 7 | 13316 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2017, Gaudenz Steinlin <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudscale_server
short_description: Manages servers on the cloudscale.ch IaaS service
description:
- Create, start, stop and delete servers on the cloudscale.ch IaaS service.
- All operations are performed using the cloudscale.ch public API v1.
- "For details consult the full API documentation: U(https://www.cloudscale.ch/en/api/v1)."
- A valid API token is required for all operations. You can create as many tokens as you like using the cloudscale.ch control panel at
U(https://control.cloudscale.ch).
notes:
- Instead of the api_token parameter the CLOUDSCALE_API_TOKEN environment variable can be used.
- To create a new server at least the C(name), C(ssh_key), C(image) and C(flavor) options are required.
- If more than one server with the name given by the C(name) option exists, execution is aborted.
- Once a server is created all parameters except C(state) are read-only. You can't change the name, flavor or any other property. This is a limitation
of the cloudscale.ch API. The module will silently ignore differences between the configured parameters and the running server if a server with the
correct name or UUID exists. Only state changes will be applied.
version_added: 2.3
author: "Gaudenz Steinlin (@gaudenz) <[email protected]>"
options:
state:
description:
- State of the server
default: running
choices: [ running, stopped, absent ]
name:
description:
- Name of the Server.
- Either C(name) or C(uuid) are required. These options are mutually exclusive.
uuid:
description:
- UUID of the server.
- Either C(name) or C(uuid) are required. These options are mutually exclusive.
flavor:
description:
- Flavor of the server.
image:
description:
- Image used to create the server.
volume_size_gb:
description:
- Size of the root volume in GB.
default: 10
bulk_volume_size_gb:
description:
- Size of the bulk storage volume in GB.
- No bulk storage volume if not set.
ssh_keys:
description:
- List of SSH public keys.
- Use the full content of your .pub file here.
use_public_network:
description:
- Attach a public network interface to the server.
default: True
type: bool
use_private_network:
description:
- Attach a private network interface to the server.
default: False
type: bool
use_ipv6:
description:
- Enable IPv6 on the public network interface.
default: True
type: bool
anti_affinity_with:
description:
- UUID of another server to create an anti-affinity group with.
user_data:
description:
- Cloud-init configuration (cloud-config) data to use for the server.
api_token:
description:
- cloudscale.ch API token.
- This can also be passed in the CLOUDSCALE_API_TOKEN environment variable.
api_timeout:
description:
- Timeout in seconds for calls to the cloudscale.ch API.
default: 30
version_added: "2.5"
'''
EXAMPLES = '''
# Start a server (if it does not exist) and register the server details
- name: Start cloudscale.ch server
cloudscale_server:
name: my-shiny-cloudscale-server
image: debian-8
flavor: flex-4
ssh_keys: ssh-rsa XXXXXXXXXX...XXXX ansible@cloudscale
use_private_network: True
bulk_volume_size_gb: 100
api_token: xxxxxx
register: server1
# Start another server in anti-affinity to the first one
- name: Start second cloudscale.ch server
cloudscale_server:
name: my-other-shiny-server
image: ubuntu-16.04
flavor: flex-8
ssh_keys: ssh-rsa XXXXXXXXXXX ansible@cloudscale
anti_affinity_with: '{{ server1.uuid }}'
api_token: xxxxxx
# Stop the first server
- name: Stop my first server
cloudscale_server:
uuid: '{{ server1.uuid }}'
state: stopped
api_token: xxxxxx
# Delete my second server
- name: Delete my second server
cloudscale_server:
name: my-other-shiny-server
state: absent
api_token: xxxxxx
# Start a server and wait for the SSH host keys to be generated
- name: Start server and wait for SSH host keys
cloudscale_server:
name: my-cloudscale-server-with-ssh-key
image: debian-8
flavor: flex-4
ssh_keys: ssh-rsa XXXXXXXXXXX ansible@cloudscale
api_token: xxxxxx
register: server
until: server.ssh_fingerprints
retries: 60
delay: 2
'''
RETURN = '''
href:
description: API URL to get details about this server
returned: success when not state == absent
type: str
sample: https://api.cloudscale.ch/v1/servers/cfde831a-4e87-4a75-960f-89b0148aa2cc
uuid:
description: The unique identifier for this server
returned: success
type: str
sample: cfde831a-4e87-4a75-960f-89b0148aa2cc
name:
description: The display name of the server
returned: success
type: str
sample: its-a-me-mario.cloudscale.ch
state:
description: The current status of the server
returned: success
type: str
sample: running
flavor:
description: The flavor that has been used for this server
returned: success when not state == absent
type: str
sample: flex-8
image:
description: The image used for booting this server
returned: success when not state == absent
type: str
sample: debian-8
volumes:
description: List of volumes attached to the server
returned: success when not state == absent
type: list
sample: [ {"type": "ssd", "device": "/dev/vda", "size_gb": "50"} ]
interfaces:
description: List of network ports attached to the server
returned: success when not state == absent
type: list
sample: [ { "type": "public", "addresses": [ ... ] } ]
ssh_fingerprints:
description: A list of SSH host key fingerprints. Will be null until the host keys could be retrieved from the server.
returned: success when not state == absent
type: list
sample: ["ecdsa-sha2-nistp256 SHA256:XXXX", ... ]
ssh_host_keys:
description: A list of SSH host keys. Will be null until the host keys could be retrieved from the server.
returned: success when not state == absent
type: list
sample: ["ecdsa-sha2-nistp256 XXXXX", ... ]
anti_affinity_with:
description: List of servers in the same anti-affinity group
returned: success when not state == absent
type: str
sample: []
'''
import os
from datetime import datetime, timedelta
from time import sleep
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudscale import AnsibleCloudscaleBase, cloudscale_argument_spec
ALLOWED_STATES = ('running',
'stopped',
'absent',
)
class AnsibleCloudscaleServer(AnsibleCloudscaleBase):
def __init__(self, module):
super(AnsibleCloudscaleServer, self).__init__(module)
# Check if server already exists and load properties
uuid = self._module.params['uuid']
name = self._module.params['name']
# Initialize server dictionary
self.info = {'uuid': uuid, 'name': name, 'state': 'absent'}
servers = self.list_servers()
matching_server = []
for s in servers:
if uuid:
# Look for server by UUID if given
if s['uuid'] == uuid:
self.info = self._transform_state(s)
break
else:
# Look for server by name
if s['name'] == name:
matching_server.append(s)
else:
if len(matching_server) == 1:
self.info = self._transform_state(matching_server[0])
elif len(matching_server) > 1:
self._module.fail_json(msg="More than one server with name '%s' exists. "
"Use the 'uuid' parameter to identify the server." % name)
@staticmethod
def _transform_state(server):
if 'status' in server:
server['state'] = server['status']
del server['status']
else:
server['state'] = 'absent'
return server
def update_info(self):
# If we don't have a UUID (yet) there is nothing to update
if 'uuid' not in self.info:
return
url_path = 'servers/' + self.info['uuid']
resp = self._get(url_path)
if resp:
self.info = self._transform_state(resp)
else:
self.info = {'uuid': self.info['uuid'],
'name': self.info.get('name', None),
'state': 'absent'}
def wait_for_state(self, states):
start = datetime.now()
timeout = self._module.params['api_timeout'] * 2
while datetime.now() - start < timedelta(seconds=timeout):
self.update_info()
if self.info['state'] in states:
return True
sleep(1)
self._module.fail_json(msg='Timeout while waiting for a state change on server %s to states %s. Current state is %s.'
% (self.info['name'], states, self.info['state']))
def create_server(self):
data = self._module.params.copy()
# check for required parameters to create a server
missing_parameters = []
for p in ('name', 'ssh_keys', 'image', 'flavor'):
if p not in data or not data[p]:
missing_parameters.append(p)
if len(missing_parameters) > 0:
self._module.fail_json(msg='Missing required parameter(s) to create a new server: %s.' %
' '.join(missing_parameters))
# Deepcopy: Duplicate the data object for iteration, because
# iterating an object and changing it at the same time is insecure
# Sanitize data dictionary
for k, v in deepcopy(data).items():
# Remove items not relevant to the create server call
if k in ('api_token', 'api_timeout', 'uuid', 'state'):
del data[k]
continue
# Remove None values, these don't get correctly translated by urlencode
if v is None:
del data[k]
continue
self.info = self._transform_state(self._post('servers', data))
self.wait_for_state(('running', ))
def delete_server(self):
self._delete('servers/%s' % self.info['uuid'])
self.wait_for_state(('absent', ))
def start_server(self):
self._post('servers/%s/start' % self.info['uuid'])
self.wait_for_state(('running', ))
def stop_server(self):
self._post('servers/%s/stop' % self.info['uuid'])
self.wait_for_state(('stopped', ))
def list_servers(self):
return self._get('servers') or []
def main():
argument_spec = cloudscale_argument_spec()
argument_spec.update(dict(
state=dict(default='running', choices=ALLOWED_STATES),
name=dict(),
uuid=dict(),
flavor=dict(),
image=dict(),
volume_size_gb=dict(type='int', default=10),
bulk_volume_size_gb=dict(type='int'),
ssh_keys=dict(type='list'),
use_public_network=dict(type='bool', default=True),
use_private_network=dict(type='bool', default=False),
use_ipv6=dict(type='bool', default=True),
anti_affinity_with=dict(),
user_data=dict(),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=(('name', 'uuid'),),
mutually_exclusive=(('name', 'uuid'),),
supports_check_mode=True,
)
target_state = module.params['state']
server = AnsibleCloudscaleServer(module)
# The server could be in a changing or error state.
# Wait for one of the allowed states before doing anything.
# If an allowed state can't be reached, this module fails.
if server.info['state'] not in ALLOWED_STATES:
server.wait_for_state(ALLOWED_STATES)
current_state = server.info['state']
if module.check_mode:
module.exit_json(changed=not target_state == current_state,
**server.info)
changed = False
if current_state == 'absent' and target_state == 'running':
server.create_server()
changed = True
elif current_state == 'absent' and target_state == 'stopped':
server.create_server()
server.stop_server()
changed = True
elif current_state == 'stopped' and target_state == 'running':
server.start_server()
changed = True
elif current_state in ('running', 'stopped') and target_state == 'absent':
server.delete_server()
changed = True
elif current_state == 'running' and target_state == 'stopped':
server.stop_server()
changed = True
module.exit_json(changed=changed, **server.info)
if __name__ == '__main__':
main()
| gpl-3.0 | -942,680,401,781,059,200 | 32.373434 | 152 | 0.633674 | false |
tpaszkowski/quantum | quantum/plugins/cisco/extensions/credential.py | 4 | 6038 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ying Liu, Cisco Systems, Inc.
#
from webob import exc
from quantum.api import api_common as common
from quantum.api import extensions
from quantum.manager import QuantumManager
from quantum.plugins.cisco.common import cisco_exceptions as exception
from quantum.plugins.cisco.common import cisco_faults as faults
from quantum.plugins.cisco.extensions import (_credential_view as
credential_view)
from quantum import wsgi
class Credential(extensions.ExtensionDescriptor):
"""extension class Credential"""
@classmethod
def get_name(cls):
""" Returns Ext Resource Name """
return "Cisco Credential"
@classmethod
def get_alias(cls):
""" Returns Ext Resource Alias """
return "Cisco Credential"
@classmethod
def get_description(cls):
""" Returns Ext Resource Description """
return "Credential include username and password"
@classmethod
def get_namespace(cls):
""" Returns Ext Resource Namespace """
return "http://docs.ciscocloud.com/api/ext/credential/v1.0"
@classmethod
def get_updated(cls):
""" Returns Ext Resource Update Time """
return "2011-07-25T13:25:27-06:00"
@classmethod
def get_resources(cls):
""" Returns Ext Resources """
parent_resource = dict(member_name="tenant",
collection_name="extensions/csco/tenants")
controller = CredentialController(QuantumManager.get_plugin())
return [extensions.ResourceExtension('credentials', controller,
parent=parent_resource)]
class CredentialController(common.QuantumController, wsgi.Controller):
""" credential API controller
based on QuantumController """
_credential_ops_param_list = [
{'param-name': 'credential_name', 'required': True},
{'param-name': 'user_name', 'required': True},
{'param-name': 'password', 'required': True},
]
_serialization_metadata = {
"application/xml": {
"attributes": {
"credential": ["id", "name"],
},
},
}
def __init__(self, plugin):
self._resource_name = 'credential'
self._plugin = plugin
def index(self, request, tenant_id):
""" Returns a list of credential ids """
return self._items(request, tenant_id, is_detail=False)
def _items(self, request, tenant_id, is_detail):
""" Returns a list of credentials. """
credentials = self._plugin.get_all_credentials(tenant_id)
builder = credential_view.get_view_builder(request)
result = [builder.build(credential, is_detail)['credential']
for credential in credentials]
return dict(credentials=result)
# pylint: disable-msg=E1101,W0613
def show(self, request, tenant_id, id):
""" Returns credential details for the given credential id """
try:
credential = self._plugin.get_credential_details(tenant_id, id)
builder = credential_view.get_view_builder(request)
#build response with details
result = builder.build(credential, True)
return dict(credentials=result)
except exception.CredentialNotFound as exp:
return faults.Fault(faults.CredentialNotFound(exp))
def create(self, request, tenant_id):
""" Creates a new credential for a given tenant """
try:
body = self._deserialize(request.body, request.get_content_type())
req_body = self._prepare_request_body(
body, self._credential_ops_param_list)
req_params = req_body[self._resource_name]
except exc.HTTPError as exp:
return faults.Fault(exp)
credential = self._plugin.create_credential(
tenant_id,
req_params['credential_name'],
req_params['user_name'],
req_params['password'])
builder = credential_view.get_view_builder(request)
result = builder.build(credential)
return dict(credentials=result)
def update(self, request, tenant_id, id):
""" Updates the name for the credential with the given id """
try:
body = self._deserialize(request.body, request.get_content_type())
req_body = self._prepare_request_body(
body, self._credential_ops_param_list)
req_params = req_body[self._resource_name]
except exc.HTTPError as exp:
return faults.Fault(exp)
try:
credential = self._plugin.rename_credential(
tenant_id, id, req_params['credential_name'])
builder = credential_view.get_view_builder(request)
result = builder.build(credential, True)
return dict(credentials=result)
except exception.CredentialNotFound as exp:
return faults.Fault(faults.CredentialNotFound(exp))
def delete(self, request, tenant_id, id):
""" Destroys the credential with the given id """
try:
self._plugin.delete_credential(tenant_id, id)
return exc.HTTPOk()
except exception.CredentialNotFound as exp:
return faults.Fault(faults.CredentialNotFound(exp))
| apache-2.0 | 8,391,760,036,689,786,000 | 36.503106 | 78 | 0.629347 | false |
OCA/l10n-brazil | l10n_br_currency_rate_update/models/res_currency_rate_provider_bcb.py | 1 | 3106 | # Copyright 2019 Akretion - Renato Lima <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
import requests
from odoo import _, api, fields, models
from odoo.exceptions import UserError
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT
class ResCurrencyRateProviderBCB(models.Model):
_inherit = "res.currency.rate.provider"
service = fields.Selection(selection_add=[("BCB", "Brazilian Central Bank")])
@api.model
def _get_supported_currencies(self):
self.ensure_one()
if self.service == "BCB":
# List of currencies obrained from:
# https://olinda.bcb.gov.br/olinda/servico/PTAX/versao
# /v1/odata/Moedas?$top=100&$format=json&$select=simbolo
return [
"AUD",
"CAD",
"CHF",
"DKK",
"EUR",
"GBP",
"JPY",
"NOK",
"SEK",
"USD",
]
return super()._get_supported_currencies()
@api.model
def _obtain_rates(self, base_currency, currencies, date_from, date_to):
self.ensure_one()
if self.service == "BCB":
if base_currency != "BRL":
raise UserError(
_(
"Brazilian Central Bank is suitable only for companies"
" with BRL as base currency!"
)
)
url = (
"https://olinda.bcb.gov.br/olinda/servico/PTAX/versao/"
"v1/odata/CotacaoMoedaPeriodo(moeda=@moeda,dataInicial"
"=@dataInicial,dataFinalCotacao=@dataFinalCotacao)?"
"format=json&skip=0&top=10000&$filter=tipoBoletim%20eq"
"%20%27Fechamento%27%20or%20tipoBoletim%20eq%20%27Abert"
"ura%27&select=paridadeCompra%2CparidadeVen"
"da%2CcotacaoCompra%2CcotacaoVenda%2CdataHoraCotacao%2"
"CtipoBoletim"
)
params = dict()
params["@dataInicial"] = date_from.strftime("'%m-%d-%Y'")
params["@dataFinalCotacao"] = date_to.strftime("'%m-%d-%Y'")
data = {}
for cur in currencies:
params["@moeda"] = "'" + cur + "'"
response = requests.get(url, params=params)
if response.ok:
content = response.json()
for rate in content.get("value"):
rate_date = fields.Date.from_string(
rate.get("dataHoraCotacao")
).strftime(DEFAULT_SERVER_DATE_FORMAT)
if data.get(rate_date):
data[rate_date][cur] = rate.get("cotacaoVenda")
else:
rate_dict = {cur: rate.get("cotacaoVenda")}
data[rate_date] = rate_dict
return data
return super()._obtain_rates(base_currency, currencies, date_from, date_to)
| agpl-3.0 | 2,351,460,399,164,173,300 | 35.541176 | 83 | 0.507083 | false |
SUSE/azure-sdk-for-python | azure-mgmt-resource/azure/mgmt/resource/resources/v2016_02_01/models/__init__.py | 9 | 3914 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .deployment_extended_filter import DeploymentExtendedFilter
from .generic_resource_filter import GenericResourceFilter
from .resource_group_filter import ResourceGroupFilter
from .template_link import TemplateLink
from .parameters_link import ParametersLink
from .debug_setting import DebugSetting
from .deployment_properties import DeploymentProperties
from .deployment import Deployment
from .deployment_export_result import DeploymentExportResult
from .resource_management_error_with_details import ResourceManagementErrorWithDetails
from .alias_path_type import AliasPathType
from .alias_type import AliasType
from .provider_resource_type import ProviderResourceType
from .provider import Provider
from .basic_dependency import BasicDependency
from .dependency import Dependency
from .deployment_properties_extended import DeploymentPropertiesExtended
from .deployment_validate_result import DeploymentValidateResult
from .deployment_extended import DeploymentExtended
from .plan import Plan
from .sku import Sku
from .identity import Identity
from .generic_resource import GenericResource
from .resource_group_properties import ResourceGroupProperties
from .resource_group import ResourceGroup
from .resources_move_info import ResourcesMoveInfo
from .export_template_request import ExportTemplateRequest
from .tag_count import TagCount
from .tag_value import TagValue
from .tag_details import TagDetails
from .target_resource import TargetResource
from .http_message import HttpMessage
from .deployment_operation_properties import DeploymentOperationProperties
from .deployment_operation import DeploymentOperation
from .resource_provider_operation_display_properties import ResourceProviderOperationDisplayProperties
from .resource import Resource
from .sub_resource import SubResource
from .resource_group_export_result import ResourceGroupExportResult
from .deployment_extended_paged import DeploymentExtendedPaged
from .provider_paged import ProviderPaged
from .generic_resource_paged import GenericResourcePaged
from .resource_group_paged import ResourceGroupPaged
from .tag_details_paged import TagDetailsPaged
from .deployment_operation_paged import DeploymentOperationPaged
from .resource_management_client_enums import (
DeploymentMode,
ResourceIdentityType,
)
__all__ = [
'DeploymentExtendedFilter',
'GenericResourceFilter',
'ResourceGroupFilter',
'TemplateLink',
'ParametersLink',
'DebugSetting',
'DeploymentProperties',
'Deployment',
'DeploymentExportResult',
'ResourceManagementErrorWithDetails',
'AliasPathType',
'AliasType',
'ProviderResourceType',
'Provider',
'BasicDependency',
'Dependency',
'DeploymentPropertiesExtended',
'DeploymentValidateResult',
'DeploymentExtended',
'Plan',
'Sku',
'Identity',
'GenericResource',
'ResourceGroupProperties',
'ResourceGroup',
'ResourcesMoveInfo',
'ExportTemplateRequest',
'TagCount',
'TagValue',
'TagDetails',
'TargetResource',
'HttpMessage',
'DeploymentOperationProperties',
'DeploymentOperation',
'ResourceProviderOperationDisplayProperties',
'Resource',
'SubResource',
'ResourceGroupExportResult',
'DeploymentExtendedPaged',
'ProviderPaged',
'GenericResourcePaged',
'ResourceGroupPaged',
'TagDetailsPaged',
'DeploymentOperationPaged',
'DeploymentMode',
'ResourceIdentityType',
]
| mit | -6,897,030,004,351,346,000 | 35.240741 | 102 | 0.766735 | false |
shootstar/novatest | nova/openstack/common/rpc/service.py | 3 | 2738 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
from nova.openstack.common.rpc import dispatcher as rpc_dispatcher
from nova.openstack.common import service
LOG = logging.getLogger(__name__)
class Service(service.Service):
"""Service object for binaries running on hosts.
A service enables rpc by listening to queues based on topic and host.
"""
def __init__(self, host, topic, manager=None):
super(Service, self).__init__()
self.host = host
self.topic = topic
if manager is None:
self.manager = self
else:
self.manager = manager
def start(self):
super(Service, self).start()
self.conn = rpc.create_connection(new=True)
LOG.debug(_("Creating Consumer connection for Service %s") %
self.topic)
dispatcher = rpc_dispatcher.RpcDispatcher([self.manager])
# Share this same connection for these Consumers
self.conn.create_consumer(self.topic, dispatcher, fanout=False)
node_topic = '%s.%s' % (self.topic, self.host)
self.conn.create_consumer(node_topic, dispatcher, fanout=False)
self.conn.create_consumer(self.topic, dispatcher, fanout=True)
# Hook to allow the manager to do other initializations after
# the rpc connection is created.
if callable(getattr(self.manager, 'initialize_service_hook', None)):
self.manager.initialize_service_hook(self)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
def stop(self):
# Try to shut the connection down, but if we get any sort of
# errors, go ahead and ignore them.. as we're shutting down anyway
try:
self.conn.close()
except Exception:
pass
super(Service, self).stop()
| apache-2.0 | -1,106,720,765,065,482,100 | 35.026316 | 78 | 0.676041 | false |
mjudsp/Tsallis | sklearn/ensemble/tests/test_gradient_boosting_loss_functions.py | 65 | 5529 | """
Testing for the gradient boosting loss functions and initial estimators.
"""
import numpy as np
from numpy.testing import assert_array_equal
from numpy.testing import assert_almost_equal
from numpy.testing import assert_equal
from nose.tools import assert_raises
from sklearn.utils import check_random_state
from sklearn.ensemble.gradient_boosting import BinomialDeviance
from sklearn.ensemble.gradient_boosting import LogOddsEstimator
from sklearn.ensemble.gradient_boosting import LeastSquaresError
from sklearn.ensemble.gradient_boosting import RegressionLossFunction
from sklearn.ensemble.gradient_boosting import LOSS_FUNCTIONS
from sklearn.ensemble.gradient_boosting import _weighted_percentile
def test_binomial_deviance():
# Check binomial deviance loss.
# Check against alternative definitions in ESLII.
bd = BinomialDeviance(2)
# pred has the same BD for y in {0, 1}
assert_equal(bd(np.array([0.0]), np.array([0.0])),
bd(np.array([1.0]), np.array([0.0])))
assert_almost_equal(bd(np.array([1.0, 1.0, 1.0]),
np.array([100.0, 100.0, 100.0])),
0.0)
assert_almost_equal(bd(np.array([1.0, 0.0, 0.0]),
np.array([100.0, -100.0, -100.0])), 0)
# check if same results as alternative definition of deviance (from ESLII)
alt_dev = lambda y, pred: np.mean(np.logaddexp(0.0, -2.0 *
(2.0 * y - 1) * pred))
test_data = [(np.array([1.0, 1.0, 1.0]), np.array([100.0, 100.0, 100.0])),
(np.array([0.0, 0.0, 0.0]), np.array([100.0, 100.0, 100.0])),
(np.array([0.0, 0.0, 0.0]),
np.array([-100.0, -100.0, -100.0])),
(np.array([1.0, 1.0, 1.0]),
np.array([-100.0, -100.0, -100.0]))]
for datum in test_data:
assert_almost_equal(bd(*datum), alt_dev(*datum))
# check the gradient against the
alt_ng = lambda y, pred: (2 * y - 1) / (1 + np.exp(2 * (2 * y - 1) * pred))
for datum in test_data:
assert_almost_equal(bd.negative_gradient(*datum), alt_ng(*datum))
def test_log_odds_estimator():
# Check log odds estimator.
est = LogOddsEstimator()
assert_raises(ValueError, est.fit, None, np.array([1]))
est.fit(None, np.array([1.0, 0.0]))
assert_equal(est.prior, 0.0)
assert_array_equal(est.predict(np.array([[1.0], [1.0]])),
np.array([[0.0], [0.0]]))
def test_sample_weight_smoke():
rng = check_random_state(13)
y = rng.rand(100)
pred = rng.rand(100)
# least squares
loss = LeastSquaresError(1)
loss_wo_sw = loss(y, pred)
loss_w_sw = loss(y, pred, np.ones(pred.shape[0], dtype=np.float32))
assert_almost_equal(loss_wo_sw, loss_w_sw)
def test_sample_weight_init_estimators():
# Smoke test for init estimators with sample weights.
rng = check_random_state(13)
X = rng.rand(100, 2)
sample_weight = np.ones(100)
reg_y = rng.rand(100)
clf_y = rng.randint(0, 2, size=100)
for Loss in LOSS_FUNCTIONS.values():
if Loss is None:
continue
if issubclass(Loss, RegressionLossFunction):
k = 1
y = reg_y
else:
k = 2
y = clf_y
if Loss.is_multi_class:
# skip multiclass
continue
loss = Loss(k)
init_est = loss.init_estimator()
init_est.fit(X, y)
out = init_est.predict(X)
assert_equal(out.shape, (y.shape[0], 1))
sw_init_est = loss.init_estimator()
sw_init_est.fit(X, y, sample_weight=sample_weight)
sw_out = init_est.predict(X)
assert_equal(sw_out.shape, (y.shape[0], 1))
# check if predictions match
assert_array_equal(out, sw_out)
def test_weighted_percentile():
y = np.empty(102, dtype=np.float64)
y[:50] = 0
y[-51:] = 2
y[-1] = 100000
y[50] = 1
sw = np.ones(102, dtype=np.float64)
sw[-1] = 0.0
score = _weighted_percentile(y, sw, 50)
assert score == 1
def test_weighted_percentile_equal():
y = np.empty(102, dtype=np.float64)
y.fill(0.0)
sw = np.ones(102, dtype=np.float64)
sw[-1] = 0.0
score = _weighted_percentile(y, sw, 50)
assert score == 0
def test_weighted_percentile_zero_weight():
y = np.empty(102, dtype=np.float64)
y.fill(1.0)
sw = np.ones(102, dtype=np.float64)
sw.fill(0.0)
score = _weighted_percentile(y, sw, 50)
assert score == 1.0
def test_sample_weight_deviance():
# Test if deviance supports sample weights.
rng = check_random_state(13)
X = rng.rand(100, 2)
sample_weight = np.ones(100)
reg_y = rng.rand(100)
clf_y = rng.randint(0, 2, size=100)
mclf_y = rng.randint(0, 3, size=100)
for Loss in LOSS_FUNCTIONS.values():
if Loss is None:
continue
if issubclass(Loss, RegressionLossFunction):
k = 1
y = reg_y
p = reg_y
else:
k = 2
y = clf_y
p = clf_y
if Loss.is_multi_class:
k = 3
y = mclf_y
# one-hot encoding
p = np.zeros((y.shape[0], k), dtype=np.float64)
for i in range(k):
p[:, i] = y == i
loss = Loss(k)
deviance_w_w = loss(y, p, sample_weight)
deviance_wo_w = loss(y, p)
assert deviance_wo_w == deviance_w_w
| bsd-3-clause | 4,057,296,839,598,219,000 | 30.414773 | 79 | 0.567553 | false |
simonwydooghe/ansible | test/units/utils/test_encrypt.py | 31 | 7816 | # (c) 2018, Matthias Fuchs <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
import pytest
from ansible.errors import AnsibleError, AnsibleFilterError
from ansible.plugins.filter.core import get_encrypted_password
from ansible.utils import encrypt
class passlib_off(object):
def __init__(self):
self.orig = encrypt.PASSLIB_AVAILABLE
def __enter__(self):
encrypt.PASSLIB_AVAILABLE = False
return self
def __exit__(self, exception_type, exception_value, traceback):
encrypt.PASSLIB_AVAILABLE = self.orig
def assert_hash(expected, secret, algorithm, **settings):
if encrypt.PASSLIB_AVAILABLE:
assert encrypt.passlib_or_crypt(secret, algorithm, **settings) == expected
assert encrypt.PasslibHash(algorithm).hash(secret, **settings) == expected
else:
assert encrypt.passlib_or_crypt(secret, algorithm, **settings) == expected
with pytest.raises(AnsibleError) as excinfo:
encrypt.PasslibHash(algorithm).hash(secret, **settings)
assert excinfo.value.args[0] == "passlib must be installed to hash with '%s'" % algorithm
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_encrypt_with_rounds_no_passlib():
with passlib_off():
assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000)
assert_hash("$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=10000)
assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
secret="123", algorithm="sha512_crypt", salt="12345678", rounds=5000)
# If passlib is not installed. this is identical to the test_encrypt_with_rounds_no_passlib() test
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_encrypt_with_rounds():
assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000)
assert_hash("$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=10000)
assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
secret="123", algorithm="sha512_crypt", salt="12345678", rounds=5000)
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_encrypt_default_rounds_no_passlib():
with passlib_off():
assert_hash("$1$12345678$tRy4cXc3kmcfRZVj4iFXr/",
secret="123", algorithm="md5_crypt", salt="12345678")
assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678")
assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
secret="123", algorithm="sha512_crypt", salt="12345678")
assert encrypt.CryptHash("md5_crypt").hash("123")
# If passlib is not installed. this is identical to the test_encrypt_default_rounds_no_passlib() test
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_encrypt_default_rounds():
assert_hash("$1$12345678$tRy4cXc3kmcfRZVj4iFXr/",
secret="123", algorithm="md5_crypt", salt="12345678")
assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678")
assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
secret="123", algorithm="sha512_crypt", salt="12345678")
assert encrypt.PasslibHash("md5_crypt").hash("123")
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_password_hash_filter_no_passlib():
with passlib_off():
assert not encrypt.PASSLIB_AVAILABLE
assert get_encrypted_password("123", "md5", salt="12345678") == "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/"
with pytest.raises(AnsibleFilterError):
get_encrypted_password("123", "crypt16", salt="12")
def test_password_hash_filter_passlib():
if not encrypt.PASSLIB_AVAILABLE:
pytest.skip("passlib not available")
with pytest.raises(AnsibleFilterError):
get_encrypted_password("123", "sha257", salt="12345678")
# Uses 5000 rounds by default for sha256 matching crypt behaviour
assert get_encrypted_password("123", "sha256", salt="12345678") == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
assert get_encrypted_password("123", "sha256", salt="12345678", rounds=5000) == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
assert (get_encrypted_password("123", "sha256", salt="12345678", rounds=10000) ==
"$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/")
assert (get_encrypted_password("123", "sha512", salt="12345678", rounds=6000) ==
"$6$rounds=6000$12345678$l/fC67BdJwZrJ7qneKGP1b6PcatfBr0dI7W6JLBrsv8P1wnv/0pu4WJsWq5p6WiXgZ2gt9Aoir3MeORJxg4.Z/")
assert (get_encrypted_password("123", "sha512", salt="12345678", rounds=5000) ==
"$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.")
assert get_encrypted_password("123", "crypt16", salt="12") == "12pELHK2ME3McUFlHxel6uMM"
# Try algorithm that uses a raw salt
assert get_encrypted_password("123", "pbkdf2_sha256")
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_do_encrypt_no_passlib():
with passlib_off():
assert not encrypt.PASSLIB_AVAILABLE
assert encrypt.do_encrypt("123", "md5_crypt", salt="12345678") == "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/"
with pytest.raises(AnsibleError):
encrypt.do_encrypt("123", "crypt16", salt="12")
def test_do_encrypt_passlib():
if not encrypt.PASSLIB_AVAILABLE:
pytest.skip("passlib not available")
with pytest.raises(AnsibleError):
encrypt.do_encrypt("123", "sha257_crypt", salt="12345678")
# Uses 5000 rounds by default for sha256 matching crypt behaviour.
assert encrypt.do_encrypt("123", "sha256_crypt", salt="12345678") == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
assert encrypt.do_encrypt("123", "md5_crypt", salt="12345678") == "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/"
assert encrypt.do_encrypt("123", "crypt16", salt="12") == "12pELHK2ME3McUFlHxel6uMM"
def test_random_salt():
res = encrypt.random_salt()
expected_salt_candidate_chars = u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./'
assert len(res) == 8
for res_char in res:
assert res_char in expected_salt_candidate_chars
| gpl-3.0 | -6,380,241,083,227,367,000 | 46.369697 | 141 | 0.712769 | false |
ajnirp/servo | tests/wpt/web-platform-tests/tools/manifest/tests/test_manifest.py | 26 | 2371 | from .. import manifest, item as manifestitem, sourcefile
def test_local_reftest_add():
m = manifest.Manifest()
s = sourcefile.SourceFile("/", "test", "/")
test = manifestitem.RefTest(s, "/test", [("/ref", "==")])
m.local_changes.add(test)
m.update_reftests()
assert list(m) == [(test.path, {test})]
def test_local_reftest_delete_path():
m = manifest.Manifest()
s = sourcefile.SourceFile("/", "test", "/")
test = manifestitem.RefTest(s, "/test", [("/ref", "==")])
m.add(test)
m.local_changes.add_deleted(test.path)
m.update_reftests()
assert list(m) == []
def test_local_reftest_adjusted():
m = manifest.Manifest()
s = sourcefile.SourceFile("/", "test", "/")
test = manifestitem.RefTest(s, "/test", [("/ref", "==")])
m.add(test)
m.update_reftests()
assert m.compute_reftests({test.path: {test}}) == {test}
assert list(m) == [(test.path, {test})]
s_1 = sourcefile.SourceFile("/", "test-1", "/")
test_1 = manifestitem.RefTest(s_1, "/test-1", [("/test", "==")])
m.local_changes.add(test_1)
m.update_reftests()
assert m.compute_reftests({test.path: {test}, test_1.path: {test_1}}) == {test_1}
assert list(m) == [(test_1.path, {test_1})]
def test_manifest_to_json():
m = manifest.Manifest()
s = sourcefile.SourceFile("/", "test", "/")
test = manifestitem.RefTest(s, "/test", [("/ref", "==")])
m.add(test)
s_1 = sourcefile.SourceFile("/", "test-1", "/")
test_1 = manifestitem.RefTest(s_1, "/test-1", [("/test", "==")])
m.local_changes.add(test_1)
m.local_changes.add_deleted(test.path)
m.update_reftests()
json_str = m.to_json()
loaded = manifest.Manifest.from_json("/", json_str)
assert list(loaded) == list(m)
assert loaded.to_json() == json_str
def test_reftest_computation_chain():
m = manifest.Manifest()
s1 = sourcefile.SourceFile("/", "test1", "/")
s2 = sourcefile.SourceFile("/", "test2", "/")
test1 = manifestitem.RefTest(s1, "/test1", [("/test3", "==")])
test2 = manifestitem.RefTest(s2, "/test2", [("/test1", "==")])
m.add(test1)
m.add(test2)
m.update_reftests()
assert m.reftest_nodes == {'test1': {test1},
'test2': {test2}}
assert list(m) == [("test2", {test2})]
assert list(m.local_changes.itertypes()) == []
| mpl-2.0 | -4,457,116,378,822,364,700 | 28.6375 | 85 | 0.570224 | false |
zzxuanyuan/root-compressor-dummy | interpreter/llvm/src/tools/clang/tools/clang-format/clang-format.py | 33 | 3927 | # This file is a minimal clang-format vim-integration. To install:
# - Change 'binary' if clang-format is not on the path (see below).
# - Add to your .vimrc:
#
# map <C-I> :pyf <path-to-this-file>/clang-format.py<cr>
# imap <C-I> <c-o>:pyf <path-to-this-file>/clang-format.py<cr>
#
# The first line enables clang-format for NORMAL and VISUAL mode, the second
# line adds support for INSERT mode. Change "C-I" to another binding if you
# need clang-format on a different key (C-I stands for Ctrl+i).
#
# With this integration you can press the bound key and clang-format will
# format the current line in NORMAL and INSERT mode or the selected region in
# VISUAL mode. The line or region is extended to the next bigger syntactic
# entity.
#
# You can also pass in the variable "l:lines" to choose the range for
# formatting. This variable can either contain "<start line>:<end line>" or
# "all" to format the full file. So, to format the full file, write a function
# like:
# :function FormatFile()
# : let l:lines="all"
# : pyf <path-to-this-file>/clang-format.py
# :endfunction
#
# It operates on the current, potentially unsaved buffer and does not create
# or save any files. To revert a formatting, just undo.
import difflib
import json
import subprocess
import sys
import vim
# set g:clang_format_path to the path to clang-format if it is not on the path
# Change this to the full path if clang-format is not on the path.
binary = 'clang-format'
if vim.eval('exists("g:clang_format_path")') == "1":
binary = vim.eval('g:clang_format_path')
# Change this to format according to other formatting styles. See the output of
# 'clang-format --help' for a list of supported styles. The default looks for
# a '.clang-format' or '_clang-format' file to indicate the style that should be
# used.
style = 'file'
fallback_style = None
if vim.eval('exists("g:clang_format_fallback_style")') == "1":
fallback_style = vim.eval('g:clang_format_fallback_style')
def main():
# Get the current text.
buf = vim.current.buffer
text = '\n'.join(buf)
# Determine range to format.
if vim.eval('exists("l:lines")') == '1':
lines = vim.eval('l:lines')
else:
lines = '%s:%s' % (vim.current.range.start + 1, vim.current.range.end + 1)
# Determine the cursor position.
cursor = int(vim.eval('line2byte(line("."))+col(".")')) - 2
if cursor < 0:
print 'Couldn\'t determine cursor position. Is your file empty?'
return
# Avoid flashing an ugly, ugly cmd prompt on Windows when invoking clang-format.
startupinfo = None
if sys.platform.startswith('win32'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
# Call formatter.
command = [binary, '-style', style, '-cursor', str(cursor)]
if lines != 'all':
command.extend(['-lines', lines])
if fallback_style:
command.extend(['-fallback-style', fallback_style])
if vim.current.buffer.name:
command.extend(['-assume-filename', vim.current.buffer.name])
p = subprocess.Popen(command,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE, startupinfo=startupinfo)
stdout, stderr = p.communicate(input=text)
# If successful, replace buffer contents.
if stderr:
print stderr
if not stdout:
print ('No output from clang-format (crashed?).\n' +
'Please report to bugs.llvm.org.')
else:
lines = stdout.split('\n')
output = json.loads(lines[0])
lines = lines[1:]
sequence = difflib.SequenceMatcher(None, vim.current.buffer, lines)
for op in reversed(sequence.get_opcodes()):
if op[0] is not 'equal':
vim.current.buffer[op[1]:op[2]] = lines[op[3]:op[4]]
if output.get('IncompleteFormat'):
print 'clang-format: incomplete (syntax errors)'
vim.command('goto %d' % (output['Cursor'] + 1))
main()
| lgpl-2.1 | -2,842,902,930,187,002,400 | 36.04717 | 82 | 0.68602 | false |
hrayr-artunyan/shuup | shuup_tests/functional/test_order_edit_with_coupons.py | 2 | 6112 | # This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import decimal
import json
import pytest
from django.core import serializers
from shuup.admin.modules.orders.views.edit import OrderEditView
from shuup.campaigns.models import BasketCampaign, Coupon
from shuup.campaigns.models.basket_conditions import \
BasketTotalProductAmountCondition
from shuup.campaigns.models.basket_effects import BasketDiscountAmount
from shuup.core.models import Order, OrderLineType, Tax, TaxClass
from shuup.core.order_creator import OrderCreator
from shuup.default_tax.models import TaxRule
from shuup.front.basket import get_basket
from shuup.testing.factories import (
create_product, create_random_person, get_default_supplier,
get_initial_order_status, get_payment_method, get_shipping_method,
UserFactory
)
from shuup_tests.admin.test_order_creator import \
get_frontend_request_for_command
from shuup_tests.campaigns import initialize_test
from shuup_tests.utils import assert_contains, printable_gibberish
@pytest.mark.django_db
def test_order_edit_with_coupon(rf):
initial_status = get_initial_order_status()
request, shop, group = initialize_test(rf, include_tax=False)
order = _get_order_with_coupon(request, initial_status)
modifier = UserFactory()
contact = create_random_person(locale="en_US", minimum_name_comp_len=5)
assert order.customer != contact
state = _get_frontend_order_state(shop, contact)
assert order.shop.id == state["shop"]["selected"]["id"]
request = get_frontend_request_for_command(state, "finalize", modifier)
response = OrderEditView.as_view()(request, pk=order.pk)
assert_contains(response, "orderIdentifier")
data = json.loads(response.content.decode("utf8"))
edited_order = Order.objects.get(pk=order.pk)
assert edited_order.identifier == data["orderIdentifier"] == order.identifier
assert edited_order.pk == order.pk
assert edited_order.lines.count() == 4
assert OrderLineType.DISCOUNT in [l.type for l in edited_order.lines.all()]
assert edited_order.coupon_usages.count() == 1
@pytest.mark.django_db
def test_campaign_with_non_active_coupon(rf):
initial_status = get_initial_order_status()
request, shop, group = initialize_test(rf, include_tax=False)
order = _get_order_with_coupon(request, initial_status)
coupon = order.coupon_usages.first().coupon
coupon.active = False
coupon.save()
modifier = UserFactory()
contact = create_random_person(locale="en_US", minimum_name_comp_len=5)
assert order.customer != contact
state = _get_frontend_order_state(shop, contact)
assert order.shop.id == state["shop"]["selected"]["id"]
request = get_frontend_request_for_command(state, "finalize", modifier)
response = OrderEditView.as_view()(request, pk=order.pk)
assert_contains(response, "orderIdentifier")
data = json.loads(response.content.decode("utf8"))
edited_order = Order.objects.get(pk=order.pk)
assert edited_order.identifier == data["orderIdentifier"] == order.identifier
assert edited_order.pk == order.pk
assert edited_order.lines.count() == 3
assert OrderLineType.DISCOUNT not in [l.type for l in edited_order.lines.all()]
assert edited_order.coupon_usages.count() == 0
def _get_order_with_coupon(request, initial_status, condition_product_count=1):
shop = request.shop
basket = get_basket(request)
supplier = get_default_supplier()
product = create_product(printable_gibberish(), shop=shop, supplier=supplier, default_price="50")
basket.add_product(supplier=supplier, shop=shop, product=product, quantity=1)
dc = Coupon.objects.create(code="TEST", active=True)
campaign = BasketCampaign.objects.create(
shop=shop,
name="test",
public_name="test",
coupon=dc,
active=True
)
BasketDiscountAmount.objects.create(discount_amount=shop.create_price("20"), campaign=campaign)
rule = BasketTotalProductAmountCondition.objects.create(value=1)
campaign.conditions.add(rule)
campaign.save()
basket.add_code(dc.code)
basket.save()
basket.status = initial_status
creator = OrderCreator(request)
order = creator.create_order(basket)
assert order.lines.count() == 2
assert OrderLineType.DISCOUNT in [l.type for l in order.lines.all()]
return order
def _encode_address(address):
return json.loads(serializers.serialize("json", [address]))[0].get("fields")
def _get_frontend_order_state(shop, contact):
tax = Tax.objects.create(code="test_code", rate=decimal.Decimal("0.20"), name="Default")
tax_class = TaxClass.objects.create(identifier="test_tax_class", name="Default")
rule = TaxRule.objects.create(tax=tax)
rule.tax_classes.add(tax_class)
rule.save()
product = create_product(
sku=printable_gibberish(),
supplier=get_default_supplier(),
shop=shop
)
product.tax_class = tax_class
product.save()
lines = [
{"id": "x", "type": "product", "product": {"id": product.id}, "quantity": "32", "baseUnitPrice": 50}
]
state = {
"customer": {
"id": contact.id if contact else None,
"billingAddress": _encode_address(contact.default_billing_address) if contact else {},
"shippingAddress": _encode_address(contact.default_shipping_address) if contact else {},
},
"lines": lines,
"methods": {
"shippingMethod": {"id": get_shipping_method(shop=shop).id},
"paymentMethod": {"id": get_payment_method(shop=shop).id},
},
"shop": {
"selected": {
"id": shop.id,
"name": shop.safe_translation_getter("name"),
"currency": shop.currency,
"priceIncludeTaxes": shop.prices_include_tax
}
}
}
return state
| agpl-3.0 | 8,872,476,895,792,468,000 | 37.2 | 108 | 0.6893 | false |
yan12125/youtube-dl | youtube_dl/extractor/savefrom.py | 20 | 1081 | # coding: utf-8
from __future__ import unicode_literals
import os.path
import re
from .common import InfoExtractor
class SaveFromIE(InfoExtractor):
IE_NAME = 'savefrom.net'
_VALID_URL = r'https?://[^.]+\.savefrom\.net/\#url=(?P<url>.*)$'
_TEST = {
'url': 'http://en.savefrom.net/#url=http://youtube.com/watch?v=UlVRAPW2WJY&utm_source=youtube.com&utm_medium=short_domains&utm_campaign=ssyoutube.com',
'info_dict': {
'id': 'UlVRAPW2WJY',
'ext': 'mp4',
'title': 'About Team Radical MMA | MMA Fighting',
'upload_date': '20120816',
'uploader': 'Howcast',
'uploader_id': 'Howcast',
'description': r're:(?s).* Hi, my name is Rene Dreifuss\. And I\'m here to show you some MMA.*',
},
'params': {
'skip_download': True
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = os.path.splitext(url.split('/')[-1])[0]
return self.url_result(mobj.group('url'), video_id=video_id)
| unlicense | -5,230,064,784,322,526,000 | 30.794118 | 159 | 0.561517 | false |
candrews/portage | pym/portage/debug.py | 9 | 3316 | # Copyright 1999-2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import os
import sys
try:
import threading
except ImportError:
import dummy_threading as threading
import portage.const
from portage.util import writemsg
def set_trace(on=True):
if on:
t = trace_handler()
threading.settrace(t.event_handler)
sys.settrace(t.event_handler)
else:
sys.settrace(None)
threading.settrace(None)
class trace_handler(object):
def __init__(self):
python_system_paths = []
for x in sys.path:
if os.path.basename(x) == "python%s.%s" % sys.version_info[:2]:
python_system_paths.append(x)
self.ignore_prefixes = []
for x in python_system_paths:
self.ignore_prefixes.append(x + os.sep)
self.trim_filename = prefix_trimmer(os.path.join(portage.const.PORTAGE_BASE_PATH, "pym") + os.sep).trim
self.show_local_lines = False
self.max_repr_length = 200
def event_handler(self, *args):
frame, event, _arg = args
if "line" == event:
if self.show_local_lines:
self.trace_line(*args)
else:
if not self.ignore_filename(frame.f_code.co_filename):
self.trace_event(*args)
return self.event_handler
def trace_event(self, frame, event, arg):
writemsg("%s line=%d name=%s event=%s %slocals=%s\n" % \
(self.trim_filename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_code.co_name,
event,
self.arg_repr(frame, event, arg),
self.locals_repr(frame, event, arg)))
def arg_repr(self, _frame, event, arg):
my_repr = None
if "return" == event:
my_repr = repr(arg)
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
return "value=%s " % my_repr
elif "exception" == event:
my_repr = repr(arg[1])
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
return "type=%s value=%s " % (arg[0], my_repr)
return ""
def trace_line(self, frame, _event, _arg):
writemsg("%s line=%d\n" % (self.trim_filename(frame.f_code.co_filename), frame.f_lineno))
def ignore_filename(self, filename):
if filename:
for x in self.ignore_prefixes:
if filename.startswith(x):
return True
return False
def locals_repr(self, frame, _event, _arg):
"""Create a representation of the locals dict that is suitable for
tracing output."""
my_locals = frame.f_locals.copy()
# prevent unsafe __repr__ call on self when __init__ is called
# (method calls aren't safe until after __init__ has completed).
if frame.f_code.co_name == "__init__" and "self" in my_locals:
my_locals["self"] = "omitted"
# We omit items that will lead to unreasonable bloat of the trace
# output (and resulting log file).
for k, v in my_locals.items():
my_repr = repr(v)
if len(my_repr) > self.max_repr_length:
my_locals[k] = "omitted"
return my_locals
class prefix_trimmer(object):
def __init__(self, prefix):
self.prefix = prefix
self.cut_index = len(prefix)
self.previous = None
self.previous_trimmed = None
def trim(self, s):
"""Remove a prefix from the string and return the result.
The previous result is automatically cached."""
if s == self.previous:
return self.previous_trimmed
else:
if s.startswith(self.prefix):
self.previous_trimmed = s[self.cut_index:]
else:
self.previous_trimmed = s
return self.previous_trimmed
| gpl-2.0 | 6,545,502,493,311,585,000 | 26.633333 | 105 | 0.675814 | false |
1flow/1flow | oneflow/core/migrations/0051_auto.py | 2 | 43273 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding M2M table for field users on 'Author'
m2m_table_name = db.shorten_name(u'core_author_users')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('author', models.ForeignKey(orm['core.author'], null=False)),
('user', models.ForeignKey(orm[u'base.user'], null=False))
))
db.create_unique(m2m_table_name, ['author_id', 'user_id'])
def backwards(self, orm):
# Removing M2M table for field users on 'Author'
db.delete_table(db.shorten_name(u'core_author_users'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.user': {
'Meta': {'object_name': 'User'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'email_announcements': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
'hash_codes': ('jsonfield.fields.JSONField', [], {'default': "{'unsubscribe': '1d75c8513e2e45f2b4c6893e0a7766d1'}", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'register_data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'sent_emails': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.article': {
'Meta': {'object_name': 'Article', '_ormbases': ['core.BaseItem']},
u'baseitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseItem']", 'unique': 'True', 'primary_key': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'is_orphaned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'publications'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'url_absolute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'word_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'core.author': {
'Meta': {'unique_together': "(('name', 'website'),)", 'object_name': 'Author'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Author']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_unsure': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'origin_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'authors'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'})
},
'core.basefeed': {
'Meta': {'object_name': 'BaseFeed'},
'closed_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_fetch': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'errors': ('json_field.fields.JSONField', [], {'default': '[]', 'blank': 'True'}),
'fetch_interval': ('django.db.models.fields.IntegerField', [], {'default': '43200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.BaseItem']"}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Language']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'options': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.basefeed_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.baseitem': {
'Meta': {'object_name': 'BaseItem'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'authored_items'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Author']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'default_rating': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseItem']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'origin': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.baseitem_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'sources_rel_+'", 'null': 'True', 'to': "orm['core.BaseItem']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'items'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'text_direction': ('django.db.models.fields.CharField', [], {'default': "u'ltr'", 'max_length': '3'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.combinedfeed': {
'Meta': {'object_name': 'CombinedFeed'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.combinedfeedrule': {
'Meta': {'ordering': "('position',)", 'object_name': 'CombinedFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'combinedfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.CombinedFeed']"}),
'feeds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.BaseFeed']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.corepermissions': {
'Meta': {'object_name': 'CorePermissions'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'core.folder': {
'Meta': {'unique_together': "(('name', 'user', 'parent'),)", 'object_name': 'Folder'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'folders'", 'to': u"orm['base.User']"})
},
'core.helpcontent': {
'Meta': {'ordering': "['ordering', 'id']", 'object_name': 'HelpContent'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_en': ('django.db.models.fields.TextField', [], {}),
'content_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name_nt': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'core.helpwizards': {
'Meta': {'object_name': 'HelpWizards'},
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'wizards'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'show_all': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'welcome_beta_shown': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.historyentry': {
'Meta': {'object_name': 'HistoryEntry'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.historyentry_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.homepreferences': {
'Meta': {'object_name': 'HomePreferences'},
'experimental_features': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'home'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'read_shows': ('django.db.models.fields.IntegerField', [], {'default': '2', 'blank': 'True'}),
'show_advanced_preferences': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'style': ('django.db.models.fields.CharField', [], {'default': "u'RL'", 'max_length': '2', 'blank': 'True'})
},
'core.language': {
'Meta': {'object_name': 'Language'},
'dj_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Language']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.mailaccount': {
'Meta': {'unique_together': "(('user', 'hostname', 'username'),)", 'object_name': 'MailAccount'},
'conn_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_conn': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2007, 1, 1, 0, 0)'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_usable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'use_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'core.mailfeed': {
'Meta': {'object_name': 'MailFeed', '_ormbases': ['core.BaseFeed']},
'account': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'mail_feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.MailAccount']"}),
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'finish_action': ('django.db.models.fields.CharField', [], {'default': "u'markread'", 'max_length': '10'}),
'match_action': ('django.db.models.fields.CharField', [], {'default': "u'scrape'", 'max_length': '10'}),
'rules_operation': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10'}),
'scrape_blacklist': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'scrape_whitelist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
'core.mailfeedrule': {
'Meta': {'ordering': "('group', 'position')", 'object_name': 'MailFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_operation': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'header_field': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mailfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeed']"}),
'match_case': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'match_type': ('django.db.models.fields.CharField', [], {'default': "u'contains'", 'max_length': '10'}),
'match_value': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '1024'}),
'other_header': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.originaldata': {
'Meta': {'object_name': 'OriginalData'},
'feedparser': ('django.db.models.fields.TextField', [], {}),
'feedparser_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'google_reader': ('django.db.models.fields.TextField', [], {}),
'google_reader_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'original_data'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.BaseItem']"}),
'raw_email': ('django.db.models.fields.TextField', [], {}),
'raw_email_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.preferences': {
'Meta': {'object_name': 'Preferences'},
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['base.User']", 'unique': 'True', 'primary_key': 'True'})
},
'core.read': {
'Meta': {'unique_together': "(('user', 'item'),)", 'object_name': 'Read'},
'bookmark_type': ('django.db.models.fields.CharField', [], {'default': "u'U'", 'max_length': '2'}),
'check_set_subscriptions_131004_done': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_analysis': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_archived': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_auto_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_bookmarked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_fact': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_fun': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowhow': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowledge': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_number': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_prospective': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_quote': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_rules': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_starred': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_analysis': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_auto_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_bookmarked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fun': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowhow': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_number': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_prospective': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_quote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_rules': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_starred': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reads'", 'to': "orm['core.BaseItem']"}),
'knowledge_type': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'senders': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'reads_sent'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'reads'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_reads'", 'to': u"orm['base.User']"})
},
'core.readpreferences': {
'Meta': {'object_name': 'ReadPreferences'},
'auto_mark_read_delay': ('django.db.models.fields.IntegerField', [], {'default': '4500', 'blank': 'True'}),
'bookmarked_marks_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bookmarked_marks_unread': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'read'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'read_switches_to_fullscreen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reading_speed': ('django.db.models.fields.IntegerField', [], {'default': '200', 'blank': 'True'}),
'show_bottom_navbar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'starred_marks_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'starred_marks_read': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'starred_removes_bookmarked': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'watch_attributes_mark_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.rssatomfeed': {
'Meta': {'object_name': 'RssAtomFeed', '_ormbases': ['core.BaseFeed']},
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'last_etag': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'})
},
'core.selectorpreferences': {
'Meta': {'object_name': 'SelectorPreferences'},
'extended_folders_depth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'folders_show_unread_count': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lists_show_unread_count': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'selector'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'show_closed_streams': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscriptions_in_multiple_folders': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'titles_show_unread_count': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.sharepreferences': {
'Meta': {'object_name': 'SharePreferences'},
'default_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'share'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"})
},
'core.simpletag': {
'Meta': {'unique_together': "(('name', 'language'),)", 'object_name': 'SimpleTag'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'origin_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'origin_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.SimpleTag']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.snappreferences': {
'Meta': {'object_name': 'SnapPreferences'},
'default_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'snap'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'select_paragraph': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.staffpreferences': {
'Meta': {'object_name': 'StaffPreferences'},
'allow_all_articles': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'no_home_redirect': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'preferences': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'staff'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.Preferences']"}),
'reading_lists_show_bad_articles': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'selector_shows_admin_links': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'super_powers_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'core.subscription': {
'Meta': {'unique_together': "(('feed', 'user'),)", 'object_name': 'Subscription'},
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'blank': 'True', 'to': "orm['core.BaseFeed']"}),
'folders': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Folder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'reads': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Read']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_subscriptions'", 'blank': 'True', 'to': u"orm['base.User']"})
},
'core.userfeeds': {
'Meta': {'object_name': 'UserFeeds'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'imported_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'imported_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'received_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'received_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'sent_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'sent_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user_feeds'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'written_items_user_feed'", 'unique': 'True', 'null': 'True', 'to': "orm['core.BaseFeed']"})
},
'core.userimport': {
'Meta': {'object_name': 'UserImport', '_ormbases': ['core.HistoryEntry']},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'historyentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.HistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'results': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'urls': ('django.db.models.fields.TextField', [], {})
},
'core.usersubscriptions': {
'Meta': {'object_name': 'UserSubscriptions'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogs'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Subscription']"}),
'imported_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'imported_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'received_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'received_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'sent_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'sent_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user_subscriptions'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'written_items_user_subscriptions'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Subscription']"})
},
'core.website': {
'Meta': {'object_name': 'WebSite'},
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'}),
'fetch_limit_nr': ('django.db.models.fields.IntegerField', [], {'default': '16', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'mail_warned': ('json_field.fields.JSONField', [], {'default': '[]', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.WebSite']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['core'] | agpl-3.0 | 3,578,384,178,647,015,000 | 94.107692 | 226 | 0.557553 | false |
bloff/ZeroNet | plugins/Zeroname/updater/zeroname_updater.py | 13 | 5379 | import time
import json
import os
import sys
import re
import socket
from bitcoinrpc.authproxy import AuthServiceProxy
def publish():
print "* Signing..."
os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"]))
print "* Publishing..."
os.system("python zeronet.py sitePublish %s" % config["site"])
def processNameOp(domain, value):
if not value.startswith("{"):
return False
try:
data = json.loads(value)
except Exception, err:
print "Json load error: %s" % err
return False
if "zeronet" not in data:
print "No zeronet in ", data.keys()
return False
if not isinstance(data["zeronet"], dict):
print "Not dict: ", data["zeronet"]
return False
if not re.match("^[a-z]([a-z0-9-]{0,62}[a-z0-9])?$", domain):
print "Invalid domain: ", domain
return False
if "slave" in sys.argv:
print "Waiting for master update arrive"
time.sleep(30) # Wait 30 sec to allow master updater
# Note: Requires the file data/names.json to exist and contain "{}" to work
names_raw = open(names_path, "rb").read()
names = json.loads(names_raw)
for subdomain, address in data["zeronet"].items():
subdomain = subdomain.lower()
address = re.sub("[^A-Za-z0-9]", "", address)
print subdomain, domain, "->", address
if subdomain:
names["%s.%s.bit" % (subdomain, domain)] = address
else:
names["%s.bit" % domain] = address
new_names_raw = json.dumps(names, indent=2, sort_keys=True)
if new_names_raw != names_raw:
open(names_path, "wb").write(new_names_raw)
return True
else:
print "names not changed"
return False
def processBlock(block_id):
print "Processing block #%s..." % block_id
s = time.time()
block_hash = rpc.getblockhash(block_id)
block = rpc.getblock(block_hash)
print "Checking %s tx" % len(block["tx"])
updated = 0
for tx in block["tx"]:
try:
transaction = rpc.getrawtransaction(tx, 1)
for vout in transaction.get("vout", []):
if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
name_op = vout["scriptPubKey"]["nameOp"]
updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
except Exception, err:
print "Error processing tx #%s %s" % (tx, err)
print "Done in %.3fs (updated %s)." % (time.time() - s, updated)
if updated:
publish()
# Loading config...
# Check whether platform is on windows or linux
# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin
if sys.platform == "win32":
namecoin_location = os.getenv('APPDATA') + "/Namecoin/"
else:
namecoin_location = os.path.expanduser("~/.namecoin/")
config_path = namecoin_location + 'zeroname_config.json'
if not os.path.isfile(config_path): # Create sample config
open(config_path, "w").write(
json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2)
)
print "Example config written to %s" % config_path
sys.exit(0)
config = json.load(open(config_path))
names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"])
os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is.
# Getting rpc connect details
namecoin_conf = open(namecoin_location + "namecoin.conf").read()
# Connecting to RPC
rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%[email protected]:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
last_block = int(rpc.getinfo()["blocks"])
if not config["lastprocessed"]: # Start processing from last block
config["lastprocessed"] = last_block
# Processing skipped blocks
print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
for block_id in range(config["lastprocessed"], last_block + 1):
processBlock(block_id)
# processBlock(223911) # Testing zeronetwork.bit
# processBlock(227052) # Testing brainwallets.bit
# processBlock(236824) # Utf8 domain name (invalid should skip)
# processBlock(236752) # Uppercase domain (invalid should skip)
# processBlock(236870) # Encoded domain (should pass)
# sys.exit(0)
while 1:
print "Waiting for new block",
sys.stdout.flush()
while 1:
try:
rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
if (int(rpc.getinfo()["blocks"]) > last_block):
break
time.sleep(1)
rpc.waitforblock()
print "Found"
break # Block found
except socket.timeout: # Timeout
print ".",
sys.stdout.flush()
except Exception, err:
print "Exception", err.__class__, err
time.sleep(5)
last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"] + 1, last_block + 1):
processBlock(block_id)
config["lastprocessed"] = last_block
open(config_path, "w").write(json.dumps(config, indent=2))
| gpl-2.0 | 6,412,441,550,314,338,000 | 33.480769 | 124 | 0.618702 | false |
nigelsmall/py2neo | setup.py | 1 | 2349 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2016, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
from py2neo import __author__, __email__, __license__, __package__, __version__
packages = find_packages(exclude=("book", "demo", "test", "test_ext", "test_ext.*"))
package_metadata = {
"name": __package__,
"version": __version__,
"description": "Python client library and toolkit for Neo4j",
"long_description": "Py2neo is a client library and comprehensive toolkit for working with "
"Neo4j from within Python applications and from the command line. The "
"core library has no external dependencies and has been carefully "
"designed to be easy and intuitive to use.",
"author": __author__,
"author_email": __email__,
"url": "http://py2neo.org/",
"entry_points": {
"console_scripts": [
"py2neo = py2neo.__init__:main",
"neokit = neokit:main",
"geoff = py2neo.ext.geoff.__main__:main",
],
},
"packages": packages,
"py_modules": ["neokit"],
"license": __license__,
"classifiers": [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Database",
"Topic :: Software Development",
],
"zip_safe": False,
}
setup(**package_metadata)
| apache-2.0 | 7,692,302,430,956,157,000 | 35.703125 | 96 | 0.624521 | false |
apruden/opal | opal-python-client/src/main/python/opal/perm_project.py | 1 | 1309 | """
Apply permissions on a project.
"""
import sys
import opal.core
import opal.perm
PERMISSIONS = {
'administrate': 'PROJECT_ALL'
}
def add_arguments(parser):
"""
Add command specific options
"""
opal.perm.add_permission_arguments(parser, PERMISSIONS.keys())
parser.add_argument('--project', '-pr', required=True, help='Project name')
def do_command(args):
"""
Execute permission command
"""
# Build and send requests
try:
opal.perm.validate_args(args, PERMISSIONS)
request = opal.core.OpalClient.build(opal.core.OpalClient.LoginInfo.parse(args)).new_request()
if args.verbose:
request.verbose()
# send request
if args.delete:
request.delete()
else:
request.post()
try:
response = request.resource(opal.perm.do_ws(args, ['project', args.project, 'permissions', 'project'], PERMISSIONS)).send()
except Exception, e:
print Exception, e
# format response
if response.code != 200:
print response.content
except Exception, e:
print e
sys.exit(2)
except pycurl.error, error:
errno, errstr = error
print >> sys.stderr, 'An error occurred: ', errstr
sys.exit(2) | gpl-3.0 | 1,997,492,389,920,859,100 | 22.818182 | 135 | 0.59893 | false |
lagopus/lagopus | test/datastore/long_run/lib/async_datastore_cmd.py | 1 | 3080 | #!/usr/bin/env python
import sys
import socket
import ssl
import os
import select
import json
import logging
import asyncore
import threading
import six
from six.moves import _thread
from six.moves import queue
from contextlib import contextmanager
from const import *
class AsyncDataStoreCmd(asyncore.dispatcher):
def __init__(self, host="127.0.0.1", port=12345, is_tls=False,
certfile=None, keyfile=None, ca_certs=None):
asyncore.dispatcher.__init__(self)
self.is_tls = is_tls
if self.is_tls:
self.certfile = certfile
self.keyfile = keyfile
self.ca_certs = ca_certs
self.host = host
self.port = port
self.wbuf = b""
self.queue = queue.Queue()
self.th = None
def create_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.is_tls:
ssl_sock = ssl.wrap_socket(
sock,
certfile=self.certfile,
keyfile=self.keyfile,
ca_certs=self.ca_certs,
cert_reqs=ssl.CERT_REQUIRED)
sock = ssl_sock
sock.setblocking(0)
self.set_socket(sock)
def connect(self):
asyncore.dispatcher.connect(self, (self.host, self.port))
proto = "TLS" if self.is_tls else "TCP"
logging.info(
"connected: " + self.host + ":" + str(self.port) + "(" + proto + ")")
def handle_close(self):
self.close()
def writable(self):
return ((len(self.wbuf) > 0) or (not self.queue.empty()))
def handle_write(self):
try:
if len(self.wbuf) == 0:
self.wbuf = self.queue.get_nowait()
if self.wbuf is None:
_thread.exit()
w = self.wbuf
if six.PY3:
w = self.wbuf.encode()
sentlen = self.send(w)
self.wbuf = self.wbuf[sentlen:]
except queue.Empty:
pass
def readable(self):
return True
def handle_read(self):
# ignore
data = self.recv(BUFSIZE)
if not data:
raise RuntimeError("connection broken!")
logging.debug("rcve: %s" % data)
def send_cmd(self, cmd):
if cmd is not None:
cmd += "\n"
self.queue.put(cmd)
def loop(self):
asyncore.loop(timeout=0.1)
def run(self):
self.th = threading.Thread(target=self.loop)
self.th.start()
def join(self):
self.th.join()
def is_alive(self):
self.th.is_alive()
@contextmanager
def open_async_ds_cmd(**kwds):
try:
adsc = AsyncDataStoreCmd(**kwds)
adsc.create_sock()
adsc.connect()
adsc.run()
yield adsc
finally:
adsc.send_cmd(None)
adsc.join()
adsc.close()
if __name__ == "__main__":
# tests
# precondition: start lagopus.
with open_async_ds_cmd() as adsc:
adsc.send_cmd("channel cahnnel01 create")
adsc.send_cmd("channel")
| apache-2.0 | 3,564,457,098,798,977,000 | 23.444444 | 81 | 0.546429 | false |
AutorestCI/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/replica_health_state_chunk_list.py | 1 | 1121 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ReplicaHealthStateChunkList(Model):
"""The list of replica health state chunks that respect the input filters in
the chunk query. Returned by get cluster health state chunks query.
.
:param items: The list of replica health state chunks that respect the
input filters in the chunk query.
:type items: list of :class:`ReplicaHealthStateChunk
<azure.servicefabric.models.ReplicaHealthStateChunk>`
"""
_attribute_map = {
'items': {'key': 'Items', 'type': '[ReplicaHealthStateChunk]'},
}
def __init__(self, items=None):
self.items = items
| mit | -5,207,206,099,925,429,000 | 35.16129 | 80 | 0.618198 | false |
scsouthw/project-oxford-python | setup.py | 1 | 2297 | from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='projectoxford',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# http://packaging.python.org/en/latest/tutorial.html#version
version='0.2.0',
description='This project extends the Project Oxford API surface to support Python.',
long_description=long_description,
# The project's main homepage.
url='https://github.com/scsouthw/project-oxford-python',
download_url='https://github.com/scsouthw/project-oxford-python',
# Author details
author='Microsoft',
author_email='[email protected]',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
# operating systems
'Operating System :: OS Independent',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='computer vision face detection linguistics language project oxford',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
test_suite='tests.projectoxford_tests'
)
| mit | 850,265,618,725,846,100 | 34.338462 | 89 | 0.673923 | false |
glaubitz/fs-uae-debian | arcade/OpenGL/GL/IBM/rasterpos_clip.py | 9 | 1248 | '''OpenGL extension IBM.rasterpos_clip
This module customises the behaviour of the
OpenGL.raw.GL.IBM.rasterpos_clip to provide a more
Python-friendly API
Overview (from the spec)
IBM_rasterpos_clip extends the semantics of the RasterPos functions. It
provides an enable that allows a raster position that would normally be
clipped to be treated as a valid (albeit out-of-viewport) position.
This extension allows applications to specify geometry-aligned pixel
primitives that may be partially off-screen. These primitives are
tested on a pixel-by-pixel basis without being rejected completely
because of an invalid raster position.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/IBM/rasterpos_clip.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.IBM.rasterpos_clip import *
from OpenGL.raw.GL.IBM.rasterpos_clip import _EXTENSION_NAME
def glInitRasterposClipIBM():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-2.0 | -8,012,672,822,462,096,000 | 35.735294 | 73 | 0.795673 | false |
DrEVILish/Plex-Remote-Transcoder | setup.py | 2 | 1237 | from setuptools import setup, find_packages
from codecs import open
from os import path
import re
here = path.abspath(path.dirname(__file__))
def get_version():
return re.search("__version__ = \"([\d\.]+)\"", open("prt.py").read()).groups()[0]
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='prt',
version=get_version(),
description='A remote transcoder for Plex',
long_description=long_description,
url='https://github.com/wnielson/Plex-Remote-Transcoder',
author='Weston Nielson',
author_email='wnielson@github',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='plex media server, distributed plex, load balancing, remote transcoding',
py_modules=["prt"],
entry_points={
'console_scripts': [
'prt=prt:main',
'prt_local=prt:transcode_local',
'prt_remote=prt:transcode_remote'
],
},
)
| mit | -6,703,332,722,801,700,000 | 29.170732 | 87 | 0.611964 | false |
Klearnel-Devs/klearnel-manager | controller/Networker.py | 1 | 3096 | ## @package controller
# Handles communication between Klearnel & Klearnel Manager
#
# @author Antoine Ceyssens <[email protected]> & Derek Van Hove <[email protected]>
import socket, re
from controller.Crypter import Crypter
from model.Exceptions import *
## Class that sends/receives information between Klearnel & Klearnel Manager
class Networker:
s = None
SOCK_ACK = "1"
SOCK_NACK = "2"
SOCK_DENIED = "3"
SOCK_UNK = "4"
SOCK_ABORTED = "8"
SOCK_RETRY = "9"
def __init__(self):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
## Method that initiations connection between Klearnel & Klearnel Manager
# @param client The host as an IP address or hostname
# @param port The port on which to connect
# @throws NoConnectivity
def connect_to(self, client, port=42225):
try:
if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", client.ip):
ip_addr = client.ip
else:
ip_addr = socket.gethostbyname(client.name)
self.s.connect((ip_addr, port))
except:
raise NoConnectivity("Unable to find "+client.name)
## Method that formats and sends data through the open socket
# @param value The value to send
# @throws ConnectionError Raised if ack not positive
def send_val(self, value):
if type(value) is str:
self.s.send(bytes(value, 'UTF-8'))
elif type(value) is bytes:
self.s.send(value)
else:
self.s.send(bytes(value))
ack = self.s.recv(1).decode('UTF-8')
if ack != self.SOCK_ACK:
raise ConnectionError("The operation couldn't be executed on the device, error: "+ack)
## Returns an ACK decoded in UTF8
def get_ack(self):
return self.s.recv(1).decode('UTF-8')
## Receives data that must be appended to a variable
# @param buf_size The size of the packets to receive, defaults to 20
# @return Returns the decoded data
def get_multiple_data(self, buf_size=20):
result = []
while True:
new_v = self.s.recv(buf_size).decode('UTF-8')
self.s.send(bytes(self.SOCK_ACK, 'UTF-8'))
if new_v == 'EOF':
break
result.append(new_v)
return result
## Retrives and decodes socket data
# @param buf_size The size of the packets to receive
# @return Returns the decoded data
def get_data(self, buf_size):
b_result = bytes()
end = False
for i in range(0, buf_size):
char = self.s.recv(1)
if not end:
if char not in [b'\x00', b'\xff']:
b_result += char
else:
for j in range(i, buf_size):
b_result += b'\x00'
end = True
result = b_result.decode('UTF-8')
return result.split('\x00')[0]
## Sends an ACK in UTF8
# @param value The value to send
def send_ack(self, value):
self.s.send(bytes(value, 'UTF-8'))
| gpl-2.0 | 2,496,705,820,686,153,700 | 33.4 | 98 | 0.57655 | false |
evansd/django | django/db/backends/postgresql/features.py | 6 | 1830 | from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import InterfaceError
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
allows_group_by_selected_pks = True
can_return_id_from_insert = True
can_return_ids_from_bulk_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_of = True
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_autofield = True
can_introspect_ip_address_field = True
can_introspect_small_integer_field = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
has_case_insensitive_like = False
requires_sqlparse_for_splitting = False
greatest_least_ignores_nulls = True
can_clone_databases = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
@cached_property
def has_select_for_update_skip_locked(self):
return self.connection.pg_version >= 90500
@cached_property
def has_brin_index_support(self):
return self.connection.pg_version >= 90500
@cached_property
def has_jsonb_datatype(self):
return self.connection.pg_version >= 90400
@cached_property
def has_jsonb_agg(self):
return self.connection.pg_version >= 90500
@cached_property
def has_gin_pending_list_limit(self):
return self.connection.pg_version >= 90500
| bsd-3-clause | -2,447,477,456,076,253,000 | 32.272727 | 65 | 0.712022 | false |
slohse/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_organization.py | 27 | 2759 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_organization
version_added: "2.3"
author: "Wayne Witzel III (@wwitzel3)"
short_description: create, update, or destroy Ansible Tower organizations
description:
- Create, update, or destroy Ansible Tower organizations. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- Name to use for the organization.
required: True
description:
description:
- The description to use for the organization.
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Create tower organization
tower_organization:
name: "Foo"
description: "Foo bar organization"
state: present
tower_config_file: "~/tower_cli.cfg"
'''
from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def main():
argument_spec = dict(
name=dict(required=True),
description=dict(),
state=dict(choices=['present', 'absent'], default='present'),
)
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
name = module.params.get('name')
description = module.params.get('description')
state = module.params.get('state')
json_output = {'organization': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
organization = tower_cli.get_resource('organization')
try:
if state == 'present':
result = organization.modify(name=name, description=description, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = organization.delete(name=name)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Failed to update the organization: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,042,290,459,153,168,000 | 28.042105 | 105 | 0.647336 | false |
gangadhar-kadam/verve_erp | erpnext/controllers/recurring_document.py | 12 | 6770 | from __future__ import unicode_literals
import frappe
import frappe.utils
import frappe.defaults
from frappe.utils import add_days, cint, cstr, date_diff, flt, getdate, nowdate, \
get_first_day, get_last_day, comma_and
from frappe.model.naming import make_autoname
from frappe import _, msgprint, throw
from erpnext.accounts.party import get_party_account, get_due_date, get_party_details
from frappe.model.mapper import get_mapped_doc
month_map = {'Monthly': 1, 'Quarterly': 3, 'Half-yearly': 6, 'Yearly': 12}
date_field_map = {
"Sales Order": "transaction_date",
"Sales Invoice": "posting_date",
"Purchase Order": "transaction_date",
"Purchase Invoice": "posting_date"
}
def create_recurring_documents():
manage_recurring_documents("Sales Order")
manage_recurring_documents("Sales Invoice")
manage_recurring_documents("Purchase Order")
manage_recurring_documents("Purchase Invoice")
def manage_recurring_documents(doctype, next_date=None, commit=True):
"""
Create recurring documents on specific date by copying the original one
and notify the concerned people
"""
next_date = next_date or nowdate()
date_field = date_field_map[doctype]
recurring_documents = frappe.db.sql("""select name, recurring_id
from `tab{}` where ifnull(is_recurring, 0)=1
and docstatus=1 and next_date='{}'
and next_date <= ifnull(end_date, '2199-12-31')""".format(doctype, next_date))
exception_list = []
for ref_document, recurring_id in recurring_documents:
if not frappe.db.sql("""select name from `tab%s`
where %s=%s and recurring_id=%s and docstatus=1"""
% (doctype, date_field, '%s', '%s'), (next_date, recurring_id)):
try:
ref_wrapper = frappe.get_doc(doctype, ref_document)
if hasattr(ref_wrapper, "before_recurring"):
ref_wrapper.before_recurring()
new_document_wrapper = make_new_document(ref_wrapper, date_field, next_date)
send_notification(new_document_wrapper)
if commit:
frappe.db.commit()
except:
if commit:
frappe.db.rollback()
frappe.db.begin()
frappe.db.sql("update `tab%s` \
set is_recurring = 0 where name = %s" % (doctype, '%s'),
(ref_document))
notify_errors(ref_document, doctype, ref_wrapper.get("customer") or ref_wrapper.get("supplier"),
ref_wrapper.owner)
frappe.db.commit()
exception_list.append(frappe.get_traceback())
finally:
if commit:
frappe.db.begin()
if exception_list:
exception_message = "\n\n".join([cstr(d) for d in exception_list])
frappe.throw(exception_message)
def make_new_document(ref_wrapper, date_field, posting_date):
from erpnext.accounts.utils import get_fiscal_year
new_document = frappe.copy_doc(ref_wrapper)
mcount = month_map[ref_wrapper.recurring_type]
from_date = get_next_date(ref_wrapper.from_date, mcount)
# get last day of the month to maintain period if the from date is first day of its own month
# and to date is the last day of its own month
if (cstr(get_first_day(ref_wrapper.from_date)) == \
cstr(ref_wrapper.from_date)) and \
(cstr(get_last_day(ref_wrapper.to_date)) == \
cstr(ref_wrapper.to_date)):
to_date = get_last_day(get_next_date(ref_wrapper.to_date,
mcount))
else:
to_date = get_next_date(ref_wrapper.to_date, mcount)
new_document.update({
date_field: posting_date,
"from_date": from_date,
"to_date": to_date,
"fiscal_year": get_fiscal_year(posting_date)[0],
"owner": ref_wrapper.owner,
})
if ref_wrapper.doctype == "Sales Order":
new_document.update({
"delivery_date": get_next_date(ref_wrapper.delivery_date, mcount,
cint(ref_wrapper.repeat_on_day_of_month))
})
new_document.submit()
return new_document
def get_next_date(dt, mcount, day=None):
dt = getdate(dt)
from dateutil.relativedelta import relativedelta
dt += relativedelta(months=mcount, day=day)
return dt
def send_notification(new_rv):
"""Notify concerned persons about recurring document generation"""
frappe.sendmail(new_rv.notification_email_address,
subject= _("New {0}: #{1}").format(new_rv.doctype, new_rv.name),
message = _("Please find attached {0} #{1}").format(new_rv.doctype, new_rv.name),
attachments = [frappe.attach_print(new_rv.doctype, new_rv.name, file_name=new_rv.name)])
def notify_errors(doc, doctype, party, owner):
from frappe.utils.user import get_system_managers
recipients = get_system_managers(only_name=True)
frappe.sendmail(recipients + [frappe.db.get_value("User", owner, "email")],
subject="[Urgent] Error while creating recurring %s for %s" % (doctype, doc),
message = frappe.get_template("templates/emails/recurring_document_failed.html").render({
"type": doctype,
"name": doc,
"party": party
}))
assign_task_to_owner(doc, doctype, "Recurring Invoice Failed", recipients)
def assign_task_to_owner(doc, doctype, msg, users):
for d in users:
from frappe.desk.form import assign_to
args = {
'assign_to' : d,
'doctype' : doctype,
'name' : doc,
'description' : msg,
'priority' : 'High'
}
assign_to.add(args)
def validate_recurring_document(doc):
if doc.is_recurring:
validate_notification_email_id(doc)
if not doc.recurring_type:
msgprint(_("Please select {0}").format(doc.meta.get_label("recurring_type")),
raise_exception=1)
elif not (doc.from_date and doc.to_date):
throw(_("Period From and Period To dates mandatory for recurring %s") % doc.doctype)
#
def convert_to_recurring(doc, posting_date):
if doc.is_recurring:
if not doc.recurring_id:
frappe.db.set(doc, "recurring_id", doc.name)
set_next_date(doc, posting_date)
elif doc.recurring_id:
frappe.db.sql("""update `tab%s` set is_recurring = 0
where recurring_id = %s""" % (doc.doctype, '%s'), (doc.recurring_id))
#
def validate_notification_email_id(doc):
if doc.notification_email_address:
email_list = filter(None, [cstr(email).strip() for email in
doc.notification_email_address.replace("\n", "").split(",")])
from frappe.utils import validate_email_add
for email in email_list:
if not validate_email_add(email):
throw(_("{0} is an invalid email address in 'Notification \
Email Address'").format(email))
else:
frappe.throw(_("'Notification Email Addresses' not specified for recurring %s") \
% doc.doctype)
def set_next_date(doc, posting_date):
""" Set next date on which recurring document will be created"""
if not doc.repeat_on_day_of_month:
msgprint(_("Please enter 'Repeat on Day of Month' field value"), raise_exception=1)
next_date = get_next_date(posting_date, month_map[doc.recurring_type],
cint(doc.repeat_on_day_of_month))
frappe.db.set(doc, 'next_date', next_date)
msgprint(_("Next Recurring {0} will be created on {1}").format(doc.doctype, next_date))
| agpl-3.0 | 2,741,673,846,773,061,600 | 32.02439 | 101 | 0.695569 | false |
matthijsvk/multimodalSR | code/Experiments/neon-master/neon/transforms/cost.py | 1 | 22947 | # ----------------------------------------------------------------------------
# Copyright 2014-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from __future__ import division
from builtins import str
from neon import NervanaObject
import numpy as np
from collections import Counter
from neon import logger as neon_logger
class Cost(NervanaObject):
"""
Base class for cost functions that are used during training.
Child classes can either implement the below `__call__` and `bprop` methods, or alternatively
define `self.func` and self.funcgrad`. The latter is typically used for code
compactness when the operations can be fit into a lambda function.
"""
def __call__(self, y, t):
"""
Applies the cost function
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
OpTree: Returns the cost
"""
return self.func(y, t)
def bprop(self, y, t):
"""
Computes the derivative of the cost function
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
OpTree: Returns the derivative of the cost function
"""
return self.funcgrad(y, t)
class CrossEntropyBinary(Cost):
"""
Binary cross-entropy cost.
The binary cross-entropy cost is used when the labels have two classes: 0 and 1.
The cost is computed as :math:`C = \sum -t\log(y)-(1-t)\log(1-y)`, where :math:`t` is
the target label and :math:`y` is the network output.
Note:
The backpropagation assumes that this cost is coupled with an output layer
that uses the Logistic() activation function. This allows for a shortcut in
the deriviate that saves computation.
"""
def __init__(self, scale=1):
"""
Args:
scale (float, optional): Amount by which to scale the backpropagated error (default: 1)
"""
self.scale = scale
def __call__(self, y, t):
"""
Returns the binary cross entropy cost.
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
OpTree: Returns the binary cross entropy cost
"""
assert y.shape == t.shape, "CrossEntropy requires network output shape to match targets"
return self.be.sum(self.be.safelog(1 - y) * (t - 1) - self.be.safelog(y) * t, axis=0)
def bprop(self, y, t):
"""
Returns the derivative of the binary cross entropy cost.
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
OpTree: Returns the (mean) shortcut derivative of the binary entropy
cost function ``(y - t) / y.shape[1]``
"""
return self.scale * (y - t)
class CrossEntropyMulti(Cost):
"""
Multi-class cross-entropy cost.
The multi-class cross-entropy cost is used when the labels have multiple classes.
The cost is computed as :math:`C = \sum -t*\log(y)`, where :math:`t` is
the target label and :math:`y` is the network output.
The target labels :math:`t` are expected to be in an one-hot encoding. By default,
the natural logarithm is used, but a cost that returns bits instead (e.g. log base 2)
can also be specified with the ``usebits`` argument.
Note:
The back-propogation assumes that this cost is coupled with an output layer
that uses the Softmax() activation function. This allows for a shortcut in
the deriviate that saves computation.
"""
def __init__(self, scale=1, usebits=False):
"""
Args:
scale (float, optional): scale factor for the backpropagated error (default: 1)
usebits (boolean, optional): Display costs in bits (default: False)
"""
super(CrossEntropyMulti, self).__init__()
self.usebits = usebits
self.scale = scale
self.logscale = np.float(1. / np.log(2.0) if usebits else 1.)
def __call__(self, y, t):
"""
Returns the multiclass cross entropy cost
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
OpTree: Returns the multiclass cross entropy cost
"""
if y.shape != t.shape:
raise ValueError((
"CrossEntropy requires network output shape to match "
"targets. Network output shape was {} and targets shape "
"was {}"
).format(y.shape, t.shape))
return (self.be.sum(-t * self.logscale * self.be.safelog(y), axis=0))
def bprop(self, y, t):
"""
Returns the derivative of the multiclass cross entropy cost.
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
OpTree: Returns the (mean) shortcut derivative of the multiclass
entropy cost function ``(y - t) / y.shape[1]``
"""
return self.scale * (y - t)
class SumSquared(Cost):
"""
Total Squared Error cost function. Computes :math:`\sum_i (y_i-t_i)^2`.
"""
def __init__(self):
"""
Define the cost function and its gradient as lambda functions.
"""
self.func = lambda y, t: self.be.sum(self.be.square(y - t), axis=0) / 2.
self.funcgrad = lambda y, t: (y - t)
class MeanSquared(Cost):
"""
Average Squared Error cost function. Computes :math:`\\frac{1}{N}\\sum_i (y_i-t_i)^2`.
"""
def __init__(self):
"""
Define the cost function and its gradient as lambda functions.
"""
self.func = lambda y, t: self.be.mean(self.be.square(y - t), axis=0) / 2.
self.funcgrad = lambda y, t: (y - t) / y.shape[0]
class SmoothL1Loss(Cost):
"""
Smooth L1 cost function.
The L1 loss is less sensitive to outliers than the L2 loss.
See `Girshick 2015 <http://arxiv.org/pdf/1504.08083v2.pdf>`__. This
cost is used for training object localization models such as Fast-RCNN.
"""
def smoothL1(self, x):
"""
Returns the Smooth-L1 cost
"""
return (0.5 * self.be.square(x) * self._sigma2 * (self.be.absolute(x) < 1/self._sigma2) +
(self.be.absolute(x) - 0.5/self._sigma2) * (self.be.absolute(x) >= 1/self._sigma2))
def smoothL1grad(self, x):
"""
Returns the gradient of the Smooth-L1 cost.
"""
return (x * self._sigma2 * (self.be.absolute(x) < 1/self._sigma2) +
self.be.sgn(x) * (self.be.absolute(x) >= 1/self._sigma2))
def __init__(self, sigma=1.0):
"""
Define the cost function and its gradient as lambda functions.
"""
self.sigma = sigma
self._sigma2 = self.be.square(sigma)
self.func = lambda y, t: self.be.sum(self.smoothL1(y - t), axis=0)
self.funcgrad = lambda y, t: self.smoothL1grad(y - t)
class SquareHingeLoss(Cost):
"""
Applies the square hinge loss cost function
"""
def squarehinge(self, y, t):
t = 2 * t - 1
return self.be.mean(self.be.square(self.be.maximum(self.margin - t * y, 0)), axis=0)
def squarehingegrad(self, y, t):
t = 2 * t - 1
return -2 * t * self.be.maximum(self.margin - t * y, 0)/float(y.shape[0])
def __init__(self, margin=1):
"""
Initialize the square hinge loss cost function
"""
self.margin = margin
self.func = lambda y, t: self.squarehinge(y, t)
self.funcgrad = lambda y, t: self.squarehingegrad(y, t)
class Metric(Cost):
"""
Base class for Metrics. Metrics are quantities not used during training
for the back-propogration but are useful to compute and display to check
on progress.
For example, when training on image classification network,
we may want to use the Cross-entropy cost to train the weights, but display
the misclassification rate metric.
"""
def __call__(self, y, t):
"""
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
float: Returns the metric
"""
raise NotImplementedError()
class LogLoss(Metric):
"""
LogLoss metric.
Computes :math:`\\log\\left(\\sum y*t\\right)`.
"""
def __init__(self):
self.correctProbs = self.be.iobuf(1)
self.metric_names = ['LogLoss']
def __call__(self, y, t, calcrange=slice(0, None)):
"""
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
numpy array : Returns the log loss metric in numpy array,
[LogLoss]
"""
self.correctProbs[:] = self.be.sum(y * t, axis=0)
self.correctProbs[:] = -self.be.safelog(self.correctProbs)
return np.array(self.correctProbs.get()[:, calcrange].mean())
class TopKMisclassification(Metric):
"""
Multiple misclassification metrics.
Computes the LogLoss metric, the Top-1 Misclassification rate, and the Top-K
misclassification rate.
"""
def __init__(self, k):
"""
Arguments:
k (integer): Number of guesses to allow.
"""
self.correctProbs = self.be.iobuf(1)
self.top1 = self.be.iobuf(1)
self.topk = self.be.iobuf(1)
self.k = k
self.metric_names = ['LogLoss', 'Top1Misclass', 'Top' + str(k) + 'Misclass']
def __call__(self, y, t, calcrange=slice(0, None)):
"""
Returns a numpy array of metrics for: LogLoss, Top-1, and Top-K.
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
calcrange (slice, optional): Slice of data used for the metric (default: all)
Returns:
numpy array : Returns the metrics in a numpy array:
[LogLoss, Top 1 misclass, Top k misclass]
"""
be = self.be
self.correctProbs[:] = be.sum(y * t, axis=0)
nSlots = self.k - be.sum((y > self.correctProbs), axis=0)
nEq = be.sum(y == self.correctProbs, axis=0)
self.topk[:] = 1. - (nSlots > 0) * ((nEq <= nSlots) * (1 - nSlots / nEq) + nSlots / nEq)
self.top1[:] = 1. - (be.max(y, axis=0) == self.correctProbs) / nEq
self.correctProbs[:] = -be.safelog(self.correctProbs)
return np.array((self.correctProbs.get()[:, calcrange].mean(),
self.top1.get()[:, calcrange].mean(),
self.topk.get()[:, calcrange].mean()))
class Misclassification(Metric):
"""
Misclassification error metric.
"""
def __init__(self, steps=1):
"""
Initialize the metric.
"""
self.preds = self.be.iobuf((1, steps), persist_values=False)
self.hyps = self.be.iobuf((1, steps), persist_values=False)
self.outputs = self.preds # Contains per record metric
self.metric_names = ['Top1Misclass']
def __call__(self, y, t, calcrange=slice(0, None)):
"""
Returns the misclassification error metric
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
float: Returns the metric
"""
# convert back from onehot and compare
self.preds[:] = self.be.argmax(y, axis=0)
self.hyps[:] = self.be.argmax(t, axis=0)
self.outputs[:] = self.be.not_equal(self.preds, self.hyps)
return self.outputs.get()[:, calcrange].mean()
class Accuracy(Metric):
"""
Accuracy metric (correct rate).
"""
def __init__(self):
self.preds = self.be.iobuf(1)
self.hyps = self.be.iobuf(1)
self.outputs = self.preds # Contains per record metric
self.metric_names = ['Accuracy']
def __call__(self, y, t, calcrange=slice(0, None)):
"""
Returns the accuracy.
Args:
y (Tensor or OpTree): Output of previous layer or model
t (Tensor or OpTree): True targets corresponding to y
Returns:
float: Returns the metric
"""
# convert back from onehot and compare
self.preds[:] = self.be.argmax(y, axis=0)
self.hyps[:] = self.be.argmax(t, axis=0)
self.outputs[:] = self.be.equal(self.preds, self.hyps)
return self.outputs.get()[:, calcrange].mean()
class PrecisionRecall(Metric):
"""
Precision and Recall metrics.
Typically used in a conjunction with a multi-classification model.
"""
def __init__(self, num_classes, binarize=False, epsilon=1e-6):
"""
Arguments:
num_classes (int): Number of different output classes.
binarize (bool, optional): If True will attempt to convert the model
outputs to a one-hot encoding (in place).
Defaults to False.
epsilon (float, optional): Smoothing to apply to avoid division by zero.
Defaults to 1e-6.
"""
self.outputs = self.be.empty((num_classes, 2))
self.token_stats = self.be.empty((num_classes, 3))
self.metric_names = ['Precision', 'Recall']
if binarize:
self.bin_buf = self.be.iobuf(1, dtype=np.int32)
else:
self.bin_buf = None
self.eps = epsilon
def __call__(self, y, t, calcrange=slice(0, None)):
"""
Returns a numpy array with the precision and recall metrics.
Args:
y (Tensor or OpTree): Output of previous layer or model (we assume
already binarized, or you need to ensure
binarize is True during construction).
t (Tensor or OpTree): True targets corresponding to y (we assume
already binarized)
Returns:
ndarray: The class averaged precision (item 0) and recall (item
1) values. Per-class statistics remain in self.outputs.
"""
if self.bin_buf is not None:
self.be.argmax(y, axis=0, out=self.bin_buf)
y[:] = self.be.onehot(self.bin_buf, axis=0)
# True positives
self.token_stats[:, 0] = self.be.sum(y * t, axis=1)
# Prediction
self.token_stats[:, 1] = self.be.sum(y, axis=1)
# Targets
self.token_stats[:, 2] = self.be.sum(t, axis=1)
# Precision
self.outputs[:, 0] = self.token_stats[:, 0] / (self.token_stats[:, 1] + self.eps)
# Recall
self.outputs[:, 1] = self.token_stats[:, 0] / (self.token_stats[:, 2] + self.eps)
return self.outputs.get().mean(axis=0)
class ObjectDetection(Metric):
"""
The object detection metric includes object label accuracy, and
bounding box regression.
"""
def __init__(self):
self.metric_names = ['Accuracy', 'SmoothL1Loss']
self.label_ind = 0
self.bbox_ind = 1
def smoothL1(self, x):
"""
Returns the Smooth L1 cost.
"""
return (0.5 * self.be.square(x) * (self.be.absolute(x) < 1) +
(self.be.absolute(x) - 0.5) * (self.be.absolute(x) >= 1))
def __call__(self, y, t, calcrange=slice(0, None)):
"""
Returns a numpy array with the accuracy and the Smooth-L1 metrics.
Args:
y (Tensor or OpTree): Output of a model like Fast-RCNN model with 2 elements:
1. class label: (# classes, # batchsize for ROIs)
2. object bounding box (# classes * 4, # bacthsize for ROIs)
t (Tensor or OpTree): True targets corresponding to y, with 2 elements:
1. class labels: (# classes, # batchsize for ROIs)
1.1 class labels
(# classes, # batchsize for ROIs)
1.2 class labels mask
(# classes, # batchsize for ROIs)
2. object bounding box and mask, where mask will indicate the
real object to detect other than the background objects
2.1 object bounding box
(# classes * 4, # bacthsize for ROIs)
2.2 object bounding box mask
(# classes * 4, # bacthsize for ROIs)
Returns:
numpy ary : Returns the metrics in numpy array [Label Accuracy, Bounding Box Smooth-L1]
"""
t_bb = t[self.bbox_ind][0]
t_bb_mask = t[self.bbox_ind][1]
y_bb = y[self.bbox_ind]
self.detectionMetric = self.be.empty((1, t_bb.shape[1]))
self.detectionMetric[:] = self.be.sum(self.smoothL1(y_bb * t_bb_mask - t_bb), axis=0)
if isinstance(t[self.label_ind], tuple):
t_lbl = t[self.label_ind][0] * t[self.label_ind][1]
y_lbl = y[self.label_ind] * t[self.label_ind][1]
else:
t_lbl = t[self.label_ind]
y_lbl = y[self.label_ind]
self.preds = self.be.empty((1, y_lbl.shape[1]))
self.hyps = self.be.empty((1, t_lbl.shape[1]))
self.labelMetric = self.be.empty((1, y_lbl.shape[1]))
self.preds[:] = self.be.argmax(y_lbl, axis=0)
self.hyps[:] = self.be.argmax(t_lbl, axis=0)
self.labelMetric[:] = self.be.equal(self.preds, self.hyps)
return np.array((self.labelMetric.get()[:, calcrange].mean(),
self.detectionMetric.get()[:, calcrange].mean()))
class BLEUScore(Metric):
"""
Compute BLEU score metric
"""
def __init__(self, unk='<unk>'):
self.metric_names = ['BLEU']
self.end_token = '.'
self.unk_symbol = unk
def __call__(self, y, t, N=4, brevity_penalty=False, lower_case=True):
"""
Args:
y (list): list of predicted sentences
t (list): list of reference sentences where each element is a list
of multiple references
N (int, optional): compute all ngram modified precisions up to this N
brevity_penalty (bool, optional): if True, use brevity penalty
lower_case (bool, optional): if True, convert all words to lower case
"""
y_list = list(y)
t_list = list(t)
if lower_case:
for ii, sent in enumerate(y_list):
y_list[ii] = sent.lower()
# convert all sentences to lists of words
for ii, sent in enumerate(y_list):
y_list[ii] = sent.strip(self.end_token).split()
for ii, refs in enumerate(t_list):
tmp = []
for ref in refs:
tmp += [ref.split()]
t_list[ii] = tmp
def ngram_counts(sentence, counts, N):
for n in range(1, N+1):
num = len(sentence) - n + 1 # number of n-grams
for jj in range(num):
ngram = ' '.join(sentence[jj:jj+n])
ngram = repr(n) + ' ' + ngram
counts[ngram] += 1
# compute ngram counts
totals = np.zeros(N) # ngram counts over all candidates
correct = np.zeros(N) # correct ngrams (compared to max over references)
len_translation, len_reference = (0, 0)
for ii, sent in enumerate(y_list):
counts_ref_max = Counter() # maximum ngram count over all references for an example
# count ngrams in candidate sentence
counts_cand = Counter()
ngram_counts(sent, counts_cand, N)
# process reference sentences
closest_diff, closest_len = (float("inf"), float("inf"))
for ref in t_list[ii]:
counts_ref = Counter()
# find closest length of reference sentence for current example
diff = abs(len(sent) - len(ref))
if diff < closest_diff:
closest_len = len(ref)
elif diff == closest_diff:
closest_len = min(closest_len, len(ref))
# compute all ngram counts up to specified n=N for this reference
ngram_counts(ref, counts_ref, N)
for ngram, count in counts_ref.items():
if counts_ref_max[ngram] < count:
counts_ref_max[ngram] = count
len_reference += closest_len
len_translation += len(sent)
for ngram, count in counts_cand.items():
n = int(ngram[0])
ind = n - 1
totals[ind] += count
# only match if there are no UNK
if ngram.find(self.unk_symbol) == -1:
r = counts_ref_max[ngram]
c = count if r >= count else r
correct[ind] += c
# calculate bleu scores
precision = correct/totals + 0.0000001
if (brevity_penalty and len_translation < len_reference):
bp = np.exp(1-float(len_reference)/len_translation)
else:
bp = 1.0
logprec = np.log(precision)
self.bleu_n = [100*bp*np.exp(sum(logprec[:nn+1])/(nn+1)) for nn in range(N)]
neon_logger.display("Bleu scores: " + " ".join([str(np.round(f, 2)) for f in self.bleu_n]))
return self.bleu_n[-1]
| mit | 5,673,044,647,442,742,000 | 34.303077 | 99 | 0.55419 | false |
voutilad/courtlistener | cl/cleanup/scripts_archive/correct_citations_with_untitled_disposition_as_casename_182.py | 5 | 2519 | import os
import sys
execfile('/etc/courtlistener')
sys.path.append(INSTALL_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.utils.text import slugify
from alert.search.models import Document
from alert.lib.string_utils import trunc
from optparse import OptionParser
def cleaner(simulate=False, verbose=False):
"""Fixes the titles of cases where the name is untitle disposition.
Basically, the algorithm here is to find all cases with the error, then
open each in Firefox one by one. After each case is opened, a prompt will
allow the case name to be typed in, and it will be corrected on the site.
These corrections will go live immediately, but will require a reindex to
be live in the search system.
"""
queryset = Document.search.query('@casename "unpublished disposition"')
docs = queryset.set_options(mode="SPH_MATCH_EXTENDED2").order_by('-date_filed')
if verbose:
print "%s results found." % (docs.count())
# Must slice here, or else only get top 20 results
for doc in docs[0:docs.count()]:
if doc.citation.caseNameFull.lower() == "unpublished disposition":
# Only do each case once, since the index isn't updated until
# later, and I may run this script many times.
print doc.download_url
casename = raw_input("Case name: ")
doc.citation.caseNameFull = casename
doc.citation.caseNameShort = trunc(casename, 100)
doc.citation.slug = trunc(slugify(casename), 50)
doc.precedential_status = "Unpublished"
if not simulate:
doc.citation.save()
doc.save()
print ""
def main():
usage = "usage: %prog [--verbose] [---simulate]"
parser = OptionParser(usage)
parser.add_option('-v', '--verbose', action="store_true", dest='verbose',
default=False, help="Display log during execution")
parser.add_option('-s', '--simulate', action="store_true",
dest='simulate', default=False, help="Simulate the corrections without " + \
"actually making them.")
(options, args) = parser.parse_args()
verbose = options.verbose
simulate = options.simulate
if simulate:
print "*******************************************"
print "* SIMULATE MODE - NO CHANGES WILL BE MADE *"
print "*******************************************"
return cleaner(simulate, verbose)
if __name__ == '__main__':
main()
| agpl-3.0 | 1,924,817,315,147,393,500 | 36.044118 | 84 | 0.628821 | false |
unicefuganda/rapidsms-bednets | bednets/spreadsheets_utils.py | 1 | 1693 | import xlrd
def is_empty(arg):
if arg is None:
return True
if isinstance(arg, basestring):
arg = arg.strip()
try:
if not len(arg):
return True
except TypeError:
# wasn't a sequence
pass
return False
class XlsParser(object):
def parse(self, xls_contents):
assert xls_contents is not None
workbook = xlrd.open_workbook(file_contents=xls_contents)
worksheet = workbook.sheets()[0]
header_found = False
header = None
parsedData = []
for row_num in range(worksheet.nrows):
row = worksheet.row_values(row_num)
if not header_found:
header, header_found = self._is_header_row(row)
continue
if self._is_empty(row):
continue
row = self._clean(row)
parsedData.append(dict(zip(header, row)))
if not header_found:
raise XlsParsingException()
return parsedData
def _remove_trailing_empty_header_field(self, field_header):
for field in field_header[::-1]:
if is_empty(field):
field_header.pop()
else:
break
def _is_header_row(self, row):
if is_empty(row[0]):
return None, False
self._remove_trailing_empty_header_field(row)
return [unicode(value).strip().lower() for value in row], True
def _clean(self, row):
return [unicode(value).strip() for value in row]
def _is_empty(self, row):
return len([value for value in row if not is_empty(value)]) == 0
class XlsParsingException(Exception):
pass | bsd-3-clause | 8,240,008,530,787,729,000 | 26.322581 | 72 | 0.564087 | false |
bjanesh/odi-tools | .bpms/bpm_reg_class.py | 1 | 2576 | import numpy as np
from astropy.io import fits
import matplotlib.pyplot as plt
from pyraf import iraf
class reg_to_bpm(object):
def __init__(self, reg_file, img_file):
self.reg_file = reg_file
self.img_file = img_file
self.ota = 'OTA'+self.reg_file.strip('bpm_xy, .reg')+'.SCI'
self.mask_name = 'bpm_ota'+self.ota+'.fits'
try:
hdu_list = fits.open(self.img_file)
hdu_ota = hdu_list[self.ota]
self.empty = np.zeros_like(hdu_ota.data)
hdu_list.close()
except KeyError:
hdu_list = fits.open(self.img_file)
hdu_ota = hdu_list['OTA33.SCI']
self.empty = np.zeros_like(hdu_ota.data)
hdu_list.close()
def parse_box_reg(self):
box_x = []
box_y = []
box_dx = []
box_dy = []
with open(self.reg_file) as reg:
for line in reg:
if line.startswith('box'):
reg_line = line[4:-1]
reg_info = reg_line.strip(')').split(',')
box_x.append(float(reg_info[0]))
box_y.append(float(reg_info[1]))
box_dx.append(float(reg_info[2]))
box_dy.append(float(reg_info[3]))
self.box_x = box_x
self.box_y = box_y
self.box_dx = box_dx
self.box_dy = box_dy
return box_x,box_y,box_dx,box_dy
def mask_reg(self,x,y,dx,dy):
img_array = self.empty
x1,x2 = (x-1) - 0.5*dx, x + 0.5*dx
x1 = x1 - 1
if x1 < 0:
x1 = 0
y1,y2 = (y-1) - 0.5*dy, y + 0.5*dy
if y1 < 0:
y1 = 0
img_array[int(y1):int(y2),int(x1):int(x2)] = 1.0
return img_array
def reg_mask_ota(self):
print self.reg_file
box_x,box_y,box_dx,box_dy = self.parse_box_reg()
for i,reg in enumerate(box_x):
ota_reg_mask = self.mask_reg(self.box_x[i],
self.box_y[i],
self.box_dx[i],
self.box_dy[i])
hdu = fits.PrimaryHDU(ota_reg_mask.astype(float))
mask_name = self.mask_name
hdu.writeto(mask_name,clobber=True)
iraf.unlearn(iraf.imutil.imcopy)
iraf.imutil.imcopy.setParam('input',mask_name)
iraf.imutil.imcopy.setParam('output',mask_name.replace('fits','pl'))
iraf.imutil.imcopy.setParam('verbose','no')
iraf.imutil.imcopy(mode='h')
return ota_reg_mask
| bsd-3-clause | -8,498,234,974,841,395,000 | 32.454545 | 76 | 0.494177 | false |
prakhar0402/morph3D | shape.py | 1 | 7033 | # MAD Lab, University at Buffalo
# Copyright (C) 2018 Prakhar Jaiswal <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import numpy as np
from numpy import *
import subprocess
import binvox_rw
import fftw3f
class Shape:
'''
The Shape class include the properties and functions to store 3D raterized
(voxel) model and perform operations on it
'''
def __init__(self):
'''
Initializes some parameters for the voxel model
'''
# The voxel data (to be stored as 3D numpy array of 0's and 1's
self.voxel = array([])
# Fourier transform of the voxel data
self.voxel_ft = array([])
self.visible = True
# the actual resolution of the shape taking scale into consideration
self.size = 64
# resolution input by the user
self.resolution = 64
self.scale = 1
self.filename = ""
def read_voxel(self):
'''
Reads in a triangulated 3D model file (.obj, .stl, etc.), rasterizes
it using 'binvox', and saves the data as 3D numpy array of 0's and 1's
'''
if len(self.filename) != 0:
binvox = self.filename[:self.filename.rfind('.')] + '.binvox'
if not os.path.isfile(binvox):
subprocess.call("./binvox -d "+ str(self.size) +
" " + self.filename, shell = True)
fid = open(binvox, 'r')
model = binvox_rw.read_as_3d_array(fid)
if model.dims[0] != self.size:
os.remove(binvox)
subprocess.call("./binvox -d "+ str(self.size) +
" " + self.filename, shell = True)
fid = open(binvox, 'r')
model = binvox_rw.read_as_3d_array(fid)
self.voxel = 1*model.data
if self.scale != 1:
self.pad_voxel([self.resolution] * 3)
def write_voxel(self, filename):
'''
Write the voxel model data into a .binvox file
'''
if len(filename) != 0 and not self.isempty():
fp = open(filename, 'w')
data = self.voxel > 0
dims = list(self.get_voxel_shape())
translate = [0.0, 0.0, 0.0]
scale = 1.0
axis_order = 'xyz'
model = binvox_rw.Voxels(data, dims, translate, scale, axis_order)
binvox_rw.write(model, fp)
def set_voxel(self, voxel):
'''
Sets the voxel field to voxel and updates the resolution
'''
self.voxel = voxel
self.set_resolution(self.get_voxel_shape()[0])
def set_voxel_ft(self, voxel_ft):
'''
Sets the voxel_ft field to voxel_ft
'''
self.voxel_ft = voxel_ft
def set_size(self):
'''
Computes and sets the size at which the shape has to be rasterized
Takes into consideration scale and resolution defined by the user
'''
self.size = max(1, int(self.scale * self.resolution))
def set_resolution(self, resolution):
'''
Sets the resolution and updates the size field
'''
self.resolution = resolution
self.set_size()
def set_scale(self, scale):
'''
Sets the scale and updates the size field
'''
self.scale = scale
self.set_size()
def set_filename(self, filename):
self.filename = filename
def set_visibility(self, flag = True):
self.visible = flag
def toggle_visibility(self):
self.visible = not self.visible
def get_voxel(self):
return self.voxel
def get_voxel_ft(self):
return self.voxel_ft
def get_size(self):
return self.size
def get_scale(self):
return self.scale
def get_filename(self):
return self.filename
def get_voxel_shape(self):
return array(self.voxel.shape)
def isempty(self):
'''
Returns if the voxel field is empty
'''
return self.voxel.size == 0
def get_sublevel_set(self, level):
'''
Returns the sublevel set of the voxel model at level
The output is a 3D numpy array with 1's in all cell were voxel field
has value larger than 99.99% of level and 0's elsewhere
99.99% is used to account for any precision error
'''
return 1 * (self.voxel > 0.9999*level)
def get_volume(self):
'''
Returns the volume or the number of high cells in the voxel model
'''
sublevel_set = self.get_sublevel_set(1)
return np.count_nonzero(sublevel_set)
def pad_voxel(self, dims):
'''
Pads the voxel field with zero to enflate the size upto 'dims'
The actual data is centered in the 3D array of size 'dims'
'''
sz = self.get_voxel_shape()
voxel = self.voxel
self.voxel = zeros(dims, dtype = 'f')
sid = (dims - sz)/2
eid = sid + sz
self.voxel[sid[0]:eid[0], sid[1]:eid[1], sid[2]:eid[2]] = voxel
def fourier_transform(self):
'''
Computes the Fast Fourier Transform of the rasterized 3D model
'''
voxel = self.voxel.astype('f')
self.voxel_ft = voxel.astype('F')
trans = fftw3f.Plan(voxel, self.voxel_ft, direction='forward')
trans()
def inverse_fourier_transform(self):
'''
Computes Inverse Fourier Transform to get back the rasterized 3D model
'''
if self.voxel_ft.size == 0:
pass
else:
self.voxel = zeros(self.voxel_ft.shape, dtype = 'f')
trans = fftw3f.Plan(self.voxel_ft, self.voxel, direction='backward')
trans()
def normalize(self):
'''
Normalizes the Inverse Fourier Tranform
'''
self.voxel /= prod(self.voxel.shape)
self.voxel = fft.fftshift(self.voxel)
def display(self, mlab, contours = [1], color = (1, 0, 0), opacity = 0.5):
'''
Displays the voxel model if the visible flag is set to true and the
voxel field is not empty
'''
if self.visible and not self.isempty():
mlab.contour3d(self.voxel, contours = contours,
color = color, opacity = opacity)
| gpl-3.0 | 1,962,431,219,134,627,600 | 31.410138 | 80 | 0.568321 | false |
allanino/nupic | nupic/engine/__init__.py | 11 | 24520 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import sys
import nupic.bindings.engine_internal as engine
from nupic.support.lockattributes import LockAttributesMixin
import functools
basicTypes = ['Byte', 'Int16', 'UInt16', 'Int32', 'UInt32', 'Int64', 'UInt64', 'Real32', 'Real64', 'Handle']
pyRegions = (("nupic.regions.AnomalyRegion", "AnomalyRegion"),
("nupic.regions.CLAClassifierRegion", "CLAClassifierRegion"),
("nupic.regions.ImageSensor", "ImageSensor"),
("nupic.regions.KNNAnomalyClassifierRegion", "KNNAnomalyClassifierRegion"),
("nupic.regions.KNNClassifierRegion", "KNNClassifierRegion"),
("nupic.regions.PyRegion", "PyRegion"),
("nupic.regions.RecordSensor", "RecordSensor"),
("nupic.regions.SPRegion", "SPRegion"),
("nupic.regions.SVMClassifierNode", "SVMClassifierNode"),
("nupic.regions.TPRegion", "TPRegion"),
("nupic.regions.TestNode", "TestNode"),
("nupic.regions.TestRegion", "TestRegion"),
("nupic.regions.UnimportableNode", "UnimportableNode"),
("nupic.regions.extra.GaborNode2", "GaborNode2"))
registeredRegions = False
def registerBuiltInRegions():
global registeredRegions
# Initialize nupic regions
if not registeredRegions:
for module, className in pyRegions:
engine.Network.registerPyRegion(module, className)
registeredRegions = True
registerBuiltInRegions()
# Import all the array types from engine (there is no HandleArray)
arrayTypes = [t + 'Array' for t in basicTypes[:-1]]
for a in arrayTypes:
exec('from %s import %s as %s' % (engine.__name__, a, a))
# Intercept the default exception handling for the purposes of stripping
# parts of the stack trace that can confuse users. If you want the original
# stack trace define this environment variable
if not 'NTA_STANDARD_PYTHON_UNHANDLED_EXCEPTIONS' in os.environ:
import traceback
import cStringIO
def customExceptionHandler(type, value, tb):
"""Catch unhandled Python exception
The handler prints the original exception info including into a buffer.
It then extracts the original error message (when the exception is raised
inside a Py node additional stacktrace info will be appended in the end)
and saves the original exception to a file called error.txt. It prints
just the error message to the screen and tells the user about the error.txt
file.
"""
# Print the exception info to a string IO buffer for manipulation
buff = cStringIO.StringIO()
traceback.print_exception(type, value, tb, file=buff)
text = buff.getvalue()
# get the lines skip the first one: "Traceback (most recent call last)"
lines = text.split('\n')[1:]
#
# Extract the error message
begin = 0
end = len(lines)
for i, line in enumerate(lines):
if line.startswith('RuntimeError:'):
begin = i
#
# elif line.startswith('Traceback (most recent call last):'):
# end = i
# break
#
message = '\n'.join(lines[begin:end])
message = message[len('Runtime Error:'):]
#stacktrace = lines[end:]
# Get the stack trace if available (default to empty string)
stacktrace = getattr(value, 'stackTrace', '')
# Remove engine from stack trace
lines = [x for x in lines if 'engine' not in x]
failMessage = 'The program failed with the following error message:'
dashes = '-' * len(failMessage)
print
print dashes
print 'Traceback (most recent call last):'
print '\n'.join(lines[:begin-2])
if stacktrace:
print stacktrace
print dashes
print 'The program failed with the following error message:'
print dashes
print message
print
#sys.excepthook = customExceptionHandler
# Expose the timer class directly
# Do it this way instead of bringing engine.Timer
# into the namespace to avoid engine
# in the class name
class Timer(engine.Timer):
pass
# Expose the os class directly
# The only wrapped method is getProcessMemoryUsage()
class OS(engine.OS):
pass
class Dimensions(engine.Dimensions):
"""Represent the topology of an N-dimensional region
Basically, it is a list of integers such as: [4, 8, 6]
In this example the topology is a 3 dimensional region with
4 x 8 x 6 nodes.
You can initialize it with a list of dimensions or with no arguments
and then append dimensions.
"""
def __init__(self, *args):
"""Construct a Dimensions object
The constructor can be called with no arguments or with a list
of integers
"""
# Init the base class
engine.Dimensions.__init__(self, *args)
def __str__(self):
return self.toString()
def Array(dtype, size=None, ref=False):
"""Factory function that creates typed Array or ArrayRef objects
dtype - the data type of the array (as string).
Supported types are: Byte, Int16, UInt16, Int32, UInt32, Int64, UInt64, Real32, Real64
size - the size of the array. Must be positive integer.
"""
def getArrayType(self):
"""A little function to replace the getType() method of arrays
It returns a string representation of the array element type instead of the
integer value (NTA_BasicType enum) returned by the origianl array
"""
return self._dtype
# ArrayRef can't be allocated
if ref:
assert size is None
index = basicTypes.index(dtype)
if index == -1:
raise Exception('Invalid data type: ' + dtype)
if size and size <= 0:
raise Exception('Array size must be positive')
suffix = 'ArrayRef' if ref else 'Array'
arrayFactory = getattr(engine, dtype + suffix)
arrayFactory.getType = getArrayType
if size:
a = arrayFactory(size)
else:
a = arrayFactory()
a._dtype = basicTypes[index]
return a
def ArrayRef(dtype):
return Array(dtype, None, True)
class CollectionIterator(object):
def __init__(self, collection):
self.collection = collection
self.index = 0
def next(self):
index = self.index
if index == self.collection.getCount():
raise StopIteration
self.index += 1
return self.collection.getByIndex(index)[0]
class CollectionWrapper(object):
"""Wrap an nupic::Collection with a dict-like interface
The optional valueWrapper is used to wrap values for adaptation purposes.
Maintains the original documentation
collection - the original collection
valueWrapper - an optional callable object used to wrap values.
"""
def IdentityWrapper(o):
return o
def __init__(self, collection, valueWrapper=IdentityWrapper):
self.collection = collection
self.valueWrapper = valueWrapper
self.__class__.__doc__ == collection.__class__.__doc__
def __iter__(self):
return CollectionIterator(self.collection)
def __str__(self):
return str(self.collection)
def __repr__(self):
return repr(self.collection)
def __len__(self):
return self.collection.getCount()
def __getitem__(self, key):
if not self.collection.contains(key):
raise KeyError('Key ' + key + ' not found')
value = self.collection.getByName(key)
value = self.valueWrapper(key, value)
return value
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def __contains__(self, key):
return self.collection.contains(key)
def keys(self):
keys = set()
for i in range(self.collection.getCount()):
keys.add(self.collection.getByIndex(i)[0])
return keys
def values(self):
values = set()
for i in range(self.collection.getCount()):
p = self.collection.getByIndex(i)
values.add(self.valueWrapper(p[0], p[1]))
return values
def items(self):
items = set()
for i in range(self.collection.getCount()):
p = self.collection.getByIndex(i)
items.add((p[0], self.valueWrapper(p[0], p[1])))
return items
def __cmp__(self, other):
return self.collection == other.collection
def __hash__(self):
return hash(self.collection)
class SpecItem(object):
"""Wrapper that translates the data type and access code to a string
The original values are an enumerated type in C++ that become
just integers in Python. This class wraps the original ParameterSpec
and translates the integer values to meaningful strings: that correspond to the C++ enum labels.
It is used to wrap ParameterSpec, InputSpec and OutputSpec
"""
accessModes = ['Create', 'ReadOnly', 'ReadWrite']
def __init__(self, name, item):
self.name = name
self.item = item
self.__class__.__doc__ == item.__class__.__doc__
# Translate data type to string representation
self.dataType = basicTypes[item.dataType]
# Translate access mode to string representation
if hasattr(item, 'accessMode'): # ParameterSpec only
self.accessMode = SpecItem.accessModes[item.accessMode]
def __getattr__(self, name):
return getattr(self.item, name)
def __str__(self):
d = dict(name=self.name,
description=self.description,
dataType=self.dataType,
count=self.count)
if hasattr(self.item, 'accessMode'): # ParameterSpec only
self.accessMode = SpecItem.accessModes[self.item.accessMode]
if hasattr(self.item, 'accessMode'): # ParameterSpec only
d['accessMode'] = self.accessMode
if hasattr(self.item, 'constraints'): # ParameterSpec only
d['constraints'] = self.constraints
if hasattr(self.item, 'defaultValue'): # ParameterSpec only
d['defaultValue'] = self.defaultValue
return str(d)
class Spec(object):
def __init__(self, spec):
self.spec = spec
self.__class__.__doc__ == spec.__class__.__doc__
self.description = spec.description
self.singleNodeOnly = spec.singleNodeOnly
self.inputs = CollectionWrapper(spec.inputs, SpecItem)
self.outputs = CollectionWrapper(spec.outputs, SpecItem)
self.parameters = CollectionWrapper(spec.parameters, SpecItem)
self.commands = CollectionWrapper(spec.commands)
def __str__(self):
return self.spec.toString()
def __repr__(self):
return self.spec.toString()
class _ArrayParameterHelper:
"""This class is used by Region._getParameterMethods"""
def __init__(self, region, datatype):
self._region = region
self.datatype = basicTypes[datatype]
def getParameterArray(self, paramName):
# return a PyArray instead of a plain array.
# PyArray constructor/class for type X is called XArray()
#factoryName = self.datatype + 'Array'
#if factoryName not in globals():
# import exceptions
# raise exceptions.Exception("Internal error -- did not find %s constructor in engine" % factoryName)
#
#arrayFactory = globals()[factoryName]
#a = arrayFactory();
a = Array(self.datatype)
self._region.getParameterArray(paramName, a)
return a
class Region(LockAttributesMixin):
"""
@doc:place_holder(Region.description)
"""
#Wrapper for a network region
#- Maintains original documentation
#- Implement syntactic sugar properties:
#name = property(getName)
#type = property(getType)
#spec = property(getSpec)
#dimensions = property(getDimensions, setDimensions)
#network = property(getNetwork)
#- Makes sure that returned objects are high-level wrapper objects
#- Forwards everything else to internal region
def __init__(self, region, network):
"""Store the wraped region and hosting network
The network is the high-level Network and not the internal
Network. This is important in case the user requests the network
from the region (never leak a engine object, remember)
"""
self._network = network
self._region = region
self.__class__.__doc__ == region.__class__.__doc__
# A cache for typed get/setPArameter() calls
self._paramTypeCache = {}
def __getattr__(self, name):
if not '_region' in self.__dict__:
raise AttributeError
return getattr(self._region, name)
def __setattr__(self, name, value):
if name in ('_region', '__class__', '_network'):
self.__dict__[name] = value
elif name == 'dimensions':
self.setDimensions(value)
else:
setattr(self._region, name, value)
@staticmethod
def getSpecFromType(nodeType):
"""
@doc:place_holder(Region.getSpecFromType)
"""
return Spec(engine.Region.getSpecFromType(nodeType))
def compute(self):
"""
@doc:place_holder(Region.compute)
** This line comes from the original docstring (not generated by Documentor)
"""
return self._region.compute()
def getInputData(self, inputName):
"""
@doc:place_holder(Region.getInputData)
"""
return self._region.getInputArray(inputName)
def getOutputData(self, outputName):
"""
@doc:place_holder(Region.getOutputData)
"""
return self._region.getOutputArray(outputName)
def executeCommand(self, args):
"""
@doc:place_holder(Region.executeCommand)
"""
return self._region.executeCommand(args)
def _getSpec(self):
"""Spec of the region"""
return Spec(self._region.getSpec())
def _getDimensions(self):
"""Dimensions of the region"""
return Dimensions(tuple(self._region.getDimensions()))
def _getNetwork(self):
"""Network for the region"""
return self._network
def __hash__(self):
"""Hash a region"""
return self._region.__hash__()
def __cmp__(self, other):
"""Compare regions"""
return self._region == other._region
def _getParameterMethods(self, paramName):
"""Returns functions to set/get the parameter. These are
the strongly typed functions get/setParameterUInt32, etc.
The return value is a pair:
setfunc, getfunc
If the parameter is not available on this region, setfunc/getfunc
are None. """
if paramName in self._paramTypeCache:
return self._paramTypeCache[paramName]
try:
# Catch the error here. We will re-throw in getParameter or
# setParameter with a better error message than we could generate here
paramSpec = self.getSpec().parameters.getByName(paramName)
except:
return (None, None)
dataType = paramSpec.dataType
dataTypeName = basicTypes[dataType]
count = paramSpec.count
if count == 1:
# Dynamically generate the proper typed get/setParameter<dataType>
x = 'etParameter' + dataTypeName
try:
g = getattr(self, 'g' + x) # get the typed getParameter method
s = getattr(self, 's' + x) # get the typed setParameter method
except AttributeError:
raise Exception("Internal error: unknown parameter type %s" % dataTypeName)
info = (s, g)
else:
if dataTypeName == "Byte":
info = (self.setParameterString, self.getParameterString)
else:
helper = _ArrayParameterHelper(self, dataType)
info = (self.setParameterArray, helper.getParameterArray)
self._paramTypeCache[paramName] = info
return info
def getParameter(self, paramName):
"""Get parameter value"""
(setter, getter) = self._getParameterMethods(paramName)
if getter is None:
import exceptions
raise exceptions.Exception("getParameter -- parameter name '%s' does not exist in region %s of type %s" %
(paramName, self.name, self.type))
return getter(paramName)
def setParameter(self, paramName, value):
"""Set parameter value"""
(setter, getter) = self._getParameterMethods(paramName)
if setter is None:
import exceptions
raise exceptions.Exception("setParameter -- parameter name '%s' does not exist in region %s of type %s" %
(paramName, self.name, self.type))
setter(paramName, value)
def _get(self, method):
"""Auto forwarding of properties to get methods of internal region"""
return getattr(self._region, method)()
network = property(_getNetwork,
doc='@property:place_holder(Region.getNetwork)')
name = property(functools.partial(_get, method='getName'),
doc="@property:place_holder(Region.getName)")
type = property(functools.partial(_get, method='getType'),
doc='@property:place_holder(Region.getType)')
spec = property(_getSpec,
doc='@property:place_holder(Region.getSpec)')
dimensions = property(_getDimensions,
engine.Region.setDimensions,
doc='@property:place_holder(Region.getDimensions)')
computeTimer = property(functools.partial(_get, method='getComputeTimer'),
doc='@property:place_holder(Region.getComputeTimer)')
executeTimer = property(functools.partial(_get, method='getExecuteTimer'),
doc='@property:place_holder(Region.getExecuteTimer)')
class Network(engine.Network):
"""
@doc:place_holder(Network.description)
"""
def __init__(self, *args):
"""Constructor
- Initialize the internal engine.Network class generated by Swig
- Attach docstrings to selected methods
"""
# Init engine.Network class
engine.Network.__init__(self, *args)
# Prepare documentation table.
# Each item is pair of method/property, docstring
# The docstring is attached later to the method or property.
# The key for method items is the method object of the engine.Network class.
# The key for properties is the property name
docTable = (
(engine.Network.getRegions, 'Get the collection of regions in a network'),
)
# Attach documentation to methods and properties
for obj, docString in docTable:
if isinstance(obj, str):
prop = getattr(Network, obj)
assert isinstance(prop, property)
setattr(Network, obj, property(prop.fget, prop.fset, prop.fdel, docString))
else:
obj.im_func.__doc__ = docString
def _getRegions(self):
"""Get the collection of regions in a network
This is a tricky one. The collection of regions returned from
from the internal network is a collection of internal regions.
The desired collection is a collelcion of net.Region objects
that also points to this network (net.network) and not to
the internal network. To achieve that a CollectionWrapper
class is used with a custom makeRegion() function (see bellow)
as a value wrapper. The CollectionWrapper class wraps each value in the
original collection with the result of the valueWrapper.
"""
def makeRegion(name, r):
"""Wrap a engine region with a nupic.engine.Region
Also passes the containing nupic.engine.Network network in _network. This
function is passed a value wrapper to the CollectionWrapper
"""
r = Region(r, self)
#r._network = self
return r
regions = CollectionWrapper(engine.Network.getRegions(self), makeRegion)
return regions
def addRegion(self, name, nodeType, nodeParams):
"""
@doc:place_holder(Network.addRegion)
"""
engine.Network.addRegion(self, name, nodeType, nodeParams)
return self._getRegions()[name]
def addRegionFromBundle(self, name, nodeType, dimensions, bundlePath, label):
"""
@doc:place_holder(Network.addRegionFromBundle)
"""
engine.Network.addRegionFromBundle(self,
name,
nodeType,
dimensions,
bundlePath,
label)
return self._getRegions()[name]
def setPhases(self, name, phases):
"""
@doc:place_holder(Network.setPhases)
"""
phases = engine.UInt32Set(phases)
engine.Network.setPhases(self, name, phases)
def run(self, n):
"""
@doc:place_holder(Network.run)
"""
#Just forward to the internal network
#This is needed for inspectors to work properly because they wrap some key
#methods such as 'run'.
engine.Network.run(self, n)
def disableProfiling(self, *args, **kwargs):
"""
@doc:place_holder(Network.disableProfiling)
"""
engine.Network.disableProfiling(self, *args, **kwargs)
def enableProfiling(self, *args, **kwargs):
"""
@doc:place_holder(Network.enableProfiling)
"""
engine.Network.enableProfiling(self, *args, **kwargs)
def getCallbacks(self, *args, **kwargs):
"""
@doc:place_holder(Network.getCallbacks)
"""
engine.Network.getCallbacks(self, *args, **kwargs)
def initialize(self, *args, **kwargs):
"""
@doc:place_holder(Network.initialize)
"""
engine.Network.initialize(self, *args, **kwargs)
def link(self, *args, **kwargs):
"""
@doc:place_holder(Network.link)
"""
engine.Network.link(self, *args, **kwargs)
def removeLink(self, *args, **kwargs):
"""
@doc:place_holder(Network.removeLink)
"""
engine.Network.removeLink(self, *args, **kwargs)
def removeRegion(self, *args, **kwargs):
"""
@doc:place_holder(Network.removeRegion)
"""
engine.Network.removeRegion(self, *args, **kwargs)
def resetProfiling(self, *args, **kwargs):
"""
@doc:place_holder(Network.resetProfiling)
"""
engine.Network.resetProfiling(self, *args, **kwargs)
def save(self, *args, **kwargs):
"""
@doc:place_holder(Network.save)
"""
engine.Network.save(self, *args, **kwargs)
def inspect(self):
"""Launch a GUI inpector to inspect the network"""
from nupic.analysis import inspect
inspect(self)
@staticmethod
def registerRegion(regionClass):
"""
Adds the module and class name for the region to the list of classes the network can use
regionClass: a pointer to a subclass of PyRegion
"""
engine.Network.registerPyRegion(regionClass.__module__, regionClass.__name__)
@staticmethod
def unregisterRegion(regionName):
"""
Unregisters a region from the internal list of regions
:param str regionName: The name of the region to unregister
(ex: regionName=regionClass.__name__)
"""
engine.Network.unregisterPyRegion(regionName)
# Syntactic sugar properties
regions = property(_getRegions, doc='@property:place_holder(Network.getRegions)')
minPhase = property(engine.Network.getMinPhase, doc='@property:place_holder(Network.getMinPhase)')
maxPhase = property(engine.Network.getMaxPhase, doc='@property:place_holder(Network.getMaxPhase)')
minEnabledPhase = property(engine.Network.getMinEnabledPhase, engine.Network.setMinEnabledPhase, doc='@property:place_holder(Network.getMinEnabledPhase)')
maxEnabledPhase = property(engine.Network.getMaxEnabledPhase, engine.Network.setMaxEnabledPhase, doc='@property:place_holder(Network.getMaxEnabledPhase)')
if __name__=='__main__':
n = Network()
print n.regions
print len(n.regions)
print Network.regions.__doc__
d = Dimensions([3, 4, 5])
print len(d)
print d
a = Array('Byte', 5)
print len(a)
for i in range(len(a)):
a[i] = ord('A') + i
for i in range(len(a)):
print a[i]
r = n.addRegion('r', 'TestNode', '')
print 'name:', r.name
print 'node type:', r.type
print 'node spec:', r.spec
| agpl-3.0 | 2,147,315,528,524,643,800 | 30.597938 | 156 | 0.657953 | false |
uphold/bitcoin | qa/rpc-tests/test_framework/address.py | 64 | 2246 | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Encode and decode BASE58, P2PKH and P2SH addresses."""
from .script import hash256, hash160, sha256, CScript, OP_0
from .util import bytes_to_hex_str, hex_str_to_bytes
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def byte_to_base58(b, version):
result = ''
str = bytes_to_hex_str(b)
str = bytes_to_hex_str(chr(version).encode('latin-1')) + str
checksum = bytes_to_hex_str(hash256(hex_str_to_bytes(str)))
str += checksum[:8]
value = int('0x'+str,0)
while value > 0:
result = chars[value % 58] + result
value //= 58
while (str[:2] == '00'):
result = chars[0] + result
str = str[2:]
return result
# TODO: def base58_decode
def keyhash_to_p2pkh(hash, main = False):
assert (len(hash) == 20)
version = 0 if main else 111
return byte_to_base58(hash, version)
def scripthash_to_p2sh(hash, main = False):
assert (len(hash) == 20)
version = 5 if main else 196
return byte_to_base58(hash, version)
def key_to_p2pkh(key, main = False):
key = check_key(key)
return keyhash_to_p2pkh(hash160(key), main)
def script_to_p2sh(script, main = False):
script = check_script(script)
return scripthash_to_p2sh(hash160(script), main)
def key_to_p2sh_p2wpkh(key, main = False):
key = check_key(key)
p2shscript = CScript([OP_0, hash160(key)])
return script_to_p2sh(p2shscript, main)
def script_to_p2sh_p2wsh(script, main = False):
script = check_script(script)
p2shscript = CScript([OP_0, sha256(script)])
return script_to_p2sh(p2shscript, main)
def check_key(key):
if (type(key) is str):
key = hex_str_to_bytes(key) # Assuming this is hex string
if (type(key) is bytes and (len(key) == 33 or len(key) == 65)):
return key
assert(False)
def check_script(script):
if (type(script) is str):
script = hex_str_to_bytes(script) # Assuming this is hex string
if (type(script) is bytes or type(script) is CScript):
return script
assert(False)
| mit | 4,220,560,689,061,288,400 | 31.550725 | 71 | 0.658949 | false |
conda/kapsel | conda_kapsel/plugins/provider.py | 1 | 25554 | # -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
"""Types related to project requirement providers."""
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from copy import deepcopy
import os
import shutil
from conda_kapsel.internal import conda_api
from conda_kapsel.internal import logged_subprocess
from conda_kapsel.internal.metaclass import with_metaclass
from conda_kapsel.internal.makedirs import makedirs_ok_if_exists
from conda_kapsel.internal.simple_status import SimpleStatus
def _service_directory(local_state_file, relative_name):
return os.path.join(os.path.dirname(local_state_file.filename), "services", relative_name)
class ProvideContext(object):
"""A context passed to ``Provider.provide()`` representing state that can be modified."""
def __init__(self, environ, local_state_file, default_env_spec_name, status, mode):
"""Create a ProvideContext.
Args:
environ (dict): environment variables to be read and modified
local_state_file (LocalStateFile): to store any created state
status (RequirementStatus): current status
mode (str): one of PROVIDE_MODE_PRODUCTION, PROVIDE_MODE_DEVELOPMENT, PROVIDE_MODE_CHECK
"""
self.environ = environ
self._local_state_file = local_state_file
self._default_env_spec_name = default_env_spec_name
self._status = status
self._mode = mode
def ensure_service_directory(self, relative_name):
"""Create a directory in PROJECT_DIR/services with the given name.
The name should be unique to the ServiceRequirement creating the directory,
so usually the requirement's env var.
Args:
relative_name (str): name to distinguish this dir from other service directories
"""
path = _service_directory(self._local_state_file, relative_name)
makedirs_ok_if_exists(path)
return path
def transform_service_run_state(self, service_name, func):
"""Run a function which takes and potentially modifies the state of a service.
If the function modifies the state it's given, the new state will be saved
and passed in next time.
Args:
service_name (str): the name of the service, should be
specific enough to uniquely identify the provider
func (function): function to run, passing it the current state
Returns:
Whatever ``func`` returns.
"""
old_state = self._local_state_file.get_service_run_state(service_name)
modified = deepcopy(old_state)
result = func(modified)
if modified != old_state:
self._local_state_file.set_service_run_state(service_name, modified)
self._local_state_file.save()
return result
@property
def status(self):
"""Get the current ``RequirementStatus``."""
return self._status
@property
def local_state_file(self):
"""Get the LocalStateFile."""
return self._local_state_file
@property
def default_env_spec_name(self):
"""Get the default env spec."""
return self._default_env_spec_name
@property
def mode(self):
"""Get flavor of provide.
Value should be ``PROVIDE_MODE_DEVELOPMENT``, ``PROVIDE_MODE_PRODUCTION``, or ``PROVIDE_MODE_CHECK``.
"""
return self._mode
def shutdown_service_run_state(local_state_file, service_name):
"""Run any shutdown commands from the local state file for the given service.
Also remove the shutdown commands from the file.
Args:
local_state_file (LocalStateFile): local state
service_name (str): the name of the service, usually a
variable name, should be specific enough to uniquely
identify the provider
Returns:
a `Status` instance potentially containing errors
"""
run_states = local_state_file.get_all_service_run_states()
if service_name not in run_states:
return SimpleStatus(success=True, description=("Nothing to do to shut down %s." % service_name))
errors = []
state = run_states[service_name]
if 'shutdown_commands' in state:
commands = state['shutdown_commands']
for command in commands:
code = logged_subprocess.call(command)
if code != 0:
errors.append("Shutting down %s, command %s failed with code %d." % (service_name, repr(command), code))
# clear out the run state once we try to shut it down
local_state_file.set_service_run_state(service_name, dict())
local_state_file.save()
if errors:
return SimpleStatus(success=False,
description=("Shutdown commands failed for %s." % service_name),
errors=errors)
else:
return SimpleStatus(success=True, description=("Successfully shut down %s." % service_name))
def delete_service_directory(local_state_file, relative_name):
"""Delete a directory in PROJECT_DIR/services with the given name.
The name should be unique to the ServiceRequirement creating the directory,
so usually the requirement's env var.
IF this fails, it does so silently (returns no errors).
Args:
relative_name (str): name to distinguish this dir from other service directories
Returns:
None
"""
path = _service_directory(local_state_file, relative_name)
try:
shutil.rmtree(path=path)
except OSError:
pass
# also delete the services directory itself, if it's now empty
try:
# this fails on non-empty dir
os.rmdir(os.path.dirname(path))
except OSError:
pass
class ProviderAnalysis(object):
"""A Provider's preflight check snapshotting the state prior to ``provide()``.
Instances of this class are immutable, and are usually created as part of a
``RequirementStatus``.
"""
def __init__(self, config, missing_env_vars_to_configure, missing_env_vars_to_provide):
"""Create a ProviderAnalysis."""
self._config = deepcopy(config) # defensive copy so we don't modify the original
self._missing_env_vars_to_configure = missing_env_vars_to_configure
self._missing_env_vars_to_provide = missing_env_vars_to_provide
@property
def config(self):
"""Get the configuration dict from the time of analysis."""
return self._config
@property
def missing_env_vars_to_configure(self):
"""Get the env vars we were missing in order to configure, from the time of analysis."""
return self._missing_env_vars_to_configure
@property
def missing_env_vars_to_provide(self):
"""Get the env vars we were missing in order to provide, from the time of analysis."""
return self._missing_env_vars_to_provide
class ProvideResult(object):
"""A Provider's results from the ``provide()`` call.
Instances of this class are immutable, and are returned from ``provide()``.
"""
def __init__(self, errors=None, logs=None):
"""Create a ProvideResult."""
if errors is None:
errors = []
if logs is None:
logs = []
self._errors = errors
self._logs = logs
def copy_with_additions(self, errors=None, logs=None):
"""Copy this result, appending additional errors and logs."""
if errors is None:
errors = []
if logs is None:
logs = []
if len(errors) == 0 and len(logs) == 0:
# we don't have to actually copy since we are immutable
return self
else:
return ProvideResult(errors=(self._errors + errors), logs=(self._logs + logs))
@property
def errors(self):
"""Get any fatal errors that occurred during provide() preventing success."""
return self._errors
@property
def logs(self):
"""Get any debug logs that occurred during provide()."""
return self._logs
@classmethod
def empty(cls):
"""Get an empty ProvideResult (currently a singleton since these are immutable)."""
return _emptyProvideResult
# get this via ProvideResult.empty()
_emptyProvideResult = ProvideResult()
class Provider(with_metaclass(ABCMeta)):
"""A Provider can take some action to meet a Requirement."""
@abstractmethod
def missing_env_vars_to_configure(self, requirement, environ, local_state_file):
"""Get a list of unset environment variable names that must be set before configuring this provider.
Args:
requirement (Requirement): requirement instance we are providing for
environ (dict): current environment variable dict
local_state_file (LocalStateFile): local state file
"""
pass # pragma: no cover
@abstractmethod
def missing_env_vars_to_provide(self, requirement, environ, local_state_file):
"""Get a list of unset environment variable names that must be set before calling provide().
Args:
requirement (Requirement): requirement instance we are providing for
environ (dict): current environment variable dict
local_state_file (LocalStateFile): local state file
"""
pass # pragma: no cover
@abstractmethod
def read_config(self, requirement, environ, local_state_file, default_env_spec_name, overrides):
"""Read a config dict from the local state file for the given requirement.
Args:
requirement (Requirement): the requirement we're providing
environ (dict): current environment variables
local_state_file (LocalStateFile): file to read from
default_env_spec_name (str): the fallback env spec name
overrides (UserConfigOverrides): user-supplied forced config
"""
pass # pragma: no cover
def set_config_values_as_strings(self, requirement, environ, local_state_file, default_env_spec_name, overrides,
values):
"""Set some config values in the state file (should not save the file).
Args:
requirement (Requirement): the requirement we're providing
environ (dict): current environment variables
local_state_file (LocalStateFile): file to save to
default_env_spec_name (str): default env spec name for this prepare
overrides (UserConfigOverrides): if any values in here change, delete the override
values (dict): dict from string to string
"""
pass # silently ignore unknown config values
def config_html(self, requirement, environ, local_state_file, overrides, status):
"""Get an HTML string for configuring the provider.
The HTML string must contain a single <form> tag. Any
<input>, <textarea>, and <select> elements should have
their name attribute set to match the dict keys used in
``read_config()``'s result. The <form> should not have a
submit button, since it will be merged with other
forms. The initial state of all the form fields will be
auto-populated from the values in ``read_config()``. When
the form is submitted, any changes made by the user will
be set back using ``set_config_values_as_strings()``.
This is simple to use, but for now not very flexible; if you need
more flexibility let us know and we can figure out what API
to add in future versions.
Args:
requirement (Requirement): the requirement we're providing
environ (dict): current environment variables
local_state_file (LocalStateFile): file to save to
status (RequirementStatus): last-computed status
Returns:
An HTML string or None if there's nothing to configure.
"""
return None
def analyze(self, requirement, environ, local_state_file, default_env_spec_name, overrides):
"""Analyze whether and how we'll be able to provide the requirement.
This is used to show the situation in the UI, and also to
consolidate all IO-type work in one place (inside
Requirement.check_status()).
Returns:
A ``ProviderAnalysis`` instance.
"""
config = self.read_config(requirement, environ, local_state_file, default_env_spec_name, overrides)
missing_to_configure = self.missing_env_vars_to_configure(requirement, environ, local_state_file)
missing_to_provide = self.missing_env_vars_to_provide(requirement, environ, local_state_file)
return ProviderAnalysis(config=config,
missing_env_vars_to_configure=missing_to_configure,
missing_env_vars_to_provide=missing_to_provide)
@abstractmethod
def provide(self, requirement, context):
"""Execute the provider, fulfilling the requirement.
The implementation should read and modify the passed-in
``environ`` rather than accessing the OS environment
directly.
Args:
requirement (Requirement): requirement we want to meet
context (ProvideContext): context containing project state
Returns:
a ``ProvideResult`` instance
"""
pass # pragma: no cover
@abstractmethod
def unprovide(self, requirement, environ, local_state_file, overrides, requirement_status=None):
"""Undo the provide, cleaning up any files or processes we created.
The requirement may still be met after this, if our providing wasn't
really needed.
Args:
requirement (Requirement): requirement we want to de-provide
environ (dict): current env vars, often from a previous prepare
local_state_file (LocalStateFile): the local state
overrides (UserConfigOverrides): overrides to state
requirement_status (RequirementStatus or None): requirement status if available
Returns:
a `Status` instance describing the (non)success of the unprovision
"""
pass # pragma: no cover
class EnvVarProvider(Provider):
"""Meets a requirement for an env var by letting people set it manually."""
def _local_state_override(self, requirement, local_state_file):
return local_state_file.get_value(["variables", requirement.env_var], default=None)
def _disabled_local_state_override(self, requirement, local_state_file):
return local_state_file.get_value(["disabled_variables", requirement.env_var], default=None)
def missing_env_vars_to_configure(self, requirement, environ, local_state_file):
"""Override superclass to require env prefix."""
if self._get_env_prefix(environ) is not None:
return ()
else:
return (conda_api.conda_prefix_variable(), )
def missing_env_vars_to_provide(self, requirement, environ, local_state_file):
"""Override superclass to require env prefix."""
return self.missing_env_vars_to_configure(requirement, environ, local_state_file)
def _get_env_prefix(self, environ):
# on unix, ENV_PATH is the prefix and DEFAULT_ENV can be just a name,
# on windows DEFAULT_ENV is always the prefix
return environ.get(conda_api.conda_prefix_variable(), None)
def read_config(self, requirement, environ, local_state_file, default_env_spec_name, overrides):
"""Override superclass to read env var value."""
config = dict()
value = None
if requirement.encrypted:
# import keyring locally because it's an optional dependency
# that prints a warning when it's needed but not found.
import conda_kapsel.internal.keyring as keyring
env_prefix = self._get_env_prefix(environ)
if env_prefix is None:
value = None
else:
value = keyring.get(env_prefix, requirement.env_var)
# note that we will READ an encrypted value from local
# state if someone puts it in there by hand, but we won't
# ever write one there ourselves.
if value is None:
value = self._local_state_override(requirement, local_state_file)
disabled_value = self._disabled_local_state_override(requirement, local_state_file)
was_disabled = value is None and disabled_value is not None
if was_disabled:
value = disabled_value
if value is not None:
config['value'] = value
if value is not None and not was_disabled:
source = 'variables'
elif requirement.env_var in environ:
source = 'environ'
config['value'] = environ[requirement.env_var]
elif 'default' in requirement.options:
source = 'default'
else:
source = 'unset'
config['source'] = source
return config
def set_config_values_as_strings(self, requirement, environ, local_state_file, default_env_spec_name, overrides,
values):
"""Override superclass to set env var value."""
if requirement.encrypted:
self._set_encrypted_config_values_as_strings(requirement, environ, local_state_file, default_env_spec_name,
overrides, values)
else:
self._set_nonencrypted_config_values_as_strings(requirement, environ, local_state_file,
default_env_spec_name, overrides, values)
def _set_nonencrypted_config_values_as_strings(self, requirement, environ, local_state_file, default_env_spec_name,
overrides, values):
override_path = ["variables", requirement.env_var]
disabled_path = ["disabled_variables", requirement.env_var]
# we set override_path only if the source is variables,
# otherwise the value goes in disabled_variables. If we
# don't have a source that means the only option we
# presented was to set the local override, so default to
# 'variables'
overriding = (values.get('source', 'variables') == 'variables')
# If there's an existing override value and the source is not 'variables',
# we need to be sure to move the existing to disabled_variables.
# Also, we save values['value'] from the web form in local_state_file,
# even if we aren't using it as the source right now.
local_override_value = self._local_state_override(requirement, local_state_file)
if local_override_value is None:
local_override_value = self._disabled_local_state_override(requirement, local_state_file)
value_string = values.get('value', local_override_value)
if value_string is not None:
if value_string == '':
# the reason empty string unsets is that otherwise there's no easy
# way to unset from a web form
local_state_file.unset_value(override_path)
local_state_file.unset_value(disabled_path)
else:
if overriding:
local_state_file.set_value(override_path, value_string)
local_state_file.unset_value(disabled_path)
else:
local_state_file.set_value(disabled_path, value_string)
local_state_file.unset_value(override_path)
def _set_encrypted_config_values_as_strings(self, requirement, environ, local_state_file, default_env_spec_name,
overrides, values):
# import keyring locally because it's an optional dependency
# that prints a warning when it's needed but not found.
import conda_kapsel.internal.keyring as keyring
env_prefix = self._get_env_prefix(environ)
from_keyring = keyring.get(env_prefix, requirement.env_var)
value_string = values.get('value', from_keyring)
if value_string is not None:
if value_string == '':
keyring.unset(env_prefix, requirement.env_var)
else:
keyring.set(env_prefix, requirement.env_var, value_string)
def _extra_source_options_html(self, requirement, environ, local_state_file, status):
"""Override this in a subtype to add choices to the config HTML.
Choices should be radio inputs with name="source"
"""
return ""
def config_html(self, requirement, environ, local_state_file, overrides, status):
"""Override superclass to provide our config html."""
if status.requirement.encrypted:
input_type = 'password'
else:
input_type = 'text'
extra_html = self._extra_source_options_html(requirement, environ, local_state_file, status)
choices_html = extra_html
if requirement.env_var in environ:
choices_html = choices_html + """
<div>
<label><input type="radio" name="source" value="environ"/>Keep value '{from_environ}'</label>
</div>
<div>
<label><input type="radio" name="source" value="variables"/>Use this value instead:
<input type="{input_type}" name="value"/></label>
</div>
""".format(from_environ=environ[requirement.env_var],
input_type=input_type)
else:
if 'default' in requirement.options:
choices_html = choices_html + """
<div>
<label><input type="radio" name="source" value="default"/>Keep default '{from_default}'</label>
</div>
<div>
<label><input type="radio" name="source" value="variables"/>Use this value instead:
<input type="{input_type}" name="value"/></label>
</div>
""".format(input_type=input_type,
from_default=requirement.options['default'])
else:
choices_html = choices_html + """
<div>
<label><input type="radio" name="source" value="variables"/>Use this value:
<input type="{input_type}" name="value"/></label>
</div>
""".format(input_type=input_type)
# print(("%s: choices_html=\n" % self.__class__.__name__) + choices_html)
return """
<form>
%s
</form>
""" % (choices_html)
def provide(self, requirement, context):
"""Override superclass to use configured env var (or already-set env var)."""
errors = []
logs = []
# We prefer the values in this order:
# - value set in project-local state overrides everything
# (otherwise the UI for configuring the value would end
# up ignored)
# - value in the keyring overrides (treated the same as
# kapsel-local.yml, but for encrypted variables)
# - then anything already set in the environment wins, so you
# can override on the command line like `FOO=bar myapp`
# - then the kapsel.yml default value
local_state_override = None
if requirement.encrypted:
# import keyring locally because it's an optional dependency
# that prints a warning when it's needed but not found.
import conda_kapsel.internal.keyring as keyring
env_prefix = self._get_env_prefix(context.environ)
if env_prefix is not None:
local_state_override = keyring.get(env_prefix, requirement.env_var)
# we will read encrypted vars from local state, though we never
# put them in there ourselves
if local_state_override is None:
local_state_override = self._local_state_override(requirement, context.local_state_file)
if local_state_override is not None:
# kapsel-local.yml
#
# variables:
# REDIS_URL: "redis://example.com:1234"
context.environ[requirement.env_var] = local_state_override
elif requirement.env_var in context.environ:
# nothing to do here
pass
elif 'default' in requirement.options:
# kapsel.yml
#
# variables:
# REDIS_URL:
# default: "redis://example.com:1234"
value = requirement.options['default']
if value is not None:
context.environ[requirement.env_var] = value
else:
pass
return ProvideResult.empty().copy_with_additions(errors, logs)
def unprovide(self, requirement, environ, local_state_file, overrides, requirement_status=None):
"""Override superclass to return success always."""
return SimpleStatus(success=True, description=("Nothing to clean up for %s." % requirement.env_var))
| bsd-3-clause | 7,948,325,205,841,965,000 | 40.281099 | 120 | 0.62071 | false |
anirudhSK/chromium | mojo/public/bindings/pylib/parse/mojo_translate.py | 1 | 5181 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Translate parse tree to Mojom IR"""
import os
import sys
class MojomBuilder():
def __init__(self):
self.mojom = {}
def MapKind(self, kind):
map_to_kind = { 'bool': 'b',
'int8': 'i8',
'int16': 'i16',
'int32': 'i32',
'int64': 'i64',
'uint8': 'u8',
'uint16': 'u16',
'uint32': 'u32',
'uint64': 'u64',
'float': 'f',
'double': 'd',
'string': 's',
'handle': 'h',
'handle<data_pipe_consumer>': 'h:d:c',
'handle<data_pipe_producer>': 'h:d:p',
'handle<message_pipe>': 'h:m'}
if kind.endswith('[]'):
return 'a:' + self.MapKind(kind[0:len(kind)-2])
if kind in map_to_kind:
return map_to_kind[kind]
return 'x:' + kind
def MapOrdinal(self, ordinal):
if ordinal == None:
return None;
return int(ordinal[1:]) # Strip leading '@'
def GetAttribute(self, attributes, name):
out = None
if attributes:
for attribute in attributes:
if attribute[0] == 'ATTRIBUTE' and attribute[1] == name:
out = attribute[2]
return out
def MapFields(self, fields):
out = []
for field in fields:
if field[0] == 'FIELD':
out.append({'name': field[2],
'kind': self.MapKind(field[1]),
'ordinal': self.MapOrdinal(field[3]),
'default': field[4]})
return out
def MapParameters(self, parameters):
out = []
for parameter in parameters:
if parameter[0] == 'PARAM':
out.append({'name': parameter[2],
'kind': self.MapKind(parameter[1]),
'ordinal': self.MapOrdinal(parameter[3])})
return out
def MapMethods(self, methods):
out = []
if methods:
for method in methods:
if method[0] == 'METHOD':
method_dict = {'name': method[1],
'parameters': self.MapParameters(method[2]),
'ordinal': self.MapOrdinal(method[3])}
if method[4] != None:
method_dict['response_parameters'] = self.MapParameters(method[4])
out.append(method_dict)
return out
def MapEnumFields(self, fields):
out = []
for field in fields:
if field[0] == 'ENUM_FIELD':
out.append({'name': field[1],
'value': field[2]})
return out
def MapEnums(self, enums):
out = []
if enums:
for enum in enums:
if enum[0] == 'ENUM':
out.append({'name': enum[1],
'fields': self.MapEnumFields(enum[2])})
return out
def AddStruct(self, name, attributes, body):
struct = {}
struct['name'] = name
# TODO(darin): Add support for |attributes|
#struct['attributes'] = MapAttributes(attributes)
struct['fields'] = self.MapFields(body)
struct['enums'] = self.MapEnums(body)
self.mojom['structs'].append(struct)
def AddInterface(self, name, attributes, body):
interface = {}
interface['name'] = name
interface['peer'] = self.GetAttribute(attributes, 'Peer')
interface['methods'] = self.MapMethods(body)
interface['enums'] = self.MapEnums(body)
self.mojom['interfaces'].append(interface)
def AddEnum(self, name, fields):
# TODO(mpcomplete): add support for specifying enums as types. Right now
# we just use int32.
enum = {}
enum['name'] = name
enum['fields'] = self.MapEnumFields(fields)
self.mojom['enums'].append(enum)
def AddModule(self, name, namespace, contents):
self.mojom['name'] = name
self.mojom['namespace'] = namespace
self.mojom['structs'] = []
self.mojom['interfaces'] = []
self.mojom['enums'] = []
for item in contents:
if item[0] == 'STRUCT':
self.AddStruct(name=item[1], attributes=item[2], body=item[3])
elif item[0] == 'INTERFACE':
self.AddInterface(name=item[1], attributes=item[2], body=item[3])
elif item[0] == 'ENUM':
self.AddEnum(name=item[1], fields=item[2])
def AddImport(self, filename):
import_item = {}
import_item['filename'] = filename
self.mojom['imports'].append(import_item)
def Build(self, tree, name):
self.mojom['imports'] = []
for item in tree:
if item[0] == 'MODULE':
self.AddModule(name=name, namespace=item[1], contents=item[2])
elif item[0] == 'IMPORT':
self.AddImport(filename=item[1])
return self.mojom
def Translate(tree, name):
return MojomBuilder().Build(tree, name)
def Main():
if len(sys.argv) < 2:
print("usage: %s filename" % (sys.argv[0]))
sys.exit(1)
tree = eval(open(sys.argv[1]).read())
name = os.path.splitext(os.path.basename(sys.argv[1]))[0]
result = Translate(tree, name)
print(result)
if __name__ == '__main__':
Main()
| bsd-3-clause | 713,136,918,399,017,500 | 27.944134 | 78 | 0.550666 | false |
childresslab/MicrocavityExp1 | logic/automation.py | 1 | 8918 | # -*- coding: utf-8 -*-
"""
Execution tree for auto measurements
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import os
import pyqtgraph.configfile as configfile
from collections import OrderedDict
from core.module import Connector
from logic.generic_logic import GenericLogic
from qtpy import QtCore
class TreeItem:
""" Item in a TreeModel.
"""
def __init__(self, data, parent=None):
""" Create TreeItem.
@param data object: data stored in TreeItem
@param parent Treeitem: parent of this item
"""
self.parentItem = parent
self.itemData = data
self.childItems = []
def appendChild(self, item):
""" Append child node to tree item.
@param item :
"""
self.childItems.append(item)
def child(self, row):
""" Get child item for specific index
@param row int: row index for child item
@return : child item in given row
"""
return self.childItems[row]
def childCount(self):
""" Get number of children.
@return int: number of children
"""
return len(self.childItems)
def columnCount(self):
""" Return number of columns.
@return int: number of columns in data
"""
return len(self.itemData)
def data(self, column):
""" Get data from a given column.
@para, column int: column index
@return : data stored in column
"""
try:
return self.itemData[column]
except IndexError:
return None
def parent(self):
""" Get parent item.
@return TreeItem: parent item
"""
return self.parentItem
def row(self):
""" Get our own row index.
@return int: row index in parent item
"""
if self.parentItem:
return self.parentItem.childItems.index(self)
return 0
class TreeModel(QtCore.QAbstractItemModel):
""" A tree model for storing TreeItems in a tree structure.
"""
def __init__(self, parent=None):
""" Create a TreeModel.
@param parent TreeModel: parent model
"""
super(TreeModel, self).__init__(parent)
self.rootItem = TreeItem(("Title", "Summary"))
def columnCount(self, parent):
""" Return number of columns.
@param parent TreeModel: prent model
"""
if parent.isValid():
return parent.internalPointer().columnCount()
else:
return self.rootItem.columnCount()
def data(self, index, role):
""" Retrieve data from model.
@param index QModelIndex: index of data
@param role QtRole: role for data
"""
if not index.isValid():
return None
if role != QtCore.Qt.DisplayRole:
return None
item = index.internalPointer()
return item.data(index.column())
def flags(self, index):
""" Get flags for item at index.
@param index QModelIndex: index for item
@return flags: Qt model flags
"""
if not index.isValid():
return QtCore.Qt.NoItemFlags
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def headerData(self, section, orientation, role):
""" Header for this model.
@param section QModelIndex: index for header data
@param orientation: header orientation
@param role: Qt role for header
"""
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
return self.rootItem.data(section)
return None
def index(self, row, column, parent):
""" Make QModelIndex from row and column number.
@param row int: row number
@param column int: column number
@param parent QAbstractModel: model parent
@return QModelIndex: index for item at position
"""
if not self.hasIndex(row, column, parent):
return QtCore.QModelIndex()
if not parent.isValid():
parentItem = self.rootItem
else:
parentItem = parent.internalPointer()
childItem = parentItem.child(row)
if childItem:
return self.createIndex(row, column, childItem)
else:
return QtCore.QModelIndex()
def parent(self, index):
""" Get parent index for item at index.
@param index QModelIndex: index for item
@return QModelIndex: index for parent
"""
if not index.isValid():
return QtCore.QModelIndex()
childItem = index.internalPointer()
parentItem = childItem.parent()
if parentItem == self.rootItem:
return QtCore.QModelIndex()
return self.createIndex(parentItem.row(), 0, parentItem)
def rowCount(self, parent):
""" Return number of rows in model.
@return int: number of rowa
"""
if parent.column() > 0:
return 0
if not parent.isValid():
parentItem = self.rootItem
else:
parentItem = parent.internalPointer()
return parentItem.childCount()
def loadExecTree(self, tree, parent=None):
""" Load a tree from a nested dictionary into the model.
@param tree dict: dictionary tree to be loaded
@param parent TreeItem: root item for loaded tree
"""
if not isinstance(parent, TreeItem):
self.rootItem = TreeItem(("Title", "Summary"))
self.recursiveLoad(tree, self.rootItem)
else:
self.recursiveLoad(tree, parent)
def recursiveLoad(self, tree, parent):
""" Recursively load a tree from a nested dictionary into the model.
@param tree dict: dictionary for (sub)tree to be loaded
@param parent TreeItem: root item for loaded (sub)tree
"""
for key,value in tree.items():
if isinstance(value, OrderedDict):
newchild = TreeItem([key, 'branch'], parent)
parent.appendChild(newchild)
self.recursiveLoad(value, newchild)
else:
newchild = TreeItem([key, 'leaf'], parent)
parent.appendChild(newchild)
def recursiveSave(self, parent):
""" Save TreeModel into nested dict.
@param parent TreeItem: parent item
@return dict: dictionary containing tree
"""
if parent.childCount() > 0:
retdict = OrderedDict()
for i in range(parent.childCount()):
key = parent.child(i).itemData[0]
retdict[key] = self.recursiveSave(parent.child(i))
return retdict
else:
return parent.itemData[0]
class AutomationLogic(GenericLogic):
""" Logic module agreggating multiple hardware switches.
"""
_modclass = 'AutomationLogic'
_modtype = 'logic'
taskrunner = Connector(interface='TaskRunner')
sigRepeat = QtCore.Signal()
def on_activate(self):
""" Prepare logic module for work.
"""
self._taskrunner = self.get_connector('taskrunner')
#stuff = "a\txyz\n b\tx\n c\ty\n d\tw\ne\tm\n"
#tr = OrderedDict([
# ('a', OrderedDict([
# ('f', OrderedDict([
# ('g', 5)
# ])),
# ('h', 'letrole'),
# ])),
# ('b', 1),
# ('c', 2),
# ('d', 3),
# ('e', 4)
#])
self.model = TreeModel()
#self.model.loadExecTree(tr)
self.loadAutomation('auto.cfg')
def on_deactivate(self):
""" Deactivate modeule.
"""
print(self.model.recursiveSave(self.model.rootItem))
def loadAutomation(self, path):
""" Load automation config into model.
@param path str: file path
"""
if os.path.isfile(path):
configdict = configfile.readConfigFile(path)
self.model.loadExecTree(configdict)
| gpl-3.0 | 6,549,909,363,573,408,000 | 27.860841 | 82 | 0.580063 | false |
shaded-enmity/cryptopals | set-2/aes_cbc.py | 1 | 1367 | #!/usr/bin/python
import Crypto.Cipher.AES, os, sys, binascii
key, file_ = sys.argv[1:]
def ord_str(string):
return [ord(c) for c in string]
def chars(lst):
return ''.join([chr(c) for c in lst])
cipher = Crypto.Cipher.AES.new(key, mode=Crypto.Cipher.AES.MODE_ECB)
text = binascii.a2b_base64("".join([s.strip() for s in open(file_).readlines()]))
IV = bytes(b'\x00'*16)
def remove_pad(txt, padsize):
return txt[:-ord(txt[-1])]
def encrypt_block(block):
ciphertext = cipher.encrypt(block)
return ciphertext
def get_blocks(stream, blocksize):
for i in xrange(0, len(stream), blocksize):
yield bytes(stream[i:i+blocksize])
def xor_blocks(a, b):
assert len(a) == len(b)
return map(lambda (x, y): ord(x)^ord(y), zip(a, b))
def encrypt(text, iv):
assert len(key) == len(iv)
lastblock, encrypted = iv, []
for block in get_blocks(text, len(key)):
lastblock = encrypt_block(chars(xor_blocks(block, lastblock)))
encrypted.append(lastblock)
return str(zip(*encrypted))
def decrypt(text, iv):
assert len(key) == len(iv)
lastblock, decrypted = iv, []
for block in get_blocks(text, len(key)):
decrypted.append(chars(xor_blocks(cipher.decrypt(block), lastblock)))
lastblock = block
return ''.join(decrypted)
#encrypt(text, IV)
print remove_pad(decrypt(text, IV), 16)
| mit | 1,247,901,839,241,740,500 | 26.897959 | 81 | 0.648135 | false |
giampaolo/psutil | scripts/internal/print_downloads.py | 2 | 4099 | #!/usr/bin/env python3
# Copyright (c) 2009 Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Print PYPI statistics in MarkDown format.
Useful sites:
* https://pepy.tech/project/psutil
* https://pypistats.org/packages/psutil
* https://hugovk.github.io/top-pypi-packages/
"""
from __future__ import print_function
import json
import os
import subprocess
import sys
import pypinfo # NOQA
from psutil._common import memoize
AUTH_FILE = os.path.expanduser("~/.pypinfo.json")
PKGNAME = 'psutil'
DAYS = 30
LIMIT = 100
GITHUB_SCRIPT_URL = "https://github.com/giampaolo/psutil/blob/master/" \
"scripts/internal/pypistats.py"
LAST_UPDATE = None
bytes_billed = 0
# --- get
@memoize
def sh(cmd):
assert os.path.exists(AUTH_FILE)
env = os.environ.copy()
env['GOOGLE_APPLICATION_CREDENTIALS'] = AUTH_FILE
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
raise RuntimeError(stderr)
assert not stderr, stderr
return stdout.strip()
@memoize
def query(cmd):
global bytes_billed
ret = json.loads(sh(cmd))
bytes_billed += ret['query']['bytes_billed']
return ret
def top_packages():
global LAST_UPDATE
ret = query("pypinfo --all --json --days %s --limit %s '' project" % (
DAYS, LIMIT))
LAST_UPDATE = ret['last_update']
return [(x['project'], x['download_count']) for x in ret['rows']]
def ranking():
data = top_packages()
i = 1
for name, downloads in data:
if name == PKGNAME:
return i
i += 1
raise ValueError("can't find %s" % PKGNAME)
def downloads():
data = top_packages()
for name, downloads in data:
if name == PKGNAME:
return downloads
raise ValueError("can't find %s" % PKGNAME)
def downloads_pyver():
return query("pypinfo --json --days %s %s pyversion" % (DAYS, PKGNAME))
def downloads_by_country():
return query("pypinfo --json --days %s %s country" % (DAYS, PKGNAME))
def downloads_by_system():
return query("pypinfo --json --days %s %s system" % (DAYS, PKGNAME))
def downloads_by_distro():
return query("pypinfo --json --days %s %s distro" % (DAYS, PKGNAME))
# --- print
templ = "| %-30s | %15s |"
def print_row(left, right):
if isinstance(right, int):
right = '{0:,}'.format(right)
print(templ % (left, right))
def print_header(left, right="Downloads"):
print_row(left, right)
s = templ % ("-" * 30, "-" * 15)
print("|:" + s[2:-2] + ":|")
def print_markdown_table(title, left, rows):
pleft = left.replace('_', ' ').capitalize()
print("### " + title)
print()
print_header(pleft)
for row in rows:
lval = row[left]
print_row(lval, row['download_count'])
print()
def main():
downs = downloads()
print("# Download stats")
print("")
s = "psutil download statistics of the last %s days (last update " % DAYS
s += "*%s*).\n" % LAST_UPDATE
s += "Generated via [pypistats.py](%s) script.\n" % GITHUB_SCRIPT_URL
print(s)
data = [
{'what': 'Per month', 'download_count': downs},
{'what': 'Per day', 'download_count': int(downs / 30)},
{'what': 'PYPI ranking', 'download_count': ranking()}
]
print_markdown_table('Overview', 'what', data)
print_markdown_table('Operating systems', 'system_name',
downloads_by_system()['rows'])
print_markdown_table('Distros', 'distro_name',
downloads_by_distro()['rows'])
print_markdown_table('Python versions', 'python_version',
downloads_pyver()['rows'])
print_markdown_table('Countries', 'country',
downloads_by_country()['rows'])
if __name__ == '__main__':
try:
main()
finally:
print("bytes billed: %s" % bytes_billed, file=sys.stderr)
| bsd-3-clause | -827,577,229,933,360,000 | 24.459627 | 77 | 0.601366 | false |
0--key/lib | portfolio/Python/scrapy/hadrianequine/petsensedirectcouk.py | 2 | 3072 | import re
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.url import urljoin_rfc
from product_spiders.items import Product, ProductLoader
class PetSenseDirectSpider(BaseSpider):
name = 'petsensedirect.co.uk'
allowed_domains = ['www.petsensedirect.co.uk']
start_urls = ('http://www.petsensedirect.co.uk/',)
def __init__(self, *args, **kwargs):
super(PetSenseDirectSpider, self).__init__(*args, **kwargs)
self.URLBASE = 'http://www.petsensedirect.co.uk/'
def parse(self, response):
if not isinstance(response, HtmlResponse):
return
# categories
hxs = HtmlXPathSelector(response)
category_urls = hxs.select('//div[@id="top_nav"]//a[not(starts-with(@href,"javascript:"))]/@href').extract()
for url in category_urls:
url = urljoin_rfc(self.URLBASE, url)
yield Request(url)
# subcategories
subcategory_urls = hxs.select('//div[@id="center"]//a[not(starts-with(@href,"javascript:"))]/img/../@href').extract()
for url in subcategory_urls:
url = urljoin_rfc(self.URLBASE + 'acatalog/', url)
yield Request(url)
# products
products_urls = hxs.select('//div[@id="center"]//a[not(starts-with(@href,"javascript:"))]/img/../@href').extract()
for url in products_urls:
url = urljoin_rfc(self.URLBASE + 'acatalog/', url)
yield Request(url, callback=self.parse_product, dont_filter=True)
def parse_product(self, response):
if not isinstance(response, HtmlResponse):
return
# sub products
hxs = HtmlXPathSelector(response)
product_data = hxs.select('//table//b')
try:
name = product_data[0].select('./text()').extract()[0]
except IndexError:
return
single_price = product_data.re('\xa3(.*?)<')
multiple_prices = hxs.select('//select[@class="form_input_general"]/option/text()').extract()
products_data = []
if single_price:
price = single_price[0]
products_data.append((name.strip(), price))
else:
for name_and_price in multiple_prices:
name_and_price = re.search('(.*).*\xa3(.*)', name_and_price)
if name_and_price:
name_and_price = name_and_price.groups()
products_data.append((name.strip() + ' ' + name_and_price[0].strip(), name_and_price[1]))
for item in products_data:
product = Product()
loader = ProductLoader(item=product, response=response)
# try:
loader.add_value('url', response.url)
loader.add_value('name', item[0])
loader.add_value('price', item[1])
loader.add_value('sku', '')
yield loader.load_item()
# except IndexError:
# continue
| apache-2.0 | 8,825,509,672,500,765,000 | 34.732558 | 125 | 0.574544 | false |
taras-sereda/pytorch-seq2seq | scripts/generate_toy_data.py | 1 | 1473 | from __future__ import print_function
import argparse
import os
import shutil
import random
parser = argparse.ArgumentParser()
parser.add_argument('--dir', help="data directory", default="../data")
parser.add_argument('--max-len', help="max sequence length", default=10)
args = parser.parse_args()
def generate_dataset(root, name, size):
path = os.path.join(root, name)
if not os.path.exists(path):
os.mkdir(path)
# generate data file
data_path = os.path.join(path, 'data.txt')
with open(data_path, 'w') as fout:
for _ in range(size):
length = random.randint(1, args.max_len)
seq = []
for _ in range(length):
seq.append(str(random.randint(0, 9)))
fout.write("\t".join([" ".join(seq), " ".join(reversed(seq))]))
fout.write('\n')
# generate vocabulary
src_vocab = os.path.join(path, 'vocab.source')
with open(src_vocab, 'w') as fout:
fout.write("\n".join([str(i) for i in range(10)]))
tgt_vocab = os.path.join(path, 'vocab.target')
shutil.copy(src_vocab, tgt_vocab)
if __name__ == '__main__':
data_dir = args.dir
if not os.path.exists(data_dir):
os.mkdir(data_dir)
toy_dir = os.path.join(data_dir, 'toy_reverse')
if not os.path.exists(toy_dir):
os.mkdir(toy_dir)
generate_dataset(toy_dir, 'train', 10000)
generate_dataset(toy_dir, 'dev', 1000)
generate_dataset(toy_dir, 'test', 1000)
| apache-2.0 | 8,982,934,713,930,129,000 | 31.021739 | 75 | 0.606925 | false |
andrei4ka/fuel-web-redhat | tasklib/tasklib/actions/puppet.py | 4 | 2354 | # Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from tasklib.actions import action
from tasklib import exceptions
from tasklib import utils
log = logging.getLogger(__name__)
class PuppetAction(action.Action):
def run(self):
log.debug('Running puppet task %s with command %s',
self.task.name, self.command)
exit_code, stdout, stderr = utils.execute(self.command)
log.debug(
'Task %s with command %s\n returned code %s\n out %s err%s',
self.task.name, self.command, exit_code, stdout, stderr)
# 0 - no changes
# 2 - was some changes but successfull
# 4 - failures during transaction
# 6 - changes and failures
if exit_code not in [0, 2]:
raise exceptions.Failed()
return exit_code
@property
def manifest(self):
return (self.task.metadata.get('puppet_manifest') or
self.config['puppet_manifest'])
@property
def puppet_options(self):
if 'puppet_options' in self.task.metadata:
return self.task.metadata['puppet_options']
return self.config['puppet_options']
@property
def puppet_modules(self):
return (self.task.metadata.get('puppet_modules') or
self.config['puppet_modules'])
@property
def command(self):
cmd = ['puppet', 'apply', '--detailed-exitcodes']
if self.puppet_modules:
cmd.append('--modulepath={0}'.format(self.puppet_modules))
if self.puppet_options:
cmd.append(self.puppet_options)
if self.config['debug']:
cmd.append('--debug --verbose --evaltrace')
cmd.append(os.path.join(self.task.dir, self.manifest))
return ' '.join(cmd)
| apache-2.0 | -5,012,215,877,036,417,000 | 33.617647 | 78 | 0.636788 | false |
thejdeep/CoAPthon | coverage_test_proxy.py | 4 | 55609 | from Queue import Queue
import random
import socket
import threading
import unittest
from coapclient import HelperClient
from coapforwardproxy import CoAPForwardProxy
from coapserver import CoAPServer
from coapthon import defines
from coapthon.messages.option import Option
from coapthon.messages.request import Request
from coapthon.messages.response import Response
from coapthon.serializer import Serializer
__author__ = 'Giacomo Tanganelli'
__version__ = "2.0"
class Tests(unittest.TestCase):
def setUp(self):
self.server_address = ("127.0.0.1", 5683)
self.current_mid = random.randint(1, 1000)
self.server_mid = random.randint(1000, 2000)
self.server = CoAPServer("127.0.0.1", 5684)
self.server_thread = threading.Thread(target=self.server.listen, args=(10,))
self.server_thread.start()
self.proxy = CoAPForwardProxy("127.0.0.1", 5683)
self.proxy_thread = threading.Thread(target=self.proxy.listen, args=(10,))
self.proxy_thread.start()
self.queue = Queue()
def tearDown(self):
self.server.close()
self.server_thread.join(timeout=25)
self.server = None
self.proxy.close()
self.proxy_thread.join(timeout=25)
self.proxy = None
def _test_with_client(self, message_list): # pragma: no cover
client = HelperClient(self.server_address)
for message, expected in message_list:
if message is not None:
received_message = client.send_request(message)
if expected is not None:
if expected.type is not None:
self.assertEqual(received_message.type, expected.type)
if expected.mid is not None:
self.assertEqual(received_message.mid, expected.mid)
self.assertEqual(received_message.code, expected.code)
if expected.source is not None:
self.assertEqual(received_message.source, self.server_address)
if expected.token is not None:
self.assertEqual(received_message.token, expected.token)
if expected.payload is not None:
self.assertEqual(received_message.payload, expected.payload)
if expected.options:
self.assertEqual(len(received_message.options), len(expected.options))
for o in expected.options:
assert isinstance(o, Option)
option_value = getattr(expected, o.name.lower().replace("-", "_"))
option_value_rec = getattr(received_message, o.name.lower().replace("-", "_"))
self.assertEqual(option_value, option_value_rec)
client.stop()
def _test_with_client_observe(self, message_list): # pragma: no cover
client = HelperClient(self.server_address)
for message, expected in message_list:
if message is not None:
client.send_request(message, self.client_callback)
if expected is not None:
received_message = self.queue.get()
if expected.type is not None:
self.assertEqual(received_message.type, expected.type)
if expected.mid is not None:
self.assertEqual(received_message.mid, expected.mid)
self.assertEqual(received_message.code, expected.code)
if expected.source is not None:
self.assertEqual(received_message.source, self.server_address)
if expected.token is not None:
self.assertEqual(received_message.token, expected.token)
if expected.payload is not None:
self.assertEqual(received_message.payload, expected.payload)
if expected.options:
self.assertEqual(len(received_message.options), len(expected.options))
for o in expected.options:
assert isinstance(o, Option)
option_value = getattr(expected, o.name.lower().replace("-", "_"))
option_value_rec = getattr(received_message, o.name.lower().replace("-", "_"))
self.assertEqual(option_value, option_value_rec)
client.stop()
def client_callback(self, response):
print "Callback"
self.queue.put(response)
def _test_plugtest(self, message_list): # pragma: no cover
serializer = Serializer()
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
for message, expected in message_list:
if message is not None:
datagram = serializer.serialize(message)
sock.sendto(datagram, message.destination)
if expected is not None:
datagram, source = sock.recvfrom(4096)
received_message = serializer.deserialize(datagram, source)
print received_message.pretty_print()
print expected.pretty_print()
if expected.type is not None:
self.assertEqual(received_message.type, expected.type)
if expected.mid is not None:
self.assertEqual(received_message.mid, expected.mid)
self.assertEqual(received_message.code, expected.code)
if expected.source is not None:
self.assertEqual(received_message.source, source)
if expected.token is not None:
self.assertEqual(received_message.token, expected.token)
if expected.payload is not None:
self.assertEqual(received_message.payload, expected.payload)
if expected.options is not None:
self.assertEqual(received_message.options, expected.options)
for o in expected.options:
assert isinstance(o, Option)
option_value = getattr(expected, o.name.lower().replace("-", "_"))
option_value_rec = getattr(received_message, o.name.lower().replace("-", "_"))
self.assertEqual(option_value, option_value_rec)
sock.close()
def _test_datagram(self, message_list): # pragma: no cover
serializer = Serializer()
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
for message, expected in message_list:
if message is not None:
datagram, destination = message
sock.sendto(datagram, destination)
if expected is not None:
datagram, source = sock.recvfrom(4096)
received_message = serializer.deserialize(datagram, source)
if expected.type is not None:
self.assertEqual(received_message.type, expected.type)
if expected.mid is not None:
self.assertEqual(received_message.mid, expected.mid)
self.assertEqual(received_message.code, expected.code)
if expected.source is not None:
self.assertEqual(received_message.source, source)
if expected.token is not None:
self.assertEqual(received_message.token, expected.token)
if expected.payload is not None:
self.assertEqual(received_message.payload, expected.payload)
if expected.options is not None:
self.assertEqual(received_message.options, expected.options)
for o in expected.options:
assert isinstance(o, Option)
option_value = getattr(expected, o.name.lower().replace("-", "_"))
option_value_rec = getattr(received_message, o.name.lower().replace("-", "_"))
self.assertEqual(option_value, option_value_rec)
sock.close()
def test_get_forward(self):
print "TEST_GET_FORWARD"
path = "/basic"
req = Request()
req.code = defines.Codes.GET.number
req.uri_path = path
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.proxy_uri = "coap://127.0.0.1:5684/basic"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Basic Resource"
exchange1 = (req, expected)
self.current_mid += 1
self._test_with_client([exchange1])
# def test_separate(self):
# print "TEST_SEPARATE"
# path = "/separate"
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["CON"]
# expected._mid = None
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.max_age = 60
#
# exchange1 = (req, expected)
#
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "POST"
#
# expected = Response()
# expected.type = defines.Types["CON"]
# expected._mid = None
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.options = None
#
# exchange2 = (req, expected)
#
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.PUT.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "PUT"
#
# expected = Response()
# expected.type = defines.Types["CON"]
# expected._mid = None
# expected.code = defines.Codes.CHANGED.number
# expected.token = None
# expected.options = None
#
# exchange3 = (req, expected)
#
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.DELETE.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["CON"]
# expected._mid = None
# expected.code = defines.Codes.DELETED.number
# expected.token = None
#
# exchange4 = (req, expected)
#
# self.current_mid += 1
# self._test_with_client([exchange1, exchange2, exchange3, exchange4])
#
def test_post(self):
print "TEST_POST"
path = "/storage/new_res?id=1"
req = Request()
req.code = defines.Codes.POST.number
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "test"
req.add_if_none_match()
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res?id=1"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CREATED.number
expected.token = None
expected.payload = None
expected.location_path = "storage/new_res"
expected.location_query = "id=1"
exchange1 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.if_match = ["test", "not"]
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "test"
exchange2 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.PUT.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.if_match = ["not"]
req.payload = "not"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.PRECONDITION_FAILED.number
expected.token = None
exchange3 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.if_match = ["not"]
req.payload = "not"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.PRECONDITION_FAILED.number
expected.token = None
exchange4 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.PUT.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req._mid = self.current_mid
req.destination = self.server_address
req.add_if_none_match()
req.payload = "not"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.PRECONDITION_FAILED.number
expected.token = None
exchange5 = (req, expected)
self.current_mid += 1
self._test_with_client([exchange1, exchange2, exchange3, exchange4, exchange5])
def test_post_block(self):
print "TEST_POST_BLOCK"
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras sollicitudin fermentum ornare. " \
"Cras accumsan tellus quis dui lacinia eleifend. Proin ultrices rutrum orci vitae luctus. " \
"Nullam malesuada pretium elit, at aliquam odio vehicula in. Etiam nec maximus elit. " \
"Etiam at erat ac ex ornare feugiat. Curabitur sed malesuada orci, id aliquet nunc. Phasellus " \
"nec leo luctus, blandit lorem sit amet, interdum metus. Duis efficitur volutpat magna, ac " \
"ultricies nibh aliquet sit amet. Etiam tempor egestas augue in hendrerit. Nunc eget augue " \
"ultricies, dignissim lacus et, vulputate dolor. Nulla eros odio, fringilla vel massa ut, " \
"facilisis cursus quam. Fusce faucibus lobortis congue. Fusce consectetur porta neque, id " \
"sollicitudin velit maximus eu. Sed pharetra leo quam, vel finibus turpis cursus ac. " \
"Aenean ac nisi massa. Cras commodo arcu nec ante tristique ullamcorper. Quisque eu hendrerit" \
" urna. Cras fringilla eros ut nunc maximus, non porta nisl mollis. Aliquam in rutrum massa." \
" Praesent tristique turpis dui, at ultri"
req.block1 = (1, 1, 1024)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.REQUEST_ENTITY_INCOMPLETE.number
expected.token = None
expected.payload = None
exchange1 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras sollicitudin fermentum ornare. " \
"Cras accumsan tellus quis dui lacinia eleifend. Proin ultrices rutrum orci vitae luctus. " \
"Nullam malesuada pretium elit, at aliquam odio vehicula in. Etiam nec maximus elit. " \
"Etiam at erat ac ex ornare feugiat. Curabitur sed malesuada orci, id aliquet nunc. Phasellus " \
"nec leo luctus, blandit lorem sit amet, interdum metus. Duis efficitur volutpat magna, ac " \
"ultricies nibh aliquet sit amet. Etiam tempor egestas augue in hendrerit. Nunc eget augue " \
"ultricies, dignissim lacus et, vulputate dolor. Nulla eros odio, fringilla vel massa ut, " \
"facilisis cursus quam. Fusce faucibus lobortis congue. Fusce consectetur porta neque, id " \
"sollicitudin velit maximus eu. Sed pharetra leo quam, vel finibus turpis cursus ac. " \
"Aenean ac nisi massa. Cras commodo arcu nec ante tristique ullamcorper. Quisque eu hendrerit" \
" urna. Cras fringilla eros ut nunc maximus, non porta nisl mollis. Aliquam in rutrum massa." \
" Praesent tristique turpis dui, at ultri"
req.block1 = (0, 1, 1024)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (0, 1, 1024)
exchange2 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "a imperdiet nisl. Quisque a iaculis libero, id tempus lacus. Aenean convallis est non justo " \
"consectetur, a hendrerit enim consequat. In accumsan ante a egestas luctus. Etiam quis neque " \
"nec eros vestibulum faucibus. Nunc viverra ipsum lectus, vel scelerisque dui dictum a. Ut orci " \
"enim, ultrices a ultrices nec, pharetra in quam. Donec accumsan sit amet eros eget fermentum."
req.block1 = (1, 1, 64)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (1, 1, 64)
exchange3 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "a imperdiet nisl. Quisque a iaculis libero, id tempus lacus. Aenean convallis est non justo " \
"consectetur, a hendrerit enim consequat. In accumsan ante a egestas luctus. Etiam quis neque " \
"nec eros vestibulum faucibus. Nunc viverra ipsum lectus, vel scelerisque dui dictum a. Ut orci " \
"enim, ultrices a ultrices nec, pharetra in quam. Donec accumsan sit amet eros eget fermentum."
req.block1 = (3, 1, 64)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.REQUEST_ENTITY_INCOMPLETE.number
expected.token = None
expected.payload = None
exchange4 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/storage/new_res"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "a imperdiet nisl. Quisque a iaculis libero, id tempus lacus. Aenean convallis est non justo " \
"consectetur, a hendrerit enim consequat. In accumsan ante a egestas luctus. Etiam quis neque " \
"nec eros vestibulum faucibus. Nunc viverra ipsum lectus, vel scelerisque dui dictum a. Ut orci " \
"enim, ultrices a ultrices nec, pharetra in quam. Donec accumsan sit amet eros eget fermentum."
req.block1 = (2, 0, 64)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CREATED.number
expected.token = None
expected.payload = None
expected.location_path = "storage/new_res"
exchange5 = (req, expected)
self.current_mid += 1
self._test_plugtest([exchange1, exchange2, exchange3, exchange4, exchange5])
def test_get_block(self):
print "TEST_GET_BLOCK"
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (0, 0, 512)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (0, 1, 512)
exchange1 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (1, 0, 256)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (1, 1, 256)
exchange2 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (2, 0, 128)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (2, 1, 128)
exchange3 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (3, 0, 64)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (3, 1, 64)
exchange4 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (4, 0, 32)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (4, 1, 32)
exchange5 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (5, 0, 16)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (5, 1, 16)
exchange6 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (6, 0, 1024)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (6, 1, 1024)
exchange7 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = None
req.block2 = (7, 0, 1024)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.block2 = (7, 0, 1024)
exchange8 = (req, expected)
self.current_mid += 1
self._test_plugtest([exchange1, exchange2, exchange3, exchange4, exchange5, exchange6, exchange7, exchange8])
#self._test_plugtest([exchange1])
def test_post_block_big(self):
print "TEST_POST_BLOCK_BIG"
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "Lorem ipsum dolo"
req.block1 = (0, 1, 16)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (0, 1, 16)
exchange1 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "r sit amet, consectetur adipisci"
req.block1 = (1, 1, 32)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (1, 1, 32)
exchange2 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "ng elit. Sed ut ultrices ligula. Pellentesque purus augue, cursu"
req.block1 = (2, 1, 64)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (2, 1, 64)
exchange3 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "s ultricies est in, vehicula congue metus. Vestibulum vel justo lacinia, porttitor quam vitae, " \
"feugiat sapien. Quisque finibus, "
req.block1 = (3, 1, 128)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (3, 1, 128)
exchange4 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "nisi vitae rhoncus malesuada, augue mauris dapibus tellus, sit amet venenatis libero" \
" libero sed lorem. In pharetra turpis sed eros porta mollis. Quisque dictum dolor nisl," \
" imperdiet tincidunt augue malesuada vitae. Donec non felis urna. Suspendisse at hend"
req.block1 = (4, 1, 256)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (4, 1, 256)
exchange5 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "rerit ex, quis aliquet ante. Vivamus ultrices dolor at elit tincidunt, eget fringilla " \
"ligula vestibulum. In molestie sagittis nibh, ut efficitur tellus faucibus non. Maecenas " \
"posuere elementum faucibus. Morbi nisi diam, molestie non feugiat et, elementum eget magna." \
" Donec vel sem facilisis quam viverra ultrices nec eu lacus. Sed molestie nisi id ultrices " \
"interdum. Curabitur pharetra sed tellus in dignissim. Duis placerat aliquam metus, volutpat " \
"elementum augue aliquam a. Nunc sed dolor at orci maximus portt"
req.block1 = (5, 1, 512)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTINUE.number
expected.token = None
expected.payload = None
expected.block1 = (5, 1, 512)
exchange6 = (req, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "itor ac sit amet eros. Mauris et nisi in tortor pharetra rhoncus sit amet hendrerit metus. " \
"Integer laoreet placerat cursus. Nam a nulla ex. Donec laoreet sagittis libero quis " \
"imperdiet. Vivamus facilisis turpis nec rhoncus venenatis. Duis pulvinar tellus vel quam " \
"maximus imperdiet. Mauris eget nibh orci. Duis ut cursus nibh. Nulla sed commodo elit. " \
"Suspendisse ac eros lacinia, mattis turpis at, porttitor justo. Vivamus molestie " \
"tincidunt libero. Etiam porttitor lacus odio, at lobortis tortor scelerisque nec. " \
"Nullam non ante vel nisi ultrices consectetur. Maecenas massa felis, tempor eget " \
"malesuada eget, pretium eu sapien. Vivamus dapibus ante erat, non faucibus orci sodales " \
"sit amet. Cras magna felis, sodales eget magna sed, eleifend rutrum ligula. Vivamus interdum " \
"enim enim, eu facilisis tortor dignissim quis. Ut metus nulla, mattis non lorem et, " \
"elementum ultrices orci. Quisque eleifend, arcu vitae ullamcorper pulvinar, ipsum ex " \
"sodales arcu, eget consectetur mauris metus ac tortor. Donec id sem felis. Maur"
req.block1 = (6, 0, 1024)
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CHANGED.number
expected.token = None
expected.payload = None
expected.location_path = "big"
exchange7 = (req, expected)
self.current_mid += 1
self._test_plugtest([exchange1, exchange2, exchange3, exchange4, exchange5, exchange6, exchange7])
# def test_options(self):
# print "TEST_OPTIONS"
# path = "/storage/new_res"
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# option = Option()
# option.number = defines.OptionRegistry.ETAG.number
# option.value = "test"
# req.add_option(option)
# req.del_option(option)
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.payload = None
# expected.location_path = "storage/new_res"
#
# exchange1 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# option = Option()
# option.number = defines.OptionRegistry.ETAG.number
# option.value = "test"
# req.add_option(option)
# req.del_option_by_name("ETag")
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.payload = None
# expected.location_path = "storage/new_res"
#
# exchange2 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# option = Option()
# option.number = defines.OptionRegistry.ETAG.number
# option.value = "test"
# req.add_option(option)
# del req.etag
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.payload = None
# expected.location_path = "storage/new_res"
#
# exchange3 = (req, expected)
# self.current_mid += 1
#
# self._test_with_client([exchange1, exchange2, exchange3])
#
# def test_content_type(self):
# print "TEST_CONTENT_TYPE"
# path = "/storage/new_res"
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "<value>test</value>"
# req.content_type = defines.Content_types["application/xml"]
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.payload = None
# expected.location_path = "storage/new_res"
#
# exchange1 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.payload = "Basic Resource"
#
# exchange2 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.PUT.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CHANGED.number
# expected.token = None
# expected.payload = None
#
# exchange3 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.payload = "test"
#
# exchange4 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.accept = defines.Content_types["application/xml"]
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.payload = "<value>test</value>"
#
# exchange5 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.accept = defines.Content_types["application/json"]
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.NOT_ACCEPTABLE.number
# expected.token = None
# expected.payload = None
# expected.content_type = defines.Content_types["application/json"]
#
# exchange6 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = "/xml"
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.payload = "<value>0</value>"
# expected.content_type = defines.Content_types["application/xml"]
#
# print(expected.pretty_print())
#
# exchange7 = (req, expected)
# self.current_mid += 1
#
# self._test_with_client([exchange1, exchange2, exchange3, exchange4, exchange5, exchange6, exchange7])
#
# def test_ETAG(self):
# print "TEST_ETAG"
# path = "/etag"
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.payload = "ETag resource"
# expected.etag = "0"
#
# exchange1 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.payload = None
# expected.location_path = path
# expected.etag = "1"
#
# exchange2 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.etag = "1"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.VALID.number
# expected.token = None
# expected.payload = "test"
# expected.etag = "1"
#
# exchange3 = (req, expected)
# self.current_mid += 1
#
# self._test_with_client([exchange1, exchange2, exchange3])
#
# def test_child(self):
# print "TEST_CHILD"
# path = "/child"
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CREATED.number
# expected.token = None
# expected.payload = None
# expected.location_path = path
#
# exchange1 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CONTENT.number
# expected.token = None
# expected.payload = "test"
#
# exchange2 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.PUT.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "testPUT"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.CHANGED.number
# expected.token = None
# expected.payload = None
#
# exchange3 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.DELETE.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.DELETED.number
# expected.token = None
# expected.payload = None
#
# exchange4 = (req, expected)
# self.current_mid += 1
#
# self._test_with_client([exchange1, exchange2, exchange3, exchange4])
#
# def test_not_found(self):
# print "TEST_not_found"
# path = "/not_found"
#
# req = Request()
# req.code = defines.Codes.GET.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.token = 100
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.NOT_FOUND.number
# expected.token = "100"
# expected.payload = None
#
# exchange1 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.POST.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "test"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.METHOD_NOT_ALLOWED.number
# expected.token = None
#
# exchange2 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.PUT.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
# req.payload = "testPUT"
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.NOT_FOUND.number
# expected.token = None
# expected.payload = None
#
# exchange3 = (req, expected)
# self.current_mid += 1
#
# req = Request()
# req.code = defines.Codes.DELETE.number
# req.uri_path = path
# req.type = defines.Types["CON"]
# req._mid = self.current_mid
# req.destination = self.server_address
#
# expected = Response()
# expected.type = defines.Types["ACK"]
# expected._mid = self.current_mid
# expected.code = defines.Codes.NOT_FOUND.number
# expected.token = None
# expected.payload = None
#
# exchange4 = (req, expected)
# self.current_mid += 1
#
# self._test_with_client([exchange1, exchange2, exchange3, exchange4])
#
def test_invalid(self):
print "TEST_INVALID"
# version
req = ("\x00\x01\x8c\xda", self.server_address)
expected = Response()
expected.type = defines.Types["RST"]
expected._mid = None
expected.code = defines.Codes.BAD_REQUEST.number
exchange1 = (req, expected)
# version
req = ("\x40", self.server_address)
expected = Response()
expected.type = defines.Types["RST"]
expected._mid = None
expected.code = defines.Codes.BAD_REQUEST.number
exchange2 = (req, expected)
# code
req = ("\x40\x05\x8c\xda", self.server_address)
expected = Response()
expected.type = defines.Types["RST"]
expected._mid = None
expected.code = defines.Codes.BAD_REQUEST.number
exchange3 = (req, expected)
# option
req = ("\x40\x01\x8c\xda\x94", self.server_address)
expected = Response()
expected.type = defines.Types["RST"]
expected._mid = None
expected.code = defines.Codes.BAD_REQUEST.number
exchange4 = (req, expected)
# payload marker
req = ("\x40\x02\x8c\xda\x75\x62\x61\x73\x69\x63\xff", self.server_address)
expected = Response()
expected.type = defines.Types["RST"]
expected._mid = None
expected.code = defines.Codes.BAD_REQUEST.number
exchange5 = (req, expected)
self._test_datagram([exchange1, exchange2, exchange3, exchange4, exchange5])
def test_post_block_big_client(self):
print "TEST_POST_BLOCK_BIG_CLIENT"
req = Request()
req.code = defines.Codes.POST.number
req.proxy_uri = "coap://127.0.0.1:5684/big"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.payload = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras sollicitudin fermentum ornare. " \
"Cras accumsan tellus quis dui lacinia eleifend. Proin ultrices rutrum orci vitae luctus. " \
"Nullam malesuada pretium elit, at aliquam odio vehicula in. Etiam nec maximus elit. " \
"Etiam at erat ac ex ornare feugiat. Curabitur sed malesuada orci, id aliquet nunc. Phasellus " \
"nec leo luctus, blandit lorem sit amet, interdum metus. Duis efficitur volutpat magna, ac " \
"ultricies nibh aliquet sit amet. Etiam tempor egestas augue in hendrerit. Nunc eget augue " \
"ultricies, dignissim lacus et, vulputate dolor. Nulla eros odio, fringilla vel massa ut, " \
"facilisis cursus quam. Fusce faucibus lobortis congue. Fusce consectetur porta neque, id " \
"sollicitudin velit maximus eu. Sed pharetra leo quam, vel finibus turpis cursus ac. " \
"Aenean ac nisi massa. Cras commodo arcu nec ante tristique ullamcorper. Quisque eu hendrerit" \
" urna. Cras fringilla eros ut nunc maximus, non porta nisl mollis. Aliquam in rutrum massa." \
" Praesent tristique turpis dui, at ultricies lorem fermentum at. Vivamus sit amet ornare neque, " \
"a imperdiet nisl. Quisque a iaculis libero, id tempus lacus. Aenean convallis est non justo " \
"consectetur, a hendrerit enim consequat. In accumsan ante a egestas luctus. Etiam quis neque " \
"nec eros vestibulum faucibus. Nunc viverra ipsum lectus, vel scelerisque dui dictum a. Ut orci " \
"enim, ultrices a ultrices nec, pharetra in quam. Donec accumsan sit amet eros eget fermentum." \
"Vivamus ut odio ac odio malesuada accumsan. Aenean vehicula diam at tempus ornare. Phasellus " \
"dictum mauris a mi consequat, vitae mattis nulla fringilla. Ut laoreet tellus in nisl efficitur," \
" a luctus justo tempus. Fusce finibus libero eget velit finibus iaculis. Morbi rhoncus purus " \
"vel vestibulum ullamcorper. Sed ac metus in urna fermentum feugiat. Nulla nunc diam, sodales " \
"aliquam mi id, varius porta nisl. Praesent vel nibh ac turpis rutrum laoreet at non odio. " \
"Phasellus ut posuere mi. Suspendisse malesuada velit nec mauris convallis porta. Vivamus " \
"sed ultrices sapien, at cras amet."
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CHANGED.number
expected.token = None
expected.payload = None
exchange1 = (req, expected)
self.current_mid += 1
self._test_with_client([exchange1])
def test_observe_client(self):
print "TEST_OBSERVE_CLIENT"
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/basic"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.observe = 0
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = None
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
exchange1 = (req, expected)
self.current_mid += 1
self._test_with_client_observe([exchange1])
def test_duplicate(self):
print "TEST_DUPLICATE"
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/basic"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
self.current_mid += 1
self._test_plugtest([(req, expected), (req, expected)])
def test_duplicate_not_completed(self):
print "TEST_DUPLICATE_NOT_COMPLETED"
req = Request()
req.code = defines.Codes.GET.number
req.proxy_uri = "coap://127.0.0.1:5684/long"
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = None
expected.token = None
expected2 = Response()
expected2.type = defines.Types["CON"]
expected2._mid = None
expected2.code = defines.Codes.CONTENT.number
expected2.token = None
self.current_mid += 1
self._test_plugtest([(req, None), (req, expected), (None, expected2)])
if __name__ == '__main__':
unittest.main()
| mit | 1,140,289,532,587,326,200 | 38.133709 | 123 | 0.576184 | false |
INTI-CMNB-FPGA/fpga_lib | scripts/plotter.py | 1 | 1557 | #!/usr/bin/python
#
# Plotter
#
# Authors:
# * Bruno Valinoti
# * Rodrigo A. Melo
#
# Copyright (c) 2018 Authors and INTI
# Distributed under the BSD 3-Clause License
#
import numpy as np
import matplotlib.pyplot as plot
import matplotlib as mpl
import argparse, sys
## Parsing the command line ###################################################
version = 'Plotter v1.0'
parser = argparse.ArgumentParser(
description='A simple data plotter.'
)
parser.add_argument(
'-v', '--version',
action='version',
version=version
)
parser.add_argument(
'filename',
metavar='FILENAME',
nargs='+',
help=''
)
parser.add_argument(
'-x','--xlabel',
metavar='XLABEL',
default='Sample'
)
parser.add_argument(
'-y','--ylabel',
metavar='YLABEL',
default='Value'
)
parser.add_argument(
'-p','--points',
action='store_true'
)
parser.add_argument(
'-c','--circles',
action='store_true'
)
parser.add_argument(
'-l','--linewidth',
metavar='1',
default='1',
type=int
)
options = parser.parse_args()
## Parsing the command line ###################################################
plot.xlabel(options.xlabel)
plot.ylabel(options.ylabel)
#print(mpl.rcParams.keys())
mpl.rcParams['lines.linewidth'] = options.linewidth
if options.points or options.circles:
mpl.rcParams['lines.linestyle'] = None
if options.points:
mpl.rcParams['lines.marker'] = '.'
if options.circles:
mpl.rcParams['lines.marker'] = 'o'
for file in options.filename:
data = np.loadtxt(file)
plot.plot(data)
plot.show()
| bsd-3-clause | 1,320,421,314,315,734,000 | 16.693182 | 79 | 0.621066 | false |
gitpan/Games-Solitaire-BlackHole-Solver | contrib/make_pysol_board.py | 2 | 20866 | #!/usr/bin/python
#
# make_pysol_freecell_board.py - Program to generate the boards of
# PySol for input into Freecell Solver.
#
# Usage: make_pysol_freecell_board.py [board number] | fc-solve
#
# Or on non-UNIXes:
#
# python make_pysol_freecell_board.py [board number] | fc-solve
#
# This program is platform independant and will generate the same results
# on all architectures and operating systems.
#
# Based on the code by Markus Franz Xaver Johannes Oberhumer.
# Modified by Shlomi Fish, 2000
#
# Since much of the code here is ripped from the actual PySol code, this
# program is distributed under the GNU General Public License.
#
#
#
## vim:ts=4:et:nowrap
##
##---------------------------------------------------------------------------##
##
## PySol -- a Python Solitaire game
##
## Copyright (C) 2000 Markus Franz Xaver Johannes Oberhumer
## Copyright (C) 1999 Markus Franz Xaver Johannes Oberhumer
## Copyright (C) 1998 Markus Franz Xaver Johannes Oberhumer
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; see the file COPYING.
## If not, write to the Free Software Foundation, Inc.,
## 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
##
## Markus F.X.J. Oberhumer
## <[email protected]>
## http://wildsau.idv.uni-linz.ac.at/mfx/pysol.html
##
##---------------------------------------------------------------------------##
# imports
import sys, os, re, string, time, types
import random
# PySol imports
# /***********************************************************************
# // Abstract PySol Random number generator.
# //
# // We use a seed of type long in the range [0, MAX_SEED].
# ************************************************************************/
class PysolRandom:
MAX_SEED = 0L
ORIGIN_UNKNOWN = 0
ORIGIN_RANDOM = 1
ORIGIN_PREVIEW = 2 # random from preview
ORIGIN_SELECTED = 3 # manually entered
ORIGIN_NEXT_GAME = 4 # "Next game number"
def __init__(self, seed=None):
if seed is None:
seed = self._getRandomSeed()
self.initial_seed = self.setSeed(seed)
self.origin = self.ORIGIN_UNKNOWN
def __str__(self):
return self.str(self.initial_seed)
def reset(self):
self.seed = self.initial_seed
def getSeed(self):
return self.seed
def setSeed(self, seed):
seed = self._convertSeed(seed)
if type(seed) is not types.LongType:
raise TypeError, "seeds must be longs"
if not (0L <= seed <= self.MAX_SEED):
raise ValueError, "seed out of range"
self.seed = seed
return seed
def copy(self):
random = PysolRandom(0L)
random.__class__ = self.__class__
random.__dict__.update(self.__dict__)
return random
#
# implementation
#
def choice(self, seq):
return seq[int(self.random() * len(seq))]
# Get a random integer in the range [a, b] including both end points.
def randint(self, a, b):
return a + int(self.random() * (b+1-a))
#
# subclass responsibility
#
# Get the next random number in the range [0.0, 1.0).
def random(self):
raise SubclassResponsibility
#
# subclass overrideable
#
def _convertSeed(self, seed):
return long(seed)
def increaseSeed(self, seed):
if seed < self.MAX_SEED:
return seed + 1L
return 0L
def _getRandomSeed(self):
t = long(time.time() * 256.0)
t = (t ^ (t >> 24)) % (self.MAX_SEED + 1L)
return t
#
# shuffle
# see: Knuth, Vol. 2, Chapter 3.4.2, Algorithm P
# see: FAQ of sci.crypt: "How do I shuffle cards ?"
#
def shuffle(self, seq):
n = len(seq) - 1
while n > 0:
j = self.randint(0, n)
seq[n], seq[j] = seq[j], seq[n]
n = n - 1
# /***********************************************************************
# // Linear Congruential random generator
# //
# // Knuth, Donald.E., "The Art of Computer Programming,", Vol 2,
# // Seminumerical Algorithms, Third Edition, Addison-Wesley, 1998,
# // p. 106 (line 26) & p. 108
# ************************************************************************/
class LCRandom64(PysolRandom):
MAX_SEED = 0xffffffffffffffffL # 64 bits
def str(self, seed):
s = repr(long(seed))[:-1]
s = "0"*(20-len(s)) + s
return s
def random(self):
self.seed = (self.seed*6364136223846793005L + 1L) & self.MAX_SEED
return ((self.seed >> 21) & 0x7fffffffL) / 2147483648.0
# /***********************************************************************
# // Linear Congruential random generator
# // In PySol this is only used for 0 <= seed <= 32000.
# ************************************************************************/
class LCRandom31(PysolRandom):
MAX_SEED = 0x7fffffffL # 31 bits
def str(self, seed):
return "%05d" % int(seed)
def random(self):
self.seed = (self.seed*214013L + 2531011L) & self.MAX_SEED
return (self.seed >> 16) / 32768.0
def randint(self, a, b):
self.seed = (self.seed*214013L + 2531011L) & self.MAX_SEED
return a + (int(self.seed >> 16) % (b+1-a))
# ************************************************************************
# * Mersenne Twister random number generator
# * uses standart python module `random'
# ************************************************************************
class BasicRandom:
#MAX_SEED = 0L
#MAX_SEED = 0xffffffffffffffffL # 64 bits
MAX_SEED = 100000000000000000000L # 20 digits
ORIGIN_UNKNOWN = 0
ORIGIN_RANDOM = 1
ORIGIN_PREVIEW = 2 # random from preview
ORIGIN_SELECTED = 3 # manually entered
ORIGIN_NEXT_GAME = 4 # "Next game number"
def __str__(self):
return self.str(self.initial_seed)
def str(self, seed):
return '%020d' % seed
def reset(self):
raise SubclassResponsibility
def copy(self):
random = self.__class__(0L)
random.__dict__.update(self.__dict__)
return random
def increaseSeed(self, seed):
if seed < self.MAX_SEED:
return seed + 1L
return 0L
def _getRandomSeed(self):
t = long(time.time() * 256.0)
t = (t ^ (t >> 24)) % (self.MAX_SEED + 1L)
return t
class MTRandom(BasicRandom, random.Random):
def setSeed(self, seed):
random.Random.__init__(self, seed)
self.initial_seed = seed
self.initial_state = self.getstate()
self.origin = self.ORIGIN_UNKNOWN
def reset(self):
self.setstate(self.initial_state)
class Card:
ACE = 1
KING = 13
def __init__(self, id, rank, suit, print_ts):
self.id = id
self.rank = rank
self.suit = suit
self.flipped = False
self.print_ts = print_ts
self.empty = False
def is_king(self):
return self.rank == self.KING
def is_ace(self):
return self.rank == self.ACE
def rank_s(self):
s = "0A23456789TJQK"[self.rank]
if (not self.print_ts) and s == "T":
s = "10"
return s
def suit_s(self):
return "CSHD"[self.suit];
def to_s(self):
if self.empty:
return "-"
ret = ""
ret = ret + self.rank_s()
ret = ret + self.suit_s()
if self.flipped:
ret = "<" + ret + ">"
return ret
def found_s(self):
return self.suit_s() + "-" + self.rank_s()
def flip(self, flipped=True):
new_card = Card(self.id, self.rank, self.suit, self.print_ts)
new_card.flipped = flipped
return new_card
def is_empty(self):
return self.empty
class Columns:
def __init__(self, num):
self.num = num
cols = []
for i in range(num):
cols.append([])
self.cols = cols
def add(self, idx, card):
self.cols[idx].append(card)
def rev(self):
self.cols.reverse()
def output(self):
for column in self.cols:
print column_to_string(column)
class Board:
def __init__(self, num_columns, with_freecells=False,
with_talon=False, with_foundations=False):
self.with_freecells = with_freecells
self.with_talon = with_talon
self.with_foundations = with_foundations
self.columns = Columns(num_columns)
if (self.with_freecells):
self.freecells = []
if (self.with_talon):
self.talon = []
if (self.with_foundations):
self.foundations = map(lambda s:empty_card(),range(4))
def reverse_cols(self):
return self.columns.rev()
def add(self, idx, card):
return self.columns.add(idx, card)
def print_freecells(self):
print "FC: " + column_to_string(self.freecells)
def print_talon(self):
print "Talon: " + column_to_string(self.talon)
def print_foundations(self):
cells = []
for f in [2,0,3,1]:
if not self.foundations[f].is_empty():
cells.append(self.foundations[f].found_s())
if len(cells):
print "Foundations:" + ("".join(map(lambda s: " "+s, cells)))
def output(self):
if (self.with_talon):
self.print_talon()
if (self.with_foundations):
self.print_foundations()
if (self.with_freecells):
self.print_freecells()
self.columns.output()
def add_freecell(self, card):
if not self.with_freecells:
raise "Layout does not have freecells!"
self.freecells.append(card)
def add_talon(self, card):
if not self.with_talon:
raise "Layout does not have a talon!"
self.talon.append(card)
def put_into_founds(self, card):
if not self.with_foundations:
raise "Layout does not have foundations!"
if ((self.foundations[card.suit].rank+1) == card.rank):
self.foundations[card.suit] = card
return True
else:
return False
self.talon.append(card)
def empty_card():
ret = Card(0,0,0,1)
ret.empty = True
return ret
def createCards(num_decks, print_ts):
cards = []
for deck in range(num_decks):
id = 0
for suit in range(4):
for rank in range(13):
cards.append(Card(id, rank+1, suit, print_ts))
id = id + 1
return cards
def column_to_list_of_strings(col):
return map( lambda c: c.to_s(), col)
def column_to_string(col):
return " ".join(column_to_list_of_strings(col))
def flip_card(card_str, flip):
if flip:
return "<" + card_str + ">"
else:
return card_str
def shuffle(orig_cards, game_num, is_pysol_fc_deals):
if game_num <= 32000:
r = LCRandom31()
r.setSeed(game_num)
fcards = []
if (len(orig_cards) == 52):
for i in range(13):
for j in (0, 39, 26, 13):
fcards.append(orig_cards[i + j])
orig_cards = fcards
r.shuffle(orig_cards)
else:
r = 0
if (is_pysol_fc_deals):
r = MTRandom()
else:
r = LCRandom64()
r.setSeed(game_num)
r.shuffle(orig_cards)
return orig_cards
class Game:
REVERSE_MAP = \
{
"freecell":
[ "freecell", "forecell", "bakers_game",
"ko_bakers_game", "kings_only_bakers_game", "relaxed_freecell",
"eight_off" ],
"der_katz":
[ "der_katz", "der_katzenschwantz", "die_schlange"],
"seahaven":
[ "seahaven_towers", "seahaven", "relaxed_seahaven", "relaxed_seahaven_towers" ],
"bakers_dozen" : None,
"gypsy" : None,
"klondike" : [ "klondike", "klondike_by_threes", "casino_klondike", "small_harp", "thumb_and_pouch", "vegas_klondike", "whitehead" ],
"simple_simon" : None,
"yukon" : None,
"beleaguered_castle" : [ "beleaguered_castle", "streets_and_alleys", "citadel" ],
"fan" : None,
"black_hole" : None,
}
def __init__(self, game_id, game_num, is_pysol_fc_deals, print_ts):
mymap = {}
for k in self.REVERSE_MAP.keys():
if self.REVERSE_MAP[k] is None:
mymap[k] = k
else:
for alias in self.REVERSE_MAP[k]:
mymap[alias] = k
self.games_map = mymap
self.game_id = game_id
self.game_num = game_num
self.print_ts = print_ts
self.is_pysol_fc_deals = is_pysol_fc_deals
def print_layout(self):
game_class = self.lookup()
if not game_class:
raise "Unknown game type " + self.game_id + "\n"
self.deal()
getattr(self, game_class)()
self.board.output()
def lookup(self):
return self.games_map[self.game_id];
def is_two_decks(self):
return self.game_id in ("der_katz", "der_katzenschwantz", "die_schlange", "gypsy")
def get_num_decks(self):
if self.is_two_decks():
return 2
else:
return 1
def deal(self):
orig_cards = createCards(self.get_num_decks(), self.print_ts)
orig_cards = shuffle(orig_cards, self.game_num, self.is_pysol_fc_deals)
cards = orig_cards
cards.reverse()
self.cards = cards
self.card_idx = 0
return True
def __iter__(self):
return self
def no_more_cards(self):
return self.card_idx >= len(self.cards)
def next(self):
if self.no_more_cards():
raise StopIteration
c = self.cards[self.card_idx]
self.card_idx = self.card_idx + 1
return c
def new_cards(self, cards):
self.cards = cards
self.card_idx = 0
def add(self, idx, card):
return self.board.add(idx, card)
def add_freecell(self, card):
return self.board.add_freecell(card)
def cyclical_deal(game, num_cards, num_cols, flipped=False):
for i in range(num_cards):
game.add(i%num_cols, game.next().flip(flipped=flipped))
return i
def add_all_to_talon(game):
for card in game:
game.board.add_talon(card)
### These are the games variants:
### Each one is a callback.
def der_katz(game):
if (game.game_id == "die_schlange"):
print "Foundations: H-A S-A D-A C-A H-A S-A D-A C-A"
game.board = Board(9)
col_idx = 0
for card in game:
if card.is_king():
col_idx = col_idx + 1
if not ((game.game_id == "die_schlange") and (card.rank == 1)):
game.add(col_idx, card)
def freecell(game):
is_fc = (game.game_id in ('forecell', 'eight_off'))
game.board = Board(8, with_freecells=is_fc)
if is_fc:
game.cyclical_deal(48, 8)
for card in game:
game.add_freecell(card)
if game.game_id == "eight_off":
game.add_freecell(empty_card())
else:
game.cyclical_deal(52, 8)
def seahaven(game):
game.board = Board(10, with_freecells=True)
game.add_freecell(empty_card())
game.cyclical_deal(50, 10)
for card in game:
game.add_freecell(card)
def bakers_dozen(game):
i, n = 0, 13
kings = []
cards = game.cards
cards.reverse()
for c in cards:
if c.is_king():
kings.append(i)
i = i + 1
for i in kings:
j = i % n
while j < i:
if not cards[j].is_king():
cards[i], cards[j] = cards[j], cards[i]
break
j = j + n
game.new_cards(cards)
game.board = Board(13)
game.cyclical_deal(52, 13)
def gypsy(game):
num_cols = 8
game.board = Board(num_cols, with_talon=True)
game.cyclical_deal(num_cols*2, num_cols, flipped=True)
game.cyclical_deal(num_cols, num_cols, flipped=False)
game.add_all_to_talon()
def klondike(game):
num_cols = 7
game.board = Board(num_cols, with_talon=True)
card_num = 0
for r in range(1,num_cols):
for s in range(num_cols-r):
game.add(s, game.next().flip())
game.cyclical_deal(num_cols, num_cols)
game.add_all_to_talon()
if not (game.game_id == "small_harp"):
game.board.reverse_cols()
def simple_simon(game):
game.board = Board(10)
num_cards = 9
while num_cards >= 3:
for s in range(num_cards):
game.add(s, game.next())
num_cards = num_cards - 1
for s in range(10):
game.add(s, game.next())
def fan(game):
game.board = Board(18)
game.cyclical_deal(52-1, 17)
game.add(17, game.next())
def _shuffleHookMoveSorter(self, cards, func, ncards):
# note that we reverse the cards, so that smaller sort_orders
# will be nearer to the top of the Talon
sitems, i = [], len(cards)
for c in cards[:]:
select, sort_order = func(c)
if select:
cards.remove(c)
sitems.append((sort_order, i, c))
if len(sitems) >= ncards:
break
i = i - 1
sitems.sort()
sitems.reverse()
scards = map(lambda item: item[2], sitems)
return cards, scards
def _shuffleHookMoveToBottom(self, cards, func, ncards=999999):
# move cards to bottom of the Talon (i.e. last cards to be dealt)
cards, scards = self._shuffleHookMoveSorter(cards, func, ncards)
ret = scards + cards
return ret
def black_hole(game):
game.board = Board(17)
# move Ace to bottom of the Talon (i.e. last cards to be dealt)
game.cards = game._shuffleHookMoveToBottom(game.cards, lambda c: (c.id == 13, c.suit), 1)
game.next()
game.cyclical_deal(52-1, 17)
print "Foundations: AS"
def beleaguered_castle(game):
aces_up = game.game_id in ("beleaguered_castle", "citadel")
game.board = Board(8, with_foundations=True)
if aces_up:
new_cards = []
for c in game:
if c.is_ace():
game.board.put_into_founds(c)
else:
new_cards.append(c)
game.new_cards(new_cards)
for i in range(6):
for s in range(8):
c = game.next()
if (game.game_id == "citadel") and game.board.put_into_founds(c):
# Already dealt with this card
True
else:
game.add(s, c)
if game.no_more_cards():
break
if (game.game_id == "streets_and_alleys"):
game.cyclical_deal(4, 4)
def yukon(game):
num_cols = 7
game.board = Board(num_cols)
for i in range(1, num_cols):
for j in range(i, num_cols):
game.add(j, game.next().flip())
for i in range(4):
for j in range(1,num_cols):
game.add(j, game.next())
game.cyclical_deal(num_cols, num_cols)
def shlomif_main(args):
print_ts = 0
pysol_fc_deals = 0
while args[1][0] == '-':
if (args[1] == "-t"):
print_ts = 1
args.pop(0)
elif ((args[1] == "--pysolfc") or (args[1] == "-F")):
pysol_fc_deals = 1
args.pop(0)
else:
raise "Unknown flag " + args[1] + "!"
game_num = long(args[1])
if (len(args) >= 3):
which_game = args[2]
else:
which_game = "freecell"
game = Game(which_game, game_num, pysol_fc_deals, print_ts)
game.print_layout();
if __name__ == "__main__":
sys.exit(shlomif_main(sys.argv))
| mit | 2,585,555,502,783,666,700 | 26.895722 | 149 | 0.527078 | false |
kbdick/RecycleTracker | recyclecollector/scrap/gdata-2.0.18/tests/gdata_tests/apps/emailsettings/data_test.py | 23 | 7153 | #!/usr/bin/python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Claudio Cherubino <[email protected]>'
import unittest
import gdata.apps.emailsettings.data
import gdata.test_config as conf
class EmailSettingsLabelTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsLabel()
def testName(self):
self.entry.name = 'test label'
self.assertEquals(self.entry.name, 'test label')
class EmailSettingsFilterTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsFilter()
def testFrom(self):
self.entry.from_address = '[email protected]'
self.assertEquals(self.entry.from_address, '[email protected]')
def testTo(self):
self.entry.to_address = '[email protected]'
self.assertEquals(self.entry.to_address, '[email protected]')
def testFrom(self):
self.entry.from_address = '[email protected]'
self.assertEquals(self.entry.from_address, '[email protected]')
def testSubject(self):
self.entry.subject = 'Read me'
self.assertEquals(self.entry.subject, 'Read me')
def testHasTheWord(self):
self.entry.has_the_word = 'important'
self.assertEquals(self.entry.has_the_word, 'important')
def testDoesNotHaveTheWord(self):
self.entry.does_not_have_the_word = 'spam'
self.assertEquals(self.entry.does_not_have_the_word, 'spam')
def testHasAttachments(self):
self.entry.has_attachments = True
self.assertEquals(self.entry.has_attachments, True)
def testLabel(self):
self.entry.label = 'Trip reports'
self.assertEquals(self.entry.label, 'Trip reports')
def testMarkHasRead(self):
self.entry.mark_has_read = True
self.assertEquals(self.entry.mark_has_read, True)
def testArchive(self):
self.entry.archive = True
self.assertEquals(self.entry.archive, True)
class EmailSettingsSendAsAliasTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsSendAsAlias()
def testName(self):
self.entry.name = 'Sales'
self.assertEquals(self.entry.name, 'Sales')
def testAddress(self):
self.entry.address = '[email protected]'
self.assertEquals(self.entry.address, '[email protected]')
def testReplyTo(self):
self.entry.reply_to = '[email protected]'
self.assertEquals(self.entry.reply_to, '[email protected]')
def testMakeDefault(self):
self.entry.make_default = True
self.assertEquals(self.entry.make_default, True)
class EmailSettingsWebClipTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsWebClip()
def testEnable(self):
self.entry.enable = True
self.assertEquals(self.entry.enable, True)
class EmailSettingsForwardingTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsForwarding()
def testEnable(self):
self.entry.enable = True
self.assertEquals(self.entry.enable, True)
def testForwardTo(self):
self.entry.forward_to = '[email protected]'
self.assertEquals(self.entry.forward_to, '[email protected]')
def testAction(self):
self.entry.action = 'KEEP'
self.assertEquals(self.entry.action, 'KEEP')
class EmailSettingsPopTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsPop()
def testEnable(self):
self.entry.enable = True
self.assertEquals(self.entry.enable, True)
def testForwardTo(self):
self.entry.enable_for = 'ALL_MAIL'
self.assertEquals(self.entry.enable_for, 'ALL_MAIL')
def testAction(self):
self.entry.action = 'KEEP'
self.assertEquals(self.entry.action, 'KEEP')
class EmailSettingsImapTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsImap()
def testEnable(self):
self.entry.enable = True
self.assertEquals(self.entry.enable, True)
class EmailSettingsVacationResponderTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsVacationResponder()
def testEnable(self):
self.entry.enable = True
self.assertEquals(self.entry.enable, True)
def testSubject(self):
self.entry.subject = 'On vacation!'
self.assertEquals(self.entry.subject, 'On vacation!')
def testMessage(self):
self.entry.message = 'See you on September 1st'
self.assertEquals(self.entry.message, 'See you on September 1st')
def testStartDate(self):
self.entry.start_date = '2011-12-05'
self.assertEquals(self.entry.start_date, '2011-12-05')
def testEndDate(self):
self.entry.end_date = '2011-12-06'
self.assertEquals(self.entry.end_date, '2011-12-06')
def testContactsOnly(self):
self.entry.contacts_only = True
self.assertEquals(self.entry.contacts_only, True)
def testDomainOnly(self):
self.entry.domain_only = True
self.assertEquals(self.entry.domain_only, True)
class EmailSettingsSignatureTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsSignature()
def testValue(self):
self.entry.signature_value = 'Regards, Joe'
self.assertEquals(self.entry.signature_value, 'Regards, Joe')
class EmailSettingsLanguageTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsLanguage()
def testLanguage(self):
self.entry.language_tag = 'es'
self.assertEquals(self.entry.language_tag, 'es')
class EmailSettingsGeneralTest(unittest.TestCase):
def setUp(self):
self.entry = gdata.apps.emailsettings.data.EmailSettingsGeneral()
def testPageSize(self):
self.entry.page_size = 25
self.assertEquals(self.entry.page_size, 25)
def testShortcuts(self):
self.entry.shortcuts = True
self.assertEquals(self.entry.shortcuts, True)
def testArrows(self):
self.entry.arrows = True
self.assertEquals(self.entry.arrows, True)
def testSnippets(self):
self.entry.snippets = True
self.assertEquals(self.entry.snippets, True)
def testUnicode(self):
self.entry.use_unicode = True
self.assertEquals(self.entry.use_unicode, True)
def suite():
return conf.build_suite([EmailSettingsLabelTest, EmailSettingsFilterTest,
EmailSettingsSendAsAliasTest, EmailSettingsWebClipTest,
EmailSettingsForwardingTest, EmailSettingsPopTest,
EmailSettingsImapTest, EmailSettingsVacationResponderTest,
EmailSettingsSignatureTest, EmailSettingsLanguageTest,
EmailSettingsGeneralTest])
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 9,015,260,266,122,555,000 | 27.612 | 79 | 0.73228 | false |
serzans/wagtail | wagtail/wagtailadmin/views/home.py | 1 | 2524 | from django.shortcuts import render
from django.conf import settings
from django.db.models import F
from wagtail.wagtailcore import hooks
from wagtail.wagtailcore.models import PageRevision, UserPagePermissionsProxy
from wagtail.utils.compat import render_to_string
from wagtail.wagtailadmin.site_summary import SiteSummaryPanel
# Panels for the homepage
class UpgradeNotificationPanel(object):
name = 'upgrade_notification'
order = 100
def __init__(self, request):
self.request = request
def render(self):
if self.request.user.is_superuser and getattr(settings, "WAGTAIL_ENABLE_UPDATE_CHECK", True):
return render_to_string('wagtailadmin/home/upgrade_notification.html', {}, request=self.request)
else:
return ""
class PagesForModerationPanel(object):
name = 'pages_for_moderation'
order = 200
def __init__(self, request):
self.request = request
user_perms = UserPagePermissionsProxy(request.user)
self.page_revisions_for_moderation = (user_perms.revisions_for_moderation()
.select_related('page', 'user').order_by('-created_at'))
def render(self):
return render_to_string('wagtailadmin/home/pages_for_moderation.html', {
'page_revisions_for_moderation': self.page_revisions_for_moderation,
}, request=self.request)
class RecentEditsPanel(object):
name = 'recent_edits'
order = 300
def __init__(self, request):
self.request = request
# Last n edited pages
self.last_edits = PageRevision.objects.filter(
user=self.request.user,
created_at=F('page__latest_revision_created_at')
).order_by('-created_at')[:5]
def render(self):
return render_to_string('wagtailadmin/home/recent_edits.html', {
'last_edits': self.last_edits,
}, request=self.request)
def home(request):
panels = [
SiteSummaryPanel(request),
UpgradeNotificationPanel(request),
PagesForModerationPanel(request),
RecentEditsPanel(request),
]
for fn in hooks.get_hooks('construct_homepage_panels'):
fn(request, panels)
return render(request, "wagtailadmin/home.html", {
'site_name': settings.WAGTAIL_SITE_NAME,
'panels': sorted(panels, key=lambda p: p.order),
'user': request.user
})
def error_test(request):
raise Exception("This is a test of the emergency broadcast system.")
| bsd-3-clause | -7,972,610,040,625,939,000 | 29.780488 | 108 | 0.658479 | false |
PyFilesystem/pyfilesystem | fs/base.py | 1 | 53209 | #!/usr/bin/env python
"""
fs.base
=======
This module defines the most basic filesystem abstraction, the FS class.
Instances of FS represent a filesystem containing files and directories
that can be queried and manipulated. To implement a new kind of filesystem,
start by sublcassing the base FS class.
For more information regarding implementing a working PyFilesystem interface, see :ref:`implementers`.
"""
from __future__ import with_statement
__all__ = ['DummyLock',
'silence_fserrors',
'NullFile',
'synchronize',
'FS',
'flags_to_mode',
'NoDefaultMeta']
import os
import os.path
import shutil
import fnmatch
import datetime
import time
import errno
try:
import threading
except ImportError:
import dummy_threading as threading
from fs.path import *
from fs.errors import *
from fs.local_functools import wraps
import six
from six import b
class DummyLock(object):
"""A dummy lock object that doesn't do anything.
This is used as a placeholder when locking is disabled. We can't
directly use the Lock class from the dummy_threading module, since
it attempts to sanity-check the sequence of acquire/release calls
in a way that breaks when real threading is available.
"""
def acquire(self, blocking=1):
"""Acquiring a DummyLock always succeeds."""
return 1
def release(self):
"""Releasing a DummyLock always succeeds."""
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def silence_fserrors(f, *args, **kwargs):
"""Perform a function call and return ``None`` if an :class:`~fs.errors.FSError` is thrown
:param f: Function to call
:param args: Parameters to f
:param kwargs: Keyword parameters to f
"""
try:
return f(*args, **kwargs)
except FSError:
return None
class NoDefaultMeta(object):
"""A singleton used to signify that there is no default for getmeta"""
pass
class NullFile(object):
"""A NullFile is a file object that has no functionality.
Null files are returned by the :meth:`~fs.base.FS.safeopen` method in FS objects when the
file doesn't exist. This can simplify code by negating the need to check
if a file exists, or handling exceptions.
"""
def __init__(self):
self.closed = False
def __iter__(self):
return self
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.closed = True
def flush(self):
pass
def next(self):
raise StopIteration
def readline(self, *args, **kwargs):
return b("")
def close(self):
self.closed = True
def read(self, size=None):
return b("")
def seek(self, *args, **kwargs):
pass
def tell(self):
return 0
def truncate(self, *args, **kwargs):
return 0
def write(self, data):
pass
def writelines(self, *args, **kwargs):
pass
def synchronize(func):
"""Decorator to synchronize a method on self._lock."""
@wraps(func)
def acquire_lock(self, *args, **kwargs):
self._lock.acquire()
try:
return func(self, *args, **kwargs)
finally:
self._lock.release()
return acquire_lock
class FS(object):
"""The base class for Filesystem abstraction objects.
An instance of a class derived from FS is an abstraction on some kind of filesystem, such as the OS filesystem or a zip file.
"""
_meta = {}
def __init__(self, thread_synchronize=True):
"""The base class for Filesystem objects.
:param thread_synconize: If True, a lock object will be created for the object, otherwise a dummy lock will be used.
:type thread_synchronize: bool
"""
self.closed = False
super(FS, self).__init__()
self.thread_synchronize = thread_synchronize
if thread_synchronize:
self._lock = threading.RLock()
else:
self._lock = DummyLock()
def __del__(self):
if not getattr(self, 'closed', True):
try:
self.close()
except:
pass
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def cachehint(self, enabled):
"""Recommends the use of caching. Implementations are free to use or
ignore this value.
:param enabled: If True the implementation is permitted to aggressively cache directory
structure / file information. Caching such information can speed up many operations,
particularly for network based filesystems. The downside of caching is that
changes made to directories or files outside of this interface may not be picked up immediately.
"""
pass
# Deprecating cache_hint in favour of no underscore version, for consistency
cache_hint = cachehint
def close(self):
"""Close the filesystem. This will perform any shutdown related
operations required. This method will be called automatically when
the filesystem object is garbage collected, but it is good practice
to call it explicitly so that any attached resourced are freed when they
are no longer required.
"""
self.closed = True
def __getstate__(self):
# Locks can't be pickled, so instead we just indicate the
# type of lock that should be there. None == no lock,
# True == a proper lock, False == a dummy lock.
state = self.__dict__.copy()
lock = state.get("_lock", None)
if lock is not None:
if isinstance(lock, threading._RLock):
state["_lock"] = True
else:
state["_lock"] = False
return state
def __setstate__(self, state):
self.__dict__.update(state)
lock = state.get("_lock")
if lock is not None:
if lock:
self._lock = threading.RLock()
else:
self._lock = DummyLock()
def getmeta(self, meta_name, default=NoDefaultMeta):
"""Retrieve a meta value associated with an FS object.
Meta values are a way for an FS implementation to report potentially
useful information associated with the file system.
A meta key is a lower case string with no spaces. Meta keys may also
be grouped in namespaces in a dotted notation, e.g. 'atomic.namespaces'.
FS implementations aren't obliged to return any meta values, but the
following are common:
* *read_only* True if the file system cannot be modified
* *thread_safe* True if the implementation is thread safe
* *network* True if the file system requires network access
* *unicode_paths* True if the file system supports unicode paths
* *case_insensitive_paths* True if the file system ignores the case of paths
* *atomic.makedir* True if making a directory is an atomic operation
* *atomic.rename* True if rename is an atomic operation, (and not implemented as a copy followed by a delete)
* *atomic.setcontents* True if the implementation supports setting the contents of a file as an atomic operation (without opening a file)
* *free_space* The free space (in bytes) available on the file system
* *total_space* The total space (in bytes) available on the file system
* *virtual* True if the filesystem defers to other filesystems
* *invalid_path_chars* A string containing characters that may not be used in paths
FS implementations may expose non-generic meta data through a self-named namespace. e.g. ``"somefs.some_meta"``
Since no meta value is guaranteed to exist, it is advisable to always supply a
default value to ``getmeta``.
:param meta_name: The name of the meta value to retrieve
:param default: An option default to return, if the meta value isn't present
:raises `fs.errors.NoMetaError`: If specified meta value is not present, and there is no default
"""
if meta_name not in self._meta:
if default is not NoDefaultMeta:
return default
raise NoMetaError(meta_name=meta_name)
return self._meta[meta_name]
def hasmeta(self, meta_name):
"""Check that a meta value is supported
:param meta_name: The name of a meta value to check
:rtype: bool
"""
try:
self.getmeta(meta_name)
except NoMetaError:
return False
return True
def validatepath(self, path):
"""Validate an fs path, throws an :class:`~fs.errors.InvalidPathError` exception if validation fails.
A path is invalid if it fails to map to a path on the underlaying filesystem. The default
implementation checks for the presence of any of the characters in the meta value 'invalid_path_chars',
but implementations may have other requirements for paths.
:param path: an fs path to validatepath
:raises `fs.errors.InvalidPathError`: if `path` does not map on to a valid path on this filesystem
"""
invalid_chars = self.getmeta('invalid_path_chars', default=None)
if invalid_chars:
re_invalid_chars = getattr(self, '_re_invalid_chars', None)
if re_invalid_chars is None:
self._re_invalid_chars = re_invalid_chars = re.compile('|'.join(re.escape(c) for c in invalid_chars), re.UNICODE)
if re_invalid_chars.search(path):
raise InvalidCharsInPathError(path)
def isvalidpath(self, path):
"""Check if a path is valid on this filesystem
:param path: an fs path
"""
try:
self.validatepath(path)
except InvalidPathError:
return False
else:
return True
def getsyspath(self, path, allow_none=False):
"""Returns the system path (a path recognized by the OS) if one is present.
If the path does not map to a system path (and `allow_none` is False)
then a NoSysPathError exception is thrown. Otherwise, the system
path will be returned as a unicode string.
:param path: a path within the filesystem
:param allow_none: if True, this method will return None when there is no system path,
rather than raising NoSysPathError
:type allow_none: bool
:raises `fs.errors.NoSysPathError`: if the path does not map on to a system path, and allow_none is set to False (default)
:rtype: unicode
"""
if not allow_none:
raise NoSysPathError(path=path)
return None
def hassyspath(self, path):
"""Check if the path maps to a system path (a path recognized by the OS).
:param path: path to check
:returns: True if `path` maps to a system path
:rtype: bool
"""
return self.getsyspath(path, allow_none=True) is not None
def getpathurl(self, path, allow_none=False):
"""Returns a url that corresponds to the given path, if one exists.
If the path does not have an equivalent URL form (and allow_none is False)
then a :class:`~fs.errors.NoPathURLError` exception is thrown. Otherwise the URL will be
returns as an unicode string.
:param path: a path within the filesystem
:param allow_none: if true, this method can return None if there is no
URL form of the given path
:type allow_none: bool
:raises `fs.errors.NoPathURLError`: If no URL form exists, and allow_none is False (the default)
:rtype: unicode
"""
if not allow_none:
raise NoPathURLError(path=path)
return None
def haspathurl(self, path):
"""Check if the path has an equivalent URL form
:param path: path to check
:returns: True if `path` has a URL form
:rtype: bool
"""
return self.getpathurl(path, allow_none=True) is not None
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Open a the given path as a file-like object.
:param path: a path to file that should be opened
:type path: string
:param mode: mode of file to open, identical to the mode string used
in 'file' and 'open' builtins
:type mode: string
:param kwargs: additional (optional) keyword parameters that may
be required to open the file
:type kwargs: dict
:rtype: a file-like object
:raises `fs.errors.ParentDirectoryMissingError`: if an intermediate directory is missing
:raises `fs.errors.ResourceInvalidError`: if an intermediate directory is an file
:raises `fs.errors.ResourceNotFoundError`: if the path is not found
"""
raise UnsupportedError("open file")
def safeopen(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Like :py:meth:`~fs.base.FS.open`, but returns a
:py:class:`~fs.base.NullFile` if the file could not be opened.
A ``NullFile`` is a dummy file which has all the methods of a file-like object,
but contains no data.
:param path: a path to file that should be opened
:type path: string
:param mode: mode of file to open, identical to the mode string used
in 'file' and 'open' builtins
:type mode: string
:param kwargs: additional (optional) keyword parameters that may
be required to open the file
:type kwargs: dict
:rtype: a file-like object
"""
try:
f = self.open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
except ResourceNotFoundError:
return NullFile()
return f
def exists(self, path):
"""Check if a path references a valid resource.
:param path: A path in the filesystem
:type path: string
:rtype: bool
"""
return self.isfile(path) or self.isdir(path)
def isdir(self, path):
"""Check if a path references a directory.
:param path: a path in the filesystem
:type path: string
:rtype: bool
"""
raise UnsupportedError("check for directory")
def isfile(self, path):
"""Check if a path references a file.
:param path: a path in the filesystem
:type path: string
:rtype: bool
"""
raise UnsupportedError("check for file")
def __iter__(self):
""" Iterates over paths returned by :py:meth:`~fs.base.listdir` method with default params. """
for f in self.listdir():
yield f
def listdir(self,
path="./",
wildcard=None,
full=False,
absolute=False,
dirs_only=False,
files_only=False):
"""Lists the the files and directories under a given path.
The directory contents are returned as a list of unicode paths.
:param path: root of the path to list
:type path: string
:param wildcard: Only returns paths that match this wildcard
:type wildcard: string containing a wildcard, or a callable that accepts a path and returns a boolean
:param full: returns full paths (relative to the root)
:type full: bool
:param absolute: returns absolute paths (paths beginning with /)
:type absolute: bool
:param dirs_only: if True, only return directories
:type dirs_only: bool
:param files_only: if True, only return files
:type files_only: bool
:rtype: iterable of paths
:raises `fs.errors.ParentDirectoryMissingError`: if an intermediate directory is missing
:raises `fs.errors.ResourceInvalidError`: if the path exists, but is not a directory
:raises `fs.errors.ResourceNotFoundError`: if the path is not found
"""
raise UnsupportedError("list directory")
def listdirinfo(self,
path="./",
wildcard=None,
full=False,
absolute=False,
dirs_only=False,
files_only=False):
"""Retrieves a list of paths and path info under a given path.
This method behaves like listdir() but instead of just returning
the name of each item in the directory, it returns a tuple of the
name and the info dict as returned by getinfo.
This method may be more efficient than calling
:py:meth:`~fs.base.FS.getinfo` on each individual item returned by :py:meth:`~fs.base.FS.listdir`, particularly
for network based filesystems.
:param path: root of the path to list
:param wildcard: filter paths that match this wildcard
:param dirs_only: only retrieve directories
:type dirs_only: bool
:param files_only: only retrieve files
:type files_only: bool
:raises `fs.errors.ResourceNotFoundError`: If the path is not found
:raises `fs.errors.ResourceInvalidError`: If the path exists, but is not a directory
"""
path = normpath(path)
def getinfo(p):
try:
if full or absolute:
return self.getinfo(p)
else:
return self.getinfo(pathjoin(path, p))
except FSError:
return {}
return [(p, getinfo(p))
for p in self.listdir(path,
wildcard=wildcard,
full=full,
absolute=absolute,
dirs_only=dirs_only,
files_only=files_only)]
def _listdir_helper(self,
path,
entries,
wildcard=None,
full=False,
absolute=False,
dirs_only=False,
files_only=False):
"""A helper method called by listdir method that applies filtering.
Given the path to a directory and a list of the names of entries within
that directory, this method applies the semantics of the listdir()
keyword arguments. An appropriately modified and filtered list of
directory entries is returned.
"""
path = normpath(path)
if dirs_only and files_only:
raise ValueError("dirs_only and files_only can not both be True")
if wildcard is not None:
if not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn: bool(wildcard_re.match(fn))
entries = [p for p in entries if wildcard(p)]
if dirs_only:
isdir = self.isdir
entries = [p for p in entries if isdir(pathcombine(path, p))]
elif files_only:
isfile = self.isfile
entries = [p for p in entries if isfile(pathcombine(path, p))]
if full:
entries = [pathcombine(path, p) for p in entries]
elif absolute:
path = abspath(path)
entries = [(pathcombine(path, p)) for p in entries]
return entries
def ilistdir(self,
path="./",
wildcard=None,
full=False,
absolute=False,
dirs_only=False,
files_only=False):
"""Generator yielding the files and directories under a given path.
This method behaves identically to :py:meth:`~fs.base.FS.listdir` but returns an generator
instead of a list. Depending on the filesystem this may be more
efficient than calling :py:meth:`~fs.base.FS.listdir` and iterating over the resulting list.
"""
return iter(self.listdir(path,
wildcard=wildcard,
full=full,
absolute=absolute,
dirs_only=dirs_only,
files_only=files_only))
def ilistdirinfo(self,
path="./",
wildcard=None,
full=False,
absolute=False,
dirs_only=False,
files_only=False):
"""Generator yielding paths and path info under a given path.
This method behaves identically to :py:meth:`~fs.base.listdirinfo` but returns an generator
instead of a list. Depending on the filesystem this may be more
efficient than calling :py:meth:`~fs.base.listdirinfo` and iterating over the resulting
list.
"""
return iter(self.listdirinfo(path,
wildcard,
full,
absolute,
dirs_only,
files_only))
def makedir(self, path, recursive=False, allow_recreate=False):
"""Make a directory on the filesystem.
:param path: path of directory
:type path: string
:param recursive: if True, any intermediate directories will also be created
:type recursive: bool
:param allow_recreate: if True, re-creating a directory wont be an error
:type allow_create: bool
:raises `fs.errors.DestinationExistsError`: if the path is already a directory, and allow_recreate is False
:raises `fs.errors.ParentDirectoryMissingError`: if a containing directory is missing and recursive is False
:raises `fs.errors.ResourceInvalidError`: if a path is an existing file
:raises `fs.errors.ResourceNotFoundError`: if the path is not found
"""
raise UnsupportedError("make directory")
def remove(self, path):
"""Remove a file from the filesystem.
:param path: Path of the resource to remove
:type path: string
:raises `fs.errors.ParentDirectoryMissingError`: if an intermediate directory is missing
:raises `fs.errors.ResourceInvalidError`: if the path is a directory
:raises `fs.errors.ResourceNotFoundError`: if the path does not exist
"""
raise UnsupportedError("remove resource")
def removedir(self, path, recursive=False, force=False):
"""Remove a directory from the filesystem
:param path: path of the directory to remove
:type path: string
:param recursive: if True, empty parent directories will be removed
:type recursive: bool
:param force: if True, any directory contents will be removed
:type force: bool
:raises `fs.errors.DirectoryNotEmptyError`: if the directory is not empty and force is False
:raises `fs.errors.ParentDirectoryMissingError`: if an intermediate directory is missing
:raises `fs.errors.ResourceInvalidError`: if the path is not a directory
:raises `fs.errors.ResourceNotFoundError`: if the path does not exist
"""
raise UnsupportedError("remove directory")
def rename(self, src, dst):
"""Renames a file or directory
:param src: path to rename
:type src: string
:param dst: new name
:type dst: string
:raises ParentDirectoryMissingError: if a containing directory is missing
:raises ResourceInvalidError: if the path or a parent path is not a
directory or src is a parent of dst or one of src or dst is a dir
and the other don't
:raises ResourceNotFoundError: if the src path does not exist
"""
raise UnsupportedError("rename resource")
@convert_os_errors
def settimes(self, path, accessed_time=None, modified_time=None):
"""Set the accessed time and modified time of a file
:param path: path to a file
:type path: string
:param accessed_time: the datetime the file was accessed (defaults to current time)
:type accessed_time: datetime
:param modified_time: the datetime the file was modified (defaults to current time)
:type modified_time: datetime
"""
with self._lock:
sys_path = self.getsyspath(path, allow_none=True)
if sys_path is not None:
now = datetime.datetime.now()
if accessed_time is None:
accessed_time = now
if modified_time is None:
modified_time = now
accessed_time = int(time.mktime(accessed_time.timetuple()))
modified_time = int(time.mktime(modified_time.timetuple()))
os.utime(sys_path, (accessed_time, modified_time))
return True
else:
raise UnsupportedError("settimes")
def getinfo(self, path):
"""Returns information for a path as a dictionary. The exact content of
this dictionary will vary depending on the implementation, but will
likely include a few common values. The following values will be found
in info dictionaries for most implementations:
* "size" - Number of bytes used to store the file or directory
* "created_time" - A datetime object containing the time the resource was created
* "accessed_time" - A datetime object containing the time the resource was last accessed
* "modified_time" - A datetime object containing the time the resource was modified
:param path: a path to retrieve information for
:type path: string
:rtype: dict
:raises `fs.errors.ParentDirectoryMissingError`: if an intermediate directory is missing
:raises `fs.errors.ResourceInvalidError`: if the path is not a directory
:raises `fs.errors.ResourceNotFoundError`: if the path does not exist
"""
raise UnsupportedError("get resource info")
def getinfokeys(self, path, *keys):
"""Get specified keys from info dict, as returned from `getinfo`. The returned dictionary may
not contain all the keys that were asked for, if they aren't available.
This method allows a filesystem to potentially provide a faster way of retrieving these info values if you
are only interested in a subset of them.
:param path: a path to retrieve information for
:param keys: the info keys you would like to retrieve
:rtype: dict
"""
info = self.getinfo(path)
return dict((k, info[k]) for k in keys if k in info)
def desc(self, path):
"""Returns short descriptive text regarding a path. Intended mainly as
a debugging aid.
:param path: A path to describe
:rtype: str
"""
#if not self.exists(path):
# return ''
try:
sys_path = self.getsyspath(path)
except NoSysPathError:
return "No description available"
return sys_path
def getcontents(self, path, mode='rb', encoding=None, errors=None, newline=None):
"""Returns the contents of a file as a string.
:param path: A path of file to read
:param mode: Mode to open file with (should be 'rb' for binary or 't' for text)
:param encoding: Encoding to use when reading contents in text mode
:param errors: Unicode errors parameter if text mode is use
:param newline: Newlines parameter for text mode decoding
:rtype: str
:returns: file contents
"""
if 'r' not in mode:
raise ValueError("mode must contain 'r' to be readable")
f = None
try:
f = self.open(path, mode=mode, encoding=encoding, errors=errors, newline=newline)
contents = f.read()
return contents
finally:
if f is not None:
f.close()
def _setcontents(self,
path,
data,
encoding=None,
errors=None,
chunk_size=1024 * 64,
progress_callback=None,
finished_callback=None):
"""Does the work of setcontents. Factored out, so that `setcontents_async` can use it"""
if progress_callback is None:
progress_callback = lambda bytes_written: None
if finished_callback is None:
finished_callback = lambda: None
if not data:
progress_callback(0)
self.createfile(path, wipe=True)
finished_callback()
return 0
bytes_written = 0
progress_callback(0)
if hasattr(data, 'read'):
read = data.read
chunk = read(chunk_size)
if isinstance(chunk, six.text_type):
f = self.open(path, 'wt', encoding=encoding, errors=errors)
else:
f = self.open(path, 'wb')
write = f.write
try:
while chunk:
write(chunk)
bytes_written += len(chunk)
progress_callback(bytes_written)
chunk = read(chunk_size)
finally:
f.close()
else:
if isinstance(data, six.text_type):
with self.open(path, 'wt', encoding=encoding, errors=errors) as f:
f.write(data)
bytes_written += len(data)
else:
with self.open(path, 'wb') as f:
f.write(data)
bytes_written += len(data)
progress_callback(bytes_written)
finished_callback()
return bytes_written
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=1024 * 64):
"""A convenience method to create a new file from a string or file-like object
:param path: a path of the file to create
:param data: a string or bytes object containing the contents for the new file
:param encoding: if `data` is a file open in text mode, or a text string, then use this `encoding` to write to the destination file
:param errors: if `data` is a file open in text mode or a text string, then use `errors` when opening the destination file
:param chunk_size: Number of bytes to read in a chunk, if the implementation has to resort to a read / copy loop
"""
return self._setcontents(path, data, encoding=encoding, errors=errors, chunk_size=1024 * 64)
def setcontents_async(self,
path,
data,
encoding=None,
errors=None,
chunk_size=1024 * 64,
progress_callback=None,
finished_callback=None,
error_callback=None):
"""Create a new file from a string or file-like object asynchronously
This method returns a ``threading.Event`` object. Call the ``wait`` method on the event object
to block until all data has been written, or simply ignore it.
:param path: a path of the file to create
:param data: a string or a file-like object containing the contents for the new file
:param encoding: if `data` is a file open in text mode, or a text string, then use this `encoding` to write to the destination file
:param errors: if `data` is a file open in text mode or a text string, then use `errors` when opening the destination file
:param chunk_size: Number of bytes to read and write in a chunk
:param progress_callback: A function that is called periodically
with the number of bytes written.
:param finished_callback: A function that is called when all data has been written
:param error_callback: A function that is called with an exception
object if any error occurs during the copy process.
:returns: An event object that is set when the copy is complete, call
the `wait` method of this object to block until the data is written
"""
finished_event = threading.Event()
def do_setcontents():
try:
self._setcontents(path,
data,
encoding=encoding,
errors=errors,
chunk_size=1024 * 64,
progress_callback=progress_callback,
finished_callback=finished_callback)
except Exception, e:
if error_callback is not None:
error_callback(e)
finally:
finished_event.set()
threading.Thread(target=do_setcontents).start()
return finished_event
def createfile(self, path, wipe=False):
"""Creates an empty file if it doesn't exist
:param path: path to the file to create
:param wipe: if True, the contents of the file will be erased
"""
with self._lock:
if not wipe and self.isfile(path):
return
f = None
try:
f = self.open(path, 'wb')
finally:
if f is not None:
f.close()
def opendir(self, path):
"""Opens a directory and returns a FS object representing its contents.
:param path: path to directory to open
:type path: string
:return: the opened dir
:rtype: an FS object
"""
from fs.wrapfs.subfs import SubFS
if not self.exists(path):
raise ResourceNotFoundError(path)
if not self.isdir(path):
raise ResourceInvalidError("path should reference a directory")
return SubFS(self, path)
def walk(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False):
"""Walks a directory tree and yields the root path and contents.
Yields a tuple of the path of each directory and a list of its file
contents.
:param path: root path to start walking
:type path: string
:param wildcard: if given, only return files that match this wildcard
:type wildcard: a string containing a wildcard (e.g. `*.txt`) or a callable that takes the file path and returns a boolean
:param dir_wildcard: if given, only walk directories that match the wildcard
:type dir_wildcard: a string containing a wildcard (e.g. `*.txt`) or a callable that takes the directory name and returns a boolean
:param search: a string identifying the method used to walk the directories. There are two such methods:
* ``"breadth"`` yields paths in the top directories first
* ``"depth"`` yields the deepest paths first
:param ignore_errors: ignore any errors reading the directory
:type ignore_errors: bool
:rtype: iterator of (current_path, paths)
"""
path = normpath(path)
if not self.exists(path):
raise ResourceNotFoundError(path)
def listdir(path, *args, **kwargs):
if ignore_errors:
try:
return self.listdir(path, *args, **kwargs)
except:
return []
else:
return self.listdir(path, *args, **kwargs)
if wildcard is None:
wildcard = lambda f: True
elif not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn: bool(wildcard_re.match(fn))
if dir_wildcard is None:
dir_wildcard = lambda f: True
elif not callable(dir_wildcard):
dir_wildcard_re = re.compile(fnmatch.translate(dir_wildcard))
dir_wildcard = lambda fn: bool(dir_wildcard_re.match(fn))
if search == "breadth":
dirs = [path]
dirs_append = dirs.append
dirs_pop = dirs.pop
isdir = self.isdir
while dirs:
current_path = dirs_pop()
paths = []
paths_append = paths.append
try:
for filename in listdir(current_path, dirs_only=True):
path = pathcombine(current_path, filename)
if dir_wildcard(path):
dirs_append(path)
for filename in listdir(current_path, files_only=True):
path = pathcombine(current_path, filename)
if wildcard(filename):
paths_append(filename)
except ResourceNotFoundError:
# Could happen if another thread / process deletes something whilst we are walking
pass
yield (current_path, paths)
elif search == "depth":
def recurse(recurse_path):
try:
for path in listdir(recurse_path, wildcard=dir_wildcard, full=True, dirs_only=True):
for p in recurse(path):
yield p
except ResourceNotFoundError:
# Could happen if another thread / process deletes something whilst we are walking
pass
yield (recurse_path, listdir(recurse_path, wildcard=wildcard, files_only=True))
for p in recurse(path):
yield p
else:
raise ValueError("Search should be 'breadth' or 'depth'")
def walkfiles(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False):
"""Like the 'walk' method, but just yields file paths.
:param path: root path to start walking
:type path: string
:param wildcard: if given, only return files that match this wildcard
:type wildcard: A string containing a wildcard (e.g. `*.txt`) or a callable that takes the file path and returns a boolean
:param dir_wildcard: if given, only walk directories that match the wildcard
:type dir_wildcard: A string containing a wildcard (e.g. `*.txt`) or a callable that takes the directory name and returns a boolean
:param search: a string identifying the method used to walk the directories. There are two such methods:
* ``"breadth"`` yields paths in the top directories first
* ``"depth"`` yields the deepest paths first
:param ignore_errors: ignore any errors reading the directory
:type ignore_errors: bool
:rtype: iterator of file paths
"""
for path, files in self.walk(normpath(path), wildcard=wildcard, dir_wildcard=dir_wildcard, search=search, ignore_errors=ignore_errors):
for f in files:
yield pathcombine(path, f)
def walkdirs(self,
path="/",
wildcard=None,
search="breadth",
ignore_errors=False):
"""Like the 'walk' method but yields directories.
:param path: root path to start walking
:type path: string
:param wildcard: if given, only return directories that match this wildcard
:type wildcard: A string containing a wildcard (e.g. `*.txt`) or a callable that takes the directory name and returns a boolean
:param search: a string identifying the method used to walk the directories. There are two such methods:
* ``"breadth"`` yields paths in the top directories first
* ``"depth"`` yields the deepest paths first
:param ignore_errors: ignore any errors reading the directory
:type ignore_errors: bool
:rtype: iterator of dir paths
"""
for p, _files in self.walk(path, dir_wildcard=wildcard, search=search, ignore_errors=ignore_errors):
yield p
def getsize(self, path):
"""Returns the size (in bytes) of a resource.
:param path: a path to the resource
:type path: string
:returns: the size of the file
:rtype: integer
"""
info = self.getinfo(path)
size = info.get('size', None)
if size is None:
raise OperationFailedError("get size of resource", path)
return size
def copy(self, src, dst, overwrite=False, chunk_size=1024 * 64):
"""Copies a file from src to dst.
:param src: the source path
:type src: string
:param dst: the destination path
:type dst: string
:param overwrite: if True, then an existing file at the destination may
be overwritten; If False then DestinationExistsError
will be raised.
:type overwrite: bool
:param chunk_size: size of chunks to use if a simple copy is required
(defaults to 64K).
:type chunk_size: bool
"""
with self._lock:
if not self.isfile(src):
if self.isdir(src):
raise ResourceInvalidError(src, msg="Source is not a file: %(path)s")
raise ResourceNotFoundError(src)
if not overwrite and self.exists(dst):
raise DestinationExistsError(dst)
src_syspath = self.getsyspath(src, allow_none=True)
dst_syspath = self.getsyspath(dst, allow_none=True)
if src_syspath is not None and dst_syspath is not None:
self._shutil_copyfile(src_syspath, dst_syspath)
else:
src_file = None
try:
src_file = self.open(src, "rb")
self.setcontents(dst, src_file, chunk_size=chunk_size)
except ResourceNotFoundError:
if self.exists(src) and not self.exists(dirname(dst)):
raise ParentDirectoryMissingError(dst)
finally:
if src_file is not None:
src_file.close()
@classmethod
@convert_os_errors
def _shutil_copyfile(cls, src_syspath, dst_syspath):
try:
shutil.copyfile(src_syspath, dst_syspath)
except IOError, e:
# shutil reports ENOENT when a parent directory is missing
if getattr(e, "errno", None) == errno.ENOENT:
if not os.path.exists(dirname(dst_syspath)):
raise ParentDirectoryMissingError(dst_syspath)
raise
@classmethod
@convert_os_errors
def _shutil_movefile(cls, src_syspath, dst_syspath):
shutil.move(src_syspath, dst_syspath)
def move(self, src, dst, overwrite=False, chunk_size=16384):
"""moves a file from one location to another.
:param src: source path
:type src: string
:param dst: destination path
:type dst: string
:param overwrite: When True the destination will be overwritten (if it exists),
otherwise a DestinationExistsError will be thrown
:type overwrite: bool
:param chunk_size: Size of chunks to use when copying, if a simple copy
is required
:type chunk_size: integer
:raise `fs.errors.DestinationExistsError`: if destination exists and `overwrite` is False
"""
with self._lock:
src_syspath = self.getsyspath(src, allow_none=True)
dst_syspath = self.getsyspath(dst, allow_none=True)
# Try to do an os-level rename if possible.
# Otherwise, fall back to copy-and-remove.
if src_syspath is not None and dst_syspath is not None:
if not os.path.isfile(src_syspath):
if os.path.isdir(src_syspath):
raise ResourceInvalidError(src, msg="Source is not a file: %(path)s")
raise ResourceNotFoundError(src)
if not overwrite and os.path.exists(dst_syspath):
raise DestinationExistsError(dst)
try:
os.rename(src_syspath, dst_syspath)
return
except OSError:
pass
self.copy(src, dst, overwrite=overwrite, chunk_size=chunk_size)
self.remove(src)
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
"""moves a directory from one location to another.
:param src: source directory path
:type src: string
:param dst: destination directory path
:type dst: string
:param overwrite: if True then any existing files in the destination
directory will be overwritten
:type overwrite: bool
:param ignore_errors: if True then this method will ignore FSError
exceptions when moving files
:type ignore_errors: bool
:param chunk_size: size of chunks to use when copying, if a simple copy
is required
:type chunk_size: integer
:raise `fs.errors.DestinationExistsError`: if destination exists and `overwrite` is False
"""
with self._lock:
if not self.isdir(src):
if self.isfile(src):
raise ResourceInvalidError(src, msg="Source is not a directory: %(path)s")
raise ResourceNotFoundError(src)
if not overwrite and self.exists(dst):
raise DestinationExistsError(dst)
src_syspath = self.getsyspath(src, allow_none=True)
dst_syspath = self.getsyspath(dst, allow_none=True)
if src_syspath is not None and dst_syspath is not None:
try:
os.rename(src_syspath, dst_syspath)
return
except OSError:
pass
def movefile_noerrors(src, dst, **kwargs):
try:
return self.move(src, dst, **kwargs)
except FSError:
return
if ignore_errors:
movefile = movefile_noerrors
else:
movefile = self.move
src = abspath(src)
dst = abspath(dst)
if dst:
self.makedir(dst, allow_recreate=overwrite)
for dirname, filenames in self.walk(src, search="depth"):
dst_dirname = relpath(frombase(src, abspath(dirname)))
dst_dirpath = pathjoin(dst, dst_dirname)
self.makedir(dst_dirpath, allow_recreate=True, recursive=True)
for filename in filenames:
src_filename = pathjoin(dirname, filename)
dst_filename = pathjoin(dst_dirpath, filename)
movefile(src_filename, dst_filename, overwrite=overwrite, chunk_size=chunk_size)
self.removedir(dirname)
def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
"""copies a directory from one location to another.
:param src: source directory path
:type src: string
:param dst: destination directory path
:type dst: string
:param overwrite: if True then any existing files in the destination
directory will be overwritten
:type overwrite: bool
:param ignore_errors: if True, exceptions when copying will be ignored
:type ignore_errors: bool
:param chunk_size: size of chunks to use when copying, if a simple copy
is required (defaults to 16K)
"""
with self._lock:
if not self.isdir(src):
raise ResourceInvalidError(src, msg="Source is not a directory: %(path)s")
def copyfile_noerrors(src, dst, **kwargs):
try:
return self.copy(src, dst, **kwargs)
except FSError:
return
if ignore_errors:
copyfile = copyfile_noerrors
else:
copyfile = self.copy
src = abspath(src)
dst = abspath(dst)
if not overwrite and self.exists(dst):
raise DestinationExistsError(dst)
if dst:
self.makedir(dst, allow_recreate=True)
for dirname, filenames in self.walk(src):
dst_dirname = relpath(frombase(src, abspath(dirname)))
dst_dirpath = pathjoin(dst, dst_dirname)
self.makedir(dst_dirpath, allow_recreate=True, recursive=True)
for filename in filenames:
src_filename = pathjoin(dirname, filename)
dst_filename = pathjoin(dst_dirpath, filename)
copyfile(src_filename, dst_filename, overwrite=overwrite, chunk_size=chunk_size)
def isdirempty(self, path):
"""Check if a directory is empty (contains no files or sub-directories)
:param path: a directory path
:rtype: bool
"""
with self._lock:
path = normpath(path)
iter_dir = iter(self.ilistdir(path))
try:
next(iter_dir)
except StopIteration:
return True
return False
def makeopendir(self, path, recursive=False):
"""makes a directory (if it doesn't exist) and returns an FS object for
the newly created directory.
:param path: path to the new directory
:param recursive: if True any intermediate directories will be created
:return: the opened dir
:rtype: an FS object
"""
with self._lock:
self.makedir(path, allow_recreate=True, recursive=recursive)
dir_fs = self.opendir(path)
return dir_fs
def printtree(self, max_levels=5):
"""Prints a tree structure of the FS object to the console
:param max_levels: The maximum sub-directories to display, defaults to
5. Set to None for no limit
"""
from fs.utils import print_fs
print_fs(self, max_levels=max_levels)
tree = printtree
def browse(self, hide_dotfiles=False):
"""Displays the FS tree in a graphical window (requires wxPython)
:param hide_dotfiles: If True, files and folders that begin with a dot will be hidden
"""
from fs.browsewin import browse
browse(self, hide_dotfiles)
def getmmap(self, path, read_only=False, copy=False):
"""Returns a mmap object for this path.
See http://docs.python.org/library/mmap.html for more details on the mmap module.
:param path: A path on this filesystem
:param read_only: If True, the mmap may not be modified
:param copy: If False then changes wont be written back to the file
:raises `fs.errors.NoMMapError`: Only paths that have a syspath can be opened as a mmap
"""
syspath = self.getsyspath(path, allow_none=True)
if syspath is None:
raise NoMMapError(path)
try:
import mmap
except ImportError:
raise NoMMapError(msg="mmap not supported")
if read_only:
f = open(syspath, 'rb')
access = mmap.ACCESS_READ
else:
if copy:
f = open(syspath, 'rb')
access = mmap.ACCESS_COPY
else:
f = open(syspath, 'r+b')
access = mmap.ACCESS_WRITE
m = mmap.mmap(f.fileno(), 0, access=access)
return m
def flags_to_mode(flags, binary=True):
"""Convert an os.O_* flag bitmask into an FS mode string."""
if flags & os.O_WRONLY:
if flags & os.O_TRUNC:
mode = "w"
elif flags & os.O_APPEND:
mode = "a"
else:
mode = "r+"
elif flags & os.O_RDWR:
if flags & os.O_TRUNC:
mode = "w+"
elif flags & os.O_APPEND:
mode = "a+"
else:
mode = "r+"
else:
mode = "r"
if flags & os.O_EXCL:
mode += "x"
if binary:
mode += 'b'
else:
mode += 't'
return mode
| bsd-3-clause | 7,546,556,319,514,486,000 | 35.925052 | 155 | 0.585991 | false |
c0cky/mediathread | mediathread/projects/migrations/0014_auto_20151104_1513.py | 2 | 1149 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def delete_orphan_collaborations(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
Collaboration = apps.get_model('structuredcollaboration', 'Collaboration')
ContentType = apps.get_model('contenttypes', 'ContentType')
try:
ctype = ContentType.objects.get(model='project', app_label='projects')
to_delete = []
for c in Collaboration.objects.filter(content_type=ctype):
try:
Project.objects.get(id=int(c.object_pk))
except Project.DoesNotExist:
to_delete.append(c.id)
Collaboration.objects.filter(id__in=to_delete).delete()
except ContentType.DoesNotExist:
pass # skip this migration during unit tests
class Migration(migrations.Migration):
dependencies = [
('projects', '0013_auto_20151021_1438'),
('structuredcollaboration', '0004_auto_20151016_1401'),
('contenttypes', '0001_initial'),
]
operations = [
migrations.RunPython(delete_orphan_collaborations)
]
| gpl-2.0 | 8,739,052,059,690,337,000 | 30.916667 | 78 | 0.650131 | false |
xlevus/babel | tests/messages/test_frontend.py | 22 | 35766 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
from datetime import datetime
from distutils.dist import Distribution
from distutils.errors import DistutilsOptionError
from distutils.log import _global_log
import logging
import os
import shutil
import sys
import time
import unittest
from babel import __version__ as VERSION
from babel.dates import format_datetime
from babel.messages import frontend
from babel.util import LOCALTZ
from babel.messages.pofile import read_po
from babel._compat import StringIO
this_dir = os.path.abspath(os.path.dirname(__file__))
class CompileCatalogTestCase(unittest.TestCase):
def setUp(self):
self.olddir = os.getcwd()
self.datadir = os.path.join(this_dir, 'data')
os.chdir(self.datadir)
_global_log.threshold = 5 # shut up distutils logging
self.dist = Distribution(dict(
name='TestProject',
version='0.1',
packages=['project']
))
self.cmd = frontend.compile_catalog(self.dist)
self.cmd.initialize_options()
def tearDown(self):
os.chdir(self.olddir)
def test_no_directory_or_output_file_specified(self):
self.cmd.locale = 'en_US'
self.cmd.input_file = 'dummy'
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
def test_no_directory_or_input_file_specified(self):
self.cmd.locale = 'en_US'
self.cmd.output_file = 'dummy'
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
class ExtractMessagesTestCase(unittest.TestCase):
def setUp(self):
self.olddir = os.getcwd()
self.datadir = os.path.join(this_dir, 'data')
os.chdir(self.datadir)
_global_log.threshold = 5 # shut up distutils logging
self.dist = Distribution(dict(
name='TestProject',
version='0.1',
packages=['project']
))
self.cmd = frontend.extract_messages(self.dist)
self.cmd.initialize_options()
def tearDown(self):
pot_file = self._pot_file()
if os.path.isfile(pot_file):
os.unlink(pot_file)
os.chdir(self.olddir)
def _i18n_dir(self):
return os.path.join(self.datadir, 'project', 'i18n')
def _pot_file(self):
return os.path.join(self._i18n_dir(), 'temp.pot')
def assert_pot_file_exists(self):
assert os.path.isfile(self._pot_file())
def test_neither_default_nor_custom_keywords(self):
self.cmd.output_file = 'dummy'
self.cmd.no_default_keywords = True
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
def test_no_output_file_specified(self):
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
def test_both_sort_output_and_sort_by_file(self):
self.cmd.output_file = 'dummy'
self.cmd.sort_output = True
self.cmd.sort_by_file = True
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
def test_input_dirs_is_treated_as_list(self):
self.cmd.input_dirs = self.datadir
self.cmd.output_file = self._pot_file()
self.cmd.finalize_options()
self.cmd.run()
with open(self._pot_file(), 'U') as f:
catalog = read_po(f)
msg = catalog.get('bar')
self.assertEqual(1, len(msg.locations))
self.assertTrue('file1.py' in msg.locations[0][0])
def test_input_dirs_handle_spaces_after_comma(self):
self.cmd.input_dirs = 'foo, bar'
self.cmd.output_file = self._pot_file()
self.cmd.finalize_options()
self.assertEqual(['foo', 'bar'], self.cmd.input_dirs)
def test_extraction_with_default_mapping(self):
self.cmd.copyright_holder = 'FooBar, Inc.'
self.cmd.msgid_bugs_address = '[email protected]'
self.cmd.output_file = 'project/i18n/temp.pot'
self.cmd.add_comments = 'TRANSLATOR:,TRANSLATORS:'
self.cmd.finalize_options()
self.cmd.run()
self.assert_pot_file_exists()
expected_content = r"""# Translations template for TestProject.
# Copyright (C) %(year)s FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, %(year)s.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: %(date)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. TRANSLATOR: This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
#: project/ignored/this_wont_normally_be_here.py:11
msgid "FooBar"
msgid_plural "FooBars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'year': time.strftime('%Y'),
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(self._pot_file(), 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_extraction_with_mapping_file(self):
self.cmd.copyright_holder = 'FooBar, Inc.'
self.cmd.msgid_bugs_address = '[email protected]'
self.cmd.mapping_file = 'mapping.cfg'
self.cmd.output_file = 'project/i18n/temp.pot'
self.cmd.add_comments = 'TRANSLATOR:,TRANSLATORS:'
self.cmd.finalize_options()
self.cmd.run()
self.assert_pot_file_exists()
expected_content = r"""# Translations template for TestProject.
# Copyright (C) %(year)s FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, %(year)s.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: %(date)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. TRANSLATOR: This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'year': time.strftime('%Y'),
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(self._pot_file(), 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_extraction_with_mapping_dict(self):
self.dist.message_extractors = {
'project': [
('**/ignored/**.*', 'ignore', None),
('**.py', 'python', None),
]
}
self.cmd.copyright_holder = 'FooBar, Inc.'
self.cmd.msgid_bugs_address = '[email protected]'
self.cmd.output_file = 'project/i18n/temp.pot'
self.cmd.add_comments = 'TRANSLATOR:,TRANSLATORS:'
self.cmd.finalize_options()
self.cmd.run()
self.assert_pot_file_exists()
expected_content = r"""# Translations template for TestProject.
# Copyright (C) %(year)s FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, %(year)s.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: %(date)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. TRANSLATOR: This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'year': time.strftime('%Y'),
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(self._pot_file(), 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
class InitCatalogTestCase(unittest.TestCase):
def setUp(self):
self.olddir = os.getcwd()
self.datadir = os.path.join(this_dir, 'data')
os.chdir(self.datadir)
_global_log.threshold = 5 # shut up distutils logging
self.dist = Distribution(dict(
name='TestProject',
version='0.1',
packages=['project']
))
self.cmd = frontend.init_catalog(self.dist)
self.cmd.initialize_options()
def tearDown(self):
for dirname in ['en_US', 'ja_JP', 'lv_LV']:
locale_dir = os.path.join(self._i18n_dir(), dirname)
if os.path.isdir(locale_dir):
shutil.rmtree(locale_dir)
os.chdir(self.olddir)
def _i18n_dir(self):
return os.path.join(self.datadir, 'project', 'i18n')
def _po_file(self, locale):
return os.path.join(self._i18n_dir(), locale, 'LC_MESSAGES',
'messages.po')
def test_no_input_file(self):
self.cmd.locale = 'en_US'
self.cmd.output_file = 'dummy'
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
def test_no_locale(self):
self.cmd.input_file = 'dummy'
self.cmd.output_file = 'dummy'
self.assertRaises(DistutilsOptionError, self.cmd.finalize_options)
def test_with_output_dir(self):
self.cmd.input_file = 'project/i18n/messages.pot'
self.cmd.locale = 'en_US'
self.cmd.output_dir = 'project/i18n'
self.cmd.finalize_options()
self.cmd.run()
po_file = self._po_file('en_US')
assert os.path.isfile(po_file)
expected_content = r"""# English (United States) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: en_US <[email protected]>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_keeps_catalog_non_fuzzy(self):
self.cmd.input_file = 'project/i18n/messages_non_fuzzy.pot'
self.cmd.locale = 'en_US'
self.cmd.output_dir = 'project/i18n'
self.cmd.finalize_options()
self.cmd.run()
po_file = self._po_file('en_US')
assert os.path.isfile(po_file)
expected_content = r"""# English (United States) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: en_US <[email protected]>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_correct_init_more_than_2_plurals(self):
self.cmd.input_file = 'project/i18n/messages.pot'
self.cmd.locale = 'lv_LV'
self.cmd.output_dir = 'project/i18n'
self.cmd.finalize_options()
self.cmd.run()
po_file = self._po_file('lv_LV')
assert os.path.isfile(po_file)
expected_content = r"""# Latvian (Latvia) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: lv_LV <[email protected]>\n"
"Plural-Forms: nplurals=3; plural=(n%%10==1 && n%%100!=11 ? 0 : n != 0 ? 1 :"
" 2)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
msgstr[2] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_correct_init_singular_plural_forms(self):
self.cmd.input_file = 'project/i18n/messages.pot'
self.cmd.locale = 'ja_JP'
self.cmd.output_dir = 'project/i18n'
self.cmd.finalize_options()
self.cmd.run()
po_file = self._po_file('ja_JP')
assert os.path.isfile(po_file)
expected_content = r"""# Japanese (Japan) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: ja_JP <[email protected]>\n"
"Plural-Forms: nplurals=1; plural=0\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='ja_JP')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_supports_no_wrap(self):
self.cmd.input_file = 'project/i18n/long_messages.pot'
self.cmd.locale = 'en_US'
self.cmd.output_dir = 'project/i18n'
long_message = '"'+ 'xxxxx '*15 + '"'
with open('project/i18n/messages.pot', 'rb') as f:
pot_contents = f.read().decode('latin-1')
pot_with_very_long_line = pot_contents.replace('"bar"', long_message)
with open(self.cmd.input_file, 'wb') as f:
f.write(pot_with_very_long_line.encode('latin-1'))
self.cmd.no_wrap = True
self.cmd.finalize_options()
self.cmd.run()
po_file = self._po_file('en_US')
assert os.path.isfile(po_file)
expected_content = r"""# English (United States) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: en_US <[email protected]>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid %(long_message)s
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en_US'),
'long_message': long_message}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_supports_width(self):
self.cmd.input_file = 'project/i18n/long_messages.pot'
self.cmd.locale = 'en_US'
self.cmd.output_dir = 'project/i18n'
long_message = '"'+ 'xxxxx '*15 + '"'
with open('project/i18n/messages.pot', 'rb') as f:
pot_contents = f.read().decode('latin-1')
pot_with_very_long_line = pot_contents.replace('"bar"', long_message)
with open(self.cmd.input_file, 'wb') as f:
f.write(pot_with_very_long_line.encode('latin-1'))
self.cmd.width = 120
self.cmd.finalize_options()
self.cmd.run()
po_file = self._po_file('en_US')
assert os.path.isfile(po_file)
expected_content = r"""# English (United States) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: en_US <[email protected]>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid %(long_message)s
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en_US'),
'long_message': long_message}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
class CommandLineInterfaceTestCase(unittest.TestCase):
def setUp(self):
self.datadir = os.path.join(this_dir, 'data')
self.orig_working_dir = os.getcwd()
self.orig_argv = sys.argv
self.orig_stdout = sys.stdout
self.orig_stderr = sys.stderr
sys.argv = ['pybabel']
sys.stdout = StringIO()
sys.stderr = StringIO()
os.chdir(self.datadir)
self._remove_log_handlers()
self.cli = frontend.CommandLineInterface()
def tearDown(self):
os.chdir(self.orig_working_dir)
sys.argv = self.orig_argv
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
for dirname in ['lv_LV', 'ja_JP']:
locale_dir = os.path.join(self._i18n_dir(), dirname)
if os.path.isdir(locale_dir):
shutil.rmtree(locale_dir)
self._remove_log_handlers()
def _remove_log_handlers(self):
# Logging handlers will be reused if possible (#227). This breaks the
# implicit assumption that our newly created StringIO for sys.stderr
# contains the console output. Removing the old handler ensures that a
# new handler with our new StringIO instance will be used.
log = logging.getLogger('babel')
for handler in log.handlers:
log.removeHandler(handler)
def test_usage(self):
try:
self.cli.run(sys.argv)
self.fail('Expected SystemExit')
except SystemExit as e:
self.assertEqual(2, e.code)
self.assertEqual("""\
usage: pybabel command [options] [args]
pybabel: error: no valid command or option passed. try the -h/--help option for more information.
""", sys.stderr.getvalue().lower())
def _run_init_catalog(self):
i18n_dir = os.path.join(self.datadir, 'project', 'i18n')
pot_path = os.path.join(self.datadir, 'project', 'i18n', 'messages.pot')
init_argv = sys.argv + ['init', '--locale', 'en_US', '-d', i18n_dir,
'-i', pot_path]
self.cli.run(init_argv)
def test_no_duplicated_output_for_multiple_runs(self):
self._run_init_catalog()
first_output = sys.stderr.getvalue()
self._run_init_catalog()
second_output = sys.stderr.getvalue()[len(first_output):]
# in case the log message is not duplicated we should get the same
# output as before
self.assertEqual(first_output, second_output)
def test_frontend_can_log_to_predefined_handler(self):
custom_stream = StringIO()
log = logging.getLogger('babel')
log.addHandler(logging.StreamHandler(custom_stream))
self._run_init_catalog()
self.assertNotEqual(id(sys.stderr), id(custom_stream))
self.assertEqual('', sys.stderr.getvalue())
assert len(custom_stream.getvalue()) > 0
def test_help(self):
try:
self.cli.run(sys.argv + ['--help'])
self.fail('Expected SystemExit')
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual("""\
usage: pybabel command [options] [args]
options:
--version show program's version number and exit
-h, --help show this help message and exit
--list-locales print all known locales and exit
-v, --verbose print as much as possible
-q, --quiet print as little as possible
commands:
compile compile message catalogs to mo files
extract extract messages from source files and generate a pot file
init create new message catalogs from a pot file
update update existing message catalogs from a pot file
""", sys.stdout.getvalue().lower())
def _pot_file(self):
return os.path.join(self._i18n_dir(), 'temp.pot')
def assert_pot_file_exists(self):
assert os.path.isfile(self._pot_file())
def test_extract_with_default_mapping(self):
pot_file = self._pot_file()
self.cli.run(sys.argv + ['extract',
'--copyright-holder', 'FooBar, Inc.',
'--project', 'TestProject', '--version', '0.1',
'--msgid-bugs-address', '[email protected]',
'-c', 'TRANSLATOR', '-c', 'TRANSLATORS:',
'-o', pot_file, 'project'])
self.assert_pot_file_exists()
expected_content = r"""# Translations template for TestProject.
# Copyright (C) %(year)s FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, %(year)s.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: %(date)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. TRANSLATOR: This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
#: project/ignored/this_wont_normally_be_here.py:11
msgid "FooBar"
msgid_plural "FooBars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'year': time.strftime('%Y'),
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(pot_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_extract_with_mapping_file(self):
pot_file = self._pot_file()
self.cli.run(sys.argv + ['extract',
'--copyright-holder', 'FooBar, Inc.',
'--project', 'TestProject', '--version', '0.1',
'--msgid-bugs-address', '[email protected]',
'--mapping', os.path.join(self.datadir, 'mapping.cfg'),
'-c', 'TRANSLATOR', '-c', 'TRANSLATORS:',
'-o', pot_file, 'project'])
self.assert_pot_file_exists()
expected_content = r"""# Translations template for TestProject.
# Copyright (C) %(year)s FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, %(year)s.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: %(date)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. TRANSLATOR: This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'year': time.strftime('%Y'),
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(pot_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_init_with_output_dir(self):
po_file = self._po_file('en_US')
self.cli.run(sys.argv + ['init',
'--locale', 'en_US',
'-d', os.path.join(self._i18n_dir()),
'-i', os.path.join(self._i18n_dir(), 'messages.pot')])
assert os.path.isfile(po_file)
expected_content = r"""# English (United States) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: en_US <[email protected]>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def _i18n_dir(self):
return os.path.join(self.datadir, 'project', 'i18n')
def test_init_singular_plural_forms(self):
po_file = self._po_file('ja_JP')
self.cli.run(sys.argv + ['init',
'--locale', 'ja_JP',
'-d', os.path.join(self._i18n_dir()),
'-i', os.path.join(self._i18n_dir(), 'messages.pot')])
assert os.path.isfile(po_file)
expected_content = r"""# Japanese (Japan) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: ja_JP <[email protected]>\n"
"Plural-Forms: nplurals=1; plural=0\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_init_more_than_2_plural_forms(self):
po_file = self._po_file('lv_LV')
self.cli.run(sys.argv + ['init',
'--locale', 'lv_LV',
'-d', self._i18n_dir(),
'-i', os.path.join(self._i18n_dir(), 'messages.pot')])
assert os.path.isfile(po_file)
expected_content = r"""# Latvian (Latvia) translations for TestProject.
# Copyright (C) 2007 FooBar, Inc.
# This file is distributed under the same license as the TestProject
# project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: TestProject 0.1\n"
"Report-Msgid-Bugs-To: [email protected]\n"
"POT-Creation-Date: 2007-04-01 15:30+0200\n"
"PO-Revision-Date: %(date)s\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: lv_LV <[email protected]>\n"
"Plural-Forms: nplurals=3; plural=(n%%10==1 && n%%100!=11 ? 0 : n != 0 ? 1 :"
" 2)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel %(version)s\n"
#. This will be a translator coment,
#. that will include several lines
#: project/file1.py:8
msgid "bar"
msgstr ""
#: project/file2.py:9
msgid "foobar"
msgid_plural "foobars"
msgstr[0] ""
msgstr[1] ""
msgstr[2] ""
""" % {'version': VERSION,
'date': format_datetime(datetime.now(LOCALTZ), 'yyyy-MM-dd HH:mmZ',
tzinfo=LOCALTZ, locale='en')}
with open(po_file, 'U') as f:
actual_content = f.read()
self.assertEqual(expected_content, actual_content)
def test_compile_catalog(self):
po_file = self._po_file('de_DE')
mo_file = po_file.replace('.po', '.mo')
self.cli.run(sys.argv + ['compile',
'--locale', 'de_DE',
'-d', self._i18n_dir()])
assert not os.path.isfile(mo_file), 'Expected no file at %r' % mo_file
self.assertEqual("""\
catalog %r is marked as fuzzy, skipping
""" % (po_file), sys.stderr.getvalue())
def test_compile_fuzzy_catalog(self):
po_file = self._po_file('de_DE')
mo_file = po_file.replace('.po', '.mo')
try:
self.cli.run(sys.argv + ['compile',
'--locale', 'de_DE', '--use-fuzzy',
'-d', self._i18n_dir()])
assert os.path.isfile(mo_file)
self.assertEqual("""\
compiling catalog %r to %r
""" % (po_file, mo_file), sys.stderr.getvalue())
finally:
if os.path.isfile(mo_file):
os.unlink(mo_file)
def _po_file(self, locale):
return os.path.join(self._i18n_dir(), locale, 'LC_MESSAGES',
'messages.po')
def test_compile_catalog_with_more_than_2_plural_forms(self):
po_file = self._po_file('ru_RU')
mo_file = po_file.replace('.po', '.mo')
try:
self.cli.run(sys.argv + ['compile',
'--locale', 'ru_RU', '--use-fuzzy',
'-d', self._i18n_dir()])
assert os.path.isfile(mo_file)
self.assertEqual("""\
compiling catalog %r to %r
""" % (po_file, mo_file), sys.stderr.getvalue())
finally:
if os.path.isfile(mo_file):
os.unlink(mo_file)
def test_parse_mapping():
buf = StringIO(
'[extractors]\n'
'custom = mypackage.module:myfunc\n'
'\n'
'# Python source files\n'
'[python: **.py]\n'
'\n'
'# Genshi templates\n'
'[genshi: **/templates/**.html]\n'
'include_attrs =\n'
'[genshi: **/templates/**.txt]\n'
'template_class = genshi.template:TextTemplate\n'
'encoding = latin-1\n'
'\n'
'# Some custom extractor\n'
'[custom: **/custom/*.*]\n')
method_map, options_map = frontend.parse_mapping(buf)
assert len(method_map) == 4
assert method_map[0] == ('**.py', 'python')
assert options_map['**.py'] == {}
assert method_map[1] == ('**/templates/**.html', 'genshi')
assert options_map['**/templates/**.html']['include_attrs'] == ''
assert method_map[2] == ('**/templates/**.txt', 'genshi')
assert (options_map['**/templates/**.txt']['template_class']
== 'genshi.template:TextTemplate')
assert options_map['**/templates/**.txt']['encoding'] == 'latin-1'
assert method_map[3] == ('**/custom/*.*', 'mypackage.module:myfunc')
assert options_map['**/custom/*.*'] == {}
def test_parse_keywords():
kw = frontend.parse_keywords(['_', 'dgettext:2',
'dngettext:2,3', 'pgettext:1c,2'])
assert kw == {
'_': None,
'dgettext': (2,),
'dngettext': (2, 3),
'pgettext': ((1, 'c'), 2),
}
| bsd-3-clause | 4,559,972,466,620,809,700 | 31.544131 | 97 | 0.623218 | false |
fintech-circle/edx-platform | lms/djangoapps/certificates/queue.py | 1 | 22075 | """Interface for adding certificate generation tasks to the XQueue. """
import json
import random
import logging
import lxml.html
from lxml.etree import XMLSyntaxError, ParserError
from uuid import uuid4
from django.test.client import RequestFactory
from django.conf import settings
from django.core.urlresolvers import reverse
from requests.auth import HTTPBasicAuth
from lms.djangoapps.grades.new.course_grade_factory import CourseGradeFactory
from xmodule.modulestore.django import modulestore
from capa.xqueue_interface import XQueueInterface
from capa.xqueue_interface import make_xheader, make_hashkey
from course_modes.models import CourseMode
from student.models import UserProfile, CourseEnrollment
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification
from certificates.models import (
CertificateStatuses,
GeneratedCertificate,
certificate_status_for_student,
CertificateStatuses as status,
CertificateWhitelist,
ExampleCertificate
)
LOGGER = logging.getLogger(__name__)
class XQueueAddToQueueError(Exception):
"""An error occurred when adding a certificate task to the queue. """
def __init__(self, error_code, error_msg):
self.error_code = error_code
self.error_msg = error_msg
super(XQueueAddToQueueError, self).__init__(unicode(self))
def __unicode__(self):
return (
u"Could not add certificate to the XQueue. "
u"The error code was '{code}' and the message was '{msg}'."
).format(
code=self.error_code,
msg=self.error_msg
)
class XQueueCertInterface(object):
"""
XQueueCertificateInterface provides an
interface to the xqueue server for
managing student certificates.
Instantiating an object will create a new
connection to the queue server.
See models.py for valid state transitions,
summary of methods:
add_cert: Add a new certificate. Puts a single
request on the queue for the student/course.
Once the certificate is generated a post
will be made to the update_certificate
view which will save the certificate
download URL.
regen_cert: Regenerate an existing certificate.
For a user that already has a certificate
this will delete the existing one and
generate a new cert.
del_cert: Delete an existing certificate
For a user that already has a certificate
this will delete his cert.
"""
def __init__(self, request=None):
# Get basic auth (username/password) for
# xqueue connection if it's in the settings
if settings.XQUEUE_INTERFACE.get('basic_auth') is not None:
requests_auth = HTTPBasicAuth(
*settings.XQUEUE_INTERFACE['basic_auth'])
else:
requests_auth = None
if request is None:
factory = RequestFactory()
self.request = factory.get('/')
else:
self.request = request
self.xqueue_interface = XQueueInterface(
settings.XQUEUE_INTERFACE['url'],
settings.XQUEUE_INTERFACE['django_auth'],
requests_auth,
)
self.whitelist = CertificateWhitelist.objects.all()
self.restricted = UserProfile.objects.filter(allow_certificate=False)
self.use_https = True
def regen_cert(self, student, course_id, course=None, forced_grade=None, template_file=None, generate_pdf=True):
"""(Re-)Make certificate for a particular student in a particular course
Arguments:
student - User.object
course_id - courseenrollment.course_id (string)
WARNING: this command will leave the old certificate, if one exists,
laying around in AWS taking up space. If this is a problem,
take pains to clean up storage before running this command.
Change the certificate status to unavailable (if it exists) and request
grading. Passing grades will put a certificate request on the queue.
Return the certificate.
"""
# TODO: when del_cert is implemented and plumbed through certificates
# repo also, do a deletion followed by a creation r/t a simple
# recreation. XXX: this leaves orphan cert files laying around in
# AWS. See note in the docstring too.
try:
certificate = GeneratedCertificate.eligible_certificates.get(user=student, course_id=course_id)
LOGGER.info(
(
u"Found an existing certificate entry for student %s "
u"in course '%s' "
u"with status '%s' while regenerating certificates. "
),
student.id,
unicode(course_id),
certificate.status
)
certificate.status = status.unavailable
certificate.save()
LOGGER.info(
(
u"The certificate status for student %s "
u"in course '%s' has been changed to '%s'."
),
student.id,
unicode(course_id),
certificate.status
)
except GeneratedCertificate.DoesNotExist:
pass
return self.add_cert(
student,
course_id,
course=course,
forced_grade=forced_grade,
template_file=template_file,
generate_pdf=generate_pdf
)
def del_cert(self, student, course_id):
"""
Arguments:
student - User.object
course_id - courseenrollment.course_id (string)
Removes certificate for a student, will change
the certificate status to 'deleting'.
Certificate must be in the 'error' or 'downloadable' state
otherwise it will return the current state
"""
raise NotImplementedError
# pylint: disable=too-many-statements
def add_cert(self, student, course_id, course=None, forced_grade=None, template_file=None, generate_pdf=True):
"""
Request a new certificate for a student.
Arguments:
student - User.object
course_id - courseenrollment.course_id (CourseKey)
forced_grade - a string indicating a grade parameter to pass with
the certificate request. If this is given, grading
will be skipped.
generate_pdf - Boolean should a message be sent in queue to generate certificate PDF
Will change the certificate status to 'generating' or
`downloadable` in case of web view certificates.
The course must not be a CCX.
Certificate must be in the 'unavailable', 'error',
'deleted' or 'generating' state.
If a student has a passing grade or is in the whitelist
table for the course a request will be made for a new cert.
If a student has allow_certificate set to False in the
userprofile table the status will change to 'restricted'
If a student does not have a passing grade the status
will change to status.notpassing
Returns the newly created certificate instance
"""
if hasattr(course_id, 'ccx'):
LOGGER.warning(
(
u"Cannot create certificate generation task for user %s "
u"in the course '%s'; "
u"certificates are not allowed for CCX courses."
),
student.id,
unicode(course_id)
)
return None
valid_statuses = [
status.generating,
status.unavailable,
status.deleted,
status.error,
status.notpassing,
status.downloadable,
status.auditing,
status.audit_passing,
status.audit_notpassing,
]
cert_status = certificate_status_for_student(student, course_id)['status']
cert = None
if cert_status not in valid_statuses:
LOGGER.warning(
(
u"Cannot create certificate generation task for user %s "
u"in the course '%s'; "
u"the certificate status '%s' is not one of %s."
),
student.id,
unicode(course_id),
cert_status,
unicode(valid_statuses)
)
return None
# The caller can optionally pass a course in to avoid
# re-fetching it from Mongo. If they have not provided one,
# get it from the modulestore.
if course is None:
course = modulestore().get_course(course_id, depth=0)
profile = UserProfile.objects.get(user=student)
profile_name = profile.name
# Needed for access control in grading.
self.request.user = student
self.request.session = {}
is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()
course_grade = CourseGradeFactory().create(student, course)
enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(student, course_id)
mode_is_verified = enrollment_mode in GeneratedCertificate.VERIFIED_CERTS_MODES
user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)
cert_mode = enrollment_mode
is_eligible_for_certificate = is_whitelisted or CourseMode.is_eligible_for_certificate(enrollment_mode)
unverified = False
# For credit mode generate verified certificate
if cert_mode == CourseMode.CREDIT_MODE:
cert_mode = CourseMode.VERIFIED
if template_file is not None:
template_pdf = template_file
elif mode_is_verified and user_is_verified:
template_pdf = "certificate-template-{id.org}-{id.course}-verified.pdf".format(id=course_id)
elif mode_is_verified and not user_is_verified:
template_pdf = "certificate-template-{id.org}-{id.course}.pdf".format(id=course_id)
if CourseMode.mode_for_course(course_id, CourseMode.HONOR):
cert_mode = GeneratedCertificate.MODES.honor
else:
unverified = True
else:
# honor code and audit students
template_pdf = "certificate-template-{id.org}-{id.course}.pdf".format(id=course_id)
LOGGER.info(
(
u"Certificate generated for student %s in the course: %s with template: %s. "
u"given template: %s, "
u"user is verified: %s, "
u"mode is verified: %s"
),
student.username,
unicode(course_id),
template_pdf,
template_file,
user_is_verified,
mode_is_verified
)
cert, created = GeneratedCertificate.objects.get_or_create(user=student, course_id=course_id) # pylint: disable=no-member
cert.mode = cert_mode
cert.user = student
cert.grade = course_grade.percent
cert.course_id = course_id
cert.name = profile_name
cert.download_url = ''
# Strip HTML from grade range label
grade_contents = forced_grade or course_grade.letter_grade
try:
grade_contents = lxml.html.fromstring(grade_contents).text_content()
passing = True
except (TypeError, XMLSyntaxError, ParserError) as exc:
LOGGER.info(
(
u"Could not retrieve grade for student %s "
u"in the course '%s' "
u"because an exception occurred while parsing the "
u"grade contents '%s' as HTML. "
u"The exception was: '%s'"
),
student.id,
unicode(course_id),
grade_contents,
unicode(exc)
)
# Log if the student is whitelisted
if is_whitelisted:
LOGGER.info(
u"Student %s is whitelisted in '%s'",
student.id,
unicode(course_id)
)
passing = True
else:
passing = False
# If this user's enrollment is not eligible to receive a
# certificate, mark it as such for reporting and
# analytics. Only do this if the certificate is new, or
# already marked as ineligible -- we don't want to mark
# existing audit certs as ineligible.
cutoff = settings.AUDIT_CERT_CUTOFF_DATE
if (cutoff and cert.created_date >= cutoff) and not is_eligible_for_certificate:
cert.status = CertificateStatuses.audit_passing if passing else CertificateStatuses.audit_notpassing
cert.save()
LOGGER.info(
u"Student %s with enrollment mode %s is not eligible for a certificate.",
student.id,
enrollment_mode
)
return cert
# If they are not passing, short-circuit and don't generate cert
elif not passing:
cert.status = status.notpassing
cert.save()
LOGGER.info(
(
u"Student %s does not have a grade for '%s', "
u"so their certificate status has been set to '%s'. "
u"No certificate generation task was sent to the XQueue."
),
student.id,
unicode(course_id),
cert.status
)
return cert
# Check to see whether the student is on the the embargoed
# country restricted list. If so, they should not receive a
# certificate -- set their status to restricted and log it.
if self.restricted.filter(user=student).exists():
cert.status = status.restricted
cert.save()
LOGGER.info(
(
u"Student %s is in the embargoed country restricted "
u"list, so their certificate status has been set to '%s' "
u"for the course '%s'. "
u"No certificate generation task was sent to the XQueue."
),
student.id,
cert.status,
unicode(course_id)
)
return cert
if unverified:
cert.status = status.unverified
cert.save()
LOGGER.info(
(
u"User %s has a verified enrollment in course %s "
u"but is missing ID verification. "
u"Certificate status has been set to unverified"
),
student.id,
unicode(course_id),
)
return cert
# Finally, generate the certificate and send it off.
return self._generate_cert(cert, course, student, grade_contents, template_pdf, generate_pdf)
def _generate_cert(self, cert, course, student, grade_contents, template_pdf, generate_pdf):
"""
Generate a certificate for the student. If `generate_pdf` is True,
sends a request to XQueue.
"""
course_id = unicode(course.id)
key = make_hashkey(random.random())
cert.key = key
contents = {
'action': 'create',
'username': student.username,
'course_id': course_id,
'course_name': course.display_name or course_id,
'name': cert.name,
'grade': grade_contents,
'template_pdf': template_pdf,
}
if generate_pdf:
cert.status = status.generating
else:
cert.status = status.downloadable
cert.verify_uuid = uuid4().hex
cert.save()
if generate_pdf:
try:
self._send_to_xqueue(contents, key)
except XQueueAddToQueueError as exc:
cert.status = ExampleCertificate.STATUS_ERROR
cert.error_reason = unicode(exc)
cert.save()
LOGGER.critical(
(
u"Could not add certificate task to XQueue. "
u"The course was '%s' and the student was '%s'."
u"The certificate task status has been marked as 'error' "
u"and can be re-submitted with a management command."
), course_id, student.id
)
else:
LOGGER.info(
(
u"The certificate status has been set to '%s'. "
u"Sent a certificate grading task to the XQueue "
u"with the key '%s'. "
),
cert.status,
key
)
return cert
def add_example_cert(self, example_cert):
"""Add a task to create an example certificate.
Unlike other certificates, an example certificate is
not associated with any particular user and is never
shown to students.
If an error occurs when adding the example certificate
to the queue, the example certificate status
will be set to "error".
Arguments:
example_cert (ExampleCertificate)
"""
contents = {
'action': 'create',
'course_id': unicode(example_cert.course_key),
'name': example_cert.full_name,
'template_pdf': example_cert.template,
# Example certificates are not associated with a particular user.
# However, we still need to find the example certificate when
# we receive a response from the queue. For this reason,
# we use the example certificate's unique identifier as a username.
# Note that the username is *not* displayed on the certificate;
# it is used only to identify the certificate task in the queue.
'username': example_cert.uuid,
# We send this extra parameter to differentiate
# example certificates from other certificates.
# This is not used by the certificates workers or XQueue.
'example_certificate': True,
}
# The callback for example certificates is different than the callback
# for other certificates. Although both tasks use the same queue,
# we can distinguish whether the certificate was an example cert based
# on which end-point XQueue uses once the task completes.
callback_url_path = reverse('certificates.views.update_example_certificate')
try:
self._send_to_xqueue(
contents,
example_cert.access_key,
task_identifier=example_cert.uuid,
callback_url_path=callback_url_path
)
LOGGER.info(u"Started generating example certificates for course '%s'.", example_cert.course_key)
except XQueueAddToQueueError as exc:
example_cert.update_status(
ExampleCertificate.STATUS_ERROR,
error_reason=unicode(exc)
)
LOGGER.critical(
(
u"Could not add example certificate with uuid '%s' to XQueue. "
u"The exception was %s. "
u"The example certificate has been marked with status 'error'."
), example_cert.uuid, unicode(exc)
)
def _send_to_xqueue(self, contents, key, task_identifier=None, callback_url_path='/update_certificate'):
"""Create a new task on the XQueue.
Arguments:
contents (dict): The contents of the XQueue task.
key (str): An access key for the task. This will be sent
to the callback end-point once the task completes,
so that we can validate that the sender is the same
entity that received the task.
Keyword Arguments:
callback_url_path (str): The path of the callback URL.
If not provided, use the default end-point for student-generated
certificates.
"""
callback_url = u'{protocol}://{base_url}{path}'.format(
protocol=("https" if self.use_https else "http"),
base_url=settings.SITE_NAME,
path=callback_url_path
)
# Append the key to the URL
# This is necessary because XQueue assumes that only one
# submission is active for a particular URL.
# If it receives a second submission with the same callback URL,
# it "retires" any other submission with the same URL.
# This was a hack that depended on the URL containing the user ID
# and courseware location; an assumption that does not apply
# to certificate generation.
# XQueue also truncates the callback URL to 128 characters,
# but since our key lengths are shorter than that, this should
# not affect us.
callback_url += "?key={key}".format(
key=(
task_identifier
if task_identifier is not None
else key
)
)
xheader = make_xheader(callback_url, key, settings.CERT_QUEUE)
(error, msg) = self.xqueue_interface.send_to_queue(
header=xheader, body=json.dumps(contents))
if error:
exc = XQueueAddToQueueError(error, msg)
LOGGER.critical(unicode(exc))
raise exc
| agpl-3.0 | 1,843,334,779,938,168,800 | 36.670648 | 130 | 0.572956 | false |
vtemian/django-openid-auth | django_openid_auth/tests/test_views.py | 7 | 58521 | # django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2009-2010 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import cgi
import unittest
from urllib import quote_plus
from django.conf import settings
from django.contrib.auth.models import User, Group
from django.http import HttpRequest, HttpResponse
from django.test import TestCase
from openid.consumer.consumer import Consumer, SuccessResponse
from openid.consumer.discover import OpenIDServiceEndpoint
from openid.extensions import ax, sreg, pape
from openid.fetchers import (
HTTPFetcher, HTTPFetchingError, HTTPResponse, setDefaultFetcher)
from openid.oidutil import importElementTree
from openid.server.server import BROWSER_REQUEST_MODES, ENCODE_URL, Server
from openid.store.memstore import MemoryStore
from openid.message import OPENID1_URL_LIMIT, IDENTIFIER_SELECT
from django_openid_auth import teams
from django_openid_auth.models import UserOpenID
from django_openid_auth.views import (
sanitise_redirect_url,
make_consumer,
)
from django_openid_auth.auth import OpenIDBackend
from django_openid_auth.signals import openid_login_complete
from django_openid_auth.store import DjangoOpenIDStore
from django_openid_auth.exceptions import (
MissingUsernameViolation,
DuplicateUsernameViolation,
MissingPhysicalMultiFactor,
RequiredAttributeNotReturned,
)
ET = importElementTree()
class StubOpenIDProvider(HTTPFetcher):
def __init__(self, base_url):
self.store = MemoryStore()
self.identity_url = base_url + 'identity'
self.localid_url = base_url + 'localid'
self.endpoint_url = base_url + 'endpoint'
self.server = Server(self.store, self.endpoint_url)
self.last_request = None
self.type_uris = ['http://specs.openid.net/auth/2.0/signon']
def fetch(self, url, body=None, headers=None):
if url == self.identity_url:
# Serve an XRDS document directly, pointing at our endpoint.
type_uris = ['<Type>%s</Type>' % uri for uri in self.type_uris]
return HTTPResponse(
url, 200, {'content-type': 'application/xrds+xml'}, """\
<?xml version="1.0"?>
<xrds:XRDS
xmlns="xri://$xrd*($v*2.0)"
xmlns:xrds="xri://$xrds">
<XRD>
<Service priority="0">
%s
<URI>%s</URI>
<LocalID>%s</LocalID>
</Service>
</XRD>
</xrds:XRDS>
""" % ('\n'.join(type_uris), self.endpoint_url, self.localid_url))
elif url.startswith(self.endpoint_url):
# Gather query parameters
query = {}
if '?' in url:
query.update(cgi.parse_qsl(url.split('?', 1)[1]))
if body is not None:
query.update(cgi.parse_qsl(body))
self.last_request = self.server.decodeRequest(query)
# The browser based requests should not be handled through
# the fetcher interface.
assert self.last_request.mode not in BROWSER_REQUEST_MODES
response = self.server.handleRequest(self.last_request)
webresponse = self.server.encodeResponse(response)
return HTTPResponse(url, webresponse.code, webresponse.headers,
webresponse.body)
else:
raise HTTPFetchingError('unknown URL %s' % url)
def parseFormPost(self, content):
"""Parse an HTML form post to create an OpenID request."""
# Hack to make the javascript XML compliant ...
content = content.replace('i < elements.length',
'i < elements.length')
tree = ET.XML(content)
form = tree.find('.//form')
assert form is not None, 'No form in document'
assert form.get('action') == self.endpoint_url, (
'Form posts to %s instead of %s' % (form.get('action'),
self.endpoint_url))
query = {}
for input in form.findall('input'):
if input.get('type') != 'hidden':
continue
query[input.get('name').encode('UTF-8')] = \
input.get('value').encode('UTF-8')
self.last_request = self.server.decodeRequest(query)
return self.last_request
class DummyDjangoRequest(object):
def __init__(self, request_path):
self.request_path = request_path
self.META = {
'HTTP_HOST': "localhost",
'SCRIPT_NAME': "http://localhost",
'SERVER_PROTOCOL': "http",
}
self.POST = {
'openid_identifier': "http://example.com/identity",
}
self.GET = {}
self.session = {}
def get_full_path(self):
return self.META['SCRIPT_NAME'] + self.request_path
def build_absolute_uri(self):
return self.META['SCRIPT_NAME'] + self.request_path
def _combined_request(self):
request = {}
request.update(self.POST)
request.update(self.GET)
return request
REQUEST = property(_combined_request)
class RelyingPartyTests(TestCase):
urls = 'django_openid_auth.tests.urls'
def setUp(self):
super(RelyingPartyTests, self).setUp()
self.provider = StubOpenIDProvider('http://example.com/')
self.req = DummyDjangoRequest('http://localhost/')
self.endpoint = OpenIDServiceEndpoint()
self.endpoint.claimed_id = 'http://example.com/identity'
self.endpoint.server_url = 'http://example.com/'
self.consumer = make_consumer(self.req)
self.server = Server(DjangoOpenIDStore())
setDefaultFetcher(self.provider, wrap_exceptions=False)
self.old_login_redirect_url = getattr(settings, 'LOGIN_REDIRECT_URL', '/accounts/profile/')
self.old_create_users = getattr(settings, 'OPENID_CREATE_USERS', False)
self.old_strict_usernames = getattr(settings, 'OPENID_STRICT_USERNAMES', False)
self.old_update_details = getattr(settings, 'OPENID_UPDATE_DETAILS_FROM_SREG', False)
self.old_sso_server_url = getattr(settings, 'OPENID_SSO_SERVER_URL', None)
self.old_teams_map = getattr(settings, 'OPENID_LAUNCHPAD_TEAMS_MAPPING', {})
self.old_use_as_admin_login = getattr(settings, 'OPENID_USE_AS_ADMIN_LOGIN', False)
self.old_follow_renames = getattr(settings, 'OPENID_FOLLOW_RENAMES', False)
self.old_physical_multifactor = getattr(settings, 'OPENID_PHYSICAL_MULTIFACTOR_REQUIRED', False)
self.old_login_render_failure = getattr(settings, 'OPENID_RENDER_FAILURE', None)
self.old_consumer_complete = Consumer.complete
self.old_required_fields = getattr(
settings, 'OPENID_SREG_REQUIRED_FIELDS', [])
settings.OPENID_CREATE_USERS = False
settings.OPENID_STRICT_USERNAMES = False
settings.OPENID_UPDATE_DETAILS_FROM_SREG = False
settings.OPENID_SSO_SERVER_URL = None
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING = {}
settings.OPENID_USE_AS_ADMIN_LOGIN = False
settings.OPENID_FOLLOW_RENAMES = False
settings.OPENID_PHYSICAL_MULTIFACTOR_REQUIRED = False
settings.OPENID_SREG_REQUIRED_FIELDS = []
def tearDown(self):
settings.LOGIN_REDIRECT_URL = self.old_login_redirect_url
settings.OPENID_CREATE_USERS = self.old_create_users
settings.OPENID_STRICT_USERNAMES = self.old_strict_usernames
settings.OPENID_UPDATE_DETAILS_FROM_SREG = self.old_update_details
settings.OPENID_SSO_SERVER_URL = self.old_sso_server_url
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING = self.old_teams_map
settings.OPENID_USE_AS_ADMIN_LOGIN = self.old_use_as_admin_login
settings.OPENID_FOLLOW_RENAMES = self.old_follow_renames
settings.OPENID_PHYSICAL_MULTIFACTOR_REQUIRED = self.old_physical_multifactor
settings.OPENID_RENDER_FAILURE = self.old_login_render_failure
Consumer.complete = self.old_consumer_complete
settings.OPENID_SREG_REQUIRED_FIELDS = self.old_required_fields
setDefaultFetcher(None)
super(RelyingPartyTests, self).tearDown()
def complete(self, openid_response):
"""Complete an OpenID authentication request."""
# The server can generate either a redirect or a form post
# here. For simplicity, force generation of a redirect.
openid_response.whichEncoding = lambda: ENCODE_URL
webresponse = self.provider.server.encodeResponse(openid_response)
self.assertEquals(webresponse.code, 302)
redirect_to = webresponse.headers['location']
self.assertTrue(redirect_to.startswith(
'http://testserver/openid/complete/'))
return self.client.get('/openid/complete/',
dict(cgi.parse_qsl(redirect_to.split('?', 1)[1])))
def test_login(self):
user = User.objects.create_user('someuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# The login form is displayed:
response = self.client.get('/openid/login/')
self.assertTemplateUsed(response, 'openid/login.html')
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
openid_request = self.provider.parseFormPost(response.content)
self.assertEquals(openid_request.mode, 'checkid_setup')
self.assertTrue(openid_request.return_to.startswith(
'http://testserver/openid/complete/'))
# Complete the request. The user is redirected to the next URL.
openid_response = openid_request.answer(True)
response = self.complete(openid_response)
self.assertRedirects(response, 'http://testserver/getuser/')
# And they are now logged in:
response = self.client.get('/getuser/')
self.assertEquals(response.content, 'someuser')
def test_login_no_next(self):
"""Logins with no next parameter redirect to LOGIN_REDIRECT_URL."""
user = User.objects.create_user('someuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
settings.LOGIN_REDIRECT_URL = '/getuser/'
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity'})
self.assertContains(response, 'OpenID transaction in progress')
openid_request = self.provider.parseFormPost(response.content)
self.assertEquals(openid_request.mode, 'checkid_setup')
self.assertTrue(openid_request.return_to.startswith(
'http://testserver/openid/complete/'))
# Complete the request. The user is redirected to the next URL.
openid_response = openid_request.answer(True)
response = self.complete(openid_response)
self.assertRedirects(
response, 'http://testserver' + settings.LOGIN_REDIRECT_URL)
def test_login_sso(self):
settings.OPENID_SSO_SERVER_URL = 'http://example.com/identity'
user = User.objects.create_user('someuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Requesting the login form immediately begins an
# authentication request.
response = self.client.get('/openid/login/', {'next': '/getuser/'})
self.assertEquals(response.status_code, 200)
self.assertContains(response, 'OpenID transaction in progress')
openid_request = self.provider.parseFormPost(response.content)
self.assertEquals(openid_request.mode, 'checkid_setup')
self.assertTrue(openid_request.return_to.startswith(
'http://testserver/openid/complete/'))
# Complete the request. The user is redirected to the next URL.
openid_response = openid_request.answer(True)
response = self.complete(openid_response)
self.assertRedirects(response, 'http://testserver/getuser/')
# And they are now logged in:
response = self.client.get('/getuser/')
self.assertEquals(response.content, 'someuser')
def test_login_create_users(self):
settings.OPENID_CREATE_USERS = True
# Create a user with the same name as we'll pass back via sreg.
User.objects.create_user('someuser', '[email protected]')
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
openid_response = openid_request.answer(True)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, {'nickname': 'someuser', 'fullname': 'Some User',
'email': '[email protected]'})
openid_response.addExtension(sreg_response)
response = self.complete(openid_response)
self.assertRedirects(response, 'http://testserver/getuser/')
# And they are now logged in as a new user (they haven't taken
# over the existing "someuser" user).
response = self.client.get('/getuser/')
self.assertEquals(response.content, 'someuser2')
# Check the details of the new user.
user = User.objects.get(username='someuser2')
self.assertEquals(user.first_name, 'Some')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def _do_user_login(self, req_data, resp_data, use_sreg=True, use_pape=None):
openid_request = self._get_login_request(req_data)
openid_response = self._get_login_response(openid_request, resp_data, use_sreg, use_pape)
response = self.complete(openid_response)
self.assertRedirects(response, 'http://testserver/getuser/')
return response
def _get_login_request(self, req_data):
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/', req_data)
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
return openid_request
def _get_login_response(self, openid_request, resp_data, use_sreg, use_pape):
openid_response = openid_request.answer(True)
if use_sreg:
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, resp_data)
openid_response.addExtension(sreg_response)
if use_pape is not None:
policies = [
use_pape
]
pape_response = pape.Response(auth_policies=policies)
openid_response.addExtension(pape_response)
return openid_response
def parse_query_string(self, query_str):
query_items = map(tuple,
[item.split('=') for item in query_str.split('&')])
query = dict(query_items)
return query
def test_login_physical_multifactor_request(self):
settings.OPENID_PHYSICAL_MULTIFACTOR_REQUIRED = True
preferred_auth = pape.AUTH_MULTI_FACTOR_PHYSICAL
self.provider.type_uris.append(pape.ns_uri)
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
response = self.client.post('/openid/login/', openid_req)
openid_request = self.provider.parseFormPost(response.content)
request_auth = openid_request.message.getArg(
'http://specs.openid.net/extensions/pape/1.0',
'preferred_auth_policies',
)
self.assertEqual(request_auth, preferred_auth)
def test_login_physical_multifactor_response(self):
settings.OPENID_PHYSICAL_MULTIFACTOR_REQUIRED = True
preferred_auth = pape.AUTH_MULTI_FACTOR_PHYSICAL
self.provider.type_uris.append(pape.ns_uri)
def mock_complete(this, request_args, return_to):
request = {'openid.mode': 'checkid_setup',
'openid.trust_root': 'http://localhost/',
'openid.return_to': 'http://localhost/',
'openid.identity': IDENTIFIER_SELECT,
'openid.ns.pape' : pape.ns_uri,
'openid.pape.auth_policies': request_args.get('openid.pape.auth_policies', pape.AUTH_NONE),
}
openid_server = self.provider.server
orequest = openid_server.decodeRequest(request)
response = SuccessResponse(
self.endpoint, orequest.message,
signed_fields=['openid.pape.auth_policies',])
return response
Consumer.complete = mock_complete
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': 'testuser', 'fullname': 'Openid User',
'email': '[email protected]'}
response = self._do_user_login(openid_req, openid_resp, use_pape=pape.AUTH_MULTI_FACTOR_PHYSICAL)
query = self.parse_query_string(response.request['QUERY_STRING'])
self.assertTrue('openid.pape.auth_policies' in query)
self.assertEqual(query['openid.pape.auth_policies'],
quote_plus(preferred_auth))
response = self.client.get('/getuser/')
self.assertEqual(response.content, 'testuser')
def test_login_physical_multifactor_not_provided(self):
settings.OPENID_PHYSICAL_MULTIFACTOR_REQUIRED = True
preferred_auth = pape.AUTH_MULTI_FACTOR_PHYSICAL
self.provider.type_uris.append(pape.ns_uri)
def mock_complete(this, request_args, return_to):
request = {'openid.mode': 'checkid_setup',
'openid.trust_root': 'http://localhost/',
'openid.return_to': 'http://localhost/',
'openid.identity': IDENTIFIER_SELECT,
'openid.ns.pape' : pape.ns_uri,
'openid.pape.auth_policies': request_args.get('openid.pape.auth_policies', pape.AUTH_NONE),
}
openid_server = self.provider.server
orequest = openid_server.decodeRequest(request)
response = SuccessResponse(
self.endpoint, orequest.message,
signed_fields=['openid.pape.auth_policies',])
return response
Consumer.complete = mock_complete
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': 'testuser', 'fullname': 'Openid User',
'email': '[email protected]'}
openid_request = self._get_login_request(openid_req)
openid_response = self._get_login_response(openid_request, openid_req, openid_resp, use_pape=pape.AUTH_NONE)
response_auth = openid_request.message.getArg(
'http://specs.openid.net/extensions/pape/1.0',
'auth_policies',
)
self.assertNotEqual(response_auth, preferred_auth)
response = self.complete(openid_response)
self.assertEquals(403, response.status_code)
self.assertContains(response, '<h1>OpenID failed</h1>', status_code=403)
self.assertContains(response, '<p>Login requires physical multi-factor authentication.</p>', status_code=403)
def test_login_physical_multifactor_not_provided_override(self):
settings.OPENID_PHYSICAL_MULTIFACTOR_REQUIRED = True
preferred_auth = pape.AUTH_MULTI_FACTOR_PHYSICAL
self.provider.type_uris.append(pape.ns_uri)
# Override the login_failure handler
def mock_login_failure_handler(request, message, status=403,
template_name=None,
exception=None):
self.assertTrue(isinstance(exception, MissingPhysicalMultiFactor))
return HttpResponse('Test Failure Override', status=200)
settings.OPENID_RENDER_FAILURE = mock_login_failure_handler
def mock_complete(this, request_args, return_to):
request = {'openid.mode': 'checkid_setup',
'openid.trust_root': 'http://localhost/',
'openid.return_to': 'http://localhost/',
'openid.identity': IDENTIFIER_SELECT,
'openid.ns.pape' : pape.ns_uri,
'openid.pape.auth_policies': request_args.get('openid.pape.auth_policies', pape.AUTH_NONE),
}
openid_server = self.provider.server
orequest = openid_server.decodeRequest(request)
response = SuccessResponse(
self.endpoint, orequest.message,
signed_fields=['openid.pape.auth_policies',])
return response
Consumer.complete = mock_complete
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': 'testuser', 'fullname': 'Openid User',
'email': '[email protected]'}
openid_request = self._get_login_request(openid_req)
openid_response = self._get_login_response(openid_request, openid_req, openid_resp, use_pape=pape.AUTH_NONE)
response_auth = openid_request.message.getArg(
'http://specs.openid.net/extensions/pape/1.0',
'auth_policies',
)
self.assertNotEqual(response_auth, preferred_auth)
# Status code should be 200, since we over-rode the login_failure handler
response = self.complete(openid_response)
self.assertEquals(200, response.status_code)
self.assertContains(response, 'Test Failure Override')
def test_login_without_nickname(self):
settings.OPENID_CREATE_USERS = True
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': '', 'fullname': 'Openid User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# username defaults to 'openiduser'
self.assertEquals(response.content, 'openiduser')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Openid')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_follow_rename(self):
settings.OPENID_FOLLOW_RENAMES = True
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': 'someuser', 'fullname': 'Some User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# If OPENID_FOLLOW_RENAMES, they are logged in as
# someuser (the passed in nickname has changed the username)
self.assertEquals(response.content, 'someuser')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Some')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_follow_rename_without_nickname_change(self):
settings.OPENID_FOLLOW_RENAMES = True
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
settings.OPENID_STRICT_USERNAMES = True
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': 'testuser', 'fullname': 'Some User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# Username should not have changed
self.assertEquals(response.content, 'testuser')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Some')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_follow_rename_conflict(self):
settings.OPENID_FOLLOW_RENAMES = True
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
# Setup existing user who's name we're going to switch to
user = User.objects.create_user('testuser', '[email protected]')
UserOpenID.objects.get_or_create(
user=user,
claimed_id='http://example.com/existing_identity',
display_id='http://example.com/existing_identity')
# Setup user who is going to try to change username to 'testuser'
renamed_user = User.objects.create_user('renameuser', '[email protected]')
UserOpenID.objects.get_or_create(
user=renamed_user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
# identity url is for 'renameuser'
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
# but returned username is for 'testuser', which already exists for another identity
openid_resp = {'nickname': 'testuser', 'fullname': 'Rename User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# If OPENID_FOLLOW_RENAMES, attempt to change username to 'testuser'
# but since that username is already taken by someone else, we go through
# the process of adding +i to it, and get testuser2.
self.assertEquals(response.content, 'testuser2')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Rename')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_follow_rename_false_onlyonce(self):
settings.OPENID_FOLLOW_RENAMES = True
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
# Setup existing user who's name we're going to switch to
user = User.objects.create_user('testuser', '[email protected]')
UserOpenID.objects.get_or_create(
user=user,
claimed_id='http://example.com/existing_identity',
display_id='http://example.com/existing_identity')
# Setup user who is going to try to change username to 'testuser'
renamed_user = User.objects.create_user('testuser2000eight', '[email protected]')
UserOpenID.objects.get_or_create(
user=renamed_user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
# identity url is for 'testuser2000eight'
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
# but returned username is for 'testuser', which already exists for another identity
openid_resp = {'nickname': 'testuser2', 'fullname': 'Rename User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# If OPENID_FOLLOW_RENAMES, attempt to change username to 'testuser'
# but since that username is already taken by someone else, we go through
# the process of adding +i to it. Even though it looks like the username
# follows the nickname+i scheme, it has non-numbers in the suffix, so
# it's not an auto-generated one. The regular process of renaming to
# 'testuser' has a conflict, so we get +2 at the end.
self.assertEquals(response.content, 'testuser2')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Rename')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_follow_rename_conflict_onlyonce(self):
settings.OPENID_FOLLOW_RENAMES = True
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
# Setup existing user who's name we're going to switch to
user = User.objects.create_user('testuser', '[email protected]')
UserOpenID.objects.get_or_create(
user=user,
claimed_id='http://example.com/existing_identity',
display_id='http://example.com/existing_identity')
# Setup user who is going to try to change username to 'testuser'
renamed_user = User.objects.create_user('testuser2000', '[email protected]')
UserOpenID.objects.get_or_create(
user=renamed_user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
# identity url is for 'testuser2000'
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
# but returned username is for 'testuser', which already exists for another identity
openid_resp = {'nickname': 'testuser', 'fullname': 'Rename User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# If OPENID_FOLLOW_RENAMES, attempt to change username to 'testuser'
# but since that username is already taken by someone else, we go through
# the process of adding +i to it. Since the user for this identity url
# already has a name matching that pattern, check if first.
self.assertEquals(response.content, 'testuser2000')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Rename')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_follow_rename_false_conflict(self):
settings.OPENID_FOLLOW_RENAMES = True
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
# Setup existing user who's username matches the name+i pattern
user = User.objects.create_user('testuser2', '[email protected]')
UserOpenID.objects.get_or_create(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
# identity url is for 'testuser2'
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
# but returned username is for 'testuser', which looks like we've done
# a username+1 for them already, but 'testuser' isn't actually taken
openid_resp = {'nickname': 'testuser', 'fullname': 'Same User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
# If OPENID_FOLLOW_RENAMES, username should be changed to 'testuser'
# because it wasn't currently taken
self.assertEquals(response.content, 'testuser')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Same')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_strict_username_no_nickname(self):
settings.OPENID_CREATE_USERS = True
settings.OPENID_STRICT_USERNAMES = True
settings.OPENID_SREG_REQUIRED_FIELDS = []
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
openid_response = openid_request.answer(True)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, {'nickname': '', # No nickname
'fullname': 'Some User',
'email': '[email protected]'})
openid_response.addExtension(sreg_response)
response = self.complete(openid_response)
# Status code should be 403: Forbidden
self.assertEquals(403, response.status_code)
self.assertContains(response, '<h1>OpenID failed</h1>', status_code=403)
self.assertContains(response, "An attribute required for logging in was not returned "
"(nickname)", status_code=403)
def test_strict_username_no_nickname_override(self):
settings.OPENID_CREATE_USERS = True
settings.OPENID_STRICT_USERNAMES = True
settings.OPENID_SREG_REQUIRED_FIELDS = []
# Override the login_failure handler
def mock_login_failure_handler(request, message, status=403,
template_name=None,
exception=None):
self.assertTrue(isinstance(exception, (RequiredAttributeNotReturned, MissingUsernameViolation)))
return HttpResponse('Test Failure Override', status=200)
settings.OPENID_RENDER_FAILURE = mock_login_failure_handler
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
openid_response = openid_request.answer(True)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, {'nickname': '', # No nickname
'fullname': 'Some User',
'email': '[email protected]'})
openid_response.addExtension(sreg_response)
response = self.complete(openid_response)
# Status code should be 200, since we over-rode the login_failure handler
self.assertEquals(200, response.status_code)
self.assertContains(response, 'Test Failure Override')
def test_strict_username_duplicate_user(self):
settings.OPENID_CREATE_USERS = True
settings.OPENID_STRICT_USERNAMES = True
# Create a user with the same name as we'll pass back via sreg.
user = User.objects.create_user('someuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/different_identity',
display_id='http://example.com/different_identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
openid_response = openid_request.answer(True)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, {'nickname': 'someuser', 'fullname': 'Some User',
'email': '[email protected]'})
openid_response.addExtension(sreg_response)
response = self.complete(openid_response)
# Status code should be 403: Forbidden
self.assertEquals(403, response.status_code)
self.assertContains(response, '<h1>OpenID failed</h1>', status_code=403)
self.assertContains(response,
"The username (someuser) with which you tried to log in is "
"already in use for a different account.",
status_code=403)
def test_strict_username_duplicate_user_override(self):
settings.OPENID_CREATE_USERS = True
settings.OPENID_STRICT_USERNAMES = True
# Override the login_failure handler
def mock_login_failure_handler(request, message, status=403,
template_name=None,
exception=None):
self.assertTrue(isinstance(exception, DuplicateUsernameViolation))
return HttpResponse('Test Failure Override', status=200)
settings.OPENID_RENDER_FAILURE = mock_login_failure_handler
# Create a user with the same name as we'll pass back via sreg.
user = User.objects.create_user('someuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/different_identity',
display_id='http://example.com/different_identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
openid_response = openid_request.answer(True)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, {'nickname': 'someuser', 'fullname': 'Some User',
'email': '[email protected]'})
openid_response.addExtension(sreg_response)
response = self.complete(openid_response)
# Status code should be 200, since we over-rode the login_failure handler
self.assertEquals(200, response.status_code)
self.assertContains(response, 'Test Failure Override')
def test_login_requires_sreg_required_fields(self):
# If any required attributes are not included in the response,
# we fail with a forbidden.
settings.OPENID_CREATE_USERS = True
settings.OPENID_SREG_REQUIRED_FIELDS = ('email', 'language')
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request, passing back some simple registration
# data. The user is redirected to the next URL.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
openid_response = openid_request.answer(True)
sreg_response = sreg.SRegResponse.extractResponse(
sreg_request, {'nickname': 'foo',
'fullname': 'Some User',
'email': '[email protected]'})
openid_response.addExtension(sreg_response)
response = self.complete(openid_response)
# Status code should be 403: Forbidden as we didn't include
# a required field - language.
self.assertContains(response,
"An attribute required for logging in was not returned "
"(language)", status_code=403)
def test_login_update_details(self):
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
openid_req = {'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'}
openid_resp = {'nickname': 'testuser', 'fullname': 'Some User',
'email': '[email protected]'}
self._do_user_login(openid_req, openid_resp)
response = self.client.get('/getuser/')
self.assertEquals(response.content, 'testuser')
# The user's full name and email have been updated.
user = User.objects.get(username=response.content)
self.assertEquals(user.first_name, 'Some')
self.assertEquals(user.last_name, 'User')
self.assertEquals(user.email, '[email protected]')
def test_login_uses_sreg_extra_fields(self):
# The configurable sreg attributes are used in the request.
settings.OPENID_SREG_EXTRA_FIELDS = ('language',)
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
for field in ('email', 'fullname', 'nickname', 'language'):
self.assertTrue(field in sreg_request)
def test_login_uses_sreg_required_fields(self):
# The configurable sreg attributes are used in the request.
settings.OPENID_SREG_REQUIRED_FIELDS = ('email', 'language')
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
self.assertEqual(['email', 'language'], sreg_request.required)
self.assertEqual(['fullname', 'nickname'], sreg_request.optional)
def test_login_attribute_exchange(self):
settings.OPENID_UPDATE_DETAILS_FROM_SREG = True
user = User.objects.create_user('testuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Configure the provider to advertise attribute exchange
# protocol and start the authentication process:
self.provider.type_uris.append('http://openid.net/srv/ax/1.0')
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# The resulting OpenID request uses the Attribute Exchange
# extension rather than the Simple Registration extension.
openid_request = self.provider.parseFormPost(response.content)
sreg_request = sreg.SRegRequest.fromOpenIDRequest(openid_request)
self.assertEqual(sreg_request.required, [])
self.assertEqual(sreg_request.optional, [])
fetch_request = ax.FetchRequest.fromOpenIDRequest(openid_request)
self.assertTrue(fetch_request.has_key(
'http://axschema.org/contact/email'))
self.assertTrue(fetch_request.has_key(
'http://axschema.org/namePerson'))
self.assertTrue(fetch_request.has_key(
'http://axschema.org/namePerson/first'))
self.assertTrue(fetch_request.has_key(
'http://axschema.org/namePerson/last'))
self.assertTrue(fetch_request.has_key(
'http://axschema.org/namePerson/friendly'))
# myOpenID compatibilty attributes:
self.assertTrue(fetch_request.has_key(
'http://schema.openid.net/contact/email'))
self.assertTrue(fetch_request.has_key(
'http://schema.openid.net/namePerson'))
self.assertTrue(fetch_request.has_key(
'http://schema.openid.net/namePerson/friendly'))
# Build up a response including AX data.
openid_response = openid_request.answer(True)
fetch_response = ax.FetchResponse(fetch_request)
fetch_response.addValue(
'http://axschema.org/contact/email', '[email protected]')
fetch_response.addValue(
'http://axschema.org/namePerson/first', 'Firstname')
fetch_response.addValue(
'http://axschema.org/namePerson/last', 'Lastname')
fetch_response.addValue(
'http://axschema.org/namePerson/friendly', 'someuser')
openid_response.addExtension(fetch_response)
response = self.complete(openid_response)
self.assertRedirects(response, 'http://testserver/getuser/')
# And they are now logged in as testuser (the passed in
# nickname has not caused the username to change).
response = self.client.get('/getuser/')
self.assertEquals(response.content, 'testuser')
# The user's full name and email have been updated.
user = User.objects.get(username='testuser')
self.assertEquals(user.first_name, 'Firstname')
self.assertEquals(user.last_name, 'Lastname')
self.assertEquals(user.email, '[email protected]')
def test_login_teams(self):
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING_AUTO = False
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING = {'teamname': 'groupname',
'otherteam': 'othergroup'}
user = User.objects.create_user('testuser', '[email protected]')
group = Group(name='groupname')
group.save()
ogroup = Group(name='othergroup')
ogroup.save()
user.groups.add(ogroup)
user.save()
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request
openid_request = self.provider.parseFormPost(response.content)
openid_response = openid_request.answer(True)
teams_request = teams.TeamsRequest.fromOpenIDRequest(openid_request)
teams_response = teams.TeamsResponse.extractResponse(
teams_request, 'teamname,some-other-team')
openid_response.addExtension(teams_response)
response = self.complete(openid_response)
self.assertRedirects(response, 'http://testserver/getuser/')
# And they are now logged in as testuser
response = self.client.get('/getuser/')
self.assertEquals(response.content, 'testuser')
# The user's groups have been updated.
user = User.objects.get(username='testuser')
self.assertTrue(group in user.groups.all())
self.assertTrue(ogroup not in user.groups.all())
def test_login_teams_automapping(self):
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING = {'teamname': 'groupname',
'otherteam': 'othergroup'}
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING_AUTO = True
settings.OPENID_LAUNCHPAD_TEAMS_MAPPING_AUTO_BLACKLIST = ['django-group1', 'django-group2']
user = User.objects.create_user('testuser', '[email protected]')
group1 = Group(name='django-group1')
group1.save()
group2 = Group(name='django-group2')
group2.save()
group3 = Group(name='django-group3')
group3.save()
user.save()
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity',
'next': '/getuser/'})
self.assertContains(response, 'OpenID transaction in progress')
# Complete the request
openid_request = self.provider.parseFormPost(response.content)
openid_response = openid_request.answer(True)
teams_request = teams.TeamsRequest.fromOpenIDRequest(openid_request)
self.assertEqual(group1 in user.groups.all(), False)
self.assertEqual(group2 in user.groups.all(), False)
self.assertTrue(group3 not in user.groups.all())
def test_login_teams_staff_not_defined(self):
delattr(settings, 'OPENID_LAUNCHPAD_STAFF_TEAMS')
user = User.objects.create_user('testuser', '[email protected]')
user.is_staff = True
user.save()
self.assertTrue(user.is_staff)
user = self.get_openid_authed_user_with_teams(user, 'teamname,some-other-team')
self.assertTrue(user.is_staff)
def test_login_teams_staff_assignment(self):
settings.OPENID_LAUNCHPAD_STAFF_TEAMS = ('teamname',)
user = User.objects.create_user('testuser', '[email protected]')
user.is_staff = False
user.save()
self.assertFalse(user.is_staff)
user = self.get_openid_authed_user_with_teams(user, 'teamname,some-other-team')
self.assertTrue(user.is_staff)
def test_login_teams_staff_unassignment(self):
settings.OPENID_LAUNCHPAD_STAFF_TEAMS = ('different-teamname',)
user = User.objects.create_user('testuser', '[email protected]')
user.is_staff = True
user.save()
self.assertTrue(user.is_staff)
user = self.get_openid_authed_user_with_teams(user, 'teamname,some-other-team')
self.assertFalse(user.is_staff)
def get_openid_authed_user_with_teams(self, user, teams_str):
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
# Posting in an identity URL begins the authentication request:
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity'})
# Complete the request
openid_request = self.provider.parseFormPost(response.content)
openid_response = openid_request.answer(True)
teams_request = teams.TeamsRequest.fromOpenIDRequest(openid_request)
teams_response = teams.TeamsResponse.extractResponse(
teams_request, teams_str)
openid_response.addExtension(teams_response)
response = self.complete(openid_response)
return User.objects.get(username=user.username)
def test_login_complete_signals_login(self):
# An oauth_login_complete signal is emitted including the
# request and sreg_response.
user = User.objects.create_user('someuser', '[email protected]')
useropenid = UserOpenID(
user=user,
claimed_id='http://example.com/identity',
display_id='http://example.com/identity')
useropenid.save()
response = self.client.post('/openid/login/',
{'openid_identifier': 'http://example.com/identity'})
openid_request = self.provider.parseFormPost(response.content)
openid_response = openid_request.answer(True)
# Use a closure to test whether the signal handler was called.
self.signal_handler_called = False
def login_callback(sender, **kwargs):
self.assertTrue(isinstance(
kwargs.get('request', None), HttpRequest))
self.assertTrue(isinstance(
kwargs.get('openid_response', None), SuccessResponse))
self.signal_handler_called = True
openid_login_complete.connect(login_callback)
response = self.complete(openid_response)
self.assertTrue(self.signal_handler_called)
openid_login_complete.disconnect(login_callback)
class HelperFunctionsTest(TestCase):
def test_sanitise_redirect_url(self):
settings.ALLOWED_EXTERNAL_OPENID_REDIRECT_DOMAINS = [
"example.com", "example.org"]
# list of URLs and whether they should be passed or not
urls = [
("http://example.com", True),
("http://example.org/", True),
("http://example.org/foo/bar", True),
("http://example.org/foo/bar?baz=quux", True),
("http://example.org:9999/foo/bar?baz=quux", True),
("http://www.example.org/", False),
("http://example.net/foo/bar?baz=quux", False),
("/somewhere/local", True),
("/somewhere/local?url=http://fail.com/bar", True),
# An empty path, as seen when no "next" parameter is passed.
("", False),
("/path with spaces", False),
]
for url, returns_self in urls:
sanitised = sanitise_redirect_url(url)
if returns_self:
self.assertEqual(url, sanitised)
else:
self.assertEqual(settings.LOGIN_REDIRECT_URL, sanitised)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
| bsd-2-clause | -123,652,948,512,215,820 | 45.519078 | 117 | 0.635515 | false |
bsmr-eve/Pyfa | gui/builtinContextMenus/factorReload.py | 1 | 1362 | from gui.contextMenu import ContextMenu
import gui.mainFrame
import gui.globalEvents as GE
# noinspection PyPackageRequirements
import wx
from gui.bitmap_loader import BitmapLoader
from service.fit import Fit
from service.settings import ContextMenuSettings
class FactorReload(ContextMenu):
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.settings = ContextMenuSettings.getInstance()
def display(self, srcContext, selection):
if not self.settings.get('factorReload'):
return False
return srcContext == "firepowerViewFull" and self.mainFrame.getActiveFit() is not None
def getText(self, itmContext, selection):
return "Factor in Reload Time"
def activate(self, fullContext, selection, i):
sFit = Fit.getInstance()
sFit.serviceFittingOptions["useGlobalForceReload"] = not sFit.serviceFittingOptions["useGlobalForceReload"]
fitID = self.mainFrame.getActiveFit()
sFit.refreshFit(fitID)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=fitID))
def getBitmap(self, context, selection):
sFit = Fit.getInstance()
if sFit.serviceFittingOptions["useGlobalForceReload"]:
return BitmapLoader.getBitmap("state_active_small", "gui")
else:
return None
FactorReload.register()
| gpl-3.0 | 3,722,753,544,549,574,000 | 33.05 | 115 | 0.712922 | false |
titeuf87/evennia | evennia/scripts/manager.py | 3 | 9652 | """
The custom manager for Scripts.
"""
from django.db.models import Q
from evennia.typeclasses.managers import TypedObjectManager, TypeclassManager
from evennia.typeclasses.managers import returns_typeclass_list
from evennia.utils.utils import make_iter
__all__ = ("ScriptManager",)
_GA = object.__getattribute__
VALIDATE_ITERATION = 0
class ScriptDBManager(TypedObjectManager):
"""
This Scriptmanager implements methods for searching
and manipulating Scripts directly from the database.
Evennia-specific search methods (will return Typeclasses or
lists of Typeclasses, whereas Django-general methods will return
Querysets or database objects).
dbref (converter)
get_id (or dbref_search)
get_dbref_range
object_totals
typeclass_search
get_all_scripts_on_obj
get_all_scripts
delete_script
remove_non_persistent
validate
script_search (equivalent to evennia.search_script)
copy_script
"""
@returns_typeclass_list
def get_all_scripts_on_obj(self, obj, key=None):
"""
Find all Scripts related to a particular object.
Args:
obj (Object): Object whose Scripts we are looking for.
key (str, optional): Script identifier - can be given as a
dbref or name string. If given, only scripts matching the
key on the object will be returned.
Returns:
matches (list): Matching scripts.
"""
if not obj:
return []
player = _GA(_GA(obj, "__dbclass__"), "__name__") == "PlayerDB"
if key:
dbref = self.dbref(key)
if dbref or dbref == 0:
if player:
return self.filter(db_player=obj, id=dbref)
else:
return self.filter(db_obj=obj, id=dbref)
elif player:
return self.filter(db_player=obj, db_key=key)
else:
return self.filter(db_obj=obj, db_key=key)
elif player:
return self.filter(db_player=obj)
else:
return self.filter(db_obj=obj)
@returns_typeclass_list
def get_all_scripts(self, key=None):
"""
Get all scripts in the database.
Args:
key (str, optional): Restrict result to only those
with matching key or dbref.
Returns:
scripts (list): All scripts found, or those matching `key`.
"""
if key:
script = []
dbref = self.dbref(key)
if dbref or dbref == 0:
script = [self.dbref_search(dbref)]
if not script:
script = self.filter(db_key=key)
return script
return self.all()
def delete_script(self, dbref):
"""
This stops and deletes a specific script directly from the
script database.
Args:
dbref (int): Database unique id.
Notes:
This might be needed for global scripts not tied to a
specific game object
"""
scripts = self.get_id(dbref)
for script in make_iter(scripts):
script.stop()
def remove_non_persistent(self, obj=None):
"""
This cleans up the script database of all non-persistent
scripts. It is called every time the server restarts.
Args:
obj (Object, optional): Only remove non-persistent scripts
assigned to this object.
"""
if obj:
to_stop = self.filter(db_obj=obj, db_persistent=False, db_is_active=True)
to_delete = self.filter(db_obj=obj, db_persistent=False, db_is_active=False)
else:
to_stop = self.filter(db_persistent=False, db_is_active=True)
to_delete = self.filter(db_persistent=False, db_is_active=False)
nr_deleted = to_stop.count() + to_delete.count()
for script in to_stop:
script.stop()
for script in to_delete:
script.delete()
return nr_deleted
def validate(self, scripts=None, obj=None, key=None, dbref=None,
init_mode=False):
"""
This will step through the script database and make sure
all objects run scripts that are still valid in the context
they are in. This is called by the game engine at regular
intervals but can also be initiated by player scripts.
Only one of the arguments are supposed to be supplied
at a time, since they are exclusive to each other.
Args:
scripts (list, optional): A list of script objects to
validate.
obj (Object, optional): Validate only scripts defined on
this object.
key (str): Validate only scripts with this key.
dbref (int): Validate only the single script with this
particular id.
init_mode (str, optional): This is used during server
upstart and can have three values:
- `False` (no init mode). Called during run.
- `"reset"` - server reboot. Kill non-persistent scripts
- `"reload"` - server reload. Keep non-persistent scripts.
Returns:
nr_started, nr_stopped (tuple): Statistics on how many objects
where started and stopped.
Notes:
This method also makes sure start any scripts it validates
which should be harmless, since already-active scripts have
the property 'is_running' set and will be skipped.
"""
# we store a variable that tracks if we are calling a
# validation from within another validation (avoids
# loops).
global VALIDATE_ITERATION
if VALIDATE_ITERATION > 0:
# we are in a nested validation. Exit.
VALIDATE_ITERATION -= 1
return None, None
VALIDATE_ITERATION += 1
# not in a validation - loop. Validate as normal.
nr_started = 0
nr_stopped = 0
if init_mode:
if init_mode == 'reset':
# special mode when server starts or object logs in.
# This deletes all non-persistent scripts from database
nr_stopped += self.remove_non_persistent(obj=obj)
# turn off the activity flag for all remaining scripts
scripts = self.get_all_scripts()
for script in scripts:
script.is_active = False
elif not scripts:
# normal operation
if dbref and self.dbref(dbref, reqhash=False):
scripts = self.get_id(dbref)
elif obj:
scripts = self.get_all_scripts_on_obj(obj, key=key)
else:
scripts = self.get_all_scripts(key=key) #self.model.get_all_cached_instances()
if not scripts:
# no scripts available to validate
VALIDATE_ITERATION -= 1
return None, None
for script in scripts:
if script.is_valid():
nr_started += script.start(force_restart=init_mode)
else:
script.stop()
nr_stopped += 1
VALIDATE_ITERATION -= 1
return nr_started, nr_stopped
@returns_typeclass_list
def script_search(self, ostring, obj=None, only_timed=False):
"""
Search for a particular script.
Args:
ostring (str): Search criterion - a script dbef or key.
obj (Object, optional): Limit search to scripts defined on
this object
only_timed (bool): Limit search only to scripts that run
on a timer.
"""
ostring = ostring.strip()
dbref = self.dbref(ostring)
if dbref or dbref == 0:
# this is a dbref, try to find the script directly
dbref_match = self.dbref_search(dbref)
if dbref_match and not ((obj and obj != dbref_match.obj)
or (only_timed and dbref_match.interval)):
return [dbref_match]
# not a dbref; normal search
obj_restriction = obj and Q(db_obj=obj) or Q()
timed_restriction = only_timed and Q(interval__gt=0) or Q()
scripts = self.filter(timed_restriction & obj_restriction & Q(db_key__iexact=ostring))
return scripts
def copy_script(self, original_script, new_key=None, new_obj=None, new_locks=None):
"""
Make an identical copy of the original_script.
Args:
original_script (Script): The Script to copy.
new_key (str, optional): Rename the copy.
new_obj (Object, optional): Place copy on different Object.
new_locks (str, optional): Give copy different locks from
the original.
Returns:
script_copy (Script): A new Script instance, copied from
the original.
"""
typeclass = original_script.typeclass_path
new_key = new_key if new_key is not None else original_script.key
new_obj = new_obj if new_obj is not None else original_script.obj
new_locks = new_locks if new_locks is not None else original_script.db_lock_storage
from evennia.utils import create
new_script = create.create_script(typeclass, key=new_key, obj=new_obj,
locks=new_locks, autostart=True)
return new_script
class ScriptManager(ScriptDBManager, TypeclassManager):
pass
| bsd-3-clause | 3,465,243,356,789,461,500 | 34.226277 | 94 | 0.579983 | false |
igemsoftware2017/USTC-Software-2017 | biohub/core/tasks/result.py | 1 | 4308 | from operator import or_
from functools import reduce
from biohub.core.tasks.storage import storage
from biohub.core.tasks.status import TaskStatus
from django.utils.functional import cached_property
def get_result_timeout():
from biohub.utils.detect import features
return 1.5 if features.testing else 2 * 60 * 60
class AsyncResultMeta(type):
def __new__(cls, name, bases, attrs):
fields = reduce(
or_,
(
set(getattr(base, 'properties', []))
for base in bases if isinstance(base, AsyncResultMeta)
),
set(attrs.get('properties', ()))
)
new_class = type.__new__(cls, name, bases, attrs)
for field in fields:
cls._make_property(field, new_class)
return new_class
@classmethod
def _make_property(cls, name, new_class):
getter = cls._prepare_descriptor(name, new_class, 'get', lambda self: self._get_field(name))
cls._prepare_descriptor(name, new_class, 'set', lambda self, value: self._set_field(name, value))
cls._prepare_descriptor(name, new_class, 'del', lambda self: self._del_field(name))
setattr(new_class, name, property(getter)) # , setter, deleter)
@classmethod
def _prepare_descriptor(cls, name, new_class, action, default):
attrname = '_{}_{}'.format(action, name)
if hasattr(new_class, attrname):
return getattr(new_class, attrname)
else:
setattr(new_class, attrname, default)
return default
class AsyncResult(object, metaclass=AsyncResultMeta):
properties = ['status', 'result', 'payload']
def __init__(self, task_id):
self._task_id = task_id
self._storage = storage
@cached_property
def _storage_key(self):
return self._task_id + '_meta'
def _get_field(self, name):
return self._storage.hget(self._storage_key, name)
def _set_field(self, name, value):
return self._storage.hset(self._storage_key, name, value)
def _del_field(self, name):
return self._storage.hdel(self._storage_key, name)
def _expire(self, timeout):
if timeout is None:
return self._storage.persist(self._storage_key)
else:
timeout = int(timeout * 1000)
return self._storage.pexpire(self._storage_key, timeout)
def exists(self):
return self._storage.exists(self._storage_key)
@property
def task_id(self):
return self._task_id
def _get_status(self):
from biohub.utils.detect import features
if hasattr(self, '_status') and not features.testing:
return self._status
status = self._get_field('status')
if status is None:
return TaskStatus.GONE
else:
status = TaskStatus(status)
if status.is_ready:
self._status = status
return status
def _check_ready(self):
if hasattr(self, '_status'):
raise ValueError('State was ready.')
def _set_status(self, status):
self._check_ready()
status = TaskStatus(status)
if status == TaskStatus.GONE:
self._del_status()
else:
if status.is_ready:
self._status = status
self._expire(get_result_timeout())
self._set_field('status', status)
def _after_ready(self, state, result):
pass
def pend(self):
self._set_status(TaskStatus.PENDING)
def run(self):
self._set_status(TaskStatus.RUNNING)
def resolve(self, result):
self._set_status(TaskStatus.SUCCESS)
self._set_result(result)
self._after_ready(TaskStatus.SUCCESS, result)
def timeout(self):
self._set_status(TaskStatus.TIMEOUT)
self._after_ready(TaskStatus.TIMEOUT, None)
def error(self, exception):
self._set_status(TaskStatus.ERROR)
self._set_result(exception)
self._after_ready(TaskStatus.ERROR, None)
def wait(self, rounds, duration):
import time
while rounds >= 0:
time.sleep(duration)
if self.status.is_ready:
return True
rounds -= 1
return False
| gpl-3.0 | 672,421,886,842,469,400 | 26.43949 | 105 | 0.59169 | false |
Raviyanto/sunflower-fm | application/plugin_base/find_extension.py | 9 | 1253 | import gtk
class FindExtension:
"""Base class for extending find files tool.
Use this class to provide find files tool with additional
options. Objects are created every time tool is created!
"""
def __init__(self, parent):
self._parent = parent
# create and configure container
self.vbox = gtk.VBox(False, 5)
self.vbox.set_border_width(7)
self.vbox.set_data('extension', self)
# create activity toggle
self._active = False
self._checkbox_active = gtk.CheckButton(_('Use this extension'))
self._checkbox_active.connect('toggled', self.__toggle_active)
self._checkbox_active.show()
self.vbox.pack_start(self._checkbox_active, False, False, 0)
def __toggle_active(self, widget, data=None):
"""Toggle extension active property"""
self._active = widget.get_active()
def is_active(self):
"""Return boolean representing extension state"""
return self._active
def get_title(self):
"""Return i18n title for extension"""
return None
def get_container(self):
"""Return widget container"""
return self.vbox
def is_path_ok(self, path):
"""Check is specified path fits the cirteria
You can access provider using self._parent._provider object.
Result needs to be boolean type.
"""
return True
| gpl-3.0 | -5,319,260,084,456,056,000 | 23.096154 | 66 | 0.708699 | false |
Endika/edx-platform | cms/djangoapps/contentstore/views/tests/test_certificates.py | 19 | 29500 | #-*- coding: utf-8 -*-
"""
Certificates Tests.
"""
import json
import mock
import ddt
from django.conf import settings
from django.test.utils import override_settings
from opaque_keys.edx.keys import AssetKey
from opaque_keys.edx.locations import AssetLocation
from contentstore.utils import reverse_course_url
from contentstore.views.certificates import CERTIFICATE_SCHEMA_VERSION
from contentstore.tests.utils import CourseTestCase
from xmodule.contentstore.django import contentstore
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from student.models import CourseEnrollment
from student.roles import CourseInstructorRole, CourseStaffRole
from student.tests.factories import UserFactory
from course_modes.tests.factories import CourseModeFactory
from contentstore.views.certificates import CertificateManager
from django.test.utils import override_settings
from contentstore.utils import get_lms_link_for_certificate_web_view
from util.testing import EventTestMixin
FEATURES_WITH_CERTS_ENABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_ENABLED['CERTIFICATES_HTML_VIEW'] = True
CERTIFICATE_JSON = {
u'name': u'Test certificate',
u'description': u'Test description',
u'is_active': True,
u'version': CERTIFICATE_SCHEMA_VERSION,
}
CERTIFICATE_JSON_WITH_SIGNATORIES = {
u'name': u'Test certificate',
u'description': u'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION,
u'course_title': 'Course Title Override',
u'is_active': True,
u'signatories': [
{
"name": "Bob Smith",
"title": "The DEAN.",
"signature_image_path": "/c4x/test/CSS101/asset/Signature.png"
}
]
}
# pylint: disable=no-member
class HelperMethods(object):
"""
Mixin that provides useful methods for certificate configuration tests.
"""
def _create_fake_images(self, asset_keys):
"""
Creates fake image files for a list of asset_keys.
"""
for asset_key_string in asset_keys:
asset_key = AssetKey.from_string(asset_key_string)
content = StaticContent(
asset_key, "Fake asset", "image/png", "data",
)
contentstore().save(content)
def _add_course_certificates(self, count=1, signatory_count=0, is_active=False):
"""
Create certificate for the course.
"""
signatories = [
{
'name': 'Name ' + str(i),
'title': 'Title ' + str(i),
'signature_image_path': '/c4x/test/CSS101/asset/Signature{}.png'.format(i),
'id': i
} for i in xrange(0, signatory_count)
]
# create images for signatory signatures except the last signatory
for idx, signatory in enumerate(signatories):
if len(signatories) > 2 and idx == len(signatories) - 1:
continue
else:
self._create_fake_images([signatory['signature_image_path']])
certificates = [
{
'id': i,
'name': 'Name ' + str(i),
'description': 'Description ' + str(i),
'signatories': signatories,
'version': CERTIFICATE_SCHEMA_VERSION,
'is_active': is_active
} for i in xrange(0, count)
]
self.course.certificates = {'certificates': certificates}
self.save_course()
# pylint: disable=no-member
class CertificatesBaseTestCase(object):
"""
Mixin with base test cases for the certificates.
"""
def _remove_ids(self, content):
"""
Remove ids from the response. We cannot predict IDs, because they're
generated randomly.
We use this method to clean up response when creating new certificate.
"""
certificate_id = content.pop("id")
return certificate_id
def test_required_fields_are_absent(self):
"""
Test required fields are absent.
"""
bad_jsons = [
# must have name of the certificate
{
u'description': 'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION
},
# an empty json
{},
]
for bad_json in bad_jsons:
response = self.client.post(
self._url(),
data=json.dumps(bad_json),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 400)
self.assertNotIn("Location", response)
content = json.loads(response.content)
self.assertIn("error", content)
def test_invalid_json(self):
"""
Test invalid json handling.
"""
# Invalid JSON.
invalid_json = "{u'name': 'Test Name', u'description': 'Test description'," \
" u'version': " + str(CERTIFICATE_SCHEMA_VERSION) + ", []}"
response = self.client.post(
self._url(),
data=invalid_json,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 400)
self.assertNotIn("Location", response)
content = json.loads(response.content)
self.assertIn("error", content)
def test_certificate_data_validation(self):
#Test certificate schema version
json_data_1 = {
u'version': 100,
u'name': u'Test certificate',
u'description': u'Test description'
}
with self.assertRaises(Exception) as context:
CertificateManager.validate(json_data_1)
self.assertTrue("Unsupported certificate schema version: 100. Expected version: 1." in context.exception)
#Test certificate name is missing
json_data_2 = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'description': u'Test description'
}
with self.assertRaises(Exception) as context:
CertificateManager.validate(json_data_2)
self.assertTrue('must have name of the certificate' in context.exception)
@ddt.ddt
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class CertificatesListHandlerTestCase(EventTestMixin, CourseTestCase, CertificatesBaseTestCase, HelperMethods):
"""
Test cases for certificates_list_handler.
"""
def setUp(self):
"""
Set up CertificatesListHandlerTestCase.
"""
super(CertificatesListHandlerTestCase, self).setUp('contentstore.views.certificates.tracker')
def _url(self):
"""
Return url for the handler.
"""
return reverse_course_url('certificates.certificates_list_handler', self.course.id)
def test_can_create_certificate(self):
"""
Test that you can create a certificate.
"""
expected = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'Test certificate',
u'description': u'Test description',
u'is_active': True,
u'signatories': []
}
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 201)
self.assertIn("Location", response)
content = json.loads(response.content)
certificate_id = self._remove_ids(content)
self.assertEqual(content, expected)
self.assert_event_emitted(
'edx.certificate.configuration.created',
course_id=unicode(self.course.id),
configuration_id=certificate_id,
)
def test_cannot_create_certificate_if_user_has_no_write_permissions(self):
"""
Tests user without write permissions on course should not able to create certificate
"""
user = UserFactory()
self.client.login(username=user.username, password='test')
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 403)
@override_settings(LMS_BASE=None)
def test_no_lms_base_for_certificate_web_view_link(self):
test_link = get_lms_link_for_certificate_web_view(
user_id=self.user.id,
course_key=self.course.id,
mode='honor'
)
self.assertEquals(test_link, None)
@override_settings(LMS_BASE="lms_base_url")
def test_lms_link_for_certificate_web_view(self):
test_url = "//lms_base_url/certificates/user/" \
+ str(self.user.id) + "/course/" + unicode(self.course.id) + '?preview=honor'
link = get_lms_link_for_certificate_web_view(
user_id=self.user.id,
course_key=self.course.id,
mode='honor'
)
self.assertEquals(link, test_url)
@mock.patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
def test_certificate_info_in_response(self):
"""
Test that certificate has been created and rendered properly with non-audit course mode.
"""
CourseModeFactory.create(course_id=self.course.id, mode_slug='verified')
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON_WITH_SIGNATORIES
)
self.assertEqual(response.status_code, 201)
# in html response
result = self.client.get_html(self._url())
self.assertIn('Test certificate', result.content)
self.assertIn('Test description', result.content)
# in JSON response
response = self.client.get_json(self._url())
data = json.loads(response.content)
self.assertEquals(len(data), 1)
self.assertEqual(data[0]['name'], 'Test certificate')
self.assertEqual(data[0]['description'], 'Test description')
self.assertEqual(data[0]['version'], CERTIFICATE_SCHEMA_VERSION)
@mock.patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
def test_certificate_info_not_in_response(self):
"""
Test that certificate has not been rendered audit only course mode.
"""
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON_WITH_SIGNATORIES
)
self.assertEqual(response.status_code, 201)
# in html response
result = self.client.get_html(self._url())
self.assertNotIn('Test certificate', result.content)
def test_unsupported_http_accept_header(self):
"""
Test if not allowed header present in request.
"""
response = self.client.get(
self._url(),
HTTP_ACCEPT="text/plain",
)
self.assertEqual(response.status_code, 406)
def test_certificate_unsupported_method(self):
"""
Unit Test: test_certificate_unsupported_method
"""
resp = self.client.put(self._url())
self.assertEqual(resp.status_code, 405)
def test_not_permitted(self):
"""
Test that when user has not read access to course then permission denied exception should raised.
"""
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, self.course.id)
response = test_user_client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 403)
self.assertIn("error", response.content)
def test_audit_course_mode_is_skipped(self):
"""
Tests audit course mode is skipped when rendering certificates page.
"""
CourseModeFactory.create(course_id=self.course.id)
CourseModeFactory.create(course_id=self.course.id, mode_slug='verified')
response = self.client.get_html(
self._url(),
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'verified')
self.assertNotContains(response, 'audit')
def test_audit_only_disables_cert(self):
"""
Tests audit course mode is skipped when rendering certificates page.
"""
CourseModeFactory.create(course_id=self.course.id, mode_slug='audit')
response = self.client.get_html(
self._url(),
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'This course does not use a mode that offers certificates.')
self.assertNotContains(response, 'This module is not enabled.')
self.assertNotContains(response, 'Loading')
@ddt.data(
['audit', 'verified'],
['verified'],
['audit', 'verified', 'credit'],
['verified', 'credit'],
['professional']
)
def test_non_audit_enables_cert(self, slugs):
"""
Tests audit course mode is skipped when rendering certificates page.
"""
for slug in slugs:
CourseModeFactory.create(course_id=self.course.id, mode_slug=slug)
response = self.client.get_html(
self._url(),
)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'This course does not use a mode that offers certificates.')
self.assertNotContains(response, 'This module is not enabled.')
self.assertContains(response, 'Loading')
def test_assign_unique_identifier_to_certificates(self):
"""
Test certificates have unique ids
"""
self._add_course_certificates(count=2)
json_data = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'is_active': True,
u'signatories': []
}
response = self.client.post(
self._url(),
data=json.dumps(json_data),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
new_certificate = json.loads(response.content)
for prev_certificate in self.course.certificates['certificates']:
self.assertNotEqual(new_certificate.get('id'), prev_certificate.get('id'))
@ddt.ddt
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class CertificatesDetailHandlerTestCase(EventTestMixin, CourseTestCase, CertificatesBaseTestCase, HelperMethods):
"""
Test cases for CertificatesDetailHandlerTestCase.
"""
_id = 0
def setUp(self): # pylint: disable=arguments-differ
"""
Set up CertificatesDetailHandlerTestCase.
"""
super(CertificatesDetailHandlerTestCase, self).setUp('contentstore.views.certificates.tracker')
def _url(self, cid=-1):
"""
Return url for the handler.
"""
cid = cid if cid > 0 else self._id
return reverse_course_url(
'certificates.certificates_detail_handler',
self.course.id,
kwargs={'certificate_id': cid},
)
def test_can_create_new_certificate_if_it_does_not_exist(self):
"""
PUT/POST new certificate.
"""
expected = {
u'id': 666,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'Test certificate',
u'description': u'Test description',
u'is_active': True,
u'course_title': u'Course Title Override',
u'signatories': []
}
response = self.client.put(
self._url(cid=666),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
content = json.loads(response.content)
self.assertEqual(content, expected)
self.assert_event_emitted(
'edx.certificate.configuration.created',
course_id=unicode(self.course.id),
configuration_id=666,
)
def test_can_edit_certificate(self):
"""
Edit certificate, check its id and modified fields.
"""
self._add_course_certificates(count=2)
expected = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'is_active': True,
u'course_title': u'Course Title Override',
u'signatories': []
}
response = self.client.put(
self._url(cid=1),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
content = json.loads(response.content)
self.assertEqual(content, expected)
self.assert_event_emitted(
'edx.certificate.configuration.modified',
course_id=unicode(self.course.id),
configuration_id=1,
)
self.reload_course()
# Verify that certificate is properly updated in the course.
course_certificates = self.course.certificates['certificates']
self.assertEqual(len(course_certificates), 2)
self.assertEqual(course_certificates[1].get('name'), u'New test certificate')
self.assertEqual(course_certificates[1].get('description'), 'New test description')
def test_can_edit_certificate_without_is_active(self):
"""
Tests user should be able to edit certificate, if is_active attribute is not present
for given certificate. Old courses might not have is_active attribute in certificate data.
"""
certificates = [
{
'id': 1,
'name': 'certificate with is_active',
'description': 'Description ',
'signatories': [],
'version': CERTIFICATE_SCHEMA_VERSION,
}
]
self.course.certificates = {'certificates': certificates}
self.save_course()
expected = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'is_active': True,
u'course_title': u'Course Title Override',
u'signatories': []
}
response = self.client.post(
self._url(cid=1),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 201)
content = json.loads(response.content)
self.assertEqual(content, expected)
def test_can_delete_certificate_with_signatories(self):
"""
Delete certificate
"""
self._add_course_certificates(count=2, signatory_count=1)
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.assert_event_emitted(
'edx.certificate.configuration.deleted',
course_id=unicode(self.course.id),
configuration_id='1',
)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates), 1)
self.assertEqual(certificates[0].get('name'), 'Name 0')
self.assertEqual(certificates[0].get('description'), 'Description 0')
def test_delete_certificate_without_write_permissions(self):
"""
Tests certificate deletion without write permission on course.
"""
self._add_course_certificates(count=2, signatory_count=1)
user = UserFactory()
self.client.login(username=user.username, password='test')
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
def test_delete_certificate_without_global_staff_permissions(self):
"""
Tests deletion of an active certificate without global staff permission on course.
"""
self._add_course_certificates(count=2, signatory_count=1, is_active=True)
user = UserFactory()
for role in [CourseInstructorRole, CourseStaffRole]:
role(self.course.id).add_users(user)
self.client.login(username=user.username, password='test')
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
def test_update_active_certificate_without_global_staff_permissions(self):
"""
Tests update of an active certificate without global staff permission on course.
"""
self._add_course_certificates(count=2, signatory_count=1, is_active=True)
cert_data = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'course_title': u'Course Title Override',
u'org_logo_path': '',
u'is_active': False,
u'signatories': []
}
user = UserFactory()
for role in [CourseInstructorRole, CourseStaffRole]:
role(self.course.id).add_users(user)
self.client.login(username=user.username, password='test')
response = self.client.put(
self._url(cid=1),
data=json.dumps(cert_data),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 403)
def test_delete_non_existing_certificate(self):
"""
Try to delete a non existing certificate. It should return status code 404 Not found.
"""
self._add_course_certificates(count=2)
response = self.client.delete(
self._url(cid=100),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 404)
def test_can_delete_signatory(self):
"""
Delete an existing certificate signatory
"""
self._add_course_certificates(count=2, signatory_count=3)
certificates = self.course.certificates['certificates']
signatory = certificates[1].get("signatories")[1]
image_asset_location = AssetLocation.from_deprecated_string(signatory['signature_image_path'])
content = contentstore().find(image_asset_location)
self.assertIsNotNone(content)
test_url = '{}/signatories/1'.format(self._url(cid=1))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates[1].get("signatories")), 2)
# make sure signatory signature image is deleted too
self.assertRaises(NotFoundError, contentstore().find, image_asset_location)
def test_deleting_signatory_without_signature(self):
"""
Delete an signatory whose signature image is already removed or does not exist
"""
self._add_course_certificates(count=2, signatory_count=4)
test_url = '{}/signatories/3'.format(self._url(cid=1))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
def test_delete_signatory_non_existing_certificate(self):
"""
Try to delete a non existing certificate signatory. It should return status code 404 Not found.
"""
self._add_course_certificates(count=2)
test_url = '{}/signatories/1'.format(self._url(cid=100))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 404)
def test_certificate_activation_success(self):
"""
Activate and Deactivate the course certificate
"""
test_url = reverse_course_url('certificates.certificate_activation_handler', self.course.id)
self._add_course_certificates(count=1, signatory_count=2)
is_active = True
for i in range(2):
if i == 1:
is_active = not is_active
response = self.client.post(
test_url,
data=json.dumps({"is_active": is_active}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEquals(response.status_code, 200)
course = self.store.get_course(self.course.id)
certificates = course.certificates['certificates']
self.assertEqual(certificates[0].get('is_active'), is_active)
cert_event_type = 'activated' if is_active else 'deactivated'
self.assert_event_emitted(
'.'.join(['edx.certificate.configuration', cert_event_type]),
course_id=unicode(self.course.id),
)
@ddt.data(True, False)
def test_certificate_activation_without_write_permissions(self, activate):
"""
Tests certificate Activate and Deactivate should not be allowed if user
does not have write permissions on course.
"""
test_url = reverse_course_url('certificates.certificate_activation_handler', self.course.id)
self._add_course_certificates(count=1, signatory_count=2)
user = UserFactory()
self.client.login(username=user.username, password='test')
response = self.client.post(
test_url,
data=json.dumps({"is_active": activate}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEquals(response.status_code, 403)
@ddt.data(True, False)
def test_certificate_activation_without_global_staff_permissions(self, activate):
"""
Tests certificate Activate and Deactivate should not be allowed if user
does not have global staff permissions on course.
"""
test_url = reverse_course_url('certificates.certificate_activation_handler', self.course.id)
self._add_course_certificates(count=1, signatory_count=2)
user = UserFactory()
for role in [CourseInstructorRole, CourseStaffRole]:
role(self.course.id).add_users(user)
self.client.login(username=user.username, password='test')
response = self.client.post(
test_url,
data=json.dumps({"is_active": activate}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEquals(response.status_code, 403)
def test_certificate_activation_failure(self):
"""
Certificate activation should fail when user has not read access to course then permission denied exception
should raised.
"""
test_url = reverse_course_url('certificates.certificate_activation_handler', self.course.id)
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, self.course.id)
self._add_course_certificates(count=1, signatory_count=2)
response = test_user_client.post(
test_url,
data=json.dumps({"is_active": True}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEquals(response.status_code, 403)
course = self.store.get_course(self.course.id)
certificates = course.certificates['certificates']
self.assertEqual(certificates[0].get('is_active'), False)
| agpl-3.0 | 3,490,135,686,985,350,000 | 36.247475 | 115 | 0.605898 | false |
cowhi/HFO | predator_prey/expLeno.py | 1 | 1502 | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 11 08:49:36 2016
Experiment facilitation
@author: Felipe Leno
"""
import subprocess
from threading import Thread
import math
import argparse
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-a','--algorithm', default='Dummy')
parser.add_argument('-l','--log_folder',default='./results/')
return parser.parse_args()
def thread_agent(initTrial,endTrial):
arg = get_args()
alg = arg.algorithm
logFolder = arg.log_folder
seed = 0
command = "python experiment.py -l" + logFolder + " -it " + str(initTrial) + " -r " + str(endTrial) + \
" -a1 " + alg + " -a2 "+alg+ " -a3 "+alg + " -s "+str(seed)
print "******Init Thread "+str(initTrial)+"-"+str(endTrial)+" - "+ alg+"**********"
subprocess.call(command, shell=True)
def runMultipleThreads():
numThreads = 8
numTrials = 1000
dividedT = int(math.floor(numTrials / numThreads))
agentThreads = []
for i in range(numThreads):
agentThreads.append(Thread(target = thread_agent, args=((i*dividedT)+1,(i+1)*dividedT)))
agentThreads[i].start()
#Waiting for program termination
for i in range(len(agentThreads)):
agentThreads[i].join()
print "End of Executions ***"
if __name__ == '__main__':
runMultipleThreads() | mit | 8,878,990,623,513,268,000 | 24.913793 | 111 | 0.555925 | false |
linuxmidhun/0install | tests/testalias.py | 9 | 3546 | #!/usr/bin/env python
from basetest import BaseTest, StringIO
import sys, tempfile, os
import unittest
sys.path.insert(0, '..')
from zeroinstall import alias
expected_script = """#!/bin/sh
exec 0launch 'http://example.com/foo.xml' "$@"
"""
old_script = """#!/bin/sh
if [ "$*" = "--versions" ]; then
exec 0launch -gd 'http://example.com/foo.xml' "$@"
else
exec 0launch 'http://example.com/foo.xml' "$@"
fi
"""
expected_script_main = """#!/bin/sh
exec 0launch --main 'a'\\'''\\''\\test' 'http://example.com/foo.xml' "$@"
"""
expected_script_command = """#!/bin/sh
exec 0launch --command 'a'\\'''\\''\\test' 'http://example.com/foo.xml' "$@"
"""
old_script_main = """#!/bin/sh
if [ "$*" = "--versions" ]; then
exec 0launch -gd 'http://example.com/foo.xml' "$@"
else
exec 0launch --main 'a'\\'''\\''\\test' 'http://example.com/foo.xml' "$@"
fi
"""
class TestAlias(BaseTest):
def setUp(self):
BaseTest.setUp(self)
def testWrite(self):
buf = StringIO()
alias.write_script(buf, 'http://example.com/foo.xml', None)
self.assertEqual(expected_script, buf.getvalue())
buf = StringIO()
alias.write_script(buf, 'http://example.com/foo.xml', 'a\'\'\\test')
self.assertEqual(expected_script_main, buf.getvalue())
buf = StringIO()
alias.write_script(buf, 'http://example.com/foo.xml', command = 'a\'\'\\test')
self.assertEqual(expected_script_command, buf.getvalue())
def testParse(self):
tmp = tempfile.NamedTemporaryFile(mode = 'wt')
tmp.write(expected_script)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual(None, main)
tmp = tempfile.NamedTemporaryFile(mode = 'wt')
tmp.write(expected_script_main)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual('a\'\'\\test', main)
tmp = tempfile.NamedTemporaryFile(mode = 'wt')
tmp.write(expected_script_command)
tmp.flush()
tmp.seek(0)
info = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', info.uri)
self.assertEqual('a\'\'\\test', info.command)
self.assertEqual(None, info.main)
def testParseOld(self):
with tempfile.NamedTemporaryFile(mode = 'wt') as tmp:
tmp.write(old_script)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual(None, main)
with tempfile.NamedTemporaryFile(mode = 'wt') as tmp:
tmp.write(old_script_main)
tmp.flush()
tmp.seek(0)
uri, main = alias.parse_script(tmp.name)
self.assertEqual('http://example.com/foo.xml', uri)
self.assertEqual('a\'\'\\test', main)
def testParseException(self):
tmp = tempfile.NamedTemporaryFile(mode = 'wb', delete = False)
tmp.write(bytes([240]))
tmp.close()
try:
alias.parse_script(tmp.name)
assert False
except alias.NotAnAliasScript:
pass
os.unlink(tmp.name)
tmp = tempfile.NamedTemporaryFile(mode = 'wt')
tmp.write('hi' + expected_script)
tmp.flush()
tmp.seek(0)
try:
alias.parse_script(tmp.name)
assert False
except alias.NotAnAliasScript:
pass
tmp = tempfile.NamedTemporaryFile(mode = 'wt')
tmp.write(expected_script_command.replace('command', 'bob'))
tmp.flush()
tmp.seek(0)
try:
alias.parse_script(tmp.name)
assert False
except alias.NotAnAliasScript as ex:
assert 'does not look like a script created by 0alias' in str(ex)
pass
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -7,265,845,992,342,441,000 | 26.068702 | 80 | 0.663847 | false |
seoester/Git-Deployment-Handler | gitdh/modules/deployment.py | 1 | 3563 | # -*- coding: utf-8 -*-
from gitdh.modules import Module
from syslog import syslog, LOG_INFO, LOG_WARNING
from gitdh.gitdhutils import filterOnStatusExt, deleteDir, deleteDirContent
import os
from os.path import abspath, join, exists, isdir, isfile
from subprocess import check_call, check_output, CalledProcessError
try:
from subprocess import DEVNULL
except ImportError:
# < Python 3.3 compatibility
from gitdh.gitdhutils import getDevNull
DEVNULL = getDevNull()
class Deployment(Module):
def isEnabled(self, action):
return True
def filter(self, commits):
commits = filterOnStatusExt('queued', commits)
branches = {}
for commit in commits:
if not commit.branch in branches:
branches[commit.branch] = []
branches[commit.branch].append(commit)
for branch in branches.values():
sortedCommits = sorted(branch, key=lambda commit: commit.date)
for commit in sortedCommits[:-1]:
self._removeCommit(commit)
def processRemoved(self, commits):
if not self.dbBe is None:
for commit in commits:
if commit.removers == [self]:
self.dbBe.setStatusSkipped(commit)
def process(self, commits):
commits = filterOnStatusExt('queued', commits)
for commit in commits:
if not self.dbBe is None:
self.dbBe.setStatusWorking(commit)
confSection = self.config.branches[commit.branch]
rmIntGitFiles = confSection.getboolean('RmIntGitFiles', True)
recursive = confSection.getboolean('Recursive', True)
if not hasattr(commit, 'deploymentSource'):
commit.deploymentSource = self.config.repoPath
syslog(LOG_INFO, "Deploying commit '%s' from '%s' : '%s' to '%s'" % (commit, commit.repository, commit.branch, confSection['Path']))
self._deleteUpdateRepo(confSection['Path'], commit.deploymentSource, commit.branch, commit, rmIntGitFiles=rmIntGitFiles, recursive=recursive)
if not self.dbBe is None:
self.dbBe.setStatusFinished(commit)
def _deleteUpdateRepo(self, path, sourceRepository, branch, commit, rmIntGitFiles=True, recursive=True):
path = abspath(path)
try:
if not exists(path):
os.mkdir(path)
else:
if not isdir(path):
syslog(LOG_WARNING, "Not a directory '%s'" % (path,))
return
deleteDirContent(path)
except OSError as e:
syslog(LOG_WARNING, "OSError while clearing '%s': '%s'" % (path, e))
return
if recursive:
args = ('git', 'clone', '-q', '--recursive', '-b', branch, sourceRepository, path)
else:
args = ('git', 'clone', '-q', '-b', branch, sourceRepository, path)
try:
check_call(args, cwd=path, stdout=DEVNULL, stderr=DEVNULL)
if not commit is None:
args = ('git', 'checkout', commit.hash)
check_call(args, cwd=path, stdout=DEVNULL, stderr=DEVNULL)
args = ('git', 'reset', '--hard', '-q')
check_call(args, cwd=path, stdout=DEVNULL, stderr=DEVNULL)
if rmIntGitFiles:
self._rmIntGitFiles(path)
except CalledProcessError as e:
syslog(LOG_WARNING, "Git Error: '%s'" % (e,))
def _rmIntGitFiles(self, path):
output = check_output(('git', 'submodule', 'status'), cwd=path, stderr=DEVNULL).decode('utf-8')
if len(output) != 0:
for line in output.strip().split('\n'):
try:
words = line.strip().split(' ')
self._rmIntGitFiles(join(path, words[1]))
except IndexError:
pass
if isdir(join(path, '.git')):
deleteDir(join(path, '.git'))
elif isfile(join(path, '.git')):
os.unlink(join(path, '.git'))
if isfile(join(path, '.gitignore')):
os.unlink(join(path, '.gitignore'))
if isfile(join(path, '.gitmodules')):
os.unlink(join(path, '.gitmodules'))
| mit | 1,584,386,687,040,601,300 | 32.933333 | 144 | 0.69071 | false |
deanhiller/databus | webapp/play1.3.x/python/Lib/CGIHTTPServer.py | 4 | 13053 | """CGI-savvy HTTP Server.
This module builds on SimpleHTTPServer by implementing GET and POST
requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
os.popen2() is used as a fallback, with slightly altered semantics; if
that function is not present either (e.g. on Macintosh), only Python
scripts are supported, and they are executed by the current process.
In all cases, the implementation is intentionally naive -- all
requests are executed sychronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
"""
__version__ = "0.4"
__all__ = ["CGIHTTPRequestHandler"]
import os
import sys
import urllib
import BaseHTTPServer
import SimpleHTTPServer
import select
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
have_popen2 = hasattr(os, 'popen2')
have_popen3 = hasattr(os, 'popen3')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, "Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script,
and return a boolean.
This function sets self.cgi_info to a tuple (dir, rest)
when it returns True, where dir is the directory part before
the CGI script name. Note that rest begins with a
slash if it is not empty.
The default implementation tests whether the path
begins with one of the strings in the list
self.cgi_directories (and the next character is a '/'
or the end of the string).
"""
path = self.path
for x in self.cgi_directories:
i = len(x)
if path[:i] == x and (not path[i:] or path[i] == '/'):
self.cgi_info = path[:i], path[i+1:]
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
path = self.path
dir, rest = self.cgi_info
i = path.find('/', len(dir) + 1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir) + 1)
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = {}
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
os.environ.update(env)
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, os.environ)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
elif self.have_popen2 or self.have_popen3:
# Windows -- use popen2 or popen3 to create a subprocess
import shutil
if self.have_popen3:
popenx = os.popen3
else:
popenx = os.popen2
cmdline = scriptfile
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = "%s -u %s" % (interp, cmdline)
if '=' not in query and '"' not in query:
cmdline = '%s "%s"' % (cmdline, query)
self.log_message("command: %s", cmdline)
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
files = popenx(cmdline, 'b')
fi = files[0]
fo = files[1]
if self.have_popen3:
fe = files[2]
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
fi.write(data)
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
fi.close()
shutil.copyfileobj(fo, self.wfile)
if self.have_popen3:
errors = fe.read()
fe.close()
if errors:
self.log_error('%s', errors)
sts = fo.close()
if sts:
self.log_error("CGI script exit status %#x", sts)
else:
self.log_message("CGI script exited OK")
else:
# Other O.S. -- execute script in this process
save_argv = sys.argv
save_stdin = sys.stdin
save_stdout = sys.stdout
save_stderr = sys.stderr
try:
save_cwd = os.getcwd()
try:
sys.argv = [scriptfile]
if '=' not in decoded_query:
sys.argv.append(decoded_query)
sys.stdout = self.wfile
sys.stdin = self.rfile
execfile(scriptfile, {"__name__": "__main__"})
finally:
sys.argv = save_argv
sys.stdin = save_stdin
sys.stdout = save_stdout
sys.stderr = save_stderr
os.chdir(save_cwd)
except SystemExit, sts:
self.log_error("CGI script exit status %s", str(sts))
else:
self.log_message("CGI script exited OK")
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody
def executable(path):
"""Test for executable file."""
try:
st = os.stat(path)
except os.error:
return False
return st.st_mode & 0111 != 0
def test(HandlerClass = CGIHTTPRequestHandler,
ServerClass = BaseHTTPServer.HTTPServer):
SimpleHTTPServer.test(HandlerClass, ServerClass)
if __name__ == '__main__':
test()
| mpl-2.0 | -5,609,523,342,159,247,000 | 33.663934 | 79 | 0.514441 | false |
eharney/cinder | cinder/zonemanager/drivers/cisco/cisco_fc_san_lookup_service.py | 1 | 15058 | # (c) Copyright 2014 Cisco Systems Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import random
from eventlet import greenthread
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
import six
from cinder import exception
from cinder.i18n import _
from cinder import ssh_utils
from cinder import utils
from cinder.zonemanager.drivers.cisco import cisco_fabric_opts as fabric_opts
import cinder.zonemanager.drivers.cisco.fc_zone_constants as zone_constant
from cinder.zonemanager import fc_san_lookup_service as fc_service
from cinder.zonemanager import utils as zm_utils
LOG = logging.getLogger(__name__)
class CiscoFCSanLookupService(fc_service.FCSanLookupService):
"""The SAN lookup service that talks to Cisco switches.
Version History:
1.0.0 - Initial version
"""
VERSION = "1.0.0"
def __init__(self, **kwargs):
"""Initializing the client."""
super(CiscoFCSanLookupService, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.create_configuration()
self.switch_user = ""
self.switch_port = ""
self.switch_pwd = ""
self.switch_ip = ""
self.sshpool = {}
def create_configuration(self):
"""Configuration specific to SAN context values."""
config = self.configuration
fabric_names = [x.strip() for x in config.fc_fabric_names.split(',')]
LOG.debug('Fabric Names: %s', fabric_names)
# There can be more than one SAN in the network and we need to
# get credentials for each for SAN context lookup later.
# Cisco Zonesets require VSANs
if fabric_names:
self.fabric_configs = fabric_opts.load_fabric_configurations(
fabric_names)
def get_device_mapping_from_network(self,
initiator_wwn_list,
target_wwn_list):
"""Provides the initiator/target map for available SAN contexts.
Looks up fcns database of each fc SAN configured to find logged in
devices and returns a map of initiator and target port WWNs for each
fabric.
:param initiator_wwn_list: List of initiator port WWN
:param target_wwn_list: List of target port WWN
:returns: List -- device wwn map in following format
.. code-block:: python
{
<Fabric name>: {
'initiator_port_wwn_list':
('200000051e55a100', '200000051e55a121'..)
'target_port_wwn_list':
('100000051e55a100', '100000051e55a121'..)
}
}
:raises Exception: when connection to fabric is failed
"""
device_map = {}
formatted_target_list = []
formatted_initiator_list = []
fabric_map = {}
fabric_names = self.configuration.fc_fabric_names
if not fabric_names:
raise exception.InvalidParameterValue(
err=_("Missing Fibre Channel SAN configuration "
"param - fc_fabric_names"))
fabrics = [x.strip() for x in fabric_names.split(',')]
LOG.debug("FC Fabric List: %s", fabrics)
if fabrics:
for t in target_wwn_list:
formatted_target_list.append(zm_utils.get_formatted_wwn(t))
for i in initiator_wwn_list:
formatted_initiator_list.append(zm_utils.get_formatted_wwn(i))
for fabric_name in fabrics:
self.switch_ip = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_address')
self.switch_user = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_user')
self.switch_pwd = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_password')
self.switch_port = self.fabric_configs[fabric_name].safe_get(
'cisco_fc_fabric_port')
zoning_vsan = self.fabric_configs[fabric_name].safe_get(
'cisco_zoning_vsan')
# Get name server data from fabric and find the targets
# logged in
nsinfo = ''
LOG.debug("show fcns database for vsan %s", zoning_vsan)
nsinfo = self.get_nameserver_info(zoning_vsan)
LOG.debug("Lookup service:fcnsdatabase-%s", nsinfo)
LOG.debug("Lookup service:initiator list from caller-%s",
formatted_initiator_list)
LOG.debug("Lookup service:target list from caller-%s",
formatted_target_list)
visible_targets = [x for x in nsinfo
if x in formatted_target_list]
visible_initiators = [x for x in nsinfo
if x in formatted_initiator_list]
if visible_targets:
LOG.debug("Filtered targets is: %s", visible_targets)
# getting rid of the : before returning
for idx, elem in enumerate(visible_targets):
elem = str(elem).replace(':', '')
visible_targets[idx] = elem
else:
LOG.debug("No targets are in the fcns database"
" for vsan %s", zoning_vsan)
if visible_initiators:
# getting rid of the : before returning ~sk
for idx, elem in enumerate(visible_initiators):
elem = str(elem).replace(':', '')
visible_initiators[idx] = elem
else:
LOG.debug("No initiators are in the fcns database"
" for vsan %s", zoning_vsan)
fabric_map = {'initiator_port_wwn_list': visible_initiators,
'target_port_wwn_list': visible_targets
}
device_map[fabric_name] = fabric_map
LOG.debug("Device map for SAN context: %s", device_map)
return device_map
def get_nameserver_info(self, fabric_vsan):
"""Get fcns database info from fabric.
This method will return the connected node port wwn list(local
and remote) for the given switch fabric
"""
cli_output = None
nsinfo_list = []
try:
cmd = ([zone_constant.FCNS_SHOW, fabric_vsan, ' | no-more'])
cli_output = self._get_switch_info(cmd)
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error("Failed collecting show fcns database for fabric")
if cli_output:
nsinfo_list = self._parse_ns_output(cli_output)
LOG.debug("Connector returning fcns info-%s", nsinfo_list)
return nsinfo_list
def _get_switch_info(self, cmd_list):
stdout, stderr, sw_data = None, None, None
try:
stdout, stderr = self._run_ssh(cmd_list, True, 1)
LOG.debug("CLI output from ssh - output: %s", stdout)
if (stdout):
sw_data = stdout.splitlines()
return sw_data
except processutils.ProcessExecutionError as e:
msg = _("Error while getting data via ssh: (command=%(cmd)s "
"error=%(err)s).") % {'cmd': cmd_list,
'err': six.text_type(e)}
LOG.error(msg)
raise exception.CiscoZoningCliException(reason=msg)
def _parse_ns_output(self, switch_data):
"""Parses name server data.
Parses nameserver raw data and adds the device port wwns to the list
:returns: list of device port wwn from ns info
"""
nsinfo_list = []
for line in switch_data:
if not(" N " in line):
continue
linesplit = line.split()
if len(linesplit) > 2:
node_port_wwn = linesplit[2]
nsinfo_list.append(node_port_wwn)
else:
msg = _("Malformed fcns output string: %s") % line
LOG.error(msg)
raise exception.InvalidParameterValue(err=msg)
return nsinfo_list
def _run_ssh(self, cmd_list, check_exit_code=True, attempts=1):
command = ' '.join(cmd_list)
if self.sshpool.get(self.switch_ip) is None:
self.sshpool[self.switch_ip] = ssh_utils.SSHPool(self.switch_ip,
self.switch_port,
None,
self.switch_user,
self.switch_pwd,
min_size=1,
max_size=5)
last_exception = None
try:
with self.sshpool.get(self.switch_ip).item() as ssh:
while attempts > 0:
attempts -= 1
try:
return processutils.ssh_execute(
ssh,
command,
check_exit_code=check_exit_code)
except Exception as e:
msg = _("Exception: %s") % six.text_type(e)
LOG.error(msg)
last_exception = e
greenthread.sleep(random.randint(20, 500) / 100.0)
try:
raise processutils.ProcessExecutionError(
exit_code=last_exception.exit_code,
stdout=last_exception.stdout,
stderr=last_exception.stderr,
cmd=last_exception.cmd)
except AttributeError:
raise processutils.ProcessExecutionError(
exit_code=-1,
stdout="",
stderr="Error running SSH command",
cmd=command)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Error running SSH command: %s", command)
def _ssh_execute(self, cmd_list, check_exit_code=True, attempts=1):
"""Execute cli with status update.
Executes CLI commands where status return is expected.
cmd_list is a list of commands, where each command is itself
a list of parameters. We use utils.check_ssh_injection to check each
command, but then join then with " ; " to form a single command.
"""
# Check that each command is secure
for cmd in cmd_list:
utils.check_ssh_injection(cmd)
# Combine into a single command.
command = ' ; '.join(map(lambda x: ' '.join(x), cmd_list))
if self.sshpool.get(self.switch_ip) is None:
self.sshpool[self.switch_ip] = ssh_utils.SSHPool(self.switch_ip,
self.switch_port,
None,
self.switch_user,
self.switch_pwd,
min_size=1,
max_size=5)
stdin, stdout, stderr = None, None, None
LOG.debug("Executing command via ssh: %s", command)
last_exception = None
try:
with self.sshpool.get(self.switch_ip).item() as ssh:
while attempts > 0:
attempts -= 1
try:
stdin, stdout, stderr = ssh.exec_command(command)
greenthread.sleep(random.randint(20, 500) / 100.0)
channel = stdout.channel
exit_status = channel.recv_exit_status()
LOG.debug("Exit Status from ssh:%s", exit_status)
# exit_status == -1 if no exit code was returned
if exit_status != -1:
LOG.debug('Result was %s', exit_status)
if check_exit_code and exit_status != 0:
raise processutils.ProcessExecutionError(
exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=command)
else:
return True
else:
return True
except Exception as e:
msg = _("Exception: %s") % six.text_type(e)
LOG.error(msg)
last_exception = e
greenthread.sleep(random.randint(20, 500) / 100.0)
LOG.debug("Handling error case after SSH:%s", last_exception)
try:
raise processutils.ProcessExecutionError(
exit_code=last_exception.exit_code,
stdout=last_exception.stdout,
stderr=last_exception.stderr,
cmd=last_exception.cmd)
except AttributeError:
raise processutils.ProcessExecutionError(
exit_code=-1,
stdout="",
stderr="Error running SSH command",
cmd=command)
except Exception as e:
with excutils.save_and_reraise_exception():
msg = (_("Error executing command via ssh: %s") %
six.text_type(e))
LOG.error(msg)
finally:
if stdin:
stdin.flush()
stdin.close()
if stdout:
stdout.close()
if stderr:
stderr.close()
def cleanup(self):
self.sshpool = {}
| apache-2.0 | -1,724,089,720,864,074,500 | 40.827778 | 78 | 0.505379 | false |
arruda/cloudfuzzy | fuzzy_modeling/models/rules.py | 1 | 3338 | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from fuzzy_modeling.models.adjectives import AdjectiveModel
from fuzzy_modeling.models.norms import NormModel
from fuzzy_modeling.models.systems import SystemModel
from fuzzy_modeling.models.operators import OperatorModel
from fuzzy.Rule import Rule
from fuzzy_modeling.models.utils import PyFuzzyMixin
class RuleModel(models.Model, PyFuzzyMixin):
"""
A Fuzzy Rule model
"""
class Meta:
app_label = 'fuzzy_modeling'
name = models.CharField(_("Name"), blank=False, null=False, max_length=250)
adjective = models.ForeignKey(AdjectiveModel)
cer = models.ForeignKey(NormModel)
operator = models.ForeignKey(OperatorModel)
certainty = models.DecimalField(
_("Certainty"),
max_digits=10,
decimal_places=2,
default=float("1")
)
# set to true so that it can be made by parts
system = models.ForeignKey(SystemModel, blank=True, null=True)
def _get_adj_instance(self, system):
"""
Return an existing instance of the adjective that is been used in this rule.
Pyfuzzy needs the instances to be the same, so that when the inference is
runned it will keep consistent.
"""
ovar_model = self.adjective.ovar
ovar = system.variables[ovar_model.name]
adj = ovar.adjectives[self.adjective.name]
return adj
def get_pyfuzzy(self, system=None):
"""
Return the Pyfuzzy class of this model
"""
# try:
adjective = self._get_adj_instance(system)
# except:
# adjective = self.adjective.get_pyfuzzy()
cer = self.cer.get_pyfuzzy()
operator = self.operator.get_pyfuzzy(system=system)
rule = Rule(
adjective=adjective,
operator=operator,
certainty=self.certainty,
CER=cer
)
return rule
@classmethod
def _get_existing_adjective_model(cls, systemModel, adjective):
output_vars_pks = systemModel.outputvariablemodel_set.all().values_list('pk', flat=True)
return AdjectiveModel.objects.get(name=adjective.name, ovar__in=output_vars_pks)
@classmethod
def from_pyfuzzy(cls, pyfuzzy, system=None, systemModel=None):
"""
Return the model representation of an instance of the pyfuzzy attr
"""
rule_model = cls(name=pyfuzzy.name, certainty=pyfuzzy.certainty)
# rule_model.save()
adj_model = None
try:
adj_model = cls._get_existing_adjective_model(
systemModel=systemModel,
adjective=pyfuzzy.adjective)
except:
adj_model = cls.adjective.field.related.parent_model.from_pyfuzzy(pyfuzzy.adjective)
# adj
rule_model.adjective = adj_model
# cer
cer_model = cls.cer.field.related.parent_model.from_pyfuzzy(pyfuzzy.CER)
rule_model.cer = cer_model
# operator
op_model = cls.operator.field.related.parent_model.from_pyfuzzy(pyfuzzy.operator, system, systemModel)
rule_model.operator = op_model
rule_model.save()
return rule_model
def __unicode__(self):
return self.name
| mit | -7,112,351,851,709,359,000 | 29.072072 | 110 | 0.636309 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.