repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Wikidata/StrepHit | strephit/side_projects/wlm.py | 1 | 3270 | # -*- encoding: utf-8 -*-
import click
import csv
import logging
from strephit.commons import wikidata, cache
from collections import defaultdict
logger = logging.getLogger(__name__)
COLUMN_TO_PROPERTY = {
'località': 'P131',
'Prov': 'P131',
'indirizzo': 'P969',
'proprieta': 'P127',
'WLMID': 'P2186',
}
@cache.cached
@wikidata.resolver('P127', 'P131')
def place_resolver(property, value, language, **kwargs):
types = [
3146899, # diocese of the Catholic Church
747074, # comune of Italy
515, # city
15089, # province of Italy
]
value = value.lower()
if 'com.' in value or 'comune' in value:
value = value.replace('com.', '').replace('comune', '').strip()
types = [747074]
elif 'prov.' in value or 'provincia' in value:
value = value.replace('prov.', '').replace('provincia', '').strip()
types = [15089]
results = wikidata.search(value, language, type_=types)
if results:
res = results[0]['id']
logger.debug('resolved "%s" to %s', value, res.encode('utf8'))
return res
else:
logger.debug('could not resolve "%s"', value)
return ''
@wikidata.resolver('P2186')
def wlmid_resolver(property, value, language, **kwargs):
return value
@cache.cached
@wikidata.resolver('P969')
def indirizzo_resolver(property, value, language, **kwargs):
return '%s@"%s"' % (language, value)
def process_row(data):
subject = data['emergenza']
resolved = defaultdict(lambda: [])
for k, v in data.iteritems():
if COLUMN_TO_PROPERTY.get(k):
v = wikidata.resolve(COLUMN_TO_PROPERTY[k], v.decode('utf8'), 'it')
if v:
resolved[COLUMN_TO_PROPERTY[k]].append(v)
info = {k: v for k, v in resolved.iteritems()}
subject = wikidata.resolver_with_hints('ddd', subject, 'it', **info)
if subject:
statements = []
for property, value in resolved.iteritems():
stmt = wikidata.finalize_statement(subject, property, value,
'it', resolve_property=False,
resolve_value=False)
if stmt is not None:
statements.append(stmt)
else:
logger.warn('could not find the wikidata id of "%s"' % data['emergenza'])
statements = None
return statements
@click.command()
@click.argument('input', type=click.File('r'))
@click.argument('output', type=click.File('w'))
@click.option('--skipped', type=click.File('w'), help='save the ids of un-resolved monuments')
def main(input, output, skipped):
rows = count = skipped_count = 0
for row in csv.DictReader(input):
rows += 1
statements = process_row(row)
if statements is None:
skipped_count += 1
if skipped:
skipped.write(row['WLMID'])
skipped.write('\n')
else:
for each in statements:
count += 1
output.write(each.encode('utf8'))
output.write('\n')
logger.info('Processed %d items (skipped %d), produced %d statements',
rows, skipped_count, count)
| gpl-3.0 | 5,434,997,803,650,359,000 | 29.849057 | 94 | 0.57156 | false |
paristote/zipf | app/zipf.py | 1 | 3862 | import subprocess
import os
import re
import collections
import sys
import shutil
import urllib
import urlparse
from flask import Flask, redirect, render_template, url_for, request
app=Flask(__name__)
class WordCounter(dict):
def __missing__(self, key):
return 0
# Debug mode True or False
DEBUG=(len(sys.argv) >= 2 and "debug"==sys.argv[1])
# Where all analyzed repos are cloned
TEMP_FOLDER="../clones"
# Pattern to identify a valid word: only letters, 2 or more times
VALID_WORD_PATTERN=re.compile('^[a-zA-Z]{2,}$')
# Pattern to separate words in a line: any of : _ = ' " ( ) and whitespace chars
WORD_SEPARATOR=re.compile('[:_=\'"\s\(\)]')
# Error codes
ERR_EMPTY_ORGA_REPO=10
ERR_ORGA_REPO_DONT_EXIST=11
# Routes
@app.route("/")
def home():
return render_template("index.html")
@app.route("/zipf", methods=['POST'])
def zipf():
# Prepare variables
orga=request.form['orga']
repo=request.form['repo']
logd("GitHub organization: "+orga)
logd("GitHub repository : "+repo)
if len(orga) == 0 or len(repo) == 0 :
return redirect(url_for("error", err=ERR_EMPTY_ORGA_REPO))
gitUrl="https://github.com/{0}/{1}.git".format(orga, repo)
cloneFolder="{0}/{1}/{2}".format(TEMP_FOLDER, orga, repo)
# Clone the repository
gitCloneSuccess=gitExport(gitUrl, cloneFolder)
if not gitCloneSuccess:
return redirect(url_for("error", err=ERR_ORGA_REPO_DONT_EXIST))
# Get the list of all files in the repository
files=walkProject(cloneFolder)
# Count each word occurrences
wordCount=WordCounter()
for f in files:
countWordsInFile(f, wordCount)
# Keep only the top 50 words and order them desc
mostCommon=collections.Counter(wordCount).most_common(50)
# Transform the words dic into key=value url query string
words=urllib.urlencode(mostCommon)
return redirect(url_for("result", orga=orga, repo=repo, words=words))
@app.route("/result/<orga>/<repo>/<words>")
def result(orga, repo, words):
# Transform the query string back into a dictionary of tuples (word, occurrences)
result=urlparse.parse_qsl(words)
return render_template("result.html", orga=orga, repo=repo, result=result)
@app.route("/error/<err>")
def error(err):
return render_template("index.html", error=err)
# ZIPF
# Clone the GitHub repo and delete all .git folders
# Return False if the git command failed
# Return True otherwise, eg if the destination folder already exists
def gitExport(gitUrl, cloneFolder):
if os.path.exists(cloneFolder):
return True
try:
subprocess.check_call(["git", "clone", "-q", "--depth=1", gitUrl, cloneFolder])
except subprocess.CalledProcessError as cpe:
logd("Command failed "+str(cpe.cmd))
return False
try:
subprocess.check_call(["rm", "-rf", cloneFolder+"/.git"])
except subprocess.CalledProcessError as cpe:
logd("Command failed "+str(cpe.cmd))
return True
# True if the given string is a valid word
def isValid(word):
return re.match(VALID_WORD_PATTERN, word)
# Walk through all folders and return a list of file paths
def walkProject(cloneFolder):
res=set()
for root, dirs, files in os.walk(cloneFolder):
for fname in files:
if fname[0] != ".":
res.add(os.path.join(root,fname))
return res
# Open the given file, then parse each line to count each word number of occurrence
# The result is added to the given WordCounter
def countWordsInFile(fileName, wc):
if os.path.exists(fileName):
with open(fileName, 'r') as f:
for line in f:
words=re.split(WORD_SEPARATOR, line)
for word in words:
if isValid(word):
wc[word.lower()]+=1
# Print a debug log message
def logd(message):
if DEBUG:
print "[DEBUG] " + str(message)
# Delete the folder with the given full path
# Unused
def deleteFolder(folder):
if os.path.exists(folder):
shutil.rmtree(folder)
# Server
if DEBUG:
print " * Running in DEBUG mode"
if __name__ == "__main__":
app.run(debug=DEBUG)
| gpl-2.0 | -1,967,538,582,482,414,600 | 26.784173 | 83 | 0.711289 | false |
pughlab/cbioportal | core/src/main/scripts/migrate_db.py | 1 | 14251 | #!/usr/bin/env python3
import os
import sys
import contextlib
import argparse
from collections import OrderedDict
import MySQLdb
# globals
ERROR_FILE = sys.stderr
OUTPUT_FILE = sys.stdout
DATABASE_HOST = 'db.host'
DATABASE_NAME = 'db.portal_db_name'
DATABASE_USER = 'db.user'
DATABASE_PW = 'db.password'
VERSION_TABLE = 'info'
VERSION_FIELD = 'DB_SCHEMA_VERSION'
REQUIRED_PROPERTIES = [DATABASE_HOST, DATABASE_NAME, DATABASE_USER, DATABASE_PW]
ALLOWABLE_GENOME_REFERENCES = ['37', 'hg19', 'GRCh37', '38', 'hg38', 'GRCh38', 'mm10', 'GRCm38']
DEFAULT_GENOME_REFERENCE = 'hg19'
MULTI_REFERENCE_GENOME_SUPPORT_MIGRATION_STEP = (2,11,0)
class PortalProperties(object):
""" Properties object class, just has fields for db conn """
def __init__(self, database_host, database_name, database_user, database_pw):
# default port:
self.database_port = 3306
# if there is a port added to the host name, split and use this one:
if ':' in database_host:
host_and_port = database_host.split(':')
self.database_host = host_and_port[0]
if self.database_host.strip() == 'localhost':
print(
"Invalid host config '" + database_host + "' in properties file. If you want to specify a port on local host use '127.0.0.1' instead of 'localhost'",
file=ERROR_FILE)
sys.exit(1)
self.database_port = int(host_and_port[1])
else:
self.database_host = database_host
self.database_name = database_name
self.database_user = database_user
self.database_pw = database_pw
def get_db_cursor(portal_properties):
""" Establishes a MySQL connection """
try:
connection = MySQLdb.connect(host=portal_properties.database_host,
port = portal_properties.database_port,
user = portal_properties.database_user,
passwd = portal_properties.database_pw,
db = portal_properties.database_name)
except MySQLdb.Error as exception:
print(exception, file=ERROR_FILE)
port_info = ''
if portal_properties.database_host.strip() != 'localhost':
# only add port info if host is != localhost (since with localhost apparently sockets are used and not the given port) TODO - perhaps this applies for all names vs ips?
port_info = " on port " + str(portal_properties.database_port)
message = (
"--> Error connecting to server "
+ portal_properties.database_host
+ port_info)
print(message, file=ERROR_FILE)
raise ConnectionError(message) from exception
if connection is not None:
return connection, connection.cursor()
def get_portal_properties(properties_filename):
""" Returns a properties object """
properties = {}
with open(properties_filename, 'r') as properties_file:
for line in properties_file:
line = line.strip()
# skip line if its blank or a comment
if len(line) == 0 or line.startswith('#'):
continue
try:
name, value = line.split('=', maxsplit=1)
except ValueError:
print(
'Skipping invalid entry in property file: %s' % (line),
file=ERROR_FILE)
continue
properties[name] = value.strip()
missing_properties = []
for required_property in REQUIRED_PROPERTIES:
if required_property not in properties or len(properties[required_property]) == 0:
missing_properties.append(required_property)
if missing_properties:
print(
'Missing required properties : (%s)' % (', '.join(missing_properties)),
file=ERROR_FILE)
return None
# return an instance of PortalProperties
return PortalProperties(properties[DATABASE_HOST],
properties[DATABASE_NAME],
properties[DATABASE_USER],
properties[DATABASE_PW])
def get_db_version(cursor):
""" gets the version number of the database """
# First, see if the version table exists
version_table_exists = False
try:
cursor.execute('select table_name from information_schema.tables')
for row in cursor.fetchall():
if VERSION_TABLE == row[0].lower().strip():
version_table_exists = True
except MySQLdb.Error as msg:
print(msg, file=ERROR_FILE)
return None
if not version_table_exists:
return (0, 0, 0)
# Now query the table for the version number
try:
cursor.execute('select ' + VERSION_FIELD + ' from ' + VERSION_TABLE)
for row in cursor.fetchall():
version = tuple(map(int, row[0].strip().split('.')))
except MySQLdb.Error as msg:
print(msg, file=ERROR_FILE)
return None
return version
def is_version_larger(version1, version2):
""" Checks if version 1 is larger than version 2 """
if version1[0] > version2[0]:
return True
if version2[0] > version1[0]:
return False
if version1[1] > version2[1]:
return True
if version2[1] > version1[1]:
return False
if version1[2] > version2[2]:
return True
return False
def print_all_check_reference_genome_warnings(warnings, force_migration):
""" Format warnings for output according to mode, and print to ERROR_FILE """
space = ' '
indent = 28 * space
allowable_reference_genome_string = ','.join(ALLOWABLE_GENOME_REFERENCES)
clean_up_string = ' Please clean up the mutation_event table and ensure it only contains references to one of the valid reference genomes (%s).' % (allowable_reference_genome_string)
use_default_string = 'the default reference genome (%s) will be used in place of invalid reference genomes and the first encountered reference genome will be used.' % (DEFAULT_GENOME_REFERENCE)
use_force_string = 'OR use the "--force" option to override this warning, then %s' % (use_default_string)
forcing_string = '--force option in effect : %s' % (use_default_string)
for warning in warnings:
if force_migration:
print('%s%s\n%s%s\n' % (indent, warning, indent, forcing_string), file=ERROR_FILE)
else:
print('%s%s%s\n%s%s\n' % (indent, warning, clean_up_string, indent, use_force_string), file=ERROR_FILE)
def validate_reference_genome_values_for_study(warnings, ncbi_to_count, study):
""" check if there are unrecognized or varied ncbi_build values for the study, add to warnings if problems are found """
if len(ncbi_to_count) == 1:
for retrieved_ncbi_build in ncbi_to_count: # single iteration
if retrieved_ncbi_build.upper() not in [x.upper() for x in ALLOWABLE_GENOME_REFERENCES]:
msg = 'WARNING: Study %s contains mutation_event records with unsupported NCBI_BUILD value %s.'%(study, retrieved_ncbi_build)
warnings.append(msg)
elif len(ncbi_to_count) > 1:
msg = 'WARNING: Study %s contains mutation_event records with %s NCBI_BUILD values {ncbi_build:record_count,...} %s.'%(study, len(ncbi_to_count), ncbi_to_count)
warnings.append(msg)
def check_reference_genome(portal_properties, cursor, force_migration):
""" query database for ncbi_build values, aggregate per study, then validate and report problems """
print('Checking database contents for reference genome information', file=OUTPUT_FILE)
""" Retrieve reference genomes from database """
warnings = []
try:
sql_statement = """
select NCBI_BUILD, count(NCBI_BUILD), CANCER_STUDY_IDENTIFIER
from mutation_event
join mutation on mutation.MUTATION_EVENT_ID = mutation_event.MUTATION_EVENT_ID
join genetic_profile on genetic_profile.GENETIC_PROFILE_ID = mutation.GENETIC_PROFILE_ID
join cancer_study on cancer_study.CANCER_STUDY_ID = genetic_profile.CANCER_STUDY_ID
group by CANCER_STUDY_IDENTIFIER, NCBI_BUILD
"""
cursor.execute(sql_statement)
study_to_ncbi_to_count = {} # {cancer_study_identifier : {ncbi_build : record_count}}
for row in cursor.fetchall():
retrieved_ncbi_build, ref_count, study = row
if study in study_to_ncbi_to_count:
study_to_ncbi_to_count[study][retrieved_ncbi_build] = ref_count
else:
study_to_ncbi_to_count[study] = {retrieved_ncbi_build : ref_count}
for study in study_to_ncbi_to_count:
validate_reference_genome_values_for_study(warnings, study_to_ncbi_to_count[study], study)
except MySQLdb.Error as msg:
print(msg, file=ERROR_FILE)
sys.exit(1)
if warnings:
print_all_check_reference_genome_warnings(warnings, force_migration)
if not force_migration:
sys.exit(1)
def run_migration(db_version, sql_filename, connection, cursor):
"""
Goes through the sql and runs lines based on the version numbers. SQL version should be stated as follows:
##version: 1.0.0
INSERT INTO ...
##version: 1.1.0
CREATE TABLE ...
"""
sql_file = open(sql_filename, 'r')
sql_version = (0, 0, 0)
run_line = False
statements = OrderedDict()
statement = ''
for line in sql_file:
if line.startswith('##'):
sql_version = tuple(map(int, line.split(':')[1].strip().split('.')))
run_line = is_version_larger(sql_version, db_version)
continue
# skip blank lines
if len(line.strip()) < 1:
continue
# skip comments
if line.startswith('#'):
continue
# skip sql comments
if line.startswith('--') and len(line) > 2 and line[2].isspace():
continue
# only execute sql line if the last version seen in the file is greater than the db_version
if run_line:
line = line.strip()
statement = statement + ' ' + line
if line.endswith(';'):
if sql_version not in statements:
statements[sql_version] = [statement]
else:
statements[sql_version].append(statement)
statement = ''
if len(statements) > 0:
run_statements(statements, connection, cursor)
else:
print('Everything up to date, nothing to migrate.', file=OUTPUT_FILE)
def run_statements(statements, connection, cursor):
try:
cursor.execute('SET autocommit=0;')
except MySQLdb.Error as msg:
print(msg, file=ERROR_FILE)
sys.exit(1)
for version, statement_list in statements.items():
print(
'Running statements for version: %s' % ('.'.join(map(str, version))),
file=OUTPUT_FILE)
for statement in statement_list:
print(
'\tExecuting statement: %s' % (statement.strip()),
file=OUTPUT_FILE)
try:
cursor.execute(statement.strip())
except MySQLdb.Error as msg:
print(msg, file=ERROR_FILE)
sys.exit(1)
connection.commit()
def warn_user():
"""Warn the user to back up their database before the script runs."""
response = input(
'WARNING: This script will alter your database! Be sure to back up your data before running.\nContinue running DB migration? (y/n) '
).strip()
while response.lower() != 'y' and response.lower() != 'n':
response = input(
'Did not recognize response.\nContinue running DB migration? (y/n) '
).strip()
if response.lower() == 'n':
sys.exit()
def usage():
print(
'migrate_db.py --properties-file [portal properties file] --sql [sql migration file]',
file=OUTPUT_FILE)
def main():
""" main function to run mysql migration """
parser = argparse.ArgumentParser(description='cBioPortal DB migration script')
parser.add_argument('-y', '--suppress_confirmation', default=False, action='store_true')
parser.add_argument('-p', '--properties-file', type=str, required=True,
help='Path to portal.properties file')
parser.add_argument('-s', '--sql', type=str, required=True,
help='Path to official migration.sql script.')
parser.add_argument('-f', '--force', default=False, action='store_true', help='Force to run database migration')
parser = parser.parse_args()
properties_filename = parser.properties_file
sql_filename = parser.sql
# check existence of properties file and sql file
if not os.path.exists(properties_filename):
print('properties file %s cannot be found' % (properties_filename), file=ERROR_FILE)
usage()
sys.exit(2)
if not os.path.exists(sql_filename):
print('sql file %s cannot be found' % (sql_filename), file=ERROR_FILE)
usage()
sys.exit(2)
# parse properties file
portal_properties = get_portal_properties(properties_filename)
if portal_properties is None:
print('failure reading properties file (%s)' % (properties_filename), file=ERROR_FILE)
sys.exit(1)
# warn user
if not parser.suppress_confirmation:
warn_user()
# set up - get db cursor
connection, cursor = get_db_cursor(portal_properties)
if cursor is None:
print('failure connecting to sql database', file=ERROR_FILE)
sys.exit(1)
# execute - get the database version and run the migration
with contextlib.closing(connection):
db_version = get_db_version(cursor)
if is_version_larger(MULTI_REFERENCE_GENOME_SUPPORT_MIGRATION_STEP, db_version):
#retrieve reference genomes from database
check_reference_genome(portal_properties, cursor, parser.force)
run_migration(db_version, sql_filename, connection, cursor)
print('Finished.', file=OUTPUT_FILE)
# do main
if __name__ == '__main__':
main()
| agpl-3.0 | -2,279,951,055,318,271,200 | 42.184848 | 197 | 0.617711 | false |
tensorflow/deepmath | deepmath/deephol/deephol_loop/checkpoint_monitor.py | 1 | 4882 | """Monitor the latest model checkpoint and compute embedding stores.
This library is a helper method for the loop to monitor checkpoints
when they get available. Once a new checkpoint appears, it gets copied over
to a temporary directory, then the embeddings are computed for the theorem
database. Finally, the checkpoint file is updated. Old checkpoints can be
removed in the meantime.
"""
from __future__ import absolute_import
from __future__ import division
# Import Type Annotations
from __future__ import print_function
import os
from tensorflow import gfile
from tensorflow import logging
def get_latest_checkpoint(dirname: str):
"""Get the latest checkpoint in the directory.
Args:
dirname: Name of the directory.
Returns:
Checkpoint prefix string.
"""
chkpt_file = os.path.join(dirname, 'checkpoint')
if not gfile.Exists(chkpt_file):
logging.info('File %s does not exist', chkpt_file)
return None
chkpt_export_folder = os.path.join(dirname, 'export')
if not gfile.Exists(chkpt_export_folder):
logging.info('Eval export folder %s does not exist', chkpt_export_folder)
return None
num_lines = 0
with gfile.Open(chkpt_file) as f:
for l in f:
num_lines += 1
if l.startswith(b'model_checkpoint_path:'):
return os.path.basename(l.strip().split()[1][1:-1])
return None
def set_latest_checkpoint(dirname: str, chkpt: str):
"""Set the latest checkpoint in the checkpoint file.
Args:
dirname: Directory in which the checkpoint is located.
chkpt: Checkpoint prefix.
"""
chkpt_file = os.path.join(dirname, 'checkpoint')
lines = []
if gfile.Exists(chkpt_file):
logging.info('Loading preexisting checkpoint file "%s"', chkpt_file)
with gfile.Open(chkpt_file) as f:
lines = [
l.strip()
for l in f.readlines()
if l.startswith(b'all_model_checkpoint_paths:')
]
else:
logging.info('No preexisting checkpoint file "%s"', chkpt_file)
with gfile.Open(chkpt_file, 'w') as f:
lines = [
'%s\n' % l.strip() for l in ([
'model_checkpoint_path: "%s"' % chkpt,
'all_model_checkpoint_paths: "%s"' % chkpt
] + lines)
]
f.writelines(lines)
def verbose_copy(src, tgt, overwrite=True):
logging.info('Copying "%s" -> "%s"', src, tgt)
gfile.Copy(src, tgt, overwrite=overwrite)
class CheckpointMonitor(object):
"""Class for syncing checkpoints between two directories."""
def __init__(self, model_directory, target_directory, checkpoints_to_keep=2):
self.model_directory = model_directory
self.target_directory = target_directory
self.checkpoints_to_keep = checkpoints_to_keep
def new_checkpoint(self):
logging.info('Looking for checkpoint in "%s"', self.model_directory)
chkpt = get_latest_checkpoint(self.model_directory)
logging.info('Checkpoint: %s', chkpt)
if chkpt != get_latest_checkpoint(self.target_directory):
logging.info('latest checkpoint: %s',
get_latest_checkpoint(self.target_directory))
return chkpt
else:
return None
def copy_latest_checkpoint(self):
"""Copy over the latest checkpoints to the target directory."""
chkpt = get_latest_checkpoint(self.model_directory)
logging.info('Got latest checkpoint: %s', chkpt)
if chkpt is None:
return None
# Check if the evaluation meta graph has been copied.
if self.has_checkpoint() is None:
# Don't copy temp export folders, e.g. 'temp-01234567/saved_model.pb'
export_file = gfile.Glob(
os.path.join(self.model_directory,
'export/best_exporter/[0-9]*/saved_model.pb'))[0]
logging.info('Copying eval export file: %s', ', '.join(export_file))
target_export_dir = os.path.join(
self.target_directory, 'export/best_exporter',
os.path.basename(os.path.dirname(export_file)))
gfile.MakeDirs(target_export_dir)
verbose_copy(
export_file,
os.path.join(target_export_dir, os.path.basename(export_file)))
files = gfile.Glob(os.path.join(self.model_directory, chkpt) + b'.*')
logging.info('Copying files: %s', ', '.join(files))
for fname in files:
verbose_copy(fname,
os.path.join(self.target_directory, os.path.basename(fname)))
return chkpt
def update_latest_checkpoint(self, chkpt):
old_chkpt = get_latest_checkpoint(self.target_directory)
if old_chkpt != chkpt:
set_latest_checkpoint(self.target_directory, chkpt)
def has_checkpoint(self):
return get_latest_checkpoint(self.target_directory)
def get_checkpoint(self):
logging.info('Getting checkpoint for %s', self.target_directory)
chkpt = get_latest_checkpoint(self.target_directory)
if chkpt is None:
return None
else:
return os.path.join(self.target_directory, chkpt)
| apache-2.0 | -8,842,323,261,161,711,000 | 33.871429 | 80 | 0.673085 | false |
brocade-apj/anzsdnhackathon2016 | www/www/__init__.py | 1 | 1269 | #! ../env/bin/python
# -*- coding: utf-8 -*-
__author__ = 'Darin Sikanic'
__email__ = '[email protected]'
__version__ = '1.0'
from flask import Flask
from webassets.loaders import PythonLoader as PythonAssetsLoader
from www.controllers.main import main
from www import assets
from www.extensions import (
cache,
assets_env,
mongo
)
from www.controllers.api import api
def create_app(object_name):
"""
An flask application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
Arguments:
object_name: the python path of the config object,
e.g. www.settings.ProdConfig
env: The name of the current environment, e.g. prod or dev
"""
app = Flask(__name__)
app.config.from_object(object_name)
# initialize the cache
cache.init_app(app)
# conntect to mongodb
mongo.init_app(app)
# initialise the api
api.init_app(app)
# Import and register the different asset bundles
assets_env.init_app(app)
assets_loader = PythonAssetsLoader(assets)
for name, bundle in assets_loader.load_bundles().items():
assets_env.register(name, bundle)
# register our blueprints
app.register_blueprint(main)
return app
| apache-2.0 | -7,661,005,437,306,028,000 | 21.660714 | 66 | 0.667455 | false |
osamak/wikiproject-med | accounts/forms.py | 1 | 1406 | # -*- coding: utf-8 -*-
from django import forms
from userena.forms import SignupForm
from accounts.models import Profile
class WikithonSignupForm(SignupForm):
name = forms.CharField(label=Profile._meta.get_field('name').verbose_name,
max_length=100)
email = forms.EmailField(label=Profile._meta.get_field('email').verbose_name)
twitter = forms.CharField(label=Profile._meta.get_field('twitter').verbose_name,
max_length=20)
bio = forms.TextField(label=Profile._meta.get_field('bio').verbose_name, widget=forms.Textarea)
avatar = forms.ImageField(label=Profile._meta.get_field('avatar').verbose_name)
def save(self):
# Save the parent form and get the user
new_user = super(WikithonSignupForm, self).save()
Profile.objects.create(user=new_user,
name=self.cleaned_data['name'],
email=self.cleaned_data['email'],
twitter=self.cleaned_data['twitter'],
bio=self.cleaned_data['bio'],
avatar=self.cleaned_data['avatar'])
return new_user
class EditProfile(forms.ModelForm):
class Meta:
model = Profile
fields = ['name', 'email', 'twitter','bio', 'avatar']
class ResendForm(forms.Form):
email = forms.EmailField()
| agpl-3.0 | -1,130,210,196,212,903,600 | 40.352941 | 99 | 0.59744 | false |
miketheman/opencomparison | settings/base.py | 1 | 9605 | # -*- coding: utf-8 -*-
# Django settings
import os.path
from os import environ
from django.template.defaultfilters import slugify
PROJECT_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# serve media through the staticfiles app.
SERVE_MEDIA = DEBUG
INTERNAL_IPS = [
"127.0.0.1",
]
ADMINS = [
("Daniel Greenfeld", "[email protected]"),
]
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = "US/Eastern"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-us"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, "media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = "/media/"
# Absolute path to the directory that holds static files like app media.
# Example: "/home/media/media.lawrence.com/apps/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, "collected_static")
# URL that handles the static files like app media.
# Example: "http://media.lawrence.com"
STATIC_URL = "/static/"
# Additional directories which hold static files
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, "static"),
]
# Use the default admin media prefix, which is...
#ADMIN_MEDIA_PREFIX = "/static/admin/"
# List of callables that know how to import templates from various sources.
from memcacheify import memcacheify
CACHES = memcacheify()
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"reversion.middleware.RevisionMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"pagination.middleware.PaginationMiddleware",
"django_sorting.middleware.SortingMiddleware",
)
TEMPLATE_DIRS = [
os.path.join(PROJECT_ROOT, "templates"),
]
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.static",
"package.context_processors.used_packages_list",
"grid.context_processors.grid_headers",
"core.context_processors.current_path",
"profiles.context_processors.lazy_profile",
"core.context_processors.core_values",
]
PROJECT_APPS = [
"grid",
'core',
"homepage",
"package",
"profiles",
"apiv1",
"feeds",
"searchv2",
"importer",
]
PREREQ_APPS = [
# Django
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"django.contrib.humanize",
"django.contrib.staticfiles",
# external
"uni_form",
"pagination",
"django_extensions",
"south",
"tastypie",
"reversion",
"django_sorting",
#"django_modeler",
# Celery task queue:
#'djcelery',
'social_auth',
'floppyforms',
'rest_framework',
]
INSTALLED_APPS = PREREQ_APPS + PROJECT_APPS
FIXTURE_DIRS = [
os.path.join(PROJECT_ROOT, "fixtures"),
]
MESSAGE_STORAGE = "django.contrib.messages.storage.session.SessionStorage"
ABSOLUTE_URL_OVERRIDES = {
"auth.user": lambda o: "/profiles/profile/%s/" % o.username,
}
AUTH_PROFILE_MODULE = "profiles.Profile"
LOGIN_URL = "/login/github/"
LOGIN_REDIRECT_URLNAME = "home"
EMAIL_CONFIRMATION_DAYS = 2
EMAIL_DEBUG = DEBUG
CACHE_TIMEOUT = 60 * 60
ROOT_URLCONF = "urls"
SECRET_KEY = "CHANGEME"
URCHIN_ID = ""
DEFAULT_FROM_EMAIL = 'Django Packages <[email protected]>'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_SUBJECT_PREFIX = '[Django Packages] '
try:
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_PASSWORD = os.environ['SENDGRID_PASSWORD']
EMAIL_HOST_USER = os.environ['SENDGRID_USERNAME']
EMAIL_PORT = 587
SERVER_EMAIL = '[email protected]'
EMAIL_USE_TLS = True
DEBUG = False
except Exception as e:
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_SUBJECT_PREFIX = '[Cartwheel Web]'
DEBUG_TOOLBAR_CONFIG = {
"INTERCEPT_REDIRECTS": False,
}
if DEBUG:
CACHE_BACKEND = 'dummy://'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
PACKAGINATOR_HELP_TEXT = {
"REPO_URL": "Enter your project repo hosting URL here.<br />Example: https://github.com/opencomparison/opencomparison",
"PYPI_URL": "<strong>Leave this blank if this package does not have a PyPI release.</strong><br />What PyPI uses to index your package. <br />Example: django-uni-form",
}
PACKAGINATOR_SEARCH_PREFIX = "django"
# if set to False any auth user can add/modify packages
# only django admins can delete
RESTRICT_PACKAGE_EDITORS = True
# if set to False any auth user can add/modify grids
# only django admins can delete
RESTRICT_GRID_EDITORS = True
# package extenders are dicts that can include:
# form
# model
# grid_items
# package_displays
PACKAGE_EXTENDERS = []
CELERYD_TASK_TIME_LIMIT = 300
LAUNCHPAD_ACTIVE = False
LOCAL_INSTALLED_APPS = []
SUPPORTED_REPO = []
########################## Site specific stuff
FRAMEWORK_TITLE = "Django"
SITE_TITLE = "Django Packages"
# local_settings.py can be used to override environment-specific settings
# like database and email that differ between development and production.
try:
from local_settings import *
except ImportError:
pass
if LOCAL_INSTALLED_APPS:
INSTALLED_APPS.extend(LOCAL_INSTALLED_APPS)
SUPPORTED_REPO.extend(["bitbucket", "github"])
if LAUNCHPAD_ACTIVE:
SUPPORTED_REPO += ["launchpad"]
AUTHENTICATION_BACKENDS = (
'social_auth.backends.contrib.github.GithubBackend',
'django.contrib.auth.backends.ModelBackend',
)
GITHUB_API_SECRET = environ.get('GITHUB_API_SECRET')
GITHUB_APP_ID = environ.get('GITHUB_APP_ID')
GITHUB_USERNAME = environ.get('GITHUB_USERNAME')
GITHUB_PASSWORD = environ.get('GITHUB_PASSWORD')
SOCIAL_AUTH_ENABLED_BACKENDS = ('github')
SOCIAL_AUTH_COMPLETE_URL_NAME = 'socialauth_complete'
SOCIAL_AUTH_ASSOCIATE_URL_NAME = 'associate_complete'
SOCIAL_AUTH_DEFAULT_USERNAME = lambda u: slugify(u)
SOCIAL_AUTH_EXTRA_DATA = False
SOCIAL_AUTH_CHANGE_SIGNAL_ONLY = True
SOCIAL_AUTH_REDIRECT_IS_HTTPS = True
LOGIN_REDIRECT_URL = '/'
# associate user via email
#SOCIAL_AUTH_ASSOCIATE_BY_MAIL = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "oc", # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
},
}
WSGI_APPLICATION = 'wsgi.application'
if DEBUG:
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
ADMIN_URL_BASE = environ.get('ADMIN_URL_BASE', r"^admin/")
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logutils.colorize.ColorizingStreamHandler',
'formatter': 'standard'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'include_html': True,
},
},
'loggers': {
'django': {
'handlers': ['console', ],
'propagate': True,
'level': 'ERROR',
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'': {
'handlers': ['console', ],
'level': os.environ.get('DEBUG_LEVEL', 'ERROR'),
},
}
}
URL_REGEX_GITHUB = r'(?:http|https|git)://github.com/[^/]*/([^/]*)/{0,1}'
########### redis setup
import redis
from rq import Worker, Queue, Connection
########### end redis setup | mit | 2,146,213,555,930,917,400 | 26.289773 | 172 | 0.664029 | false |
lucyparsons/OpenOversight | OpenOversight/migrations/versions/8ce7926aa132_.py | 1 | 1166 | """empty message
Revision ID: 8ce7926aa132
Revises: cfc5f3fd5efe
Create Date: 2018-06-07 18:53:47.656557
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '8ce7926aa132'
down_revision = 'cfc5f3fd5efe'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(u'notes_officer_id_fkey', 'notes', type_='foreignkey')
op.drop_constraint(u'notes_creator_id_fkey', 'notes', type_='foreignkey')
op.create_foreign_key(None, 'notes', 'officers', ['officer_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'notes', 'users', ['creator_id'], ['id'], ondelete='SET NULL')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'notes', type_='foreignkey')
op.drop_constraint(None, 'notes', type_='foreignkey')
op.create_foreign_key(u'notes_creator_id_fkey', 'notes', 'users', ['creator_id'], ['id'])
op.create_foreign_key(u'notes_officer_id_fkey', 'notes', 'officers', ['officer_id'], ['id'])
# ### end Alembic commands ###
| gpl-3.0 | 1,934,391,166,582,591,200 | 34.333333 | 96 | 0.66295 | false |
pabloli/MinimalModelAlg | running.py | 1 | 8071 | #%%
import networkx as nx
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import numpy as np
import textwrap
import pydot
import random, string
def drawGraph(data2Draw,current,trues,pos):
if drawActive == False:
return ""
text = str("Now we delete:"+textwrap.fill(str(current), 40)+"\n\n\n\nTrues: "+textwrap.fill(str(trues), 40))
#print (text)
#return
width = 10
height = 15
plt.figure(figsize=(width, height))
dx = nx.DiGraph()
dx = nx.nx.complete_multipartite_graph(1,2,3,4)
dx.add_nodes_from(data2Draw['members'])
dx.add_edges_from(data2Draw['edges'])
if pos == 0:
pos = nx.random_layout(dx,2,4)
nx.draw_networkx_labels(dx, pos, data2Draw['labels'], font_size=16)
nx.draw_networkx_nodes(dx, pos, nodelist=data2Draw['members'], node_color='r',node_size=700)
nx.draw_networkx_nodes(
dx, pos, nodelist=data2Draw['name'], node_color='y', node_shape='s', node_size=1300, linewidths=10)
nx.draw_networkx_edges(
dx, pos, edgelist=data2Draw['edges'], edge_color='b', arrows=True)
red_patch = mpatches.Patch(color='red', label='Members')
yellow_patch = mpatches.Patch(color='y', label='Lamdas')
plt.legend(handles=[red_patch,yellow_patch], loc=1)
plt.annotate(text,xy=(1.05,0.5),xycoords='axes fraction', fontsize=14)
plt.show()
return pos
def mergeDict(dictA, dictB):
for m in dictB.keys():
if m not in dictA.keys():
dictA[m] = dictB[m]
dictA[m] = (dictA[m])
return dictA
def mergeData(data1, data2):
allData = {}
allData['labels'] = mergeDict(data1['labels'], data2['labels'])
allData['edges'] = data1['edges'] | data2['edges']
allData['members'] = data1['members'] | data2['members']
allData['name'] = list(set().union(data1['name'],[data2['name']]))
return allData
def getData(sentence):
leftMembers = sentence['leftMembers']
name = sentence['name']
rightMembers = sentence['rightMembers']
labels = {}
edges = []
for m in leftMembers:
labels[m] = m
edges.append((m, name))
for m in rightMembers:
labels[m] = m
edges.append((name, m))
labels[name] = name
# if len (rightMembers) == 0:
# leftMembers = []
return ({'labels': (labels), 'edges': set(edges),
'members': set(leftMembers) | set(rightMembers), 'name': name})
def drawGraph2(data2Draw):
graph = pydot.Dot(graph_type='graph')
pyNodes = {}
for n in set(data2Draw['members']):
pyNodes[n] = pydot.Node(n, fillcolor='r')
for n in set(data2Draw['names']):
pyNodes[n] = pydot.Node(n, fillcolor="b", shape='box')
for edge in set(data2Draw['edges']):
graph.add_edge(pydot.Edge(edge[0], edge[1], label='a', color='r'))
graph.write_png('ojj.png')
def findIndependetMembers(data,val):
if val == 0:
independentMember = {key: 0 for key in set(data['name'])}
else:
independentMember = {key: 0 for key in set(data['members'])}
for k in data['edges']:
if k[1] in independentMember:
independentMember[k[1]] += 1
return [l for l in independentMember.keys() if independentMember[l]==0]
def updateGraph(dataToProcess, trues, pos):
dataToProcess, lamdasDeleted = removeLambda(dataToProcess, trues, pos)
dataToProcess, membersDeleted = removeMember(dataToProcess, trues, pos)
return dataToProcess, membersDeleted
def removeLambda(dataToProcess, trues, pos):
toDelete = findIndependetMembers(dataToProcess,0)
for e in [l for l in dataToProcess['edges'] if l[0] in toDelete]:
dataToProcess['edges'].remove(e)
for e in toDelete:
del dataToProcess['labels'][e]
dataToProcess['name'].remove(e)
drawGraph(dataToProcess,toDelete,trues, pos)
return dataToProcess, toDelete
def removeMember(dataToProcess,trues,pos):
toDelete = findIndependetMembers(dataToProcess,1)
for e in [l for l in dataToProcess['edges'] if l[0] in toDelete]:
dataToProcess['edges'].remove(e)
for e in toDelete:
del dataToProcess['labels'][e]
dataToProcess['members'].remove(e)
drawGraph(dataToProcess,toDelete,trues , pos)
return dataToProcess, toDelete
def updateSentencesAndGetTrues(sentences, membersToDelete):
retValue = []
for m in membersToDelete:
for s in sentences:
if [m] == s['rightMembers']:
retValue.append(m)
s['edges'] = []
continue;
if m in s['leftMembers']:
del s
continue
if m in s['rightMembers']:
s['rightMembers'].remove(m)
return set(retValue), sentences
def convertJson2Theory(sentences):
[print (l['name']+':'+str(l['leftMembers'])+'->'+str(l['rightMembers'])) for l in sentences]
def generateRandomSentences(num,lenght,members,size):
retValue = []
for i in range(num):
val = {}
lengthR = lengthL = 0
while lengthL == 0 & lengthR == 0:
lengthR = int (random_num(lenght))
lengthL = int (random_num(lenght))
val = {}
val['name']= 'L' +str(i)
val['leftMembers']=[]
for j in range(lengthL):
val['leftMembers'].append(randomword(members,size))
val['rightMembers']=[]
for j in range(lengthR):
val['rightMembers'].append(randomword(members,size))
retValue.append(val)
return retValue
def random_num(length):
return random.randint(0, length)
def randomword(members,length):
return ''.join(random.choice(string.ascii_lowercase[:members]) for i in range(length))
def readFile(filename):
f = open(filename,'r').read().replace(':[',';').replace(']->[',';').replace(']','').replace("'",'').replace(" ",'')
allS = f.split('\n')
listSentences = []
for i in allS:
ele = {}
t = i.split(';')
ele ['name']=t[0]
if t[1] == '':
ele['leftMembers'] = []
else:
ele['leftMembers'] = t[1].split(',')
if t[2] == '':
ele['rightMembers'] = []
else:
ele['rightMembers'] = t[2].split(',')
listSentences.append(ele)
return listSentences
def insert_newlines(string, every=64):
return '\n'.join(string[i:i+every] for i in range(0, len(string), every))
def main(theory):
convertJson2Theory(theory)
trues = []
deletedMembers = []
data = {'name':[],'edges':set(),'members':set(),'labels':{}}
for oS in theory:
data = mergeData(data, getData(oS))
sentences = theory
pos = ""
pos = drawGraph(data,deletedMembers,trues, 0)
#drawGraph2(data)
lastState = []
curState = [len(data['name']),len(data['edges']),len(data['members'])]
while lastState != curState:
lastState = curState
data,membersDeleted = updateGraph(data,trues, pos)
curT, sentences = updateSentencesAndGetTrues(sentences, membersDeleted)
curState = [len(data['name']),len(data['edges']),len(data['members'])]
if len(curT)>0:
trues+=curT
trues+=data['members']
drawGraph(data,[''],trues,pos)
plt.clf()
print ('Trues:')
print (trues)
print ('--------Finished--------')
#%%
theory = [
{'leftMembers': [], 'name': ('L1'), 'rightMembers': ['a', 'b']},
{'leftMembers': ['b'], 'name': ('L2'), 'rightMembers': ['a']},
{'leftMembers': [], 'name': ('L3'), 'rightMembers': ['a', 'c']},
{'leftMembers': ['a'], 'name': ('L4'), 'rightMembers': ['d', 'e', 'f']},
{'leftMembers': ['e'], 'name': ('L5'), 'rightMembers': ['f']},
{'leftMembers': ['f'], 'name': ('L6'), 'rightMembers': ['e']},
]
drawActive = True
main(theory)
#%%
drawActive = True
theory = readFile('example 1.txt')
main(theory)
#%%
drawActive = False
for i in range(100):
print ('-------------------------')
print ("Running N-", i)
main(generateRandomSentences(10,2,15,1))
| mit | 3,964,466,049,553,769,000 | 32.629167 | 119 | 0.590633 | false |
moses-rolston/err | errbot/backends/campfire.py | 1 | 4589 | import logging
import sys
try:
import pyfire
except ImportError:
logging.exception("Could not start the campfire backend")
logging.fatal("""
If you intend to use the campfire backend please install pyfire:
pip install pyfire
""")
sys.exit(-1)
from errbot.backends.base import Message, build_message, Identifier
from errbot.errBot import ErrBot
from threading import Condition
class CampfireConnection(pyfire.Campfire):
rooms = {} # keep track of joined room so we can send messages directly to them
def join_room(self, name, msg_callback, error_callback):
room = self.get_room_by_name(name)
room.join()
stream = room.get_stream(error_callback=error_callback)
stream.attach(msg_callback).start()
self.rooms[name] = (room, stream)
ENCODING_INPUT = sys.stdin.encoding
class CampfireBackend(ErrBot):
exit_lock = Condition()
def __init__(self, config):
super(CampfireBackend, self).__init__(config)
identity = config.BOT_IDENTITY
self.conn = None
self.subdomain = identity['subdomain']
self.username = identity['username']
self.password = identity['password']
if not hasattr(config, 'CHATROOM_PRESENCE') or len(config['CHATROOM_PRESENCE']) < 1:
raise Exception('Your bot needs to join at least one room, please set'
' CHATROOM_PRESENCE with at least a room in your config')
self.chatroom = CHATROOM_PRESENCE[0]
self.ssl = identity['ssl'] if 'ssl' in identity else True
self.jid = None
def send_message(self, mess):
super(CampfireBackend, self).send_message(mess)
# we only reply to rooms in reality in campfire so we need to find one or a default one at least
room_name = mess.to.domain
if not room_name:
room_name = mess.frm.domain
if room_name in self.conn.rooms:
room = self.conn.rooms[room_name][0]
room.speak(mess.body) # Basic text support for the moment
else:
logging.info(
"Attempted to send a message to a not connected room yet Room %s : %s" % (room_name, mess.body))
def serve_forever(self):
self.exit_lock.acquire()
self.connect() # be sure we are "connected" before the first command
self.connect_callback() # notify that the connection occured
try:
logging.info("Campfire connected.")
self.exit_lock.wait()
except KeyboardInterrupt:
pass
finally:
self.exit_lock.release()
self.disconnect_callback()
self.shutdown()
def connect(self):
if not self.conn:
self.conn = CampfireConnection(self.subdomain, self.username, self.password, self.ssl)
self.jid = Identifier(node=self.username,
domain=self.conn.get_room_by_name(self.chatroom).name,
resource=self.username)
# put us by default in the first room
# resource emulates the XMPP behavior in chatrooms
return self.conn
def build_message(self, text):
return Message(text, type_='groupchat') # it is always a groupchat in campfire
def shutdown(self):
super(CampfireBackend, self).shutdown()
def msg_callback(self, message):
logging.debug('Incoming message [%s]' % message)
user = ""
if message.user:
user = message.user.name
if message.is_text():
msg = Message(message.body, type_='groupchat') # it is always a groupchat in campfire
msg.frm = user + '@' + message.room.get_data()['name'] + '/' + user
msg.to = self.jid # assume it is for me
self.callback_message(msg)
def error_callback(self, error, room):
logging.error("Stream STOPPED due to ERROR: %s in room %s" % (error, room))
self.exit_lock.acquire()
self.exit_lock.notify()
self.exit_lock.release()
def join_room(self, room, username=None, password=None):
self.conn.join_room(room, self.msg_callback, self.error_callback)
def build_message(self, text):
return build_message(text, Message)
def send_simple_reply(self, mess, text, private=False):
"""Total hack to avoid stripping of rooms"""
self.send_message(self.build_reply(mess, text, True))
@property
def mode(self):
return 'campfire'
def groupchat_reply_format(self):
return '@{0} {1}'
| gpl-3.0 | -5,524,119,851,665,930,000 | 35.712 | 112 | 0.617346 | false |
datawire/quark | quarkc/test/ffi/expected/py/signatures/generics/ccc/__init__.py | 1 | 2758 | from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from builtins import str as unicode
from quark_runtime import *
_lazyImport.plug("generics.ccc")
import quark.reflect
class TLSContextInitializer(_QObject):
def _init(self):
pass
def __init__(self): self._init()
def getValue(self):
return _cast(None, lambda: Context)
def _getClass(self):
return u"generics.ccc.TLSContextInitializer"
def _getField(self, name):
return None
def _setField(self, name, value):
pass
TLSContextInitializer.generics_ccc_TLSContextInitializer_ref = None
class Context(_QObject):
def _init(self):
self.parent = None
def __init__(self, parent):
self._init()
@staticmethod
def current():
return _cast(None, lambda: Context)
@staticmethod
def global_():
return _cast(None, lambda: Context)
def _getClass(self):
return u"generics.ccc.Context"
def _getField(self, name):
if ((name) == (u"_global")):
return Context._global
if ((name) == (u"_current")):
return Context._current
if ((name) == (u"parent")):
return (self).parent
return None
def _setField(self, name, value):
if ((name) == (u"_global")):
Context._global = _cast(value, lambda: Context)
if ((name) == (u"_current")):
Context._current = _cast(value, lambda: TLS)
if ((name) == (u"parent")):
(self).parent = _cast(value, lambda: Context)
Context._global = None
Context._current = None
Context.generics_ccc_Context_ref = None
Context.generics_ccc_TLS_generics_ccc_Context__ref = None
class TLSInitializer(object):
def getValue(self):
raise NotImplementedError('`TLSInitializer.getValue` is an abstract method')
TLSInitializer.generics_ccc_TLSInitializer_quark_Object__ref = None
class TLS(_QObject):
def _init(self):
self._value = None
def __init__(self, initializer):
self._init()
def getValue(self):
return _cast(None, lambda: T)
def _getClass(self):
return u"generics.ccc.TLS<quark.Object>"
def _getField(self, name):
if ((name) == (u"_value")):
return (self)._value
return None
def _setField(self, name, value):
if ((name) == (u"_value")):
(self)._value = _cast(value, lambda: T)
def _lazy_import_quark_ffi_signatures_md():
import quark_ffi_signatures_md
globals().update(locals())
_lazyImport("import quark_ffi_signatures_md", _lazy_import_quark_ffi_signatures_md)
_lazyImport.pump("generics.ccc")
| apache-2.0 | -3,211,641,821,664,854,500 | 23.40708 | 84 | 0.614213 | false |
rodo/django-perf | foo/offset/management/commands/keypage_run.py | 1 | 1911 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013,2014 Rodolphe Quiédeville <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import imp
from django.core.management.base import BaseCommand
from optparse import make_option
from django.core.paginator import Paginator
from foo.offset.models import Log
from foo.july.models import BigBook
import logging
from datetime import datetime
class Command(BaseCommand):
help = 'Import datas'
def handle(self, *args, **options):
"""
Use keyset pagination
"""
log = Log.objects.create(name='keypage',
start=datetime.now(),
stop=datetime.now())
nb = 0
keyid = 0
while True:
queryset = BigBook.objects.filter(serie=3)
books = queryset.filter(keyid__gt=keyid).order_by('keyid')[:250]
for book in books:
keyid = book.keyid
# do want you want here
if book.nbpages > 500:
nb = nb + 1
if len(books) < 250:
break
log.stop = datetime.now()
log.save()
print "keypage", log.stop - log.start, nb
| gpl-3.0 | 2,428,596,492,873,267,700 | 30.833333 | 76 | 0.618848 | false |
paulscottrobson/wallpaper-one | software/minol - arithmetic/generate.py | 1 | 1446 | #
# Generate math tests
#
import random
random.seed(412)
monitor = [ord(x) for x in open("..\monitor_rom\monitor.bin","rb").read(-1)]
mvars = { "C":10,"D":20,"Z":33 }
def rnd(maxval):
n = maxval+1
term = ""
while n >= maxval:
n = random.randrange(0,255)
term = str(n)
if random.randrange(0,3) == 0:
k = mvars.keys()
term = k[random.randrange(0,len(k))]
n = mvars[term]
if random.randrange(0,5) == 0:
n = random.randrange(32,96)
term = "'"+chr(n)+"'"
if n==34 or n == 0x27 or n == ord("\\"):
n = maxval+1
if random.randrange(0,5) == 0:
h = random.randrange(0,8)
h = [h,str(h)]
if random.randrange(0,8) == 0:
h = rnd(8)
l = rnd(256)
n = monitor[h[0]*256+l[0]]
term = "({0},{1})".format(h[1],l[1])
return [n,term]
ptr = 0x9300
while ptr < 0xFF00:
n1 = rnd(255)
result = n1[0]
expr = n1[1]
for parts in range(0,random.randrange(2,7)):
op = random.randrange(0,4)
if op < 2:
n1 = rnd(255)
result = result + (n1[0] if op == 0 else -n1[0])
result = (result + 256) & 255
expr = expr + ("+" if op == 0 else "-") + str(n1[1])
if op == 2 and result < 50 and result > 0:
n1 = rnd(int(255/result))
result = result * n1[0]
expr = expr + "*" + n1[1]
if op == 3 and result > 10:
n1 = rnd(int(result/2))
if n1[0] > 0:
result = int(result / n1[0])
expr = expr + "/" + n1[1]
print(' db "{0}",0,{1}'.format(expr,result))
ptr = ptr + len(expr)+2
| mit | -4,739,302,166,664,086,000 | 22.322581 | 76 | 0.539419 | false |
gem/sidd | sidd/ms/node.py | 1 | 29915 | # Copyright (c) 2011-2013, ImageCat Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Module class for statistic node handling
"""
from copy import deepcopy
from utils.xml import get_node_attrib
from sidd.constants import logAPICall
from sidd.ms.exceptions import StatisticNodeError
class StatisticModifier(object):
def __init__(self, name='', level=None):
self.name = name
self.level = level
self.values = {}
@property
def is_default(self):
return len(self.values) == 1 and self.values.keys()[0] is None
def iteritems(self):
return self.values.iteritems()
def keys(self):
return self.values.keys()
def value(self, key):
if self.values.has_key(key):
return self.values[key]
else:
return ''
def calculate_weights(self, count):
for k, v in self.values.iteritems():
self.values[k] = v * 100.0 / count
def update(self, key):
if not self.values.has_key(key):
self.values[key]=1
else:
self.values[key]+= 1
def __str__(self):
outstr=[]
outstr.append('(name: "%s" ' % self.name)
for k, v in self.values.iteritems():
outstr.append("%s: %2.1f%% " % (k, v))
outstr.append(')')
# return joined string
return ''.join(outstr)
@logAPICall
def to_xml(self, pretty=False):
""" generate XML representation of current node """
outstr = []
pad = ''
line_break=''
outstr.append('%s <modifier name="%s" level="%s">%s' % (pad, self.name, self.level, line_break))
for k, v in self.values.iteritems():
outstr.append('%s <modifiervalue value="%s" weight="%s" />%s'
% (pad, k, v, line_break))
outstr.append('%s </modifier>%s' % (pad, line_break))
# return joined string
return ''.join(outstr)
@logAPICall
def from_xml(self, xmlnode):
self.name = get_node_attrib(xmlnode, 'name')
self.level = get_node_attrib(xmlnode, 'level')
for mod_node in xmlnode.findall('modifiervalue'):
if mod_node.attrib['value'] == 'None':
val = None
else:
val = get_node_attrib(mod_node, 'value')
self.values[val]=float(get_node_attrib(mod_node, 'weight'))
class StatisticNode (object):
"""
A statistic node forms part of a statistic tree.
Each node stores structural as well as tree related information
structural related information
-value: taxonomy value representing a structural type
-count: count of values included
-weight: count of current node as percentage
of count of parent
-modifiers: these are less important features on the strutural type
this is used mainly to reduce the size of the statistical tree
tree related information
-level: level of node in a statistic tree
-children: collection of child StatisticNode
"""
# static members
###########################
# additional values to be attached to the node
AverageSize, UnitCost = range(2)
# constructor / destructor
###########################
def __init__(self, parent, name='', value='', level=0,
is_default=False, is_skipped=False):
""" constructor """
self.parent=parent
self.name=name
self.value=value
self.additional = {}
self.label_additional = ["avg_size", "unit_cost"]
self.is_skipped=is_skipped
self.is_default=is_default
self.count=0
self.weight=0.0
self.modifiers=[]
self.level=level
self.children=[]
def __del__(self):
""" destructor """
del self.parent
del self.name
del self.value
del self.is_default
del self.count
del self.modifiers
del self.level
for child in self.children:
del child
# property methods
###########################
@property
def is_leaf(self):
""" is leaf if does not have children """
return len(self.children) == 0
@property
def is_valid(self):
return round(sum([c.weight for c in self.children]),0) == 100
@property
def max_level(self):
""" get max level under current node """
level = self.level
for child in self.children:
if child.max_level > level:
level = child.level
return level
@property
def clone(self):
""" get a cloned copy of the node and all its children """
return deepcopy(self)
@property
def ancestor_names(self):
if self.parent is not None:
names = self.parent.ancestor_names
names.append(self.parent.name)
return names
else:
return []
@property
def descendant_names(self):
if self.is_leaf:
return []
else:
names = {}
for child in self.children:
names[child.name] = 1
for name in child.descendant_names:
names[name] = 1
return names.keys()
# serialize / deserialize
###########################
def __str__(self):
"""
get string representation of node.
it works by recursively retrieving string from its children nodes
"""
# use list to hold strings for each line and concatenate at the end of
# the function to avoid creating throw-away strings objects
outstr = []
# add space to make it pretty
outstr.append(' ' * self.level)
# add current node
outstr.append('%s:[%s=%s (%s, %s, %2.1f%% - %d)]'
% (self.level, self.name, self.value, self.is_default,
self.is_skipped, self.weight, self.count))
# add modifiers for current node
for mod in self.modifiers:
outstr.append(str(mod))
# show additional data in leaf node
if self.is_leaf:
outstr.append(str(self.additional))
outstr.append('\n')
# add children
for child in self.children:
outstr.append(str(child))
# return joined string
return ''.join(outstr)
@logAPICall
def to_xml(self, pretty=False):
""" generate XML representation of current node """
outstr = []
pad = ''
line_break=''
if (pretty):
for i in range(self.level):
pad += ' '
line_break='\n'
outstr.append('%s<node attribute="%s" value="%s" level="%d" is_default="%s" is_skipped="%s" weight="%f">%s'
% (pad, self.name, self.value, self.level, self.is_default,
self.is_skipped, self.weight, line_break))
for key,value in self.additional.iteritems():
outstr.append('%s <additional %s="%s" />' % (pad, self.label_additional[key], value))
outstr.append('%s <modifiers>%s' % (pad, line_break))
for mod in self.modifiers:
outstr.append(mod.to_xml(pretty))
outstr.append('%s </modifiers>%s' % (pad, line_break))
if not self.is_leaf:
outstr.append('%s <children>%s' % (pad, line_break))
for child in self.children:
outstr.append(child.to_xml(pretty))
outstr.append('%s </children>%s' % (pad, line_break))
outstr.append('%s </node>%s' % (pad, line_break))
return ''.join(outstr)
@logAPICall
def from_xml(self, xmlnode):
""" construct node and children from XML """
self.name = get_node_attrib(xmlnode, 'attribute')
self.value = get_node_attrib(xmlnode, 'value')
self.level = int(get_node_attrib(xmlnode, 'level'))
self.weight = float(get_node_attrib(xmlnode, 'weight'))
self.count = self.weight
self.is_default = str(get_node_attrib(xmlnode, 'is_default')).upper()=='TRUE'
self.is_skipped = str(get_node_attrib(xmlnode, 'is_skipped')).upper()=='TRUE'
for add_node in xmlnode.findall('additional'):
for idx, label in enumerate(self.label_additional):
add_value = get_node_attrib(add_node, label)
if add_value != '':
self.additional[idx]=add_value
for mod_node in xmlnode.findall('modifiers/modifier'):
mod = StatisticModifier()
mod.from_xml(mod_node)
self.modifiers.append(mod)
for childnode in xmlnode.findall('children/node'):
logAPICall.log('created new child with xmlnode %s' % childnode, logAPICall.DEBUG_L2)
node = StatisticNode(self)
node.from_xml(childnode)
self.children.append(node)
# readonly methods
###########################
@logAPICall
def leaves(self, taxonomy,
with_modifier=True, order_attributes=False,
parent_nodes=None, parent_weight = 1.0):
if parent_nodes is None:
parent_nodes = []
branch_nodes = {"":[]}
branch_weights = {"":1.0}
# generate modifier branch if requested
if with_modifier:
for mod in self.modifiers:
# each modifier value will generate convolution with branch X modifiers.values
cur_branch_nodes = {}
cur_branch_weights = {}
for mod_val, mod_weight in mod.iteritems(): # loop through modifiers.values
mod_weight /= 100.0
for branch, value in branch_nodes.iteritems(): # loop through existing branches
branch_weight = branch_weights[branch]
# case that can occur are
# 1. modifier value is not None
# append modifier value and update weight
# 2. modifier value is None
# No new branch is created in this case. the weight of the branch is
# updated with modifier value
if ( mod_val is not None ): # case 1
if branch != "": # case 1.1
branch_key = branch + "|"+ mod_val
cur_branch_nodes[branch_key] = []
cur_branch_nodes[branch_key].append(mod_val)
cur_branch_weights[branch_key] = branch_weight * mod_weight
else: # case 1.2
cur_branch_nodes[mod_val] = []
cur_branch_nodes[mod_val].append(mod_val)
cur_branch_weights[mod_val] = branch_weight * mod_weight
else: # case 2
cur_branch_weights[branch] = branch_weight * mod_weight
branch_nodes = cur_branch_nodes
branch_weights = cur_branch_weights
for branch_key, nodes in branch_nodes.iteritems():
# root node (level=0) does not have taxonomy value attached
# but could still have modifier attached
added = 0
if self.level > 0:
if str(self.value) != "None":
parent_nodes.append(self.value)
added +=1
# modifier values
for node in nodes:
parent_nodes.append(node)
added +=1
weight = branch_weights[branch_key]
if (self.is_leaf):
leaf_value = taxonomy.to_string(parent_nodes, order_attributes)
yield leaf_value, parent_weight * self.weight / 100.0 * weight, self
for child in self.children:
for l in child.leaves(taxonomy, with_modifier, order_attributes,
parent_nodes, parent_weight * self.weight / 100.0 * weight):
yield l
# remove nodes
for i in range(added):
parent_nodes.pop()
# weight related methods
###########################
@logAPICall
def set_child_weights(self, weights):
"""
set weights for all children nodes
throws exception
if weights do not addup to 100
if number of children does not equal to number of weights
"""
# assert valid condition
if sum(weights) != 100:
raise StatisticNodeError('weight must added up to 100')
if len(weights) != len(self.children):
raise StatisticNodeError('number of weights does not equal number of children')
# set weight
for child, w in map(None, self.children, weights):
child.weight = w
@logAPICall
def calculate_weights(self):
"""
convert count into percentage relative to sum of count for all siblings
"""
# calculate weight for children based on count
if self.parent is not None:
if (self.parent.count != 0):
self.weight = self.count * 100.0 / self.parent.count
else:
self.weight = 0
else:
self.weight = 100
# calculate weight for attached modifiers based on count
for mod in self.modifiers:
mod.calculate_weights(self.count)
if self.is_leaf:
# update additional values
total_size = self.count # set to default for unitCost calculation
if self.additional.has_key(self.AverageSize):
total_size = self.additional[self.AverageSize]
self.additional[self.AverageSize] = float(total_size) / self.count
if self.additional.has_key(self.UnitCost):
# total_size defaults to count,
# so should not break even if AverageSize is not set
self.additional[self.UnitCost] /= total_size
# recursively travese down to all children
# will be skipped by leaf nodes
for child in self.children:
child.calculate_weights()
@logAPICall
def balance_weights(self):
"""
adjust its weights to make sure it adds up to 100%
"""
sum_weights = sum([child.weight for child in self.children])
total_children = len(self.children)
adj_factor = sum_weights / 100
for child in self.children:
if adj_factor == 0:
child.weight = 100.0 / total_children
else:
child.weight = child.weight / adj_factor
# tree modifying methods
###########################
@logAPICall
def add(self, attr_vals, parse_order, level, additional_data={}):
"""
recursively update statistic @ node and @ child nodes
using attr_val, defaults, skips at idx
"""
# increment count of current node
self.count+=1
# the ending condition for the recursive call
# NOTE: is_leaf is not used here, this process should work on a empty tree
if (len(parse_order) <= level):
# leaf node also aggregate additional data
self.increment_additonal(self.AverageSize, additional_data)
self.increment_additonal(self.UnitCost, additional_data)
return
logAPICall.log('processing %d %s' %(level, parse_order[level]), logAPICall.DEBUG)
# get value to add/update children
# NOTE: value for current node is already set by its parent
# all processing/parsing is to work on its children
attr_name = parse_order[level]
value = None
for val in attr_vals:
if val.attribute.name == attr_name:
value = val
break
# handle default cases
is_default = False
if value is None:
is_default = True
elif value is not None and (str(value) == value.attribute.default or str(value) == value.attribute.group.default):
value = None
is_default = True
logAPICall.log('\tnode:%s' %(value), logAPICall.DEBUG_L2)
child_found = False
# find children and add value/modifier
for child in self.children:
if (child.value is None and value is None) or str(child.value) == str(value):
logAPICall.log('found child with %s' % value, logAPICall.DEBUG_L2)
child_found = True
# recursive call to process next level
child.add(attr_vals, parse_order, level+1, additional_data)
return
# if no children found, then add new node for value and add modifier
if not child_found:
logAPICall.log('create new child with %s' % value, logAPICall.DEBUG_L2)
child = StatisticNode(self, attr_name, value, self.level+1, is_default, False)
self.children.append(child)
# recursive call to process next level
child.add(attr_vals, parse_order, level+1, additional_data)
return
@logAPICall
def eliminate_empty(self):
"""
traverse current tree and eliminating nodes with value=None and no sibling
"""
for child in self.children:
child.eliminate_empty()
if len(self.children) == 1:
child = self.children[0]
if child.value is None:
# eliminate
self.children = []
for grandchild in child.children:
grandchild.parent = self
self.children.append(grandchild)
grandchild.set_level_recursive(self.level+1)
del child
@logAPICall
def get_modifiers(self, max_level):
"""
generator providing access to all modifiers from node and children nodes
up to given max_level
"""
if self.level <= max_level: #and not self.is_leaf:
# own modifiers
for idx, mod in enumerate(self.modifiers):
# generator return
yield self, idx, mod
# traverse through children nodes
for child in self.children:
# recursively return children's modifiers with generator
for node, idx, mod in child.get_modifiers(max_level):
yield node, idx, mod
# else
# reached leaf or modifier from max depth level defined.
# return
@logAPICall
def delete_node(self, child):
"""
delete given node from children list, distribute its weight to
sibling nodes equally
throws exception if node is only child
"""
# assert valid condition
total_children = len(self.children)
if total_children == 1:
raise StatisticNodeError('only child. cannot be deleted')
# delete, wrap in exception to catch miss-matched children
try:
# remove child
weight = child.weight
self.children.remove(child)
total_children -= 1
# evenly distribute deleted weight to sibling
for child in self.children:
child.weight = child.weight + (weight / total_children)
except:
raise StatisticNodeError('unknown error while deleting node')
@logAPICall
def add_modifier(self, val, mod_idx=0):
""" update statistic for specified modifier """
if len(self.modifiers) <= mod_idx:
mod = StatisticModifier(self.name, self.level)
mod.update(val)
self.modifiers.append(mod)
else:
self.modifiers[mod_idx].update(val)
@logAPICall
def set_level_recursive(self, level):
"""
recursively set level of node and all children
this allows node to be attached at random level in tree
NOTE: use negative value for inc to decrease level
"""
if level <= 0:
raise StatisticNodeError('resulting level must be > 0')
self.level = level
# adjust children
if not self.is_leaf:
for child in self.children:
child.set_level_recursive(level + 1)
@logAPICall
def matches(self, node):
"""
test to see if node matches self or any descendant
"""
if self == node:
return True
if self.is_leaf:
return False
for child in self.children:
if (child.matches(node)):
return True
return False
@logAPICall
def update_children(self, attribute, values, weights):
"""
simply update children based on given values/weights without checking
for position of values
"""
if sum(weights) <> 100:
raise StatisticNodeError('weight does not equal to 100')
to_add = len(values) - len(self.children)
if to_add > 0:
# need to add more nodes
for i in range(to_add):
child = StatisticNode(self, attribute, '', self.level+1)
self.children.append(child)
elif to_add < 0:
# need to delete nodes
start=len(values)
for i in range(to_add):
self.children.remove(self.children[start+i])
# set value/weights
idx = 0
for val, weight in map(None, values, weights):
child = self.children[idx]
child.value = val
child.weight = weight
idx += 1
@logAPICall
def update_children_complex(self, attribute, values, weights):
"""
update children based on given values/weights
"""
# check to make sure given values/weights can be used to
# update node's children
# check for following conditions
# 1. weights do not add up to 100.
# FAIL, cannot update
# 2. values not changed, only weights were updated
# ALLOW, update children with new weights
# 3. new values are added
# ALLOW, add new value nodes, update all weights
# 4. some values are deleted
# ALLOW, delete child node(s), update all weights
sum_weight = 0 # for case 1
# sum up weights,
# check for added/deleted nodes
added = []
to_delete = []
for v, w in map(None, values, weights):
# check deleted
if w == 0:
to_delete.append(v)
continue
# update sum
sum_weight += w
# check added
child_found = False
for child in self.children:
if child.value == v:
child_found = True
if not child_found:
added.append(v)
# find additional child nodes already deleted
for child in self.children:
try:
values.index(child.value)
except:
if len(added) > 0:
# reuse to_delete to host the to_add
# this is can help in case a value is changed to another one
# the children of the node to delete can still be preserved
child.value = added[0]
added.remove(child.value)
else:
# nothing to add, remove the children
to_delete.append(child.value)
# case 1
if sum_weight <> 100:
raise StatisticNodeError('weight does not equal to 100')
# case 3, new values added
for v in added:
child = StatisticNode(self, attribute, v, self.level+1)
self.children.append(child)
# case 4, some values are deleted
for v in to_delete:
for child in self.children:
if child.value == v:
self.delete_node(child)
# after changes to node, the resulting
# case 2, only weight update needed
for child in self.children:
try:
weight = weights[values.index(child.value)]
except:
weight = 0
child.weight = weight
child.count = weight
def set_modifier(self, modidx, modifier):
"""
set modifier to given modidx if modidx is valid,
otherwise, add modifier as new modifier to node's list
"""
if modidx >= 0 and modidx < len(self.modifiers):
self.modifiers[modidx] = modifier
else:
self.modifiers.append(modifier)
def update_modifier(self, values, weights, modidx=-1):
"""
update node's (modidx) modifier with given values/weights list
raise exception if modidx is not valid index.
if no modidx is given as input, a new modifier is created and attached to
the node
"""
# check to make sure given values/weights can be used to
# update node's modifier
# check for following conditions
# 1. weights do not add up to 100.
# FAIL, cannot update
# 2. modidx exceed max index of node's modifier list
# FAIL, cannot update
# 3. modidx is negative,
# ALLOW, add new modifier with given values/weight
# 4. modidx is correct index for node's modifier list
# ALLOW, update new
# test case 1
if sum(weights) <> 100:
raise StatisticNodeError('weight does not equal to 100')
# test case 2
if len(self.modifiers) <= modidx:
raise StatisticNodeError('modifier with index %s does not exist' % modidx)
mod = StatisticModifier("User", self.level)
for v, w in map(None, values, weights):
mod.values[v]=w
#mod = {}
#for v, w in map(None, values, weights):
# mod[v]=w
if modidx < 0:
# case 3
self.modifiers.append(mod)
else:
# case 4
self.modifiers[modidx] = mod
def remove_modifier(self, modidx):
"""
remove node's (modidx) modifier.
raise exception if modidx is not valid index
"""
if modidx < 0 or len(self.modifiers) <= modidx:
raise StatisticNodeError('modifier with index %s does not exist' % modidx)
del self.modifiers[modidx]
def increment_additonal(self, key, values):
if values.has_key(key):
if not self.additional.has_key(key):
self.additional[key]=0
self.additional[key]+= values[key]
def set_additional(self, key, value):
if self.is_leaf:
self.additional[key]=value
else:
for child in self.children:
child.set_additional(key, value)
def get_additional(self, key):
return self.additional[key] if self.additional.has_key(key) else ''
def get_additional_float(self, key):
try:
return float(self.additional[key])
except:
return 0 | agpl-3.0 | -2,564,562,895,248,160,300 | 36.802853 | 123 | 0.516396 | false |
shendo/websnort | tests/test_config.py | 1 | 1499 | # Websnort - Web service for analysing pcap files with snort
# Copyright (C) 2013-2015 Steve Henderson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from websnort.config import Config
def test_config():
conf = Config('websnort.conf.complex')
assert len(conf.modules) == 3
assert conf.modules.get('snort_community')
assert conf.modules.get('snort_vrt')
assert conf.modules.get('suricata_et')
assert conf.modules['snort_community']['name'] == 'snort_community'
assert conf.modules['snort_community']['module'] == 'snort'
assert conf.modules['snort_community']['path'] == 'snort'
assert conf.modules['snort_community']['ruleset'] == 'Community Rules'
assert conf.modules['snort_community']['config'] == '/etc/snort/snort.conf'
assert not conf.modules['snort_community']['extra_args']
| gpl-3.0 | -6,451,567,111,327,162,000 | 41.828571 | 79 | 0.728486 | false |
tboch/mocpy | docs/conf.py | 1 | 5982 | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import mocpy
# -- Project information -----------------------------------------------------
project = 'mocpy'
copyright = '2018, Thomas Boch, Matthieu Baumann'
author = 'Thomas Boch, Matthieu Baumann'
# The short X.Y version
version = mocpy.__version__
# The full version, including alpha/beta/rc tags
release = mocpy.__version__
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.doctest',
'sphinxcontrib.bibtex',
# Extension for plotting image in the doc
'matplotlib.sphinxext.plot_directive',
# To support Numpy docstrings, we use this extension:
# see https://numpydoc.readthedocs.io/en/latest/install.html
'numpydoc',
]
default_role = 'py:obj'
numpydoc_class_members_toctree = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'bootstrap-astropy'
html_show_sphinx = False
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'logotext1': 'MOC', # white, semi-bold
'logotext2': 'py', # orange, light
'logotext3': ':docs', # white, light
'astropy_project_menubar': False,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'mocpydoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'mocpy.tex', 'mocpy Documentation',
'Thomas Boch, Matthieu Baumann', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'mocpy', 'mocpy Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'mocpy', 'mocpy Documentation',
author, 'mocpy', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'astropy': ('http://docs.astropy.org/en/latest/', None),
'matplotlib': ('https://matplotlib.org/' ,None),
'networkx': ('https://networkx.github.io/documentation/stable/', None),
}
def setup(app):
pass
| gpl-3.0 | -1,037,444,172,207,471,400 | 30.15625 | 79 | 0.644099 | false |
openmotics/gateway-frontend | tools/json-sorter.py | 1 | 1571 | #!/usr/bin/env python
# Copyright (C) 2016 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
if __name__ == '__main__':
import os
import sys
import json
if len(sys.argv) != 2:
print('Usage: ./tools/json-sorter.py <path to json file>')
print('Example: ./tools/json-sorter.py ./src/locale/en/translation.json')
sys.exit(1)
path = sys.argv[1]
if os.path.exists(path):
with open(path, 'r+', encoding='utf8') as json_file:
try:
contents = json.load(json_file)
json_file.seek(0)
contents = json.dumps(contents, indent=4, sort_keys=True, ensure_ascii=False)
json_file.write('{0}\n'.format(contents))
json_file.truncate()
except Exception as ex:
print('Error processing file: {0}'.format(ex))
else:
print('Path "{0}" does not exist'.format(path))
sys.exit(1)
| agpl-3.0 | -7,325,323,666,220,852,000 | 38.275 | 93 | 0.63972 | false |
mtlchun/edx | common/lib/xmodule/xmodule/modulestore/tests/test_mongo_call_count.py | 1 | 7866 | """
Tests to verify correct number of MongoDB calls during course import/export and traversal
when using the Split modulestore.
"""
from tempfile import mkdtemp
from shutil import rmtree
from unittest import TestCase, skip
import ddt
from xmodule.modulestore.xml_importer import import_from_xml
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.tests.factories import check_mongo_calls
from xmodule.modulestore.tests.test_cross_modulestore_import_export import (
MixedModulestoreBuilder, VersioningModulestoreBuilder,
MongoModulestoreBuilder, TEST_DATA_DIR
)
MIXED_OLD_MONGO_MODULESTORE_BUILDER = MixedModulestoreBuilder([('draft', MongoModulestoreBuilder())])
MIXED_SPLIT_MODULESTORE_BUILDER = MixedModulestoreBuilder([('split', VersioningModulestoreBuilder())])
@ddt.ddt
@skip("Fix call counts below - sometimes the counts are off by 1.")
class CountMongoCallsXMLRoundtrip(TestCase):
"""
This class exists to test XML import and export to/from Split.
"""
def setUp(self):
super(CountMongoCallsXMLRoundtrip, self).setUp()
self.export_dir = mkdtemp()
self.addCleanup(rmtree, self.export_dir, ignore_errors=True)
@ddt.data(
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 287, 779, 702, 702),
(MIXED_SPLIT_MODULESTORE_BUILDER, 37, 16, 190, 189),
)
@ddt.unpack
def test_import_export(self, store_builder, export_reads, import_reads, first_import_writes, second_import_writes):
with store_builder.build() as (source_content, source_store):
with store_builder.build() as (dest_content, dest_store):
source_course_key = source_store.make_course_key('a', 'course', 'course')
dest_course_key = dest_store.make_course_key('a', 'course', 'course')
# An extra import write occurs in the first Split import due to the mismatch between
# the course id and the wiki_slug in the test XML course. The course must be updated
# with the correct wiki_slug during import.
with check_mongo_calls(import_reads, first_import_writes):
import_from_xml(
source_store,
'test_user',
TEST_DATA_DIR,
course_dirs=['manual-testing-complete'],
static_content_store=source_content,
target_course_id=source_course_key,
create_course_if_not_present=True,
raise_on_failure=True,
)
with check_mongo_calls(export_reads):
export_to_xml(
source_store,
source_content,
source_course_key,
self.export_dir,
'exported_source_course',
)
with check_mongo_calls(import_reads, second_import_writes):
import_from_xml(
dest_store,
'test_user',
self.export_dir,
course_dirs=['exported_source_course'],
static_content_store=dest_content,
target_course_id=dest_course_key,
create_course_if_not_present=True,
raise_on_failure=True,
)
@ddt.ddt
class CountMongoCallsCourseTraversal(TestCase):
"""
Tests the number of Mongo calls made when traversing a course tree from the top course root
to the leaf nodes.
"""
# Suppose you want to traverse a course - maybe accessing the fields of each XBlock in the course,
# maybe not. What parameters should one use for get_course() in order to minimize the number of
# mongo calls? The tests below both ensure that code changes don't increase the number of mongo calls
# during traversal -and- demonstrate how to minimize the number of calls.
@ddt.data(
# These two lines show the way this traversal *should* be done
# (if you'll eventually access all the fields and load all the definitions anyway).
# 'lazy' does not matter in old Mongo.
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, False, True, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, True, True, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, False, True, 387),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, True, True, 387),
# As shown in these two lines: whether or not the XBlock fields are accessed,
# the same number of mongo calls are made in old Mongo for depth=None.
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, False, False, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, True, False, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, False, False, 387),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, True, False, 387),
# The line below shows the way this traversal *should* be done
# (if you'll eventually access all the fields and load all the definitions anyway).
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, True, 4),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, True, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, True, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, True, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, False, 4),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, False, 4),
# TODO: The call count below seems like a bug - should be 4?
# Seems to be related to using self.lazy in CachingDescriptorSystem.get_module_data().
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, False, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, False, 4)
)
@ddt.unpack
def test_number_mongo_calls(self, store, depth, lazy, access_all_block_fields, num_mongo_calls):
with store.build() as (source_content, source_store):
source_course_key = source_store.make_course_key('a', 'course', 'course')
# First, import a course.
import_from_xml(
source_store,
'test_user',
TEST_DATA_DIR,
course_dirs=['manual-testing-complete'],
static_content_store=source_content,
target_course_id=source_course_key,
create_course_if_not_present=True,
raise_on_failure=True,
)
# Course traversal modeled after the traversal done here:
# lms/djangoapps/mobile_api/video_outlines/serializers.py:BlockOutline
# Starting at the root course block, do a breadth-first traversal using
# get_children() to retrieve each block's children.
with check_mongo_calls(num_mongo_calls):
with source_store.bulk_operations(source_course_key):
start_block = source_store.get_course(source_course_key, depth=depth, lazy=lazy)
all_blocks = []
stack = [start_block]
while stack:
curr_block = stack.pop()
all_blocks.append(curr_block)
if curr_block.has_children:
for block in reversed(curr_block.get_children()):
stack.append(block)
if access_all_block_fields:
# Read the fields on each block in order to ensure each block and its definition is loaded.
for xblock in all_blocks:
for __, field in xblock.fields.iteritems():
if field.is_set_on(xblock):
__ = field.read_from(xblock)
| agpl-3.0 | -7,142,350,978,357,554,000 | 48.1625 | 119 | 0.597635 | false |
jacob-ogre/excel2text | excel2text.py | 1 | 2724 | #! /usr/bin/python
# excel2text.py
# A simple program to convert Excel files to text with user-defined delimiters.
#
# Copyright (C) 2013 copyright Jacob Malcom, [email protected]
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
import sys
import xlrd
def main():
"""
Convert Excel files to text.
USAGE:
excel2text <infile> <delimiter>
ARGS:
infile, an Excel workbook of .xls or .xlsx format
delimiter, one of 'tab', 'comma', or 'space'
RETURNS:
One text file per worksheet in the infile
COMMENTS:
Writes one output file per worksheet (tab) with user-defined field
delimiters, with file base name from the worksheet name. The file suffix
is .csv (delimiter= 'comma'), .tab (= 'tab'), or .txt (= 'space').
"""
delim, suffix = get_suffix_delim()
outbase = infile.split(".")[0] + "_files/"
if not os.path.exists(outbase):
os.mkdir(outbase)
process_file(suffix, delim, outbase)
def process_file(suffix, delim, outbase):
"""Read Excel row-by-row and write each sheet to file."""
fil = xlrd.open_workbook(infile)
for sheet in fil.sheet_names():
cur_sheet = fil.sheet_by_name(sheet)
new_fil = outbase + sheet + suffix
with open(new_fil, 'wb') as out:
for rown in range(cur_sheet.nrows):
to_write = []
for cel in range(len(cur_sheet.row(rown))):
to_write.append(cur_sheet.cell_value(rown,cel))
to_write = [str(x) for x in to_write]
out.write(delim.join(to_write) + "\n")
def get_suffix_delim():
"""Return delimiter and file suffix given argv."""
if delimiter == "tab":
return "\t", ".tab"
elif delimiter == "comma":
return ",", ".csv"
elif delimiter == "space":
return " ", ".txt"
else:
print "Please use 'tab', 'comma' or 'space' as delimiters."
sys.exit(2)
if __name__ == '__main__':
if len(sys.argv) != 3:
print main.__doc__
sys.exit()
infile = sys.argv[1]
delimiter = str(sys.argv[2])
main()
| gpl-2.0 | 776,733,743,265,624,700 | 33.923077 | 80 | 0.626652 | false |
bzshang/yelp-photo-classification | scripts/get_image_features.py | 1 | 2060 | """
Extract image features from next to last layer (global_pool)
"""
__author__ = 'bshang'
import numpy as np
import h5py
import logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
import sys
sys.path.append('/home/ubuntu/yelp/mxnet/python')
import mxnet as mx
MODEL = 'inception-v3'
MODEL_PATH = '/data/checkpoint/{0}'.format(MODEL)
LAYER = 'global_pool_output'
NUM_EPOCH = 30
TEST = False
if TEST:
FEATURES_PATH = '/data/test_image_features_{0}_{1}_{2}.h5'.format(MODEL, LAYER, NUM_EPOCH)
REC_FILE = '/data/rec/test_imgs.rec'
LAB_FILE = '/data/rec/test_imgs.lst'
else:
FEATURES_PATH = '/data/train_image_features_{0}_{1}_{2}.h5'.format(MODEL, LAYER, NUM_EPOCH)
REC_FILE = '/data/rec/train_imgs.rec'
LAB_FILE = '/data/rec/train_imgs.lst'
f = h5py.File(FEATURES_PATH, 'w')
filenames = f.create_dataset('pids', (0,), maxshape=(None,))
feature = f.create_dataset('feature', (0, 2048), maxshape=(None, 2048)) # 2048 features in global_pool
f.close()
with open(LAB_FILE, 'r') as f:
pids = [line.split('\t')[-1].split('.')[0] for line in f]
with h5py.File(FEATURES_PATH, 'r+') as f:
f['pids'].resize((len(pids),))
f['pids'][0: len(pids)] = np.array(pids, dtype=np.int64)
model = mx.model.FeedForward.load(MODEL_PATH, NUM_EPOCH, ctx=mx.gpu())
fea_symbol = model.symbol.get_internals()[LAYER]
feature_extractor = mx.model.FeedForward(
ctx=mx.gpu(),
symbol=fea_symbol,
arg_params=model.arg_params,
aux_params=model.aux_params,
allow_extra_params=True)
model_iter = mx.io.ImageRecordIter(
path_imgrec = REC_FILE,
mean_r = 117,
mean_g = 117,
mean_b = 117,
data_shape = (3, 299, 299),
batch_size = 32,
rand_crop = False,
rand_mirror = False,
path_imglist= LAB_FILE,
label_width = 9
)
features = feature_extractor.predict(model_iter)
features = features[:, :, 0, 0]
with h5py.File(FEATURES_PATH, 'r+') as f:
f['feature'].resize((features.shape[0], features.shape[1]))
f['feature'][0: features.shape[0], :] = features
| apache-2.0 | -2,569,413,815,894,601,700 | 26.837838 | 103 | 0.647573 | false |
apbard/scipy | scipy/linalg/_cython_wrapper_generators.py | 1 | 23543 | """
Code generator script to make the Cython BLAS and LAPACK wrappers
from the files "cython_blas_signatures.txt" and
"cython_lapack_signatures.txt" which contain the signatures for
all the BLAS/LAPACK routines that should be included in the wrappers.
"""
from operator import itemgetter
fortran_types = {'int': 'integer',
'c': 'complex',
'd': 'double precision',
's': 'real',
'z': 'complex*16',
'char': 'character',
'bint': 'logical'}
c_types = {'int': 'int',
'c': 'npy_complex64',
'd': 'double',
's': 'float',
'z': 'npy_complex128',
'char': 'char',
'bint': 'int',
'cselect1': '_cselect1',
'cselect2': '_cselect2',
'dselect2': '_dselect2',
'dselect3': '_dselect3',
'sselect2': '_sselect2',
'sselect3': '_sselect3',
'zselect1': '_zselect1',
'zselect2': '_zselect2'}
def arg_names_and_types(args):
return zip(*[arg.split(' *') for arg in args.split(', ')])
pyx_func_template = """
cdef extern from "{header_name}":
void _fortran_{name} "F_FUNC({name}wrp, {upname}WRP)"({ret_type} *out, {fort_args}) nogil
cdef {ret_type} {name}({args}) nogil:
cdef {ret_type} out
_fortran_{name}(&out, {argnames})
return out
"""
npy_types = {'c': 'npy_complex64', 'z': 'npy_complex128',
'cselect1': '_cselect1', 'cselect2': '_cselect2',
'dselect2': '_dselect2', 'dselect3': '_dselect3',
'sselect2': '_sselect2', 'sselect3': '_sselect3',
'zselect1': '_zselect1', 'zselect2': '_zselect2'}
def arg_casts(arg):
if arg in ['npy_complex64', 'npy_complex128', '_cselect1', '_cselect2',
'_dselect2', '_dselect3', '_sselect2', '_sselect3',
'_zselect1', '_zselect2']:
return '<{0}*>'.format(arg)
return ''
def pyx_decl_func(name, ret_type, args, header_name):
argtypes, argnames = arg_names_and_types(args)
# Fix the case where one of the arguments has the same name as the
# abbreviation for the argument type.
# Otherwise the variable passed as an argument is considered overwrites
# the previous typedef and Cython compilation fails.
if ret_type in argnames:
argnames = [n if n != ret_type else ret_type + '_' for n in argnames]
argnames = [n if n not in ['lambda', 'in'] else n + '_'
for n in argnames]
args = ', '.join([' *'.join([n, t])
for n, t in zip(argtypes, argnames)])
argtypes = [npy_types.get(t, t) for t in argtypes]
fort_args = ', '.join([' *'.join([n, t])
for n, t in zip(argtypes, argnames)])
argnames = [arg_casts(t) + n for n, t in zip(argnames, argtypes)]
argnames = ', '.join(argnames)
c_ret_type = c_types[ret_type]
args = args.replace('lambda', 'lambda_')
return pyx_func_template.format(name=name, upname=name.upper(), args=args,
fort_args=fort_args, ret_type=ret_type,
c_ret_type=c_ret_type, argnames=argnames,
header_name=header_name)
pyx_sub_template = """cdef extern from "{header_name}":
void _fortran_{name} "F_FUNC({name},{upname})"({fort_args}) nogil
cdef void {name}({args}) nogil:
_fortran_{name}({argnames})
"""
def pyx_decl_sub(name, args, header_name):
argtypes, argnames = arg_names_and_types(args)
argtypes = [npy_types.get(t, t) for t in argtypes]
argnames = [n if n not in ['lambda', 'in'] else n + '_' for n in argnames]
fort_args = ', '.join([' *'.join([n, t])
for n, t in zip(argtypes, argnames)])
argnames = [arg_casts(t) + n for n, t in zip(argnames, argtypes)]
argnames = ', '.join(argnames)
args = args.replace('*lambda,', '*lambda_,').replace('*in,', '*in_,')
return pyx_sub_template.format(name=name, upname=name.upper(),
args=args, fort_args=fort_args,
argnames=argnames, header_name=header_name)
blas_pyx_preamble = '''# cython: boundscheck = False
# cython: wraparound = False
# cython: cdivision = True
"""
BLAS Functions for Cython
=========================
Usable from Cython via::
cimport scipy.linalg.cython_blas
These wrappers do not check for alignment of arrays.
Alignment should be checked before these wrappers are used.
Raw function pointers (Fortran-style pointer arguments):
- {}
"""
# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_blas
# from scipy.linalg cimport cython_blas
# cimport scipy.linalg.cython_blas as cython_blas
# cimport ..linalg.cython_blas as cython_blas
# Within scipy, if BLAS functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
from __future__ import absolute_import
cdef extern from "fortran_defs.h":
pass
from numpy cimport npy_complex64, npy_complex128
'''
def make_blas_pyx_preamble(all_sigs):
names = [sig[0] for sig in all_sigs]
return blas_pyx_preamble.format("\n- ".join(names))
lapack_pyx_preamble = '''"""
LAPACK functions for Cython
===========================
Usable from Cython via::
cimport scipy.linalg.cython_lapack
This module provides Cython-level wrappers for all primary routines included
in LAPACK 3.1.0 except for ``zcgesv`` since its interface is not consistent
from LAPACK 3.1.0 to 3.6.0. It also provides some of the
fixed-api auxiliary routines.
These wrappers do not check for alignment of arrays.
Alignment should be checked before these wrappers are used.
Raw function pointers (Fortran-style pointer arguments):
- {}
"""
# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_lapack
# from scipy.linalg cimport cython_lapack
# cimport scipy.linalg.cython_lapack as cython_lapack
# cimport ..linalg.cython_lapack as cython_lapack
# Within scipy, if LAPACK functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
from __future__ import absolute_import
cdef extern from "fortran_defs.h":
pass
from numpy cimport npy_complex64, npy_complex128
cdef extern from "_lapack_subroutines.h":
# Function pointer type declarations for
# gees and gges families of functions.
ctypedef bint _cselect1(npy_complex64*)
ctypedef bint _cselect2(npy_complex64*, npy_complex64*)
ctypedef bint _dselect2(d*, d*)
ctypedef bint _dselect3(d*, d*, d*)
ctypedef bint _sselect2(s*, s*)
ctypedef bint _sselect3(s*, s*, s*)
ctypedef bint _zselect1(npy_complex128*)
ctypedef bint _zselect2(npy_complex128*, npy_complex128*)
'''
def make_lapack_pyx_preamble(all_sigs):
names = [sig[0] for sig in all_sigs]
return lapack_pyx_preamble.format("\n- ".join(names))
blas_py_wrappers = """
# Python-accessible wrappers for testing:
cdef inline bint _is_contiguous(double[:,:] a, int axis) nogil:
return (a.strides[axis] == sizeof(a[0,0]) or a.shape[axis] == 1)
cpdef float complex _test_cdotc(float complex[:] cx, float complex[:] cy) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
int incy = cy.strides[0] // sizeof(cy[0])
return cdotc(&n, &cx[0], &incx, &cy[0], &incy)
cpdef float complex _test_cdotu(float complex[:] cx, float complex[:] cy) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
int incy = cy.strides[0] // sizeof(cy[0])
return cdotu(&n, &cx[0], &incx, &cy[0], &incy)
cpdef double _test_dasum(double[:] dx) nogil:
cdef:
int n = dx.shape[0]
int incx = dx.strides[0] // sizeof(dx[0])
return dasum(&n, &dx[0], &incx)
cpdef double _test_ddot(double[:] dx, double[:] dy) nogil:
cdef:
int n = dx.shape[0]
int incx = dx.strides[0] // sizeof(dx[0])
int incy = dy.strides[0] // sizeof(dy[0])
return ddot(&n, &dx[0], &incx, &dy[0], &incy)
cpdef int _test_dgemm(double alpha, double[:,:] a, double[:,:] b, double beta,
double[:,:] c) nogil except -1:
cdef:
char *transa
char *transb
int m, n, k, lda, ldb, ldc
double *a0=&a[0,0]
double *b0=&b[0,0]
double *c0=&c[0,0]
# In the case that c is C contiguous, swap a and b and
# swap whether or not each of them is transposed.
# This can be done because a.dot(b) = b.T.dot(a.T).T.
if _is_contiguous(c, 1):
if _is_contiguous(a, 1):
transb = 'n'
ldb = (&a[1,0]) - a0 if a.shape[0] > 1 else 1
elif _is_contiguous(a, 0):
transb = 't'
ldb = (&a[0,1]) - a0 if a.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'a' is neither C nor Fortran contiguous.")
if _is_contiguous(b, 1):
transa = 'n'
lda = (&b[1,0]) - b0 if b.shape[0] > 1 else 1
elif _is_contiguous(b, 0):
transa = 't'
lda = (&b[0,1]) - b0 if b.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'b' is neither C nor Fortran contiguous.")
k = b.shape[0]
if k != a.shape[1]:
with gil:
raise ValueError("Shape mismatch in input arrays.")
m = b.shape[1]
n = a.shape[0]
if n != c.shape[0] or m != c.shape[1]:
with gil:
raise ValueError("Output array does not have the correct shape.")
ldc = (&c[1,0]) - c0 if c.shape[0] > 1 else 1
dgemm(transa, transb, &m, &n, &k, &alpha, b0, &lda, a0,
&ldb, &beta, c0, &ldc)
elif _is_contiguous(c, 0):
if _is_contiguous(a, 1):
transa = 't'
lda = (&a[1,0]) - a0 if a.shape[0] > 1 else 1
elif _is_contiguous(a, 0):
transa = 'n'
lda = (&a[0,1]) - a0 if a.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'a' is neither C nor Fortran contiguous.")
if _is_contiguous(b, 1):
transb = 't'
ldb = (&b[1,0]) - b0 if b.shape[0] > 1 else 1
elif _is_contiguous(b, 0):
transb = 'n'
ldb = (&b[0,1]) - b0 if b.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'b' is neither C nor Fortran contiguous.")
m = a.shape[0]
k = a.shape[1]
if k != b.shape[0]:
with gil:
raise ValueError("Shape mismatch in input arrays.")
n = b.shape[1]
if m != c.shape[0] or n != c.shape[1]:
with gil:
raise ValueError("Output array does not have the correct shape.")
ldc = (&c[0,1]) - c0 if c.shape[1] > 1 else 1
dgemm(transa, transb, &m, &n, &k, &alpha, a0, &lda, b0,
&ldb, &beta, c0, &ldc)
else:
with gil:
raise ValueError("Input 'c' is neither C nor Fortran contiguous.")
return 0
cpdef double _test_dnrm2(double[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.strides[0] // sizeof(x[0])
return dnrm2(&n, &x[0], &incx)
cpdef double _test_dzasum(double complex[:] zx) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
return dzasum(&n, &zx[0], &incx)
cpdef double _test_dznrm2(double complex[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.strides[0] // sizeof(x[0])
return dznrm2(&n, &x[0], &incx)
cpdef int _test_icamax(float complex[:] cx) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
return icamax(&n, &cx[0], &incx)
cpdef int _test_idamax(double[:] dx) nogil:
cdef:
int n = dx.shape[0]
int incx = dx.strides[0] // sizeof(dx[0])
return idamax(&n, &dx[0], &incx)
cpdef int _test_isamax(float[:] sx) nogil:
cdef:
int n = sx.shape[0]
int incx = sx.strides[0] // sizeof(sx[0])
return isamax(&n, &sx[0], &incx)
cpdef int _test_izamax(double complex[:] zx) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
return izamax(&n, &zx[0], &incx)
cpdef float _test_sasum(float[:] sx) nogil:
cdef:
int n = sx.shape[0]
int incx = sx.shape[0] // sizeof(sx[0])
return sasum(&n, &sx[0], &incx)
cpdef float _test_scasum(float complex[:] cx) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
return scasum(&n, &cx[0], &incx)
cpdef float _test_scnrm2(float complex[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.strides[0] // sizeof(x[0])
return scnrm2(&n, &x[0], &incx)
cpdef float _test_sdot(float[:] sx, float[:] sy) nogil:
cdef:
int n = sx.shape[0]
int incx = sx.strides[0] // sizeof(sx[0])
int incy = sy.strides[0] // sizeof(sy[0])
return sdot(&n, &sx[0], &incx, &sy[0], &incy)
cpdef float _test_snrm2(float[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.shape[0] // sizeof(x[0])
return snrm2(&n, &x[0], &incx)
cpdef double complex _test_zdotc(double complex[:] zx, double complex[:] zy) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
int incy = zy.strides[0] // sizeof(zy[0])
return zdotc(&n, &zx[0], &incx, &zy[0], &incy)
cpdef double complex _test_zdotu(double complex[:] zx, double complex[:] zy) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
int incy = zy.strides[0] // sizeof(zy[0])
return zdotu(&n, &zx[0], &incx, &zy[0], &incy)
"""
def generate_blas_pyx(func_sigs, sub_sigs, all_sigs, header_name):
funcs = "\n".join(pyx_decl_func(*(s+(header_name,))) for s in func_sigs)
subs = "\n" + "\n".join(pyx_decl_sub(*(s[::2]+(header_name,)))
for s in sub_sigs)
return make_blas_pyx_preamble(all_sigs) + funcs + subs + blas_py_wrappers
lapack_py_wrappers = """
# Python accessible wrappers for testing:
def _test_dlamch(cmach):
# This conversion is necessary to handle Python 3 strings.
cmach_bytes = bytes(cmach)
# Now that it is a bytes representation, a non-temporary variable
# must be passed as a part of the function call.
cdef char* cmach_char = cmach_bytes
return dlamch(cmach_char)
def _test_slamch(cmach):
# This conversion is necessary to handle Python 3 strings.
cmach_bytes = bytes(cmach)
# Now that it is a bytes representation, a non-temporary variable
# must be passed as a part of the function call.
cdef char* cmach_char = cmach_bytes
return slamch(cmach_char)
"""
def generate_lapack_pyx(func_sigs, sub_sigs, all_sigs, header_name):
funcs = "\n".join(pyx_decl_func(*(s+(header_name,))) for s in func_sigs)
subs = "\n" + "\n".join(pyx_decl_sub(*(s[::2]+(header_name,)))
for s in sub_sigs)
preamble = make_lapack_pyx_preamble(all_sigs)
return preamble + funcs + subs + lapack_py_wrappers
pxd_template = """ctypedef {ret_type} {name}_t({args}) nogil
cdef {name}_t *{name}_f
"""
pxd_template = """cdef {ret_type} {name}({args}) nogil
"""
def pxd_decl(name, ret_type, args):
args = args.replace('lambda', 'lambda_').replace('*in,', '*in_,')
return pxd_template.format(name=name, ret_type=ret_type, args=args)
blas_pxd_preamble = """# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_blas
# from scipy.linalg cimport cython_blas
# cimport scipy.linalg.cython_blas as cython_blas
# cimport ..linalg.cython_blas as cython_blas
# Within scipy, if BLAS functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
ctypedef float s
ctypedef double d
ctypedef float complex c
ctypedef double complex z
"""
def generate_blas_pxd(all_sigs):
body = '\n'.join(pxd_decl(*sig) for sig in all_sigs)
return blas_pxd_preamble + body
lapack_pxd_preamble = """# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_lapack
# from scipy.linalg cimport cython_lapack
# cimport scipy.linalg.cython_lapack as cython_lapack
# cimport ..linalg.cython_lapack as cython_lapack
# Within scipy, if LAPACK functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
ctypedef float s
ctypedef double d
ctypedef float complex c
ctypedef double complex z
# Function pointer type declarations for
# gees and gges families of functions.
ctypedef bint cselect1(c*)
ctypedef bint cselect2(c*, c*)
ctypedef bint dselect2(d*, d*)
ctypedef bint dselect3(d*, d*, d*)
ctypedef bint sselect2(s*, s*)
ctypedef bint sselect3(s*, s*, s*)
ctypedef bint zselect1(z*)
ctypedef bint zselect2(z*, z*)
"""
def generate_lapack_pxd(all_sigs):
return lapack_pxd_preamble + '\n'.join(pxd_decl(*sig) for sig in all_sigs)
fortran_template = """ subroutine {name}wrp(ret, {argnames})
external {wrapper}
{ret_type} {wrapper}
{ret_type} ret
{argdecls}
ret = {wrapper}({argnames})
end
"""
dims = {'work': '(*)', 'ab': '(ldab,*)', 'a': '(lda,*)', 'dl': '(*)',
'd': '(*)', 'du': '(*)', 'ap': '(*)', 'e': '(*)', 'lld': '(*)'}
def process_fortran_name(name, funcname):
if 'inc' in name:
return name
xy_exclusions = ['ladiv', 'lapy2', 'lapy3']
if ('x' in name or 'y' in name) and funcname[1:] not in xy_exclusions:
return name + '(n)'
if name in dims:
return name + dims[name]
return name
def fort_subroutine_wrapper(name, ret_type, args):
if name[0] in ['c', 's'] or name in ['zladiv', 'zdotu', 'zdotc']:
wrapper = 'w' + name
else:
wrapper = name
types, names = arg_names_and_types(args)
argnames = ', '.join(names)
names = [process_fortran_name(n, name) for n in names]
argdecls = '\n '.join('{0} {1}'.format(fortran_types[t], n)
for n, t in zip(names, types))
return fortran_template.format(name=name, wrapper=wrapper,
argnames=argnames, argdecls=argdecls,
ret_type=fortran_types[ret_type])
def generate_fortran(func_sigs):
return "\n".join(fort_subroutine_wrapper(*sig) for sig in func_sigs)
def make_c_args(args):
types, names = arg_names_and_types(args)
types = [c_types[arg] for arg in types]
return ', '.join('{0} *{1}'.format(t, n) for t, n in zip(types, names))
c_func_template = "void F_FUNC({name}wrp, {upname}WRP)({return_type} *ret, {args});\n"
def c_func_decl(name, return_type, args):
args = make_c_args(args)
return_type = c_types[return_type]
return c_func_template.format(name=name, upname=name.upper(),
return_type=return_type, args=args)
c_sub_template = "void F_FUNC({name},{upname})({args});\n"
def c_sub_decl(name, return_type, args):
args = make_c_args(args)
return c_sub_template.format(name=name, upname=name.upper(), args=args)
c_preamble = """#ifndef SCIPY_LINALG_{lib}_FORTRAN_WRAPPERS_H
#define SCIPY_LINALG_{lib}_FORTRAN_WRAPPERS_H
#include "fortran_defs.h"
#include "numpy/arrayobject.h"
"""
lapack_decls = """
typedef int (*_cselect1)(npy_complex64*);
typedef int (*_cselect2)(npy_complex64*, npy_complex64*);
typedef int (*_dselect2)(double*, double*);
typedef int (*_dselect3)(double*, double*, double*);
typedef int (*_sselect2)(float*, float*);
typedef int (*_sselect3)(float*, float*, float*);
typedef int (*_zselect1)(npy_complex128*);
typedef int (*_zselect2)(npy_complex128*, npy_complex128*);
"""
cpp_guard = """
#ifdef __cplusplus
extern "C" {
#endif
"""
c_end = """
#ifdef __cplusplus
}
#endif
#endif
"""
def generate_c_header(func_sigs, sub_sigs, all_sigs, lib_name):
funcs = "".join(c_func_decl(*sig) for sig in func_sigs)
subs = "\n" + "".join(c_sub_decl(*sig) for sig in sub_sigs)
if lib_name == 'LAPACK':
preamble = (c_preamble.format(lib=lib_name) + lapack_decls)
else:
preamble = c_preamble.format(lib=lib_name)
return "".join([preamble, cpp_guard, funcs, subs, c_end])
def split_signature(sig):
name_and_type, args = sig[:-1].split('(')
ret_type, name = name_and_type.split(' ')
return name, ret_type, args
def filter_lines(ls):
ls = [l.strip() for l in ls if l != '\n' and l[0] != '#']
func_sigs = [split_signature(l) for l in ls if l.split(' ')[0] != 'void']
sub_sigs = [split_signature(l) for l in ls if l.split(' ')[0] == 'void']
all_sigs = list(sorted(func_sigs + sub_sigs, key=itemgetter(0)))
return func_sigs, sub_sigs, all_sigs
def make_all(blas_signature_file="cython_blas_signatures.txt",
lapack_signature_file="cython_lapack_signatures.txt",
blas_name="cython_blas",
lapack_name="cython_lapack",
blas_fortran_name="_blas_subroutine_wrappers.f",
lapack_fortran_name="_lapack_subroutine_wrappers.f",
blas_header_name="_blas_subroutines.h",
lapack_header_name="_lapack_subroutines.h"):
comments = ["This file was generated by _cython_wrapper_generators.py.\n",
"Do not edit this file directly.\n"]
ccomment = ''.join(['// ' + line for line in comments]) + '\n'
pyxcomment = ''.join(['# ' + line for line in comments]) + '\n'
fcomment = ''.join(['c ' + line for line in comments]) + '\n'
with open(blas_signature_file, 'r') as f:
blas_sigs = f.readlines()
blas_sigs = filter_lines(blas_sigs)
blas_pyx = generate_blas_pyx(*(blas_sigs + (blas_header_name,)))
with open(blas_name + '.pyx', 'w') as f:
f.write(pyxcomment)
f.write(blas_pyx)
blas_pxd = generate_blas_pxd(blas_sigs[2])
with open(blas_name + '.pxd', 'w') as f:
f.write(pyxcomment)
f.write(blas_pxd)
blas_fortran = generate_fortran(blas_sigs[0])
with open(blas_fortran_name, 'w') as f:
f.write(fcomment)
f.write(blas_fortran)
blas_c_header = generate_c_header(*(blas_sigs + ('BLAS',)))
with open(blas_header_name, 'w') as f:
f.write(ccomment)
f.write(blas_c_header)
with open(lapack_signature_file, 'r') as f:
lapack_sigs = f.readlines()
lapack_sigs = filter_lines(lapack_sigs)
lapack_pyx = generate_lapack_pyx(*(lapack_sigs + (lapack_header_name,)))
with open(lapack_name + '.pyx', 'w') as f:
f.write(pyxcomment)
f.write(lapack_pyx)
lapack_pxd = generate_lapack_pxd(lapack_sigs[2])
with open(lapack_name + '.pxd', 'w') as f:
f.write(pyxcomment)
f.write(lapack_pxd)
lapack_fortran = generate_fortran(lapack_sigs[0])
with open(lapack_fortran_name, 'w') as f:
f.write(fcomment)
f.write(lapack_fortran)
lapack_c_header = generate_c_header(*(lapack_sigs + ('LAPACK',)))
with open(lapack_header_name, 'w') as f:
f.write(ccomment)
f.write(lapack_c_header)
if __name__ == '__main__':
make_all()
| bsd-3-clause | 7,461,053,143,855,277,000 | 33.070912 | 101 | 0.592023 | false |
LukeCarrier/py3k-pexpect | tools/websync.py | 1 | 1347 | #!/usr/bin/env python
# I use this to keep the sourceforge pages up to date with the
# latest documentation and I like to keep a copy of the distribution
# on the web site so that it will be compatible with
# The Vaults of Parnasus which requires a direct URL link to a
# tar ball distribution. I don't advertise the package this way.
import pexpect, pyed
import getpass
import sys, os
X = getpass.getpass('Password: ')
pp_pattern=["(?i)password:", "(?i)enter passphrase for key '.*?':"]
p = pexpect.spawn ('scp -r doc/. [email protected]:/home/groups/p/pe/pexpect/htdocs/.')
p.logfile_read = sys.stdout
p.expect (pp_pattern)
p.sendline (X)
p.expect (pexpect.EOF)
print(p.before)
p = pexpect.spawn ('scp doc/clean.css doc/email.png [email protected]:/home/groups/p/pe/pexpect/htdocs/clean.css')
p.logfile_read = sys.stdout
p.expect (pp_pattern)
p.sendline (X)
p.expect (pexpect.EOF)
print(p.before)
#p = pexpect.spawn ('ssh [email protected] "cd htdocs;tar zxvf pexpect-doc.tgz"')
#p.logfile_read = sys.stdout
#p.expect ('password:')
#p.sendline (X)
#p.expect (pexpect.EOF)
#print p.before
p = pexpect.spawn ('scp dist/pexpect-*.tar.gz [email protected]:/home/groups/p/pe/pexpect/htdocs/.')
p.logfile_read = sys.stdout
p.expect (pp_pattern)
p.sendline (X)
p.expect (pexpect.EOF)
print(p.before)
| mit | 4,307,543,370,111,144,000 | 30.325581 | 123 | 0.729027 | false |
akrzos/cfme_tests | cfme/tests/cloud/test_providers.py | 1 | 10044 | # -*- coding: utf-8 -*-
# pylint: disable=E1101
# pylint: disable=W0621
import fauxfactory
import uuid
import pytest
import utils.error as error
from cfme import Credential
from cfme.exceptions import FlashMessageException
from cfme.cloud.provider import (discover, EC2Provider, wait_for_a_provider,
Provider, OpenStackProvider, prop_region)
from cfme.web_ui import fill, flash
from utils import testgen, version
from utils.providers import get_credentials_from_config
from utils.update import update
pytest_generate_tests = testgen.generate(testgen.cloud_providers, scope="function")
@pytest.mark.tier(3)
def test_empty_discovery_form_validation():
""" Tests that the flash message is correct when discovery form is empty."""
discover(None, d_type="Amazon")
ident = version.pick({version.LOWEST: 'User ID',
'5.4': 'Username'})
flash.assert_message_match('{} is required'.format(ident))
@pytest.mark.tier(3)
def test_discovery_cancelled_validation():
""" Tests that the flash message is correct when discovery is cancelled."""
discover(None, cancel=True, d_type="Amazon")
msg = version.pick(
{version.LOWEST: 'Amazon Cloud Providers Discovery was cancelled by the user',
'5.5': 'Cloud Providers Discovery was cancelled by the user'})
flash.assert_message_match(msg)
@pytest.mark.tier(3)
def test_add_cancelled_validation(request):
"""Tests that the flash message is correct when add is cancelled."""
prov = EC2Provider()
request.addfinalizer(prov.delete_if_exists)
prov.create(cancel=True)
flash.assert_message_match({
version.LOWEST: 'Add of new Cloud Provider was cancelled by the user',
'5.5': 'Add of Cloud Provider was cancelled by the user'})
@pytest.mark.tier(3)
def test_password_mismatch_validation():
cred = Credential(
principal=fauxfactory.gen_alphanumeric(5),
secret=fauxfactory.gen_alphanumeric(5),
verify_secret=fauxfactory.gen_alphanumeric(7))
discover(cred, d_type="Amazon")
flash.assert_message_match('Password/Verify Password do not match')
@pytest.mark.tier(3)
@pytest.mark.uncollect()
@pytest.mark.usefixtures('has_no_cloud_providers')
def test_providers_discovery_amazon():
amazon_creds = get_credentials_from_config('cloudqe_amazon')
discover(amazon_creds, d_type="Amazon")
flash.assert_message_match('Amazon Cloud Providers: Discovery successfully initiated')
wait_for_a_provider()
@pytest.mark.tier(3)
@pytest.mark.usefixtures('has_no_cloud_providers')
def test_provider_add_with_bad_credentials(provider):
""" Tests provider add with bad credentials
Metadata:
test_flag: crud
"""
provider.credentials['default'] = get_credentials_from_config('bad_credentials')
with error.expected('Login failed due to a bad username or password.'):
provider.create(validate_credentials=True)
@pytest.mark.tier(2)
@pytest.mark.usefixtures('has_no_cloud_providers')
def test_provider_crud(provider):
""" Tests provider add with good credentials
Metadata:
test_flag: crud
"""
provider.create()
provider.validate_stats(ui=True)
old_name = provider.name
with update(provider):
provider.name = str(uuid.uuid4()) # random uuid
with update(provider):
provider.name = old_name # old name
provider.delete(cancel=False)
provider.wait_for_delete()
@pytest.mark.tier(3)
def test_type_required_validation(request, soft_assert):
"""Test to validate type while adding a provider"""
prov = Provider()
request.addfinalizer(prov.delete_if_exists)
if version.current_version() < "5.5":
with error.expected('Type is required'):
prov.create()
else:
pytest.sel.force_navigate("clouds_provider_new")
fill(prov.properties_form.name_text, "foo")
soft_assert("ng-invalid-required" in prov.properties_form.type_select.classes)
soft_assert(not prov.add_provider_button.can_be_clicked)
@pytest.mark.tier(3)
def test_name_required_validation(request):
"""Tests to validate the name while adding a provider"""
prov = EC2Provider(
name=None,
region='us-east-1')
request.addfinalizer(prov.delete_if_exists)
if version.current_version() < "5.5":
with error.expected("Name can't be blank"):
prov.create()
else:
# It must raise an exception because it keeps on the form
with error.expected(FlashMessageException):
prov.create()
assert prov.properties_form.name_text.angular_help_block == "Required"
@pytest.mark.tier(3)
def test_region_required_validation(request, soft_assert):
"""Tests to validate the region while adding a provider"""
prov = EC2Provider(
name=fauxfactory.gen_alphanumeric(5),
region=None)
request.addfinalizer(prov.delete_if_exists)
if version.current_version() < "5.5":
with error.expected('Region is not included in the list'):
prov.create()
else:
with error.expected(FlashMessageException):
prov.create()
soft_assert(
"ng-invalid-required" in prov.properties_form.region_select.classes)
@pytest.mark.tier(3)
def test_host_name_required_validation(request):
"""Test to validate the hostname while adding a provider"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=None,
ip_address=fauxfactory.gen_ipaddr(prefix=[10]))
request.addfinalizer(prov.delete_if_exists)
if version.current_version() < "5.5":
with error.expected("Host Name can't be blank"):
prov.create()
else:
# It must raise an exception because it keeps on the form
with error.expected(FlashMessageException):
prov.create()
assert prov.properties_form.hostname_text.angular_help_block == "Required"
@pytest.mark.tier(3)
@pytest.mark.uncollectif(lambda: version.current_version() > '5.4')
def test_ip_address_required_validation(request):
"""Test to validate the ip address while adding a provider"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=fauxfactory.gen_alphanumeric(5),
ip_address=None)
request.addfinalizer(prov.delete_if_exists)
with error.expected("IP Address can't be blank"):
prov.create()
@pytest.mark.tier(3)
def test_api_port_blank_validation(request):
"""Test to validate blank api port while adding a provider"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=fauxfactory.gen_alphanumeric(5),
ip_address=fauxfactory.gen_ipaddr(prefix=[10]),
api_port='')
request.addfinalizer(prov.delete_if_exists)
if version.current_version() < "5.5":
prov.create()
else:
# It must raise an exception because it keeps on the form
with error.expected(FlashMessageException):
prov.create()
assert prov.properties_form.api_port.angular_help_block == "Required"
@pytest.mark.tier(3)
def test_user_id_max_character_validation():
cred = Credential(principal=fauxfactory.gen_alphanumeric(51))
discover(cred, d_type="Amazon")
@pytest.mark.tier(3)
def test_password_max_character_validation():
password = fauxfactory.gen_alphanumeric(51)
cred = Credential(
principal=fauxfactory.gen_alphanumeric(5),
secret=password,
verify_secret=password)
discover(cred, d_type="Amazon")
@pytest.mark.tier(3)
def test_name_max_character_validation(request):
"""Test to validate max character for name field"""
prov = EC2Provider(
name=fauxfactory.gen_alphanumeric(255),
region='us-east-1')
request.addfinalizer(prov.delete_if_exists)
prov.create()
@pytest.mark.tier(3)
def test_hostname_max_character_validation(request):
"""Test to validate max character for hostname field"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=fauxfactory.gen_alphanumeric(255),
ip_address=fauxfactory.gen_ipaddr(prefix=[10]))
request.addfinalizer(prov.delete_if_exists)
prov.create()
@pytest.mark.tier(3)
def test_ip_max_valid_character_validation(request):
"""Test to validate max character for ip address field with valid ip address"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=fauxfactory.gen_alphanumeric(5),
ip_address=fauxfactory.gen_ipaddr(prefix=[10]))
request.addfinalizer(prov.delete_if_exists)
prov.create()
@pytest.mark.tier(3)
def test_ip_max_invalid_character_validation(request):
"""Test to validate max character for ip address field using random string"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=fauxfactory.gen_alphanumeric(5),
ip_address=fauxfactory.gen_alphanumeric(15))
request.addfinalizer(prov.delete_if_exists)
prov.create()
@pytest.mark.tier(3)
def test_api_port_max_character_validation(request):
"""Test to validate max character for api port field"""
prov = OpenStackProvider(
name=fauxfactory.gen_alphanumeric(5),
hostname=fauxfactory.gen_alphanumeric(5),
ip_address=fauxfactory.gen_ipaddr(prefix=[10]),
api_port=fauxfactory.gen_alphanumeric(15))
request.addfinalizer(prov.delete_if_exists)
prov.create()
@pytest.mark.tier(3)
@pytest.mark.uncollectif(lambda: version.current_version() < "5.5")
@pytest.mark.meta(blockers=[1278036])
def test_openstack_provider_has_api_version():
"""Check whether the Keystone API version field is present for Openstack."""
prov = Provider()
pytest.sel.force_navigate("clouds_provider_new")
fill(prop_region.properties_form, {"type_select": "OpenStack"})
pytest.sel.wait_for_ajax()
assert pytest.sel.is_displayed(
prov.properties_form.api_version), "API version select is not visible"
| gpl-2.0 | 2,334,746,530,047,233,000 | 32.704698 | 90 | 0.692055 | false |
ldamewood/figures | scripts/interpolate.py | 1 | 5456 | import numpy
__all__ = [
"interpolate",
]
class interpolate:
# Convert two vectors into a normalzied coordinate system via GS orthogonalization
@staticmethod
def plane_to_cs(cs):
# Normalize vectors
cs[0] = cs[0]/numpy.linalg.norm(cs[0])
cs[1] = cs[1]/numpy.linalg.norm(cs[1])
# Orthogonalize second vector to first
cs[1] = cs[1] - cs[0] * numpy.dot(cs[0],cs[1])
# Return array with third vector orthogonal to other two
return numpy.vstack([cs,numpy.cross(cs[0],cs[1])])
# Genreate a grid on a 2d plane in 3d space
@staticmethod
def grid_intersection(plane, res, dim):
# Create grid
x, y, z = numpy.mgrid[0:res[0],0:res[1],0.:1.]
# Center grid
x -= numpy.floor(res[0]/2); y -= numpy.floor(res[1]/2)
# Scale grid
x *= 1.*dim[0]/res[0]
y *= 1.*dim[1]/res[1]
# List of points in the grid
element = numpy.array([x.flatten(),y.flatten(),z.flatten()])
# Generate coordinate system
cs = interpolate.plane_to_cs(plane)
# Rotate points to plane cs
return x,y,numpy.dot(element.T,cs)
# Linearlly interpolate 3d periodic data on a plane grid
@staticmethod
def interpolate_plane(datain, cell=[[1.,0.,0.],[0.,1.,0.],[0.,0.,1.]],
plane = [[1.,0.,0.],[0.,0.,1.]], center = [0.5,0.5,0.5],
dim = [1.,1.], res = [100.,100.]):
# Convert to numpy
cell = numpy.array(cell)
center = numpy.array(center)
plane = numpy.array(plane)
datain = numpy.array(datain)
# Define the cell size
boxsize = max(sum(abs(cell)))
# Generate grid in Cartesian coordinates
x,y,elements = interpolate.grid_intersection(plane,res,dim)
# Scale the coordinates to the size of the box
x *= boxsize; y *= boxsize; elements *= boxsize
# Rotate points to primitive cell coordinates
rr = numpy.linalg.solve(cell.T,elements.T)
# Add the center point to all elements
rr += numpy.array([center]).T.repeat(rr.shape[1],1)
# Interpolate the density on the plane
dataout = numpy.reshape(interpolate.pinterpn(datain,rr.T),res)
# Return x,y,z data
return x[:,:,0],y[:,:,0],dataout
# Interpolate regularly spaced periodic nd data at an arbitrary point.
@staticmethod
def pinterpn(datain, rr):
# Grid dimensions. e.g. [100,100,100]
grid = datain.shape
# Number of grid dimensions. e.g. 3
dim = len(grid)
# Number of points to interpolate. e.g. rr is 100x3
nelem = rr.shape[0]
# Dimension of points should agree with grid dimension.
assert rr.shape[1] == dim
# Force rr to be between 0 and 1
rr = numpy.mod(numpy.mod(rr, 1) + 1, 1)
# allocate space for the results
data = numpy.zeros((nelem, 1),dtype=datain.dtype);
# dimmatrix is a nelem list of the grid dimentions.
# Ex: [[100,100,100],
# [100,100,100],
# ...
# [100,100,100]]
dimmatrix = numpy.tile(grid, (nelem, 1))
# ir and pr are zero-indexed integers that define two opposite corners of an
# n-dimensional box with density points defined at all of the corners given
# by datain. First, scale rr points to the size of the grid and then round
# to smallest integer to define the "lower corner".
ir = numpy.fix(numpy.dot(rr, numpy.diag(grid)))
# Find the "higher corner" by adding 1 to each lower corner and then wrapping
# the edges back to zero
pr = numpy.mod(ir + 1, dimmatrix)
# Check if any upper corners are on the boundary,
idx = (pr == dimmatrix)
# and wrap the boundary points back to zero.
pr[idx] =- dimmatrix[idx]
# xr is a vector from the lower corner of a box to the position of
# the interpolation point.
xr = numpy.dot(rr, numpy.diag(grid)) - ir
# Iterator over the 2^d corners of each box
corners = range(2 ** dim)
# Iterator over the dimensions of the space
dims = range(dim)
# Distance to nearest edge in each dimension. Initialize to zero
r = dim * [0]
# Lower and upper corners
ir = numpy.array([ir, pr])
# Distance weight factors on lower and upper corners
xr = numpy.array([1 - xr, xr])
# Loop over each position
for j in range(nelem):
# Initialize density value to zero
denval = 0
# Loop over each corner
# Add up the contribution from each corner weighted
# by the distance to that corner
for corner in corners:
x = 1
# loop over each dimension
for dim in dims:
# determine if this corner is low or high,
lohi = corner % 2
# and remove last bit.
corner /= 2
# distance weight factor
x *= xr[lohi, j, dim]
# nearest edge
r[dim] = ir[lohi, j, dim]
denval += x * datain[tuple(r)]
data[j] = denval
return data | mit | -1,390,636,003,431,998,700 | 37.978571 | 86 | 0.548021 | false |
tobymccann/flask-base | app/tools/views.py | 1 | 27628 | import os
from flask import abort, flash, redirect, render_template, url_for, request, jsonify, make_response, send_file
from flask_login import login_required
from flask_sqlalchemy import SQLAlchemy
import csv
import logging
import requests
import operator
import re
import nltk
from nltk.corpus import stopwords
from collections import Counter
from bs4 import BeautifulSoup
import io
from io import BytesIO
import time
import zipfile
from .. import db, config
from ..models import ConfigTemplate, Project, TemplateValueSet, TemplateVariable
from sqlalchemy.exc import IntegrityError
from .forms import ConfigTemplateForm, EditConfigTemplateValuesForm, TemplateValueSetForm, TemplateVariableForm, TestForm
from ..utils.appliance import get_local_ip_addresses, verify_appliance_status
from ..utils.export import get_appliance_ftp_password
from .. import celery
from . import tools
basedir = os.path.abspath(os.path.dirname(__file__))
logger = logging.getLogger()
@tools.route('/')
def index():
"""Tool dashboard page."""
return render_template('tools/base.html')
@tools.route("/test", methods=["GET", "POST"])
def test():
errors = []
results = {}
form = TestForm()
if form.validate_on_submit():
# get url the user has entered
try:
url = form.url.data
r = requests.get(url)
print(r.text)
except:
errors.append(
"Unable to get URL. Please make sure it is valid and try again."
)
return render_template("tools/test.html", form=form, errors=errors)
if r:
# text processing
raw = BeautifulSoup(r.text, 'html.parser').get_text()
nltk.data.path.append(basedir + '/nltk_data') # set the path
tokens = nltk.word_tokenize(raw)
text = nltk.Text(tokens)
# remove punctuation, count raw words
nonPunct = re.compile('.*[A-Za-z].*')
raw_words = [w for w in text if nonPunct.match(w)]
raw_word_count = Counter(raw_words)
# stop words
no_stop_words = [w for w in raw_words if w.lower() not in stopwords.words('english')]
no_stop_words_count = Counter(no_stop_words)
# save the results
results = sorted(
no_stop_words_count.items(),
key=operator.itemgetter(1),
reverse=True
)[:10]
try:
result = Result(
url=url,
result_all=raw_word_count,
result_no_stop_words=no_stop_words_count
)
db.session.add(result)
db.session.commit()
except:
errors.append("Unable to add item to database.")
return render_template("tools/test.html", form=form, errors=errors, results=results)
@tools.route("/projects/<int:project_id>/template/<int:config_template_id>")
def view_config_template(project_id, config_template_id):
"""read-only view of a single Config Template
:param project_id:
:param config_template_id:
:return:
"""
parent_project = Project.query.filter(Project.id == project_id).first_or_404()
return render_template(
"config_template/view_config_template.html",
project=parent_project,
config_template=ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
)
@tools.route("/projects/<int:project_id>/configtemplate/add", methods=["GET", "POST"])
def add_config_template(project_id):
"""add a new Config Template
:param project_id:
:return:
"""
parent_project = Project.query.filter(Project.id == project_id).first_or_404()
form = ConfigTemplateForm(request.form)
if form.validate_on_submit():
try:
config_template = ConfigTemplate(name="", project=parent_project)
config_template.name = form.name.data
config_template.template_content = form.template_content.data
config_template.project = parent_project
db.session.add(config_template)
db.session.commit()
flash("Config template <strong>%s</strong> successful created" % config_template.name, "success")
return redirect(
url_for(
"view_config_template",
project_id=project_id,
config_template_id=config_template.id
)
)
except IntegrityError as ex:
if "UNIQUE constraint failed" in str(ex):
msg = "Config Template name already in use, please use another one"
else:
msg = "Config template was not created (unknown error, see log for details)"
logger.error(msg, exc_info=True)
flash(msg, "error")
db.session.rollback()
except Exception:
msg = "Config template was not created (unknown error, see log for details)"
logger.error(msg, exc_info=True)
flash(msg, "error")
return render_template(
"config_template/add_config_template.html",
project_id=project_id,
project=parent_project,
form=form
)
@tools.route("/projects/<int:project_id>/configtemplate/<int:config_template_id>/edit")
def edit_config_template(project_id, config_template_id):
"""edit a Config Template
:param project_id:
:param config_template_id:
:return:
"""
parent_project = Project.query.filter(Project.id == project_id).first_or_404()
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
form = ConfigTemplateForm(request.form, config_template)
if form.validate_on_submit():
try:
if form.template_content.data != config_template.template_content:
flash("Config Template content changed, all Template Value Sets are deleted.", "warning")
config_template.name = form.name.data
config_template.template_content = form.template_content.data
config_template.project = parent_project
db.session.add(config_template)
db.session.commit()
flash("Config template <strong>%s</strong> successful saved" % config_template.name, "success")
return redirect(
url_for(
"view_config_template",
project_id=project_id,
config_template_id=config_template.id
)
)
except IntegrityError as ex:
if "UNIQUE constraint failed" in str(ex):
msg = "Config Template name already in use, please use another one"
else:
msg = "Config template was not created (unknown error, see log for details)"
logger.error(msg, exc_info=True)
flash(msg, "error")
db.session.rollback()
except Exception:
msg = "Config template was not created (unknown error, see log for details)"
logger.error(msg, exc_info=True)
flash(msg, "error")
return render_template(
"config_template/edit_config_template.html",
project_id=project_id,
config_template=config_template,
project=parent_project,
form=form
)
@tools.route(
"/projects/<int:project_id>/configtemplate/<int:config_template_id>/edit_all",
methods=["GET", "POST"]
)
def edit_all_config_template_values(project_id, config_template_id):
"""edit all Config Template Values based on a CSV textarea
:param project_id:
:param config_template_id:
:return:
"""
Project.query.filter(Project.id == project_id).first_or_404()
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
form = EditConfigTemplateValuesForm(request.form, config_template)
# hostname is defined in every Template Value Set
variable_list = [
"hostname"
]
for var in config_template.variables.all():
# hostname must be located as first entry
if var.var_name != "hostname":
variable_list.append(var.var_name)
if form.validate_on_submit():
# update values from the CSV file
reader = csv.DictReader(io.StringIO(form.csv_content.data), delimiter=";")
csv_lines = form.csv_content.data.splitlines()
counter = 0
for line in reader:
if "hostname" in line.keys():
if line["hostname"] is None:
flash("Invalid Hostname for Template Value Set: '%s'" % csv_lines[counter], "error")
elif line["hostname"] == "":
flash("No Hostname defined for Template Value Set: '{0}'"
.format(form.csv_content.data.splitlines()[counter]), "error")
else:
# try to access an existing TemplateValueSet
tvs = TemplateValueSet.query.filter(
TemplateValueSet.config_template_id == config_template_id,
TemplateValueSet.hostname == line["hostname"]
).first()
if not tvs:
# element not found, create and add a flush message
tvs = TemplateValueSet(hostname=line["hostname"], config_template=config_template)
flash("Create new Template Value Set for hostname <strong>{0}</strong>"
.format(line["hostname"]), "success")
# update variable values
for var in variable_list:
if var in line.keys():
if line[var]:
tvs.update_variable_value(var_name=var, value=line[var])
else:
tvs.update_variable_value(var_name=var, value="")
logger.debug("Cannot find value for variable %s for TVS "
"object %s using CSV line %s" % (var, repr(tvs), line))
else:
# hostname not defined, no creation possible
flash("No hostname in CSV line found: %s" % line, "warning")
counter += 1
return redirect(url_for("view_config_template", project_id=project_id, config_template_id=config_template_id))
else:
form.csv_content.data = ";".join(variable_list)
for tvs in config_template.template_value_sets.all():
values = []
for var in variable_list:
values.append(tvs.get_template_value_by_name_as_string(var))
form.csv_content.data += "\n" + ";".join(values)
return render_template(
"config_template/edit_all_config_template_values.html",
project_id=project_id,
config_template=config_template,
form=form
)
@tools.route("/projects/<int:project_id>/configtemplate/<int:config_template_id>/delete", methods=["GET", "POST"])
def delete_config_template(project_id, config_template_id):
"""delete the Config Template
:param project_id:
:param config_template_id:
:return:
"""
Project.query.filter(Project.id == project_id).first_or_404()
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
if request.method == "POST":
project_id = config_template.project.id
try:
db.session.delete(config_template)
db.session.commit()
except Exception:
msg = "Config Template <strong>{0}</strong> was not deleted (unknown error, see log for details)".format(
config_template.name)
flash(msg, "error")
logger.error(msg, exc_info=True)
db.session.rollback()
flash("Config Template %s successful deleted" % config_template.name, "success")
return redirect(url_for("view_project", project_id=project_id))
return render_template(
"config_template/delete_config_template.html",
project_id=project_id,
config_template=config_template
)
@tools.route("/projects/<int:project_id>/template/<int:config_template_id>/export")
def export_configurations(project_id, config_template_id):
"""
Export the configuration to various locations
:param project_id:
:param config_template_id:
:return:
"""
project = Project.query.filter(Project.id == project_id).first_or_404()
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
return render_template(
"config_template/export_configurations.html",
project_id=project_id,
project=project,
config_template=config_template,
ftp_password=get_appliance_ftp_password(),
ip_addresses=get_local_ip_addresses(),
appliance_status=verify_appliance_status()
)
@tools.route("/projects/template/<int:config_template_id>/valueset/<int:template_value_set_id>/config")
def view_config(config_template_id, template_value_set_id):
"""view the resulting configuration
:param config_template_id:
:param template_value_set_id:
:return:
"""
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
template_value_set = TemplateValueSet.query.filter(TemplateValueSet.id == template_value_set_id).first_or_404()
# generate configuration
config_result = template_value_set.get_configuration_result()
return render_template(
"configuration/view_configuration.html",
config_template=config_template,
template_value_set=template_value_set,
ftp_password=get_appliance_ftp_password(),
ip_addresses=get_local_ip_addresses(),
project=config_template.project,
config_result=config_result
)
@tools.route("/projects/template/<int:config_template_id>/valueset/<int:template_value_set_id>/config_download")
def download_config(config_template_id, template_value_set_id):
"""download the resulting configuration
:param config_template_id:
:param template_value_set_id:
:return:
"""
ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
template_value_set = TemplateValueSet.query.filter(TemplateValueSet.id == template_value_set_id).first_or_404()
# generate configuration
config_result = template_value_set.get_configuration_result()
response = make_response(config_result)
response.headers["Content-Disposition"] = "attachment; filename=%s_config.txt" % template_value_set.hostname
return response
@tools.route("/projects/<int:project_id>/template/<int:config_template_id>/download_configs")
def download_all_config_as_zip(project_id, config_template_id):
"""generate all configuration files and download them as a ZIP archive
:param project_id:
:param config_template_id:
:return:
"""
Project.query.filter(Project.id == project_id).first_or_404()
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
# generate ZIP archive with all configurations
memory_file = BytesIO()
with zipfile.ZipFile(memory_file, 'w') as zf:
for values in config_template.template_value_sets.all():
data = zipfile.ZipInfo(values.hostname + "_config.txt")
data.date_time = time.localtime(time.time())[:6]
data.compress_type = zipfile.ZIP_DEFLATED
zf.writestr(data, values.get_configuration_result())
memory_file.seek(0)
return send_file(memory_file, attachment_filename=config_template.name + "_configs.zip", as_attachment=True)
@tools.route('/shell')
@login_required
def shell():
"""embedded shell in a box view
:return:
"""
return render_template("tools/shell.html")
@tools.route("/how-to-use")
@login_required
def how_to_use():
"""How to use page
:return:
"""
return render_template("tools/how_to_use.html")
@tools.route("/template-syntax")
@login_required
def template_syntax():
"""Templating 101 page
:return:
"""
return render_template("tools/template_syntax.html")
@tools.route("/appliance/service_status")
@login_required
def appliance_status_json():
"""
Appliance Status JSON call
:return:
"""
return jsonify(verify_appliance_status())
@tools.route("/debug/list_ftp_directory")
@login_required
def list_ftp_directory():
"""
debug view to create a tree structure of the FTP directory
:return:
"""
directory_list_html = ""
for root, dirs, files in os.walk(config["FTP_DIRECTORY"]):
directory_list_html += "<p>%s</p>\n<ul>\n" % root[len(config["FTP_DIRECTORY"]):]
for file in files:
directory_list_html += "<li>%s</li>\n" % file
directory_list_html += "</ul>\n"
return "<html><body>%s</body></html>" % directory_list_html
@tools.route("/debug/list_tftp_directory")
@login_required
def list_tftp_directory():
"""
debug view to create a tree structure of the TFTP directory
:return:
"""
directory_list_html = ""
for root, dirs, files in os.walk(config["TFTP_DIRECTORY"]):
directory_list_html += "<p>%s</p>\n<ul>\n" % root[len(config["TFTP_DIRECTORY"]):]
for file in files:
directory_list_html += "<li>%s</li>\n" % file
directory_list_html += "</ul>\n"
return "<html><body>%s</body></html>" % directory_list_html
@tools.route('/task/<task_id>')
def task_status_json(task_id):
"""
JSON API endpoint to view the state of a task
:param task_id:
:return:
"""
task = celery.AsyncResult(task_id)
if task.state == 'PENDING':
response = {
'state': task.state,
'status': 'Pending...'
}
elif task.state != 'FAILURE':
response = {
'state': task.state,
'status': task.info.get('status', '')
}
if 'error' in task.info:
response['error'] = task.info['error']
if 'result' in task.info:
response['result'] = task.info['result']
else:
# something went wrong in the background job
response = {
'state': task.state,
'status': str(task.info), # this is the exception raised
}
# update the response with the result of the task
response["data"] = task.info
return jsonify(response)
@tools.route("/projects/template/<int:config_template_id>/valueset/<int:template_value_set_id>/")
def view_template_value_set(config_template_id, template_value_set_id):
"""view a single Template Value Set
:param config_template_id:
:param template_value_set_id:
:return:
"""
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
return render_template(
"template_value_set/view_template_value_set.html",
config_template=config_template,
project=config_template.project,
template_value_set=TemplateValueSet.query.filter(TemplateValueSet.id == template_value_set_id).first_or_404()
)
@tools.route("/projects/template/<int:config_template_id>/valueset/add", methods=["GET", "POST"])
def add_template_value_set(config_template_id):
"""add a new Template Value Set
:param config_template_id:
:return:
"""
parent_config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
form = TemplateValueSetForm(request.form)
if form.validate_on_submit():
try:
template_value_set = TemplateValueSet(hostname="", config_template=parent_config_template)
template_value_set.hostname = form.hostname.data
template_value_set.config_template = parent_config_template
template_value_set.copy_variables_from_config_template()
db.session.add(template_value_set)
db.session.commit()
flash("Template Value Set successful created", "success")
return redirect(url_for(
"edit_template_value_set",
template_value_set_id=template_value_set.id,
config_template_id=parent_config_template.id
))
except IntegrityError as ex:
if "UNIQUE constraint failed" in str(ex):
msg = "Template Value Set hostname already in use, please use another one"
else:
msg = "Template Value Set was not created (unknown error)"
flash(msg, "error")
logger.error(msg, exc_info=True)
db.session.rollback()
except Exception:
msg = "Template Value Set was not created (unknown error)"
logger.error(msg, exc_info=True)
flash(msg, "error")
db.session.rollback()
return render_template(
"template_value_set/add_template_value_set.html",
config_template=parent_config_template,
project=parent_config_template.project,
form=form
)
@tools.route(
"/projects/template/<int:config_template_id>/valueset/<int:template_value_set_id>/edit",
methods=["GET", "POST"]
)
def edit_template_value_set(config_template_id, template_value_set_id):
"""edit a Template Value Set
:param config_template_id:
:param template_value_set_id:
:return:
"""
parent_config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
template_value_set = TemplateValueSet.query.filter(TemplateValueSet.id == template_value_set_id).first_or_404()
form = TemplateValueSetForm(request.form, template_value_set)
if form.validate_on_submit():
try:
template_value_set.hostname = form.hostname.data
template_value_set.config_template = parent_config_template
template_value_set.copy_variables_from_config_template()
# update variable data
for key in template_value_set.get_template_value_names():
template_value_set.update_variable_value(var_name=key, value=request.form["edit_" + key])
# hostname is always the same as the name of the template value set
template_value_set.update_variable_value(var_name="hostname", value=template_value_set.hostname)
db.session.add(template_value_set)
db.session.commit()
flash("Template Value Set successful saved", "success")
return redirect(url_for(
"view_config_template",
project_id=parent_config_template.project.id,
config_template_id=parent_config_template.id
))
except IntegrityError as ex:
if "UNIQUE constraint failed" in str(ex):
msg = "Template Value Set hostname already in use, please use another one"
else:
msg = "Template Value Set was not created (unknown error)"
flash(msg, "error")
logger.error(msg, exc_info=True)
db.session.rollback()
except Exception:
msg = "Template Value Set was not created (unknown error)"
logger.error(msg, exc_info=True)
flash(msg, "error")
db.session.rollback()
return render_template(
"template_value_set/edit_template_value_set.html",
config_template=parent_config_template,
template_value_set=template_value_set,
project=parent_config_template.project,
form=form
)
@tools.route(
"/projects/template/<int:config_template_id>/valueset/<int:template_value_set_id>/delete",
methods=["GET", "POST"]
)
def delete_template_value_set(config_template_id, template_value_set_id):
"""delete the Config Template
:param config_template_id:
:param template_value_set_id:
:return:
"""
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
template_value_set = TemplateValueSet.query.filter(TemplateValueSet.id == template_value_set_id).first_or_404()
if request.method == "POST":
# drop record and add message
try:
db.session.delete(template_value_set)
db.session.commit()
except:
flash("Config Template <strong>%s</strong> was not deleted" % template_value_set.hostname, "error")
flash("Config Template <strong>%s</strong> successful deleted" % template_value_set.hostname, "success")
return redirect(
url_for(
"view_config_template",
project_id=config_template.project.id,
config_template_id=template_value_set.config_template.id
)
)
return render_template(
"template_value_set/delete_template_value_set.html",
template_value_set=template_value_set,
project=config_template.project
)
@tools.route("/projects/template/<int:config_template_id>/variable/<int:template_variable_id>/edit", methods=["GET",
"POST"])
def edit_template_variable(config_template_id, template_variable_id):
"""edit a Template Variable
:param config_template_id:
:param template_variable_id:
:return:
"""
config_template = ConfigTemplate.query.filter(ConfigTemplate.id == config_template_id).first_or_404()
template_variable = TemplateVariable.query.filter(TemplateVariable.id == template_variable_id).first_or_404()
# edit of the hostname is not permitted
if template_variable.var_name == "hostname":
abort(403)
old_var_name = template_variable.var_name
form = TemplateVariableForm(request.form, template_variable)
if form.validate_on_submit():
try:
if old_var_name != form.var_name_slug.data:
config_template.rename_variable(old_var_name, new_name=form.var_name_slug.data)
# update values from form
template_variable.description = form.description.data
template_variable.config_template = config_template
db.session.add(template_variable)
db.session.commit()
flash("Template Variable successful saved", "success")
return redirect(
url_for(
"view_config_template",
project_id=config_template.project.id,
config_template_id=config_template.id
)
)
except IntegrityError as ex:
if "UNIQUE constraint failed" in str(ex):
msg = "Template variable name already in use, please use another one"
else:
msg = "Template variable was not created (unknown error, see log for details)"
logger.error(msg, exc_info=True)
db.session.rollback()
except Exception:
msg = "Template variable was not created (unknown error, see log for details)"
logger.error(msg, exc_info=True)
flash(msg, "error")
return render_template(
"template_variable/edit_template_variable.html",
config_template=config_template,
template_variable=template_variable,
project=config_template.project,
form=form
)
| mit | -7,732,412,634,946,976,000 | 34.973958 | 121 | 0.617924 | false |
davidhuser/dhis2-pocket-knife | tests/test_share.py | 1 | 13155 | from collections import namedtuple
import pytest
from pk.share import (
Permission,
UserGroupAccess,
ShareableObject,
set_delimiter,
UserGroupAccessMerge,
merge,
PUBLIC_ACCESS_INHERITED,
validate_args,
validate_data_access
)
from pk.common.exceptions import PKClientException
class TestPermission(object):
def test_symbolic_notation_unique(self):
options = ('rw', 'r-', '--')
assert len(Permission.symbolic_notation) == len(set(Permission.symbolic_notation))
assert(len(options)**2 == len(Permission.symbolic_notation))
def test_permission_not_equal(self):
assert Permission(metadata='readwrite', data=None) != Permission(metadata=None, data=None)
def test_permission_init(self):
p = Permission(metadata='rw', data='r-')
assert p.metadata == 'rw'
assert p.data == 'r-'
@pytest.mark.parametrize('public_args, metadata, data, symbol', [
([['readwrite', 'readonly']], 'readwrite', 'readonly', 'rwr-----'),
([['readwrite', None]], 'readwrite', None, 'rw------'),
([['readwrite']], 'readwrite', None, 'rw------'),
([['readonly', 'readwrite']], 'readonly', 'readwrite', 'r-rw----'),
])
def test_permission_from_public_args(self, public_args, metadata, data, symbol):
p = Permission.from_public_args(public_args)
assert p.metadata == metadata
assert p.data == data
assert p.to_symbol() == symbol
def test_permission_from_public_args_inherited(self):
"""If not specified, assume it's inherited from server definition"""
assert Permission.from_public_args(None) == PUBLIC_ACCESS_INHERITED
@pytest.mark.parametrize('symbol, metadata, data', [
('rwrw----', 'readwrite', 'readwrite'),
('rwr-----', 'readwrite', 'readonly'),
('rw------', 'readwrite', None),
('r-rw----', 'readonly', 'readwrite'),
('r-r-----', 'readonly', 'readonly'),
('r-------', 'readonly', None),
('--rw----', None, 'readwrite'),
('--r-----', None, 'readonly'),
('--------', None, None)
])
def test_permission_from_symbol(self, symbol, metadata, data):
p = Permission.from_symbol(symbol)
assert p.metadata == metadata
assert p.data == data
@pytest.mark.parametrize('groups, metadata, data', [
(['aFilter', 'readwrite', 'readwrite'], 'readwrite', 'readwrite'),
(['aFilter', 'readwrite', 'readonly'], 'readwrite', 'readonly'),
(['aFilter', 'readwrite'], 'readwrite', None),
(['aFilter', 'readonly', 'readwrite'], 'readonly', 'readwrite'),
(['aFilter', 'readonly', 'readonly'], 'readonly', 'readonly'),
(['aFilter', 'readonly', 'none'], 'readonly', 'none'),
(['aFilter', 'none', 'readwrite'], 'none', 'readwrite'),
(['aFilter', 'none', 'readonly'], 'none', 'readonly'),
(['aFilter', 'none', 'none'], 'none', 'none'),
(['aFilter', 'none'], 'none', None)
])
def test_permission_from_group_args(self, groups, metadata, data):
p = Permission.from_group_args(groups)
assert p.metadata == metadata
assert p.data == data
def test_all_equal(self):
p1 = Permission(metadata='readwrite', data='readonly')
p2 = Permission.from_public_args([['readwrite', 'readonly']])
p3 = Permission.from_symbol('rwr-----')
p4 = Permission.from_group_args(['aFilter', 'readwrite', 'readonly'])
assert p1 == p2
assert p3 == p4
assert p1 == p4
assert p1.to_symbol() == p2.to_symbol()
assert p3.to_symbol() == p4.to_symbol()
assert p1.to_symbol() == p4.to_symbol()
def test_print(self):
assert str(Permission(metadata=None, data=None)) == '[metadata:none]'
assert str(Permission(metadata=None, data='readwrite')) == '[metadata:none] [data:readwrite]'
assert repr(Permission(metadata=None, data='readwrite')) == 'None readwrite'
class TestUserGroupAccess(object):
@pytest.mark.parametrize('access, permission', [
('rw------', Permission(metadata='readwrite', data='none')),
('rw------', Permission(metadata='readwrite', data=None)),
])
def test_from_dict(self, access, permission):
data = {'id': 'abc', 'access': access}
uga = UserGroupAccess.from_dict(data)
assert permission.to_symbol() == access
assert uga.to_json() == {"id": "abc", "access": permission.to_symbol()}
@pytest.mark.parametrize('access', [
'ab-------', 'readwrite', 123, None
])
def test_from_dict_invalid_access(self, access):
data = {'id': 'abc', 'access': access}
uga = UserGroupAccess.from_dict(data)
assert uga.permission == Permission(None, None)
assert uga.to_json() == {"id": "abc", "access": "--------"}
def test_inequality(self):
uga1 = UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data='readwrite'))
uga2 = UserGroupAccess(uid='abc', permission=Permission(metadata='readonly', data='readwrite'))
uga3 = UserGroupAccess(uid='cde', permission=Permission(metadata='readwrite', data='readwrite'))
assert uga1 != uga2
assert uga1 != uga3
class TestShareableObject(object):
def test_equality(self):
s1 = ShareableObject(
'dataElements',
'abc',
'DE01',
'rw------',
usergroup_accesses={
UserGroupAccess(uid='123', permission='rwr-----'),
UserGroupAccess(uid='456', permission='rwr-----'),
}
)
s2 = ShareableObject(
'dataElements',
'abc',
'DE01',
'rw------',
usergroup_accesses={
UserGroupAccess(uid='456', permission='rwr-----'), # switched out
UserGroupAccess(uid='123', permission='rwr-----'),
}
)
assert s1 == s2
s3 = ShareableObject(
'dataElements',
'abc',
'DE01',
'rw------',
usergroup_accesses={
UserGroupAccess(uid='456', permission='rwr-----'),
}
)
assert s1 != s3
def test_identifier(self):
s1 = ShareableObject(
obj_type='dataElements',
uid='abc',
name='DE01',
public_access='rw------'
)
assert s1.identifier() == u"'DE01'"
s2 = ShareableObject(
obj_type='dataElements',
uid='abc',
name=None,
code='CODE3000',
public_access='rw------'
)
assert s2.identifier() == u"'CODE3000'"
s3 = ShareableObject(
obj_type='dataElements',
uid='abc',
name=None,
public_access='rw------'
)
assert s3.identifier() == u""
@pytest.mark.parametrize('version, argument, expected', [
(22, 'name:like:ABC', ('&&', 'AND')),
(22, 'name:like:ABC&&code:eq:XYZ', ('&&', 'AND')),
(25, None, (None, None)),
(29, 'name:like:ABC||name:like:CDE', ('||', 'OR'))
])
def test_set_delimiter(version, argument, expected):
assert set_delimiter(version, argument) == expected
@pytest.mark.parametrize('version, argument', [
(29, 'name:^like:ABC'),
(29, 'name:like:||&&'),
(29, 'name:like:ABC||name:like:CDE&&name:like:XYZ'),
(24, 'name:like:ABC||name:like:XYZ')
])
def test_set_delimiter_raises(version, argument):
with pytest.raises(PKClientException):
set_delimiter(version, argument)
class TestValidateArgs(object):
Arguments = namedtuple('args', 'public_access extend groups')
def test_extend_usergroups_required(self):
api_version = 32
args = self.Arguments(public_access=None, extend=True, groups=None)
with pytest.raises(PKClientException):
validate_args(args, api_version)
def test_extend_usergroups_not_required_with_publicaccess(self):
api_version = 32
args = self.Arguments(public_access='readwrite', extend=True, groups=None)
validate_args(args, api_version)
def test_public_access_required_when_none(self):
api_version = 32
args = self.Arguments(public_access=None, extend=False, groups=None)
with pytest.raises(PKClientException):
validate_args(args, api_version)
def test_public_access_incorrect_count(self):
api_version = 32
args = self.Arguments(public_access=['readwrite', 'none', 'readonly'], extend=False, groups=None)
with pytest.raises(PKClientException):
validate_args(args, api_version)
class TestValidateDataAccess(object):
pass
class TestUserGroupAccessMerge(object):
def test_usergroupaccessmerge_sets(self):
ug1 = UserGroupAccessMerge(uid='abc', permission='rw------')
ug2 = UserGroupAccessMerge(uid='abc', permission='rw------')
ug3 = UserGroupAccessMerge(uid='abc', permission='r------')
ug4 = UserGroupAccessMerge(uid='cde', permission='r------')
ug5 = UserGroupAccessMerge(uid='abc ', permission='rw------')
ug_set = set()
ug_set.add(ug1)
assert ug2 in ug_set
assert ug3 in ug_set
assert ug4 not in ug_set
assert ug5 not in ug_set
def test_inequality(self):
uga1 = UserGroupAccessMerge(uid='abc', permission=Permission(metadata='readwrite', data='readwrite'))
uga2 = UserGroupAccessMerge(uid='cde', permission=Permission(metadata='readwrite', data='readwrite'))
assert uga1 != uga2
class TestMerge(object):
@pytest.mark.parametrize('server_uga, local_uga, expected', [
[ # user group accesses is retained
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
}
],
[ # user group accesses have higher priority when supplied to what is already on the server
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data='readonly'))
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readonly', data=None)),
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readonly', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data='readonly'))
}
],
[ # user group accesses are not overwritten with NONE
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata=None, data=None))
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
}
],
[ # no user groups present on server
{},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
},
{
UserGroupAccess(uid='abc', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
}
],
[ # no user groups present on server nor specified
{},
{},
{}
],
[ # ordering not important
{},
{
UserGroupAccess(uid='abc', permission=Permission(metadata=None, data='readwrite')),
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None))
},
{
UserGroupAccess(uid='def', permission=Permission(metadata='readwrite', data=None)),
UserGroupAccess(uid='abc', permission=Permission(metadata=None, data='readwrite'))
}
]
])
def test_merge(self, server_uga, local_uga, expected):
output = merge(server_uga, local_uga)
assert output == expected
| mit | 9,046,889,693,602,415,000 | 37.691176 | 109 | 0.572862 | false |
sileht/deb-openstack-keystone | keystone/identity/core.py | 1 | 21922 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Main entry point into the Identity service."""
import uuid
import urllib
import urlparse
from keystone import config
from keystone import exception
from keystone import policy
from keystone import token
from keystone.common import manager
from keystone.common import wsgi
CONF = config.CONF
class Manager(manager.Manager):
"""Default pivot point for the Identity backend.
See :mod:`keystone.common.manager.Manager` for more details on how this
dynamically calls the backend.
"""
def __init__(self):
super(Manager, self).__init__(CONF.identity.driver)
class Driver(object):
"""Interface description for an Identity driver."""
def authenticate(self, user_id=None, tenant_id=None, password=None):
"""Authenticate a given user, tenant and password.
Returns: (user, tenant, metadata).
"""
raise exception.NotImplemented()
def get_tenant(self, tenant_id):
"""Get a tenant by id.
Returns: tenant_ref or None.
"""
raise exception.NotImplemented()
def get_tenant_by_name(self, tenant_name):
"""Get a tenant by name.
Returns: tenant_ref or None.
"""
raise exception.NotImplemented()
def get_user(self, user_id):
"""Get a user by id.
Returns: user_ref or None.
"""
raise exception.NotImplemented()
def get_user_by_name(self, user_name):
"""Get a user by name.
Returns: user_ref or None.
"""
raise exception.NotImplemented()
def get_role(self, role_id):
"""Get a role by id.
Returns: role_ref or None.
"""
raise exception.NotImplemented()
def list_users(self):
"""List all users in the system.
NOTE(termie): I'd prefer if this listed only the users for a given
tenant.
Returns: a list of user_refs or an empty list.
"""
raise exception.NotImplemented()
def list_roles(self):
"""List all roles in the system.
Returns: a list of role_refs or an empty list.
"""
raise exception.NotImplemented()
# NOTE(termie): seven calls below should probably be exposed by the api
# more clearly when the api redesign happens
def add_user_to_tenant(self, tenant_id, user_id):
raise exception.NotImplemented()
def remove_user_from_tenant(self, tenant_id, user_id):
raise exception.NotImplemented()
def get_all_tenants(self):
raise exception.NotImplemented()
def get_tenants_for_user(self, user_id):
"""Get the tenants associated with a given user.
Returns: a list of tenant ids.
"""
raise exception.NotImplemented()
def get_roles_for_user_and_tenant(self, user_id, tenant_id):
"""Get the roles associated with a user within given tenant.
Returns: a list of role ids.
"""
raise exception.NotImplemented()
def add_role_to_user_and_tenant(self, user_id, tenant_id, role_id):
"""Add a role to a user within given tenant."""
raise exception.NotImplemented()
def remove_role_from_user_and_tenant(self, user_id, tenant_id, role_id):
"""Remove a role from a user within given tenant."""
raise exception.NotImplemented()
# user crud
def create_user(self, user_id, user):
raise exception.NotImplemented()
def update_user(self, user_id, user):
raise exception.NotImplemented()
def delete_user(self, user_id):
raise exception.NotImplemented()
# tenant crud
def create_tenant(self, tenant_id, tenant):
raise exception.NotImplemented()
def update_tenant(self, tenant_id, tenant):
raise exception.NotImplemented()
def delete_tenant(self, tenant_id, tenant):
raise exception.NotImplemented()
# metadata crud
def get_metadata(self, user_id, tenant_id):
raise exception.NotImplemented()
def create_metadata(self, user_id, tenant_id, metadata):
raise exception.NotImplemented()
def update_metadata(self, user_id, tenant_id, metadata):
raise exception.NotImplemented()
def delete_metadata(self, user_id, tenant_id, metadata):
raise exception.NotImplemented()
# role crud
def create_role(self, role_id, role):
raise exception.NotImplemented()
def update_role(self, role_id, role):
raise exception.NotImplemented()
def delete_role(self, role_id):
raise exception.NotImplemented()
class PublicRouter(wsgi.ComposableRouter):
def add_routes(self, mapper):
tenant_controller = TenantController()
mapper.connect('/tenants',
controller=tenant_controller,
action='get_tenants_for_token',
conditions=dict(methods=['GET']))
class AdminRouter(wsgi.ComposableRouter):
def add_routes(self, mapper):
# Tenant Operations
tenant_controller = TenantController()
mapper.connect('/tenants',
controller=tenant_controller,
action='get_all_tenants',
conditions=dict(method=['GET']))
mapper.connect('/tenants/{tenant_id}',
controller=tenant_controller,
action='get_tenant',
conditions=dict(method=['GET']))
# User Operations
user_controller = UserController()
mapper.connect('/users/{user_id}',
controller=user_controller,
action='get_user',
conditions=dict(method=['GET']))
# Role Operations
roles_controller = RoleController()
mapper.connect('/tenants/{tenant_id}/users/{user_id}/roles',
controller=roles_controller,
action='get_user_roles',
conditions=dict(method=['GET']))
mapper.connect('/users/{user_id}/roles',
controller=user_controller,
action='get_user_roles',
conditions=dict(method=['GET']))
class TenantController(wsgi.Application):
def __init__(self):
self.identity_api = Manager()
self.policy_api = policy.Manager()
self.token_api = token.Manager()
super(TenantController, self).__init__()
def get_all_tenants(self, context, **kw):
"""Gets a list of all tenants for an admin user."""
self.assert_admin(context)
tenant_refs = self.identity_api.get_tenants(context)
params = {
'limit': context['query_string'].get('limit'),
'marker': context['query_string'].get('marker'),
}
return self._format_tenant_list(tenant_refs, **params)
def get_tenants_for_token(self, context, **kw):
"""Get valid tenants for token based on token used to authenticate.
Pulls the token from the context, validates it and gets the valid
tenants for the user in the token.
Doesn't care about token scopedness.
"""
try:
token_ref = self.token_api.get_token(context=context,
token_id=context['token_id'])
except exception.NotFound:
raise exception.Unauthorized()
user_ref = token_ref['user']
tenant_ids = self.identity_api.get_tenants_for_user(
context, user_ref['id'])
tenant_refs = []
for tenant_id in tenant_ids:
tenant_refs.append(self.identity_api.get_tenant(
context=context,
tenant_id=tenant_id))
params = {
'limit': context['query_string'].get('limit'),
'marker': context['query_string'].get('marker'),
}
return self._format_tenant_list(tenant_refs, **params)
def get_tenant(self, context, tenant_id):
# TODO(termie): this stuff should probably be moved to middleware
self.assert_admin(context)
tenant = self.identity_api.get_tenant(context, tenant_id)
if tenant is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
return {'tenant': tenant}
# CRUD Extension
def create_tenant(self, context, tenant):
tenant_ref = self._normalize_dict(tenant)
self.assert_admin(context)
tenant_id = (tenant_ref.get('id')
and tenant_ref.get('id')
or uuid.uuid4().hex)
tenant_ref['id'] = tenant_id
tenant = self.identity_api.create_tenant(
context, tenant_id, tenant_ref)
return {'tenant': tenant}
def update_tenant(self, context, tenant_id, tenant):
self.assert_admin(context)
if self.identity_api.get_tenant(context, tenant_id) is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
tenant_ref = self.identity_api.update_tenant(
context, tenant_id, tenant)
return {'tenant': tenant_ref}
def delete_tenant(self, context, tenant_id, **kw):
self.assert_admin(context)
if self.identity_api.get_tenant(context, tenant_id) is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
self.identity_api.delete_tenant(context, tenant_id)
def get_tenant_users(self, context, tenant_id, **kw):
self.assert_admin(context)
if self.identity_api.get_tenant(context, tenant_id) is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
user_refs = self.identity_api.get_tenant_users(context, tenant_id)
return {'users': user_refs}
def _format_tenant_list(self, tenant_refs, **kwargs):
marker = kwargs.get('marker')
page_idx = 0
if marker is not None:
for (marker_idx, tenant) in enumerate(tenant_refs):
if tenant['id'] == marker:
# we start pagination after the marker
page_idx = marker_idx + 1
break
else:
msg = 'Marker could not be found'
raise exception.ValidationError(message=msg)
limit = kwargs.get('limit')
if limit is not None:
try:
limit = int(limit)
if limit < 0:
raise AssertionError()
except (ValueError, AssertionError):
msg = 'Invalid limit value'
raise exception.ValidationError(message=msg)
tenant_refs = tenant_refs[page_idx:limit]
for x in tenant_refs:
if 'enabled' not in x:
x['enabled'] = True
o = {'tenants': tenant_refs,
'tenants_links': []}
return o
class UserController(wsgi.Application):
def __init__(self):
self.identity_api = Manager()
self.policy_api = policy.Manager()
self.token_api = token.Manager()
super(UserController, self).__init__()
def get_user(self, context, user_id):
self.assert_admin(context)
user_ref = self.identity_api.get_user(context, user_id)
if not user_ref:
raise exception.UserNotFound(user_id=user_id)
return {'user': user_ref}
def get_users(self, context):
# NOTE(termie): i can't imagine that this really wants all the data
# about every single user in the system...
self.assert_admin(context)
user_refs = self.identity_api.list_users(context)
return {'users': user_refs}
# CRUD extension
def create_user(self, context, user):
user = self._normalize_dict(user)
self.assert_admin(context)
tenant_id = user.get('tenantId', None)
if (tenant_id is not None
and self.identity_api.get_tenant(context, tenant_id) is None):
raise exception.TenantNotFound(tenant_id=tenant_id)
user_id = uuid.uuid4().hex
user_ref = user.copy()
user_ref['id'] = user_id
new_user_ref = self.identity_api.create_user(
context, user_id, user_ref)
if tenant_id:
self.identity_api.add_user_to_tenant(context, tenant_id, user_id)
return {'user': new_user_ref}
def update_user(self, context, user_id, user):
# NOTE(termie): this is really more of a patch than a put
self.assert_admin(context)
if self.identity_api.get_user(context, user_id) is None:
raise exception.UserNotFound(user_id=user_id)
user_ref = self.identity_api.update_user(context, user_id, user)
return {'user': user_ref}
def delete_user(self, context, user_id):
self.assert_admin(context)
if self.identity_api.get_user(context, user_id) is None:
raise exception.UserNotFound(user_id=user_id)
self.identity_api.delete_user(context, user_id)
def set_user_enabled(self, context, user_id, user):
return self.update_user(context, user_id, user)
def set_user_password(self, context, user_id, user):
return self.update_user(context, user_id, user)
def update_user_tenant(self, context, user_id, user):
"""Update the default tenant."""
# ensure that we're a member of that tenant
tenant_id = user.get('tenantId')
self.identity_api.add_user_to_tenant(context, tenant_id, user_id)
return self.update_user(context, user_id, user)
class RoleController(wsgi.Application):
def __init__(self):
self.identity_api = Manager()
self.token_api = token.Manager()
self.policy_api = policy.Manager()
super(RoleController, self).__init__()
# COMPAT(essex-3)
def get_user_roles(self, context, user_id, tenant_id=None):
"""Get the roles for a user and tenant pair.
Since we're trying to ignore the idea of user-only roles we're
not implementing them in hopes that the idea will die off.
"""
if tenant_id is None:
raise exception.NotImplemented(message='User roles not supported: '
'tenant ID required')
user = self.identity_api.get_user(context, user_id)
if user is None:
raise exception.UserNotFound(user_id=user_id)
tenant = self.identity_api.get_tenant(context, tenant_id)
if tenant is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
roles = self.identity_api.get_roles_for_user_and_tenant(
context, user_id, tenant_id)
return {'roles': [self.identity_api.get_role(context, x)
for x in roles]}
# CRUD extension
def get_role(self, context, role_id):
self.assert_admin(context)
role_ref = self.identity_api.get_role(context, role_id)
if not role_ref:
raise exception.RoleNotFound(role_id=role_id)
return {'role': role_ref}
def create_role(self, context, role):
role = self._normalize_dict(role)
self.assert_admin(context)
role_id = uuid.uuid4().hex
role['id'] = role_id
role_ref = self.identity_api.create_role(context, role_id, role)
return {'role': role_ref}
def delete_role(self, context, role_id):
self.assert_admin(context)
self.get_role(context, role_id)
self.identity_api.delete_role(context, role_id)
def get_roles(self, context):
self.assert_admin(context)
roles = self.identity_api.list_roles(context)
# TODO(termie): probably inefficient at some point
return {'roles': roles}
def add_role_to_user(self, context, user_id, role_id, tenant_id=None):
"""Add a role to a user and tenant pair.
Since we're trying to ignore the idea of user-only roles we're
not implementing them in hopes that the idea will die off.
"""
self.assert_admin(context)
if tenant_id is None:
raise exception.NotImplemented(message='User roles not supported: '
'tenant_id required')
if self.identity_api.get_user(context, user_id) is None:
raise exception.UserNotFound(user_id=user_id)
if self.identity_api.get_tenant(context, tenant_id) is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
if self.identity_api.get_role(context, role_id) is None:
raise exception.RoleNotFound(role_id=role_id)
# This still has the weird legacy semantics that adding a role to
# a user also adds them to a tenant
self.identity_api.add_user_to_tenant(context, tenant_id, user_id)
self.identity_api.add_role_to_user_and_tenant(
context, user_id, tenant_id, role_id)
role_ref = self.identity_api.get_role(context, role_id)
return {'role': role_ref}
def remove_role_from_user(self, context, user_id, role_id, tenant_id=None):
"""Remove a role from a user and tenant pair.
Since we're trying to ignore the idea of user-only roles we're
not implementing them in hopes that the idea will die off.
"""
self.assert_admin(context)
if tenant_id is None:
raise exception.NotImplemented(message='User roles not supported: '
'tenant_id required')
if self.identity_api.get_user(context, user_id) is None:
raise exception.UserNotFound(user_id=user_id)
if self.identity_api.get_tenant(context, tenant_id) is None:
raise exception.TenantNotFound(tenant_id=tenant_id)
if self.identity_api.get_role(context, role_id) is None:
raise exception.RoleNotFound(role_id=role_id)
# This still has the weird legacy semantics that adding a role to
# a user also adds them to a tenant, so we must follow up on that
self.identity_api.remove_role_from_user_and_tenant(
context, user_id, tenant_id, role_id)
roles = self.identity_api.get_roles_for_user_and_tenant(
context, user_id, tenant_id)
if not roles:
self.identity_api.remove_user_from_tenant(
context, tenant_id, user_id)
return
# COMPAT(diablo): CRUD extension
def get_role_refs(self, context, user_id):
"""Ultimate hack to get around having to make role_refs first-class.
This will basically iterate over the various roles the user has in
all tenants the user is a member of and create fake role_refs where
the id encodes the user-tenant-role information so we can look
up the appropriate data when we need to delete them.
"""
self.assert_admin(context)
user_ref = self.identity_api.get_user(context, user_id)
tenant_ids = self.identity_api.get_tenants_for_user(context, user_id)
o = []
for tenant_id in tenant_ids:
role_ids = self.identity_api.get_roles_for_user_and_tenant(
context, user_id, tenant_id)
for role_id in role_ids:
ref = {'roleId': role_id,
'tenantId': tenant_id,
'userId': user_id}
ref['id'] = urllib.urlencode(ref)
o.append(ref)
return {'roles': o}
# COMPAT(diablo): CRUD extension
def create_role_ref(self, context, user_id, role):
"""This is actually used for adding a user to a tenant.
In the legacy data model adding a user to a tenant required setting
a role.
"""
self.assert_admin(context)
# TODO(termie): for now we're ignoring the actual role
tenant_id = role.get('tenantId')
role_id = role.get('roleId')
self.identity_api.add_user_to_tenant(context, tenant_id, user_id)
self.identity_api.add_role_to_user_and_tenant(
context, user_id, tenant_id, role_id)
role_ref = self.identity_api.get_role(context, role_id)
return {'role': role_ref}
# COMPAT(diablo): CRUD extension
def delete_role_ref(self, context, user_id, role_ref_id):
"""This is actually used for deleting a user from a tenant.
In the legacy data model removing a user from a tenant required
deleting a role.
To emulate this, we encode the tenant and role in the role_ref_id,
and if this happens to be the last role for the user-tenant pair,
we remove the user from the tenant.
"""
self.assert_admin(context)
# TODO(termie): for now we're ignoring the actual role
role_ref_ref = urlparse.parse_qs(role_ref_id)
tenant_id = role_ref_ref.get('tenantId')[0]
role_id = role_ref_ref.get('roleId')[0]
self.identity_api.remove_role_from_user_and_tenant(
context, user_id, tenant_id, role_id)
roles = self.identity_api.get_roles_for_user_and_tenant(
context, user_id, tenant_id)
if not roles:
self.identity_api.remove_user_from_tenant(
context, tenant_id, user_id)
| apache-2.0 | 3,092,222,762,643,436,000 | 34.937705 | 79 | 0.603686 | false |
truedays/sandbox | python/auto.py | 1 | 3096 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# CTA API CLI YAH
import sys
import requests
#from bs4 import BeautifulSoup
from xmltodict import parse
import datetime
from time import gmtime, strftime
# enable debugging
import cgitb
cgitb.enable()
# get API key from file
f = open('./.cta-api.key', 'r')
APIKEY = "?key=" + f.read(25)
f.close()
URL="http://www.ctabustracker.com/bustime/api/v1/"
apicmd = "getpredictions"
showResponse = ["stpnm","stpid","vid","rt","rtdir","prdtm"]
def getPred(localurl):
r = requests.get(localurl)
out = parse(r.text)
if "error" in out['bustime-response']:
#print "+" + str(out) + "+"
#print "localurl: " + str(localurl)
print "xXxXxXx\nERROR: " + out['bustime-response']['error']['msg']
print "xXxXxXx\n"
#sys.exit(1)
return
print "___"
# Lame safety check:
if "prdtm" in out['bustime-response']['prd']:
#print "tmpstmp: " + out['bustime-response']['prd']['tmstmp']
for x in showResponse:
print x + ": " + out['bustime-response']['prd'][x]
#out['bustime-response']['prd']:
#print key
#print x
# true == multiple predictions returned
if isinstance(out['bustime-response']['prd'], list):
for x in range(0,len(out['bustime-response']['prd'])):
if out['bustime-response']['prd'][x]:
hourNow=int(out['bustime-response']['prd'][x]['tmstmp'][9:11])
minNow=int(out['bustime-response']['prd'][x]['tmstmp'][12:14])
hourPred=int(out['bustime-response']['prd'][x]['prdtm'][9:11])
minPred=int(out['bustime-response']['prd'][x]['prdtm'][12:14])
timeRemain = ((hourPred*60)+minPred) - ((hourNow*60)+minNow)
for response in showResponse:
print response + "[" + str(x) + "]" + ": " + out['bustime-response']['prd'][x][response]
print "Minutes remaining: " + str(timeRemain)
print "___"
else:
if "tmstmp" in out['bustime-response']['prd']:
# print out['bustime-response']['prd']['tmstmp'][9:11]
# print out['bustime-response']['prd']['tmstmp'][12:14]
# print out['bustime-response']['prd']['prdtm'][9:11]
# print out['bustime-response']['prd']['prdtm'][12:14]
hourNow=int(out['bustime-response']['prd']['tmstmp'][9:11])
minNow=int(out['bustime-response']['prd']['tmstmp'][12:14])
hourPred=int(out['bustime-response']['prd']['prdtm'][9:11])
minPred=int(out['bustime-response']['prd']['prdtm'][12:14])
timeRemain = ((hourPred*60)+minPred) - ((hourNow*60)+minNow)
print "Minutes remaining: " + str(timeRemain)
print "___"
if int(strftime("%H", gmtime())) > 18 or int(strftime("%H", gmtime())) < 6 :
# Heading home
heading = "home"
stops = ["&rt=78&stpid=11401", "&rt=56&stpid=14101"]
else:
# heading to work
heading = "work"
stops = ["&rt=78&stpid=11321", "&rt=56&stpid=5586"]
#print "Content-Type: text/plain;charset=utf-8"
print
print "<html><title>Bus times - Heading to " + heading + "</title><body>"
print "<pre>"
print "hour: " + str(int(strftime("%H", gmtime())))
for stop in stops:
fullurl = URL + apicmd + APIKEY + stop
getPred(fullurl)
print "</pre>"
print """
<FORM>
<INPUT TYPE="button" onClick="history.go(0)" VALUE="Refresh">
</FORM>
"""
| gpl-3.0 | -1,918,603,070,295,321,000 | 31.25 | 93 | 0.630814 | false |
bslatkin/8-bits | appengine-ndb/ndb/blobstore_test.py | 1 | 10271 | """Tests for blobstore.py."""
import cgi
import cStringIO
import datetime
import pickle
import unittest
from .google_imports import namespace_manager
from .google_imports import datastore_types
from . import blobstore
from . import model
from . import tasklets
from . import test_utils
class BlobstoreTests(test_utils.NDBTest):
def setUp(self):
super(BlobstoreTests, self).setUp()
self.testbed.init_blobstore_stub()
the_module = blobstore
def testConstants(self):
# This intentionally hardcodes the values. I'd like to know when
# they change.
self.assertEqual(blobstore.BLOB_INFO_KIND, '__BlobInfo__')
self.assertEqual(blobstore.BLOB_MIGRATION_KIND, '__BlobMigration__')
self.assertEqual(blobstore.BLOB_KEY_HEADER, 'X-AppEngine-BlobKey')
self.assertEqual(blobstore.BLOB_RANGE_HEADER, 'X-AppEngine-BlobRange')
self.assertEqual(blobstore.UPLOAD_INFO_CREATION_HEADER,
'X-AppEngine-Upload-Creation')
self.assertEqual(blobstore.MAX_BLOB_FETCH_SIZE, 1015808)
def testExceptions(self):
self.assertTrue(issubclass(blobstore.Error, Exception))
self.assertTrue(issubclass(blobstore.InternalError, blobstore.Error))
self.assertTrue(issubclass(blobstore.BlobFetchSizeTooLargeError,
blobstore.Error))
self.assertTrue(issubclass(blobstore.BlobNotFoundError, blobstore.Error))
self.assertTrue(issubclass(blobstore.DataIndexOutOfRangeError,
blobstore.Error))
self.assertTrue(issubclass(blobstore.PermissionDeniedError,
blobstore.Error))
self.assertTrue(issubclass(blobstore.BlobInfoParseError, blobstore.Error))
def create_blobinfo(self, blobkey):
"""Handcraft a dummy BlobInfo."""
b = blobstore.BlobInfo(key=model.Key(blobstore.BLOB_INFO_KIND, blobkey),
content_type='text/plain',
creation=datetime.datetime(2012, 1, 24, 8, 15, 0),
filename='hello.txt',
size=42,
md5_hash='xxx')
model.Model._put_async(b).check_success()
return b
def testBlobInfo(self):
b = self.create_blobinfo('dummy')
self.assertEqual(b._get_kind(), blobstore.BLOB_INFO_KIND)
self.assertEqual(b.key(), blobstore.BlobKey('dummy'))
self.assertEqual(b.content_type, 'text/plain')
self.assertEqual(b.creation, datetime.datetime(2012, 1, 24, 8, 15, 0))
self.assertEqual(b.filename, 'hello.txt')
self.assertEqual(b.md5_hash, 'xxx')
def testBlobInfo_PutErrors(self):
b = self.create_blobinfo('dummy')
self.assertRaises(Exception, b.put)
self.assertRaises(Exception, b.put_async)
self.assertRaises(Exception, model.put_multi, [b])
self.assertRaises(Exception, model.put_multi_async, [b])
def testBlobInfo_Get(self):
b = self.create_blobinfo('dummy')
c = blobstore.BlobInfo.get(b.key())
self.assertEqual(c, b)
self.assertTrue(c is not b)
c = blobstore.BlobInfo.get('dummy')
self.assertEqual(c, b)
self.assertTrue(c is not b)
def testBlobInfo_GetAsync(self):
b = self.create_blobinfo('dummy')
cf = blobstore.BlobInfo.get_async(b.key())
self.assertTrue(isinstance(cf, tasklets.Future))
c = cf.get_result()
self.assertEqual(c, b)
self.assertTrue(c is not b)
df = blobstore.BlobInfo.get_async(str(b.key()))
self.assertTrue(isinstance(df, tasklets.Future))
d = df.get_result()
self.assertEqual(d, b)
self.assertTrue(d is not b)
def testBlobInfo_GetMulti(self):
b = self.create_blobinfo('b')
c = self.create_blobinfo('c')
d, e = blobstore.BlobInfo.get_multi([b.key(), str(c.key())])
self.assertEqual(d, b)
self.assertEqual(e, c)
def testBlobInfo_GetMultiAsync(self):
b = self.create_blobinfo('b')
c = self.create_blobinfo('c')
df, ef = blobstore.BlobInfo.get_multi_async([str(b.key()), c.key()])
self.assertTrue(isinstance(df, tasklets.Future))
self.assertTrue(isinstance(ef, tasklets.Future))
d, e = df.get_result(), ef.get_result()
self.assertEqual(d, b)
self.assertEqual(e, c)
def testBlobInfo_Delete(self):
b = self.create_blobinfo('dummy')
c = blobstore.get(b._key.id())
self.assertEqual(c, b)
b.delete()
d = blobstore.get(b.key())
self.assertEqual(d, None)
def testBlobInfo_DeleteAsync(self):
b = self.create_blobinfo('dummy')
df = b.delete_async()
self.assertTrue(isinstance(df, tasklets.Future), df)
df.get_result()
d = blobstore.get(b.key())
self.assertEqual(d, None)
def testBlobstore_Get(self):
b = self.create_blobinfo('dummy')
c = blobstore.get(b.key())
self.assertEqual(c, b)
self.assertTrue(c is not b)
c = blobstore.get('dummy')
self.assertEqual(c, b)
self.assertTrue(c is not b)
def testBlobstore_GetAsync(self):
b = self.create_blobinfo('dummy')
cf = blobstore.get_async(b.key())
self.assertTrue(isinstance(cf, tasklets.Future))
c = cf.get_result()
self.assertEqual(c, b)
self.assertTrue(c is not b)
cf = blobstore.get_async('dummy')
c = cf.get_result()
self.assertEqual(c, b)
self.assertTrue(c is not b)
def testBlobstore_Delete(self):
b = self.create_blobinfo('dummy')
blobstore.delete(b.key())
d = blobstore.get(b.key())
self.assertEqual(d, None)
def testBlobstore_DeleteAsync(self):
b = self.create_blobinfo('dummy')
df = blobstore.delete_async(b.key())
self.assertTrue(isinstance(df, tasklets.Future), df)
df.get_result()
d = blobstore.get(b.key())
self.assertEqual(d, None)
def testBlobstore_DeleteMulti(self):
b = self.create_blobinfo('b')
c = self.create_blobinfo('c')
blobstore.delete_multi([b.key(), str(c.key())])
d, e = blobstore.get_multi([b.key(), str(c.key())])
self.assertEqual(d, None)
self.assertEqual(e, None)
def testBlobstore_DeleteMultiAsync(self):
b = self.create_blobinfo('b')
c = self.create_blobinfo('c')
f = blobstore.delete_multi_async([b.key(), str(c.key())])
self.assertTrue(isinstance(f, tasklets.Future), f)
f.get_result()
d, e = blobstore.get_multi([b.key(), str(c.key())])
self.assertEqual(d, None)
self.assertEqual(e, None)
def testBlobstore_CreateUploadUrl(self):
url = blobstore.create_upload_url('/foo')
self.assertTrue('/_ah/upload/' in url, url)
def testBlobstore_CreateUploadUrlAsync(self):
urlf = blobstore.create_upload_url_async('/foo')
self.assertTrue(isinstance(urlf, tasklets.Future), urlf)
url = urlf.get_result()
self.assertTrue('/_ah/upload/' in url, url)
def testBlobstore_ParseBlobInfo_Errors(self):
nope = blobstore.parse_blob_info(None)
self.assertEqual(nope, None)
env = {'REQUEST_METHOD': 'POST'}
hdrs = {'content-disposition': 'blah; filename=hello.txt; name=hello',
'content-type': 'text/plain; blob-key=xxx'}
fd = cStringIO.StringIO(
'Content-length: 42\n'
'X-AppEngine-Upload-Creation: 2012-01-24 17:35:00.000000\n'
'Content-MD5: eHh4\n'
'\n'
)
fs = cgi.FieldStorage(fd, headers=hdrs, environ=env)
self.assertRaises(blobstore.BlobInfoParseError,
blobstore.parse_blob_info, fs)
fd = cStringIO.StringIO(
'Content-type: image/jpeg\n'
'Content-length: hello\n'
'X-AppEngine-Upload-Creation: 2012-01-24 17:35:00.000000\n'
'Content-MD5: eHh4\n'
'\n'
)
fs = cgi.FieldStorage(fd, headers=hdrs, environ=env)
self.assertRaises(blobstore.BlobInfoParseError,
blobstore.parse_blob_info, fs)
fd = cStringIO.StringIO(
'Content-type: image/jpeg\n'
'Content-length: 42\n'
'X-AppEngine-Upload-Creation: BLAH-01-24 17:35:00.000000\n'
'Content-MD5: eHh4\n'
'\n'
)
fs = cgi.FieldStorage(fd, headers=hdrs, environ=env)
self.assertRaises(blobstore.BlobInfoParseError,
blobstore.parse_blob_info, fs)
def testBlobstore_ParseBlobInfo(self):
env = {'REQUEST_METHOD': 'POST'}
hdrs = {'content-disposition': 'blah; filename=hello.txt; name=hello',
'content-type': 'text/plain; blob-key=xxx'}
fd = cStringIO.StringIO(
'Content-type: image/jpeg\n'
'Content-length: 42\n'
'X-AppEngine-Upload-Creation: 2012-01-24 17:35:00.000000\n'
'Content-MD5: eHh4\n'
'\n'
)
fs = cgi.FieldStorage(fd, headers=hdrs, environ=env)
bi = blobstore.parse_blob_info(fs)
self.assertTrue(isinstance(bi, blobstore.BlobInfo))
self.assertEqual(
bi,
blobstore.BlobInfo(key=model.Key(blobstore.BlobInfo, 'xxx'),
content_type='image/jpeg',
creation=datetime.datetime(2012, 1, 24, 17, 35),
filename='hello.txt',
md5_hash='xxx',
size=42))
def testBlobstore_FetchData(self):
self.create_blobinfo('xxx')
stub = self.testbed.get_stub('blobstore')
storage = stub.storage
storage._blobs['xxx'] = 'abcde'
result = blobstore.fetch_data('xxx', 0, 3) # Range is inclusive!
self.assertEqual(result, 'abcd')
def testBlobstore_FetchDataAsync(self):
b = self.create_blobinfo('xxx')
stub = self.testbed.get_stub('blobstore')
storage = stub.storage
storage._blobs['xxx'] = 'abcde'
fut = blobstore.fetch_data_async(b, 0, 2)
self.assertTrue(isinstance(fut, tasklets.Future), fut)
result = fut.get_result()
self.assertEqual(result, 'abc')
def testBlobInfo_Open(self):
b = self.create_blobinfo('xxx')
stub = self.testbed.get_stub('blobstore')
storage = stub.storage
storage._blobs['xxx'] = 'abcde'
f = b.open()
self.assertEqual(f.read(3), 'abc')
self.assertEqual(f.read(3), 'de')
self.assertEqual(f.blob_info, b)
def testBlobReader(self):
b = self.create_blobinfo('xxx')
stub = self.testbed.get_stub('blobstore')
storage = stub.storage
storage._blobs['xxx'] = 'abcde'
f = blobstore.BlobReader('xxx')
self.assertEqual(f.read(), 'abcde')
self.assertEqual(f.blob_info, b)
def main():
unittest.main()
if __name__ == '__main__':
main()
| apache-2.0 | 4,325,438,816,037,139,000 | 33.236667 | 78 | 0.644825 | false |
Mbrownshoes/ckanext-bcgov | ckanext/bcgov/logic/ofi/__init__.py | 1 | 4636 | # Copyright 2016, Province of British Columbia
# License: https://github.com/bcgov/ckanext-bcgov/blob/master/license
#
# HighwayThree Solutions Inc.
# Author: Jared Smith <jrods@github>
import logging
from functools import wraps
from pprint import pprint, pformat
import requests as reqs
from ckan.common import request
import ckan.plugins.toolkit as toolkit
import ckanext.bcgov.util.helpers as edc_h
log = logging.getLogger(u'ckanext.bcgov.logic.ofi')
def check_access(action):
"""
Decorator for call_action functions to check authorization.
Even if the call_action doesn't need any authorization checks, there should still be
a defined auth check for the call_action.
"""
@wraps(action)
def wrapper(context, data):
toolkit.check_access(action.__name__, context, data)
return action(context, data)
return wrapper
def setup_ofi_action(api_url=None):
"""
Decorator for call_action functions.
This decorator should be used last before the actual call to the
call_action function
This sets up common params and options for call_action functions.
The api_url should be used for prerequisite use only, such as getting
DWDS file formats or CRS Types, etc. It doesn't support OFI POST API calls.
:param api_url: An OFI DWDS API endpoint or NoneType
:returns: call_action function location from logic.ofi.call_action,
these args are manditory for call_actions:
def call_action(context, data, ofi_resp)
"""
def action_decorator(action):
@wraps(action)
def wrapper(context, data):
"""
Context and data are args for get_action calls
:returns: call_action function location from logic.ofi.call_action,
these args are manditory for call_actions:
def call_action(context, data, ofi_resp)
"""
if u'secure' not in data:
data[u'secure'] = False
# these action calls don't need to be the secure url
if action.__name__ in ['file_formats', 'crs_types']:
data.update(_prepare(False))
else:
data.update(_prepare(toolkit.asbool(data[u'secure'])))
if action.__name__ == 'edit_ofi_resources':
if u'package_id' not in data:
data[u'package_id'] = data.query_params.getone('package_id')
if u'object_name' not in data:
data[u'object_name'] = data.query_params.getone('object_name')
# allows the decorator to be used for just getting query params, cookies, etc.
if api_url is not None:
url = data[u'ofi_url'] + api_url
# expecting additonal pathing if incoming api endpoint ends with a '/'
if api_url.endswith(u'/'):
if 'object_name' in data:
url += data[u'object_name']
data[u'api_url'] = url
call_type = u'Secure' if data[u'secure'] else u'Public' # call_type is for logging purposes
ofi_resp = _make_api_call(url, call_type=call_type, cookies=data[u'cookies'])
else:
ofi_resp = {}
return action(context, data, ofi_resp)
return wrapper
return action_decorator
def _prepare(secure=False):
ofi_vars = {}
ofi_vars[u'config'] = edc_h.get_ofi_config()
ofi_vars[u'cookies'] = {
u'SMSESSION': request.cookies.get(u'SMSESSION', '')
}
try:
ofi_vars[u'query_params'] = request.params
except ValueError, inst:
log.info('Bad Action API request data: %s', inst)
return {}
ofi_vars[u'secure'] = secure
ofi_vars[u'ofi_url'] = edc_h._build_ofi_url(secure)
return ofi_vars
def _make_api_call(api_url, call_type='Public', cookies=None):
log.info(u'OFI outgoing, call type: %s, api url: %s', call_type, api_url)
resp = reqs.get(api_url, cookies=cookies)
_log_response(resp, call_type)
return resp
def _log_response(resp, call_type):
log.debug(u'OFI response, api response:\n %s', pformat({
u'url': resp.url,
u'status': resp.status_code,
u'reason': resp.reason,
u'headers': resp.headers,
u'cookies': resp.cookies,
u'elapsed': str(resp.elapsed.total_seconds()) + u's'
}))
log.debug(u'OFI api content: %s', pformat(resp.text))
class OFIServiceError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| agpl-3.0 | -3,168,087,227,463,341,000 | 30.537415 | 108 | 0.608067 | false |
hasgeek/funnel | migrations/versions/c47007758ee6_add_email_address_active_at.py | 1 | 1589 | """Add email_address.active_at.
Revision ID: c47007758ee6
Revises: b7fa6df99855
Create Date: 2020-08-20 21:47:43.356619
"""
from alembic import op
from sqlalchemy import column, table
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c47007758ee6'
down_revision = 'b7fa6df99855'
branch_labels = None
depends_on = None
class DELIVERY_STATE: # noqa: N801
SENT = 1
ACTIVE = 2
email_address = table(
'email_address',
column('id', sa.Integer()),
column('delivery_state', sa.Integer()),
column('delivery_state_at', sa.TIMESTAMP(timezone=True)),
column('active_at', sa.TIMESTAMP(timezone=True)),
)
def upgrade():
op.add_column(
'email_address',
sa.Column('active_at', sa.TIMESTAMP(timezone=True), nullable=True),
)
op.execute(
email_address.update()
.where(email_address.c.delivery_state == DELIVERY_STATE.ACTIVE)
.values(
{
'active_at': email_address.c.delivery_state_at,
'delivery_state': DELIVERY_STATE.SENT,
}
)
)
op.create_check_constraint(
'email_address_delivery_state_check',
'email_address',
'delivery_state IN (0, 1, 3, 4)',
)
def downgrade():
op.drop_constraint(
'email_address_delivery_state_check', 'email_address', type_='check'
)
op.execute(
email_address.update()
.where(email_address.c.active_at.isnot(None))
.values({'delivery_state': DELIVERY_STATE.ACTIVE})
)
op.drop_column('email_address', 'active_at')
| agpl-3.0 | 8,731,557,630,074,718,000 | 23.446154 | 76 | 0.623033 | false |
adrianomargarin/wttd-eventex | eventex/core/migrations/0007_course.py | 1 | 1116 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-10-09 01:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20171008_2259'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200, verbose_name='Título')),
('start', models.TimeField(blank=True, null=True, verbose_name='Início')),
('description', models.TextField(blank=True, verbose_name='Descrição')),
('slots', models.IntegerField()),
('speakers', models.ManyToManyField(blank=True, to='core.Speaker', verbose_name='Palestrantes')),
],
options={
'verbose_name': 'Palestra',
'abstract': False,
'verbose_name_plural': 'Palestras',
},
),
]
| gpl-3.0 | 6,264,919,628,078,854,000 | 34.870968 | 114 | 0.556655 | false |
aswarren/GOGranny | tests/SteinerTreeTest.py | 1 | 2069 | from GOGranny import *
from networkx import *
import unittest
class TestSteinerTree(unittest.TestCase):
def stree(self, edges, voi):
g = Graph()
for edge in edges:
g.add_edge(edge[0],edge[1],weight=edge[2])
return make_steiner_tree(g, voi)
def testSteinerTrees(self):
edges = [("a", "b", 1), ("a", "c", 5), ("a", "e", 2), ("a", "d", 2), ("b", "c", 2), ("c", "d", 3), ("e", "d", 6)]
st = self.stree(edges, ['c', 'e', 'a'])
self.failUnless(st.edges(data=True) == [('a', 'b',{'weight':1}), ('a', 'e', {'weight':2}), ('c', 'b', {'weight':2})])
edges = [('a', 'b', 3), ('b', 'c', 4), ('c', 'd', 5), ('a', 'e', 1), ('e', 'd', 1)]
st = self.stree(edges, ['b', 'd'])
self.failUnless(st.edges(data=True) == [('a', 'b', {'weight':3}), ('a', 'e',{'weight':1}), ('e', 'd', {'weight':1})])
edges = [('a', 'b', 4), ('a', 'c', 4), ('b', 'c', 4)]
st = self.stree(edges, ['a', 'b', 'c'])
self.failUnless(st.edges(data=True) == [('a', 'c', {'weight':4}), ('a', 'b', {'weight':4})])
# from the markowsky paper
edges = [('v1', 'v9', 1), ('v1', 'v2', 10), ('v8', 'v9', .5), ('v9', 'v5', 1), ('v8', 'v7', .5), ('v7', 'v6', 1), ('v6', 'v5', 1), ('v2', 'v6', 1),
('v2', 'v3', 8), ('v3', 'v5', 2), ('v5', 'v4', 2), ('v3', 'v4', 9)]
st = self.stree(edges, ['v1', 'v2', 'v3', 'v4'])
self.failUnless(st.edges(data=True) == [('v1', 'v9', {'weight':1}), ('v2', 'v6', {'weight':1}), ('v3', 'v5', {'weight':2}), ('v4', 'v5', {'weight':2}), ('v5', 'v9', {'weight':1}), ('v5', 'v6', {'weight':1})])
edges = [('a', 'b', 0), ('b', 'c', 0), ('a', 'd', 3), ('b', 'd', 3), ('c', 'd', 3)]
st = self.stree(edges, ['a', 'b', 'c', 'd'])
self.failUnless(st.edges(data=True) == [('a', 'b', {'weight':0}), ('a', 'd', {'weight':3}), ('c', 'b', {'weight':0})])
edges = [('a', 'b', 0), ('b', 'c', 0), ('a', 'd', 3), ('b', 'd', 3), ('c', 'd', 3), ('d', 'e', 1)]
st = self.stree(edges, ['a', 'b', 'c', 'e'])
self.failUnless(st.edges(data=True) == [('a', 'b', {'weight':0}), ('a', 'd', {'weight':3}), ('c', 'b', {'weight':0}), ('e', 'd', {'weight':1})])
| gpl-2.0 | -5,228,691,003,820,098,000 | 52.051282 | 209 | 0.429193 | false |
padmashrimallapur/report | report.py | 1 | 3153 | #!/usr/local/bin/python
"""
Creating the report of top 50 domains for 30 days
It is assumed that mailing table is repopulated every day.
The report will display the top 50 domain sorting with their growth percentage.
"""
import MySQLdb
import datetime
mysqlconfig = {'host': 'localhost',
'username': 'root',
'password': 'mysqlroot',
'dbName': 'indexdb'}
def __test_insert():
conn = connection()
cur = conn.cursor()
cur.execute("TRUNCATE mailing")
conn.commit()
for i in range(0, 10):
insert = "INSERT INTO mailing (addr) VALUES ('id%[email protected]')" % i
cur.execute(insert)
for i in range(0, 5):
insert = "INSERT INTO mailing (addr) VALUES ('id%[email protected]')" % i
cur.execute(insert)
for i in range(0, 25):
insert = "INSERT INTO mailing (addr) VALUES ('id%[email protected]')" % i
cur.execute(insert)
conn.commit()
def connection():
conn = MySQLdb.connect(mysqlconfig['host'],
mysqlconfig['username'],
mysqlconfig['password'],
mysqlconfig['dbName'])
return conn
def daterange():
previous_month_day = datetime.datetime.now() - datetime.timedelta(days=30)
earlier = previous_month_day.strftime("%Y-%m-%d")
current_day = datetime.datetime.now().strftime("%Y-%m-%d")
return earlier, current_day
def updateDomains(today):
conn = connection()
cursor = conn.cursor()
cursor.execute("SELECT addr FROM mailing")
for email in cursor.fetchall():
domain = email[0].split("@")[1]
cursor.execute("SELECT count FROM domain_count WHERE domain = %s AND date = %s", (domain, today))
count = cursor.fetchone()
if count is not None:
count = count[0] + 1
cursor.execute("UPDATE domain_count SET count= %s WHERE domain = %s AND date = %s", (count, domain, today))
else:
count = 1
cursor.execute("INSERT INTO domain_count (domain, count, date)VALUES (%s, %s, %s)", (domain, count, today))
conn.commit()
def printReport(fromdate, todate):
conn = connection()
cursor = conn.cursor()
cursor.execute("SELECT SUM(COUNT) FROM domain_count")
total = cursor.fetchone()[0]
cursor.execute("SELECT count,domain FROM `domain_count` WHERE date> %s or date< %s ORDER BY count DESC LIMIT 50",
(fromdate, todate))
total_domains = cursor.fetchall()
if len(total_domains) > 0:
print("Top 50 domains of last 30 days")
fmt = "{0}{1}{0}{2}{0}".format("+", "-"*14, "-"*13)
print fmt
print "{0} {1:9} {0:>4} {2:>4} {0}".format("|", "Domain", "Growth in %")
print fmt
for row in total_domains:
growth = (row[0] * 100) / total
print "{0} {1:9} {0:>4} {2:>4} {0:>7}".format("|", row[1], round(growth, 2))
else:
print "No data available"
if __name__ == '__main__':
#This is only for test insert...
__test_insert()
earlier_str, today = daterange()
updateDomains(today)
printReport(earlier_str, today)
| gpl-2.0 | -6,065,980,197,356,007,000 | 29.028571 | 119 | 0.579448 | false |
rjdp/cement | cement/utils/misc.py | 1 | 6108 | """Misc utilities."""
import os
import sys
import logging
import hashlib
from textwrap import TextWrapper
from random import random
def rando(salt=None):
"""
Generate a random MD5 hash for whatever purpose. Useful for testing
or any other time that something random is required.
:param salt: Optional 'salt', if None then random() is used.
:returns: Random MD5 hash (str).
"""
if salt is None:
salt = random()
return hashlib.md5(str(salt).encode()).hexdigest()
# class NullLogger(object):
# def __init__(self, namespace, debug, *args, **kw):
# pass
# def info(self, *args, **kw):
# pass
# def warn(self, *args, **kw):
# pass
# def error(self, *args, **kw):
# pass
# def fatal(self, *args, **kw):
# pass
# def debug(self, *args, **kw):
# pass
class MinimalLogger(object):
def __init__(self, namespace, debug, *args, **kw):
self.namespace = namespace
self.backend = logging.getLogger(namespace)
formatter = logging.Formatter(
"%(asctime)s (%(levelname)s) %(namespace)s : %(message)s"
)
console = logging.StreamHandler()
console.setFormatter(formatter)
console.setLevel(logging.INFO)
self.backend.setLevel(logging.INFO)
# FIX ME: really don't want to hard check sys.argv like this but
# can't figure any better way get logging started (only for debug)
# before the app logging is setup.
if '--debug' in sys.argv or debug:
console.setLevel(logging.DEBUG)
self.backend.setLevel(logging.DEBUG)
self.backend.addHandler(console)
def _get_logging_kwargs(self, namespace, **kw):
if not namespace:
namespace = self.namespace
if 'extra' in kw.keys() and 'namespace' in kw['extra'].keys():
pass
elif 'extra' in kw.keys() and 'namespace' not in kw['extra'].keys():
kw['extra']['namespace'] = namespace
else:
kw['extra'] = dict(namespace=namespace)
return kw
@property
def logging_is_enabled(self):
if 'CEMENT_FRAMEWORK_LOGGING' in os.environ.keys():
if is_true(os.environ['CEMENT_FRAMEWORK_LOGGING']):
res = True
else:
res = False
else:
res = True
return res
def info(self, msg, namespace=None, **kw):
if self.logging_is_enabled:
kwargs = self._get_logging_kwargs(namespace, **kw)
self.backend.info(msg, **kwargs)
def warn(self, msg, namespace=None, **kw):
if self.logging_is_enabled:
kwargs = self._get_logging_kwargs(namespace, **kw)
self.backend.warn(msg, **kwargs)
def error(self, msg, namespace=None, **kw):
if self.logging_is_enabled:
kwargs = self._get_logging_kwargs(namespace, **kw)
self.backend.error(msg, **kwargs)
def fatal(self, msg, namespace=None, **kw):
if self.logging_is_enabled:
kwargs = self._get_logging_kwargs(namespace, **kw)
self.backend.fatal(msg, **kwargs)
def debug(self, msg, namespace=None, **kw):
if self.logging_is_enabled:
kwargs = self._get_logging_kwargs(namespace, **kw)
self.backend.debug(msg, **kwargs)
def init_defaults(*sections):
"""
Returns a standard dictionary object to use for application defaults.
If sections are given, it will create a nested dict for each section name.
:arg sections: Section keys to create nested dictionaries for.
:returns: Dictionary of nested dictionaries (sections)
:rtype: dict
.. code-block:: python
from cement.core import foundation
from cement.utils.misc import init_defaults
defaults = init_defaults('myapp', 'section2', 'section3')
defaults['myapp']['debug'] = False
defaults['section2']['foo'] = 'bar
defaults['section3']['foo2'] = 'bar2'
app = foundation.CementApp('myapp', config_defaults=defaults)
"""
defaults = dict()
for section in sections:
defaults[section] = dict()
return defaults
def minimal_logger(namespace, debug=False):
"""
Setup just enough for cement to be able to do debug logging. This is the
logger used by the Cement framework, which is setup and accessed before
the application is functional (and more importantly before the
applications log handler is usable).
:param namespace: The logging namespace. This is generally '__name__' or
anything you want.
:param debug: Toggle debug output. Default: False
:type debug: boolean
:returns: Logger object
.. code-block:: python
from cement.utils.misc import minimal_logger
LOG = minimal_logger('cement')
LOG.debug('This is a debug message')
"""
return MinimalLogger(namespace, debug)
def is_true(item):
"""
Given a value, determine if it is one of [True, 'True', 'true', 1, '1'].
:param item: The item to convert to a boolean.
:returns: True if `item` is in ``[True, 'True', 'true', 1, '1']``, False
otherwise.
:rtype: boolean
"""
if item in [True, 'True', 'true', 1, '1']:
return True
else:
return False
def wrap(text, width=77, indent='', long_words=False, hyphens=False):
"""
Wrap text for cleaner output (this is a simple wrapper around
`textwrap.TextWrapper` in the standard library).
:param text: The text to wrap
:param width: The max width of a line before breaking
:param indent: String to prefix subsequent lines after breaking
:param long_words: Break on long words
:param hyphens: Break on hyphens
:returns: str(text)
"""
if type(text) != str:
raise TypeError("`text` must be a string.")
wrapper = TextWrapper(subsequent_indent=indent, width=width,
break_long_words=long_words,
break_on_hyphens=hyphens)
return wrapper.fill(text)
| bsd-3-clause | -3,778,632,260,896,210,000 | 28.941176 | 78 | 0.609037 | false |
AndriyHavirko/google-python-exercises | basic/list1.py | 1 | 3101 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic list exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in list2.py.
# A. match_ends
# Given a list of strings, return the count of the number of
# strings where the string length is 2 or more and the first
# and last chars of the string are the same.
# Note: python does not have a ++ operator, but += works.
def match_ends(words):
count = 0
for w in words:
if len(w) > 1 and w[0] == w[-1]:
count += 1
return count
# B. front_x
# Given a list of strings, return a list with the strings
# in sorted order, except group all the strings that begin with 'x' first.
# e.g. ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields
# ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
# Hint: this can be done by making 2 lists and sorting each of them
# before combining them.
def front_x(words):
x_arr = []
non_x_arr = []
for w in words:
if w[0] == 'x': x_arr.append(w)
else: non_x_arr.append(w)
return sorted(x_arr) + sorted(non_x_arr)
# C. sort_last
# Given a list of non-empty tuples, return a list sorted in increasing
# order by the last element in each tuple.
# e.g. [(1, 7), (1, 3), (3, 4, 5), (2, 2)] yields
# [(2, 2), (1, 3), (3, 4, 5), (1, 7)]
# Hint: use a custom key= function to extract the last element form each tuple.
def sort_last(tuples):
return sorted(tuples, key=lambda e: e[-1])
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Calls the above functions with interesting inputs.
def main():
print 'match_ends'
test(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print
print 'front_x'
test(front_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']),
['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test(front_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']),
['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test(front_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']),
['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print
print 'sort_last'
test(sort_last([(1, 3), (3, 2), (2, 1)]),
[(2, 1), (3, 2), (1, 3)])
test(sort_last([(2, 3), (1, 2), (3, 1)]),
[(3, 1), (1, 2), (2, 3)])
test(sort_last([(1, 7), (1, 3), (3, 4, 5), (2, 2)]),
[(2, 2), (1, 3), (3, 4, 5), (1, 7)])
if __name__ == '__main__':
main()
| apache-2.0 | -5,766,878,473,112,313,000 | 31.642105 | 79 | 0.603999 | false |
bjuvensjo/scripts | vang/git/patchish_refs.py | 1 | 3165 | #!/usr/bin/env python3
from argparse import ArgumentParser
from pprint import pprint
from re import match
from vang.pio.shell import run_command
from vang.pio.synchronize_dirs import synchronize_dirs
def apply_patch(patch_repo, apply_repo, ref):
print('Apply patch', ref, patch_repo, apply_repo, ref)
rc, output = run_command(f'git checkout {ref}', True, patch_repo)
print(output)
synchronize_dirs(patch_repo, apply_repo)
rc, output = run_command('git status', True, apply_repo)
print(output)
if 'nothing to commit' in output:
rc, output = run_command(f'git tag -a {ref} -m {ref}', True, apply_repo)
print(output)
else:
for cmd in [
'git add --all',
f'git commit -m {ref}',
f'git tag -a {ref} -m {ref}',
]:
rc, output = run_command(cmd, True, apply_repo)
print(output)
return ref
def get_refs(repo, ref_pattern):
return [
ref for ref in run_command('git tag', True, repo)[1].split('\n')
if match(r'{}'.format(ref_pattern), ref)
]
def get_unpatched_refs(patchs_refs, applied_refs):
return [p for p in patchs_refs if p not in applied_refs]
def is_valid(patchs_refs, applied_refs):
return all([p == a for p, a in zip(patchs_refs, applied_refs)
]) and not len(applied_refs) > len(patchs_refs)
def main(patch_repo, ref_pattern, apply_repo):
patchs_refs = get_refs(patch_repo, ref_pattern)
applied_refs = get_refs(apply_repo, ref_pattern)
if is_valid(patchs_refs, applied_refs):
unpatched_refs = get_unpatched_refs(patchs_refs, applied_refs)
applied_patches = []
for ref in unpatched_refs:
apply_patch(patch_repo, apply_repo, ref)
applied_patches.append(ref)
return applied_patches
else:
raise ValueError('Tags are not valid.')
def parse_args(args):
parser = ArgumentParser(description='Create patches of refs and applies, ' +
'commits and refs them in another repo.')
parser.add_argument('ref_pattern', help='A ref pattern.')
parser.add_argument('apply_repo', help='The repo to apply patches to.')
parser.add_argument(
'-p',
'--patch_repo',
help='The repo to patch from.',
default='.',
)
parser.add_argument(
'-o',
'--output',
help='A directory to put patches in.',
default='./patch',
)
return parser.parse_args(args)
# rm -rf sign/patch; rm -rf lf-sign; md lf-sign; cd lf-sign; git init
# main(
# '/Users/ei4577/slask/slask/PCS1806/sign',
# 'release.*',
# '/Users/ei4577/slask/slask/PCS1806/lf-sign',
# )
# rm -rf lf-process.mortgage; md lf-process.mortgage; cd lf-process.mortgage; git init
the_applied_patches = main(
'/Users/ei4577/slask/slask/PCS1806/process.mortgage',
'release.*',
'/Users/ei4577/slask/slask/PCS1806/lf-process.mortgage',
)
pprint(the_applied_patches)
# if __name__ == '__main__': # pragma: no cover
# args = parse_args(argv[1:])
# main(args.patch_repo, args.ref_pattern, args.output, args.apply_repo)
| apache-2.0 | 6,775,928,350,807,584,000 | 30.336634 | 86 | 0.617378 | false |
amirgeva/coide | system.py | 1 | 2487 | import utils
import re
from multiprocessing import Process, Queue
import callbacks
def libraryDirs():
out,err=utils.call('.','ld','--verbose')
return re.findall('SEARCH_DIR\("=([^"]+)"\);',out)
def listAllPackages():
res=set()
try:
all,err=utils.call('.','pkg-config','--list-all')
lines=all.splitlines()
for line in lines:
name=(line.split(' '))[0]
res.add(name)
except OSError:
pass
return sorted(list(res))
def symbolScan(q,ws):
import symbolscanner
q.put(symbolscanner.getLibrarySymbols(ws))
noMP=False
scanq=Queue()
workspacePath=''
scannerProcess=None
scanStarted=False
libSyms=None
wsSyms=None
wsLibs=None
def isScannerDone():
if scanq:
return not scanq.empty()
return True
def disableSymbolScan():
global libSyms
global wsSyms
global wsLibs
global scanq
libSyms={}
wsSyms={}
wsLibs={}
scanq=None
def startSymbolScan(ws):
utils.timestamp('start scan process')
if not noMP:
global scannerProcess
global scanStarted
global workspacePath
if scanq and not scanStarted:
scanStarted=True
workspacePath=ws
scannerProcess=Process(target=symbolScan,args=(scanq,workspacePath))
scannerProcess.start()
else:
global libSyms
global wsSyms
global wsLibs
import symbolscanner
(libSyms,wsSyms,wsLibs)=symbolscanner.getLibrarySymbols(workspacePath)
def getLibrarySymbols():
global libSyms
global wsSyms
global wsLibs
global scannerProcess
global scanq
if not libSyms:
if not scanq:
libSyms={}
wsSyms={}
wsLibs={}
else:
utils.timestamp('Getting scan results from queue')
(libSyms,wsSyms,wsLibs)=scanq.get()
utils.timestamp('Done queue get')
if scannerProcess:
utils.timestamp('Joining scan process')
scannerProcess.join()
utils.timestamp('Done join')
scannerProcess=None
if scanq:
scanq.close()
scanq=None
import symbolscanner
symbolscanner.setInitialResults(workspacePath,libSyms,wsSyms,wsLibs)
return libSyms
def getWorkspaceSymbols():
getLibrarySymbols()
return wsSyms
callbacks.closeCallbacks.append(getLibrarySymbols)
if __name__=='__main__':
getLibrarySymbols()
| gpl-2.0 | 7,818,529,345,900,477,000 | 22.471698 | 80 | 0.622437 | false |
Workday/OpenFrame | tools/telemetry/telemetry/internal/platform/profiler/android_profiling_helper_unittest.py | 1 | 7180 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pickle
import re
import shutil
import tempfile
import time
import unittest
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.platform.profiler import android_profiling_helper
from telemetry.testing import simple_mock
from telemetry.testing import tab_test_case
def _GetLibrariesMappedIntoProcesses(device, pids):
libs = set()
for pid in pids:
maps_file = '/proc/%d/maps' % pid
maps = device.ReadFile(maps_file, as_root=True).splitlines()
for map_line in maps:
lib = re.match(r'.*\s(/.*[.]so)$', map_line)
if lib:
libs.add(lib.group(1))
return libs
class TestFileMetadataMatches(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.filename_a = os.path.join(self.tempdir, 'filea')
self.filename_b = os.path.join(self.tempdir, 'fileb')
with open(self.filename_a, 'w') as f:
f.write('testing')
def tearDown(self):
shutil.rmtree(self.tempdir)
def testDoesntMatchNonExistant(self):
self.assertFalse(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
def testDoesntMatchJustExistence(self):
with open(self.filename_b, 'w') as f:
f.write('blah')
self.assertFalse(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
def testDoesntMatchCopy(self):
# This test can run so fast that the file system doesn't have enough
# accuracy to differentiate between the copy and initial file times.
# Hence we need to guarantee a delay here.
time.sleep(3)
shutil.copy(self.filename_a, self.filename_b)
self.assertFalse(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
def testMatchesAfterCopy2(self):
shutil.copy2(self.filename_a, self.filename_b)
self.assertTrue(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
def testDoesntMatchAfterCopy2ThenModify(self):
shutil.copy2(self.filename_a, self.filename_b)
filea = open(self.filename_a, 'w')
filea.write('moar testing!')
filea.close()
self.assertFalse(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
def testDoesntMatchAfterCopy2ThenModifyStats(self):
shutil.copy2(self.filename_a, self.filename_b)
os.utime(self.filename_a, (20, 20))
self.assertFalse(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
def testMatchesAfterCopyStatWithDifferentContent(self):
fileb = open(self.filename_b, 'w')
fileb.write('blahing')
fileb.close()
shutil.copystat(self.filename_a, self.filename_b)
self.assertTrue(
android_profiling_helper._FileMetadataMatches(
self.filename_a, self.filename_b))
class TestAndroidProfilingHelper(unittest.TestCase):
def testGetRequiredLibrariesForPerfProfile(self):
perf_output = os.path.join(
util.GetUnittestDataDir(), 'sample_perf_report_output.txt')
with open(perf_output) as f:
perf_output = f.read()
mock_popen = simple_mock.MockObject()
mock_popen.ExpectCall('communicate').WillReturn([None, perf_output])
mock_subprocess = simple_mock.MockObject()
mock_subprocess.ExpectCall(
'Popen').WithArgs(simple_mock.DONT_CARE).WillReturn(mock_popen)
mock_subprocess.SetAttribute('PIPE', simple_mock.MockObject())
real_subprocess = android_profiling_helper.subprocess
android_profiling_helper.subprocess = mock_subprocess
try:
libs = android_profiling_helper.GetRequiredLibrariesForPerfProfile('foo')
self.assertEqual(libs, set([
'/data/app-lib/com.google.android.apps.chrome-2/libchrome.2016.0.so',
'/system/lib/libart.so',
'/system/lib/libc.so',
'/system/lib/libm.so']))
finally:
android_profiling_helper.subprocess = real_subprocess
@decorators.Enabled('android')
def testGetRequiredLibrariesForVTuneProfile(self):
vtune_db_output = os.path.join(
util.GetUnittestDataDir(), 'sample_vtune_db_output')
with open(vtune_db_output, 'rb') as f:
vtune_db_output = pickle.load(f)
mock_cursor = simple_mock.MockObject()
mock_cursor.ExpectCall(
'execute').WithArgs(simple_mock.DONT_CARE).WillReturn(vtune_db_output)
mock_conn = simple_mock.MockObject()
mock_conn.ExpectCall('cursor').WillReturn(mock_cursor)
mock_conn.ExpectCall('close')
mock_sqlite3 = simple_mock.MockObject()
mock_sqlite3.ExpectCall(
'connect').WithArgs(simple_mock.DONT_CARE).WillReturn(mock_conn)
real_sqlite3 = android_profiling_helper.sqlite3
android_profiling_helper.sqlite3 = mock_sqlite3
try:
libs = android_profiling_helper.GetRequiredLibrariesForVTuneProfile('foo')
self.assertEqual(libs, set([
'/data/app-lib/com.google.android.apps.chrome-1/libchrome.2019.0.so',
'/system/lib/libdvm.so',
'/system/lib/libc.so',
'/system/lib/libm.so']))
finally:
android_profiling_helper.sqlite3 = real_sqlite3
class TestAndroidProfilingHelperTabTestCase(tab_test_case.TabTestCase):
def setUp(self):
super(TestAndroidProfilingHelperTabTestCase, self).setUp()
# pylint: disable=protected-access
browser_backend = self._browser._browser_backend
self._device = browser_backend.device()
@decorators.Enabled('android')
def testCreateSymFs(self):
# pylint: disable=protected-access
browser_pid = self._browser._browser_backend.pid
pids = ([browser_pid] +
self._browser._platform_backend.GetChildPids(browser_pid))
libs = _GetLibrariesMappedIntoProcesses(self._device, pids)
assert libs
symfs_dir = tempfile.mkdtemp()
try:
kallsyms = android_profiling_helper.CreateSymFs(self._device, symfs_dir,
libs)
# Check that we have kernel symbols.
assert os.path.exists(kallsyms)
is_unstripped = re.compile(r'^/data/app(-lib)?/.*\.so$')
has_unstripped = False
# Check that all requested libraries are present.
for lib in libs:
has_unstripped = has_unstripped or is_unstripped.match(lib)
assert os.path.exists(os.path.join(symfs_dir, lib[1:])), \
'%s not found in symfs' % lib
# Make sure we found at least one unstripped library.
assert has_unstripped
finally:
shutil.rmtree(symfs_dir)
# Test fails: crbug.com/437081
# @decorators.Enabled('android')
@decorators.Disabled('all')
def testGetToolchainBinaryPath(self):
with tempfile.NamedTemporaryFile() as libc:
self._device.PullFile('/system/lib/libc.so', libc.name)
path = android_profiling_helper.GetToolchainBinaryPath(libc.name,
'objdump')
assert path and os.path.exists(path)
| bsd-3-clause | 6,226,470,640,319,619,000 | 33.190476 | 80 | 0.683426 | false |
samuelchen/P2Python | test/RegisterTest.py | 1 | 1693 | # -*- coding: utf-8 -*-
'''
Created on 2013-11-19
@author: samuelchen
'''
import sys, os
sys.path.insert(0, '%s/../' % os.getcwd())
import unittest
from conn_mgr import ConnectionManager
import time
flag = False
class RegisterTest(unittest.TestCase):
def setUp(self):
ip ='127.0.0.1'
self.svr1 = ConnectionManager(peer_port = 22222, data_port = 22223)
self.svr1.addPeer(ip, 33333)
self.svr1.start()
#self.svr1.peerServer._heartbeat_loop = False # disable heart-beat check if required.
self.svr2 = ConnectionManager(peer_port = 33333, data_port = 33334)
self.svr2.addPeer(ip, 22222)
self.svr2.start()
def tearDown(self):
self.svr1.stop()
self.svr2.stop()
while self.svr1.isAlive() or self.svr2.isAlive():
time.sleep(0.5)
print 'svr1: %s, svr2: %s' % (self.svr1.isAlive(), self.svr2.isAlive())
def testRegister(self):
global flag
flag = False
def on_reg(**kwargs):
global flag
if kwargs['port'] == 22222:
print '-' * 60
print 'I received a register info from %(ip)s:%(port)d >> %(data)s' % kwargs
print '-' * 60
flag = True
return flag
self.svr2.callbacks['register'] = on_reg
self.svr1.sendRegister(loop=True)
self.svr2.sendRegister(loop=True)
time.sleep(3)
print 'testRegister done', flag
assert(flag)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testRegister']
unittest.main()
| gpl-2.0 | 2,264,397,013,491,486,700 | 23.651515 | 94 | 0.53987 | false |
jaeilepp/mne-python | mne/tests/test_dipole.py | 1 | 15664 | import os
import os.path as op
import sys
import warnings
import numpy as np
from nose.tools import assert_true, assert_equal, assert_raises
from numpy.testing import assert_allclose
from mne import (read_dipole, read_forward_solution,
convert_forward_solution, read_evokeds, read_cov,
SourceEstimate, write_evokeds, fit_dipole,
transform_surface_to, make_sphere_model, pick_types,
pick_info, EvokedArray, read_source_spaces, make_ad_hoc_cov,
make_forward_solution, Dipole, DipoleFixed, Epochs,
make_fixed_length_events)
from mne.dipole import get_phantom_dipoles
from mne.simulation import simulate_evoked
from mne.datasets import testing
from mne.utils import (run_tests_if_main, _TempDir, slow_test, requires_mne,
run_subprocess)
from mne.proj import make_eeg_average_ref_proj
from mne.io import read_raw_fif, read_raw_ctf
from mne.surface import _compute_nearest
from mne.bem import _bem_find_surface, read_bem_solution
from mne.transforms import apply_trans, _get_trans
warnings.simplefilter('always')
data_path = testing.data_path(download=False)
fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fname_dip = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_set1.dip')
fname_evo = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-ave.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-cov.fif')
fname_bem = op.join(data_path, 'subjects', 'sample', 'bem',
'sample-1280-1280-1280-bem-sol.fif')
fname_src = op.join(data_path, 'subjects', 'sample', 'bem',
'sample-oct-2-src.fif')
fname_trans = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-trans.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
fname_xfit_dip = op.join(data_path, 'dip', 'fixed_auto.fif')
fname_xfit_dip_txt = op.join(data_path, 'dip', 'fixed_auto.dip')
fname_xfit_seq_txt = op.join(data_path, 'dip', 'sequential.dip')
fname_ctf = op.join(data_path, 'CTF', 'testdata_ctf_short.ds')
subjects_dir = op.join(data_path, 'subjects')
def _compare_dipoles(orig, new):
"""Compare dipole results for equivalence."""
assert_allclose(orig.times, new.times, atol=1e-3, err_msg='times')
assert_allclose(orig.pos, new.pos, err_msg='pos')
assert_allclose(orig.amplitude, new.amplitude, err_msg='amplitude')
assert_allclose(orig.gof, new.gof, err_msg='gof')
assert_allclose(orig.ori, new.ori, rtol=1e-4, atol=1e-4, err_msg='ori')
assert_equal(orig.name, new.name)
def _check_dipole(dip, n_dipoles):
"""Check dipole sizes."""
assert_equal(len(dip), n_dipoles)
assert_equal(dip.pos.shape, (n_dipoles, 3))
assert_equal(dip.ori.shape, (n_dipoles, 3))
assert_equal(dip.gof.shape, (n_dipoles,))
assert_equal(dip.amplitude.shape, (n_dipoles,))
@testing.requires_testing_data
def test_io_dipoles():
"""Test IO for .dip files."""
tempdir = _TempDir()
dipole = read_dipole(fname_dip)
print(dipole) # test repr
out_fname = op.join(tempdir, 'temp.dip')
dipole.save(out_fname)
dipole_new = read_dipole(out_fname)
_compare_dipoles(dipole, dipole_new)
@testing.requires_testing_data
def test_dipole_fitting_ctf():
"""Test dipole fitting with CTF data."""
raw_ctf = read_raw_ctf(fname_ctf).set_eeg_reference(projection=True)
events = make_fixed_length_events(raw_ctf, 1)
evoked = Epochs(raw_ctf, events, 1, 0, 0, baseline=None).average()
cov = make_ad_hoc_cov(evoked.info)
sphere = make_sphere_model((0., 0., 0.))
# XXX Eventually we should do some better checks about accuracy, but
# for now our CTF phantom fitting tutorials will have to do
# (otherwise we need to add that to the testing dataset, which is
# a bit too big)
fit_dipole(evoked, cov, sphere)
@slow_test
@testing.requires_testing_data
@requires_mne
def test_dipole_fitting():
"""Test dipole fitting."""
amp = 10e-9
tempdir = _TempDir()
rng = np.random.RandomState(0)
fname_dtemp = op.join(tempdir, 'test.dip')
fname_sim = op.join(tempdir, 'test-ave.fif')
fwd = convert_forward_solution(read_forward_solution(fname_fwd),
surf_ori=False, force_fixed=True)
evoked = read_evokeds(fname_evo)[0]
cov = read_cov(fname_cov)
n_per_hemi = 5
vertices = [np.sort(rng.permutation(s['vertno'])[:n_per_hemi])
for s in fwd['src']]
nv = sum(len(v) for v in vertices)
stc = SourceEstimate(amp * np.eye(nv), vertices, 0, 0.001)
evoked = simulate_evoked(fwd, stc, evoked.info, cov, nave=evoked.nave,
random_state=rng)
# For speed, let's use a subset of channels (strange but works)
picks = np.sort(np.concatenate([
pick_types(evoked.info, meg=True, eeg=False)[::2],
pick_types(evoked.info, meg=False, eeg=True)[::2]]))
evoked.pick_channels([evoked.ch_names[p] for p in picks])
evoked.add_proj(make_eeg_average_ref_proj(evoked.info))
write_evokeds(fname_sim, evoked)
# Run MNE-C version
run_subprocess([
'mne_dipole_fit', '--meas', fname_sim, '--meg', '--eeg',
'--noise', fname_cov, '--dip', fname_dtemp,
'--mri', fname_fwd, '--reg', '0', '--tmin', '0',
])
dip_c = read_dipole(fname_dtemp)
# Run mne-python version
sphere = make_sphere_model(head_radius=0.1)
dip, residuals = fit_dipole(evoked, fname_cov, sphere, fname_fwd)
# Sanity check: do our residuals have less power than orig data?
data_rms = np.sqrt(np.sum(evoked.data ** 2, axis=0))
resi_rms = np.sqrt(np.sum(residuals ** 2, axis=0))
factor = 1.
# XXX weird, inexplicable differenc for 3.5 build we'll assume is due to
# Anaconda bug for now...
if os.getenv('TRAVIS', 'false') == 'true' and \
sys.version[:3] in ('3.5', '2.7'):
factor = 0.8
assert_true((data_rms > factor * resi_rms).all(),
msg='%s (factor: %s)' % ((data_rms / resi_rms).min(), factor))
# Compare to original points
transform_surface_to(fwd['src'][0], 'head', fwd['mri_head_t'])
transform_surface_to(fwd['src'][1], 'head', fwd['mri_head_t'])
assert_equal(fwd['src'][0]['coord_frame'], 5)
src_rr = np.concatenate([s['rr'][v] for s, v in zip(fwd['src'], vertices)],
axis=0)
src_nn = np.concatenate([s['nn'][v] for s, v in zip(fwd['src'], vertices)],
axis=0)
# MNE-C skips the last "time" point :(
out = dip.crop(dip_c.times[0], dip_c.times[-1])
assert_true(dip is out)
src_rr, src_nn = src_rr[:-1], src_nn[:-1]
# check that we did at least as well
corrs, dists, gc_dists, amp_errs, gofs = [], [], [], [], []
for d in (dip_c, dip):
new = d.pos
diffs = new - src_rr
corrs += [np.corrcoef(src_rr.ravel(), new.ravel())[0, 1]]
dists += [np.sqrt(np.mean(np.sum(diffs * diffs, axis=1)))]
gc_dists += [180 / np.pi * np.mean(np.arccos(np.sum(src_nn * d.ori,
axis=1)))]
amp_errs += [np.sqrt(np.mean((amp - d.amplitude) ** 2))]
gofs += [np.mean(d.gof)]
assert_true(dists[0] >= dists[1] * factor, 'dists: %s' % dists)
assert_true(corrs[0] <= corrs[1] / factor, 'corrs: %s' % corrs)
assert_true(gc_dists[0] >= gc_dists[1] * factor,
'gc-dists (ori): %s' % gc_dists)
assert_true(amp_errs[0] >= amp_errs[1] * factor,
'amplitude errors: %s' % amp_errs)
assert_true(gofs[0] <= gofs[1] / factor, 'gof: %s' % gofs)
@testing.requires_testing_data
def test_dipole_fitting_fixed():
"""Test dipole fitting with a fixed position."""
tpeak = 0.073
sphere = make_sphere_model(head_radius=0.1)
evoked = read_evokeds(fname_evo, baseline=(None, 0))[0]
evoked.pick_types(meg=True)
t_idx = np.argmin(np.abs(tpeak - evoked.times))
evoked_crop = evoked.copy().crop(tpeak, tpeak)
assert_equal(len(evoked_crop.times), 1)
cov = read_cov(fname_cov)
dip_seq, resid = fit_dipole(evoked_crop, cov, sphere)
assert_true(isinstance(dip_seq, Dipole))
assert_equal(len(dip_seq.times), 1)
pos, ori, gof = dip_seq.pos[0], dip_seq.ori[0], dip_seq.gof[0]
amp = dip_seq.amplitude[0]
# Fix position, allow orientation to change
dip_free, resid_free = fit_dipole(evoked, cov, sphere, pos=pos)
assert_true(isinstance(dip_free, Dipole))
assert_allclose(dip_free.times, evoked.times)
assert_allclose(np.tile(pos[np.newaxis], (len(evoked.times), 1)),
dip_free.pos)
assert_allclose(ori, dip_free.ori[t_idx]) # should find same ori
assert_true(np.dot(dip_free.ori, ori).mean() < 0.9) # but few the same
assert_allclose(gof, dip_free.gof[t_idx]) # ... same gof
assert_allclose(amp, dip_free.amplitude[t_idx]) # and same amp
assert_allclose(resid, resid_free[:, [t_idx]])
# Fix position and orientation
dip_fixed, resid_fixed = fit_dipole(evoked, cov, sphere, pos=pos, ori=ori)
assert_true(isinstance(dip_fixed, DipoleFixed))
assert_allclose(dip_fixed.times, evoked.times)
assert_allclose(dip_fixed.info['chs'][0]['loc'][:3], pos)
assert_allclose(dip_fixed.info['chs'][0]['loc'][3:6], ori)
assert_allclose(dip_fixed.data[1, t_idx], gof)
assert_allclose(resid, resid_fixed[:, [t_idx]])
_check_roundtrip_fixed(dip_fixed)
# Degenerate conditions
evoked_nan = evoked.copy().crop(0, 0)
evoked_nan.data[0, 0] = None
assert_raises(ValueError, fit_dipole, evoked_nan, cov, sphere)
assert_raises(ValueError, fit_dipole, evoked, cov, sphere, ori=[1, 0, 0])
assert_raises(ValueError, fit_dipole, evoked, cov, sphere, pos=[0, 0, 0],
ori=[2, 0, 0])
assert_raises(ValueError, fit_dipole, evoked, cov, sphere, pos=[0.1, 0, 0])
@testing.requires_testing_data
def test_len_index_dipoles():
"""Test len and indexing of Dipole objects."""
dipole = read_dipole(fname_dip)
d0 = dipole[0]
d1 = dipole[:1]
_check_dipole(d0, 1)
_check_dipole(d1, 1)
_compare_dipoles(d0, d1)
mask = dipole.gof > 15
idx = np.where(mask)[0]
d_mask = dipole[mask]
_check_dipole(d_mask, 4)
_compare_dipoles(d_mask, dipole[idx])
@testing.requires_testing_data
def test_min_distance_fit_dipole():
"""Test dipole min_dist to inner_skull."""
subject = 'sample'
raw = read_raw_fif(fname_raw, preload=True)
# select eeg data
picks = pick_types(raw.info, meg=False, eeg=True, exclude='bads')
info = pick_info(raw.info, picks)
# Let's use cov = Identity
cov = read_cov(fname_cov)
cov['data'] = np.eye(cov['data'].shape[0])
# Simulated scal map
simulated_scalp_map = np.zeros(picks.shape[0])
simulated_scalp_map[27:34] = 1
simulated_scalp_map = simulated_scalp_map[:, None]
evoked = EvokedArray(simulated_scalp_map, info, tmin=0)
min_dist = 5. # distance in mm
bem = read_bem_solution(fname_bem)
dip, residual = fit_dipole(evoked, cov, bem, fname_trans,
min_dist=min_dist)
dist = _compute_depth(dip, fname_bem, fname_trans, subject, subjects_dir)
# Constraints are not exact, so bump the minimum slightly
assert_true(min_dist - 0.1 < (dist[0] * 1000.) < (min_dist + 1.))
assert_raises(ValueError, fit_dipole, evoked, cov, fname_bem, fname_trans,
-1.)
def _compute_depth(dip, fname_bem, fname_trans, subject, subjects_dir):
"""Compute dipole depth."""
trans = _get_trans(fname_trans)[0]
bem = read_bem_solution(fname_bem)
surf = _bem_find_surface(bem, 'inner_skull')
points = surf['rr']
points = apply_trans(trans['trans'], points)
depth = _compute_nearest(points, dip.pos, return_dists=True)[1][0]
return np.ravel(depth)
@testing.requires_testing_data
def test_accuracy():
"""Test dipole fitting to sub-mm accuracy."""
evoked = read_evokeds(fname_evo)[0].crop(0., 0.,)
evoked.pick_types(meg=True, eeg=False)
evoked.pick_channels([c for c in evoked.ch_names[::4]])
for rad, perc_90 in zip((0.09, None), (0.002, 0.004)):
bem = make_sphere_model('auto', rad, evoked.info,
relative_radii=(0.999, 0.998, 0.997, 0.995))
src = read_source_spaces(fname_src)
fwd = make_forward_solution(evoked.info, None, src, bem)
fwd = convert_forward_solution(fwd, force_fixed=True)
vertices = [src[0]['vertno'], src[1]['vertno']]
n_vertices = sum(len(v) for v in vertices)
amp = 10e-9
data = np.eye(n_vertices + 1)[:n_vertices]
data[-1, -1] = 1.
data *= amp
stc = SourceEstimate(data, vertices, 0., 1e-3, 'sample')
sim = simulate_evoked(fwd, stc, evoked.info, cov=None, nave=np.inf)
cov = make_ad_hoc_cov(evoked.info)
dip = fit_dipole(sim, cov, bem, min_dist=0.001)[0]
ds = []
for vi in range(n_vertices):
if vi < len(vertices[0]):
hi = 0
vertno = vi
else:
hi = 1
vertno = vi - len(vertices[0])
vertno = src[hi]['vertno'][vertno]
rr = src[hi]['rr'][vertno]
d = np.sqrt(np.sum((rr - dip.pos[vi]) ** 2))
ds.append(d)
# make sure that our median is sub-mm and the large majority are very
# close (we expect some to be off by a bit e.g. because they are
# radial)
assert_true((np.percentile(ds, [50, 90]) < [0.0005, perc_90]).all())
@testing.requires_testing_data
def test_dipole_fixed():
"""Test reading a fixed-position dipole (from Xfit)."""
dip = read_dipole(fname_xfit_dip)
# print the representation of the objet DipoleFixed
print(dip)
_check_roundtrip_fixed(dip)
with warnings.catch_warnings(record=True) as w: # unused fields
dip_txt = read_dipole(fname_xfit_dip_txt)
assert_true(any('extra fields' in str(ww.message) for ww in w))
assert_allclose(dip.info['chs'][0]['loc'][:3], dip_txt.pos[0])
assert_allclose(dip_txt.amplitude[0], 12.1e-9)
with warnings.catch_warnings(record=True): # unused fields
dip_txt_seq = read_dipole(fname_xfit_seq_txt)
assert_allclose(dip_txt_seq.gof, [27.3, 46.4, 43.7, 41., 37.3, 32.5])
def _check_roundtrip_fixed(dip):
"""Helper to test roundtrip IO for fixed dipoles."""
tempdir = _TempDir()
dip.save(op.join(tempdir, 'test-dip.fif.gz'))
dip_read = read_dipole(op.join(tempdir, 'test-dip.fif.gz'))
assert_allclose(dip_read.data, dip_read.data)
assert_allclose(dip_read.times, dip.times)
assert_equal(dip_read.info['xplotter_layout'], dip.info['xplotter_layout'])
assert_equal(dip_read.ch_names, dip.ch_names)
for ch_1, ch_2 in zip(dip_read.info['chs'], dip.info['chs']):
assert_equal(ch_1['ch_name'], ch_2['ch_name'])
for key in ('loc', 'kind', 'unit_mul', 'range', 'coord_frame', 'unit',
'cal', 'coil_type', 'scanno', 'logno'):
assert_allclose(ch_1[key], ch_2[key], err_msg=key)
def test_get_phantom_dipoles():
"""Test getting phantom dipole locations."""
assert_raises(ValueError, get_phantom_dipoles, 0)
assert_raises(ValueError, get_phantom_dipoles, 'foo')
for kind in ('vectorview', 'otaniemi'):
pos, ori = get_phantom_dipoles(kind)
assert_equal(pos.shape, (32, 3))
assert_equal(ori.shape, (32, 3))
run_tests_if_main(False)
| bsd-3-clause | 2,016,151,737,440,554,000 | 40.112861 | 79 | 0.617658 | false |
DerPferd/little-python | tests/parser/test_statement_control.py | 1 | 2074 | from littlepython.parser import Parser
from littlepython.tokenizer import Tokenizer
from tests import _if, ctrl, blk, _is, v, c, asg
def test_if():
ast = ctrl([_if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))]))], blk())
parser = Parser(Tokenizer("if a is 8 { b = 3 }"))
assert parser.statement() == ast
def test_if_with_new_lines():
ast = ctrl([_if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))]))], blk())
parser = Parser(Tokenizer("if a is 8 {\nb = 3\n}"))
assert parser.statement() == ast
def test_empty_if():
ast = ctrl([_if(_is(v("a"), c(8)), blk())], blk())
parser = Parser(Tokenizer("if a is 8 { }"))
assert parser.statement() == ast
def test_empty_if_with_new_line():
ast = ctrl([_if(_is(v("a"), c(8)), blk())], blk())
parser = Parser(Tokenizer("if a is 8 { \n }"))
assert parser.statement() == ast
def test_if_else():
ast = ctrl([_if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))]))], blk([asg(v("b"), c(2))]))
parser = Parser(Tokenizer("if a is 8 { b = 3 } else { b = 2 }"))
assert parser.statement() == ast
def test_if_elif():
ast = ctrl([_if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))])),
_if(_is(v("a"), c(4)), blk([asg(v("b"), c(2))]))], blk())
parser = Parser(Tokenizer("if a is 8 { b = 3 } elif a is 4 { b = 2 }"))
assert parser.statement() == ast
def test_if_elif_else():
ast = ctrl([_if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))])),
_if(_is(v("a"), c(4)), blk([asg(v("b"), c(2))]))], blk([asg(v("b"), c(2))]))
parser = Parser(Tokenizer("if a is 8 { b = 3 } elif a is 4 { b = 2 } else { b = 2 }"))
assert parser.statement() == ast
def test_if_elif_elif_else():
ast = ctrl([_if(_is(v("a"), c(8)), blk([asg(v("b"), c(3))])),
_if(_is(v("a"), c(4)), blk([asg(v("b"), c(2))])),
_if(_is(v("a"), c(7)), blk([asg(v("b"), c(6))]))], blk([asg(v("b"), c(2))]))
parser = Parser(Tokenizer("if a is 8 { b = 3 } elif a is 4 { b = 2 } elif a is 7 { b = 6 } else { b = 2 }"))
assert parser.statement() == ast
| mit | 6,994,067,430,912,506,000 | 36.709091 | 112 | 0.492768 | false |
hack4impact/legal-checkup | config.py | 1 | 3636 | import os
import sys
from raygun4py.middleware import flask as flask_raygun
PYTHON_VERSION = sys.version_info[0]
if PYTHON_VERSION == 3:
import urllib.parse
else:
import urlparse
basedir = os.path.abspath(os.path.dirname(__file__))
if os.path.exists('config.env'):
print('Importing environment from .env file')
for line in open('config.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
class Config:
APP_NAME = 'Legal-Checkup'
if os.environ.get('SECRET_KEY'):
SECRET_KEY = os.environ.get('SECRET_KEY')
else:
SECRET_KEY = 'SECRET_KEY_ENV_VAR_NOT_SET'
print('SECRET KEY ENV VAR NOT SET! SHOULD NOT SEE IN PRODUCTION')
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
MAIL_SERVER = 'smtp.sendgrid.net'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD') or 'password'
ADMIN_EMAIL = os.environ.get(
'ADMIN_EMAIL') or '[email protected]'
EMAIL_SUBJECT_PREFIX = '[{}]'.format(APP_NAME)
EMAIL_SENDER = '{app_name} Admin <{email}>'.format(
app_name=APP_NAME, email=MAIL_USERNAME)
REDIS_URL = os.getenv('REDISTOGO_URL') or 'http://localhost:6379'
RAYGUN_APIKEY = os.environ.get('RAYGUN_APIKEY')
# Parse the REDIS_URL to set RQ config variables
if PYTHON_VERSION == 3:
urllib.parse.uses_netloc.append('redis')
url = urllib.parse.urlparse(REDIS_URL)
else:
urlparse.uses_netloc.append('redis')
url = urlparse.urlparse(REDIS_URL)
RQ_DEFAULT_HOST = url.hostname
RQ_DEFAULT_PORT = url.port
RQ_DEFAULT_PASSWORD = url.password
RQ_DEFAULT_DB = 0
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
ASSETS_DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
print('THIS APP IS IN DEBUG MODE. YOU SHOULD NOT SEE THIS IN PRODUCTION.')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
SSL_DISABLE = (os.environ.get('SSL_DISABLE') or 'True') == 'True'
@classmethod
def init_app(cls, app):
Config.init_app(app)
assert os.environ.get('SECRET_KEY'), 'SECRET_KEY IS NOT SET!'
flask_raygun.Provider(app, app.config['RAYGUN_APIKEY']).attach()
class HerokuConfig(ProductionConfig):
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# Handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
class UnixConfig(ProductionConfig):
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# Log to syslog
import logging
from logging.handlers import SysLogHandler
syslog_handler = SysLogHandler()
syslog_handler.setLevel(logging.WARNING)
app.logger.addHandler(syslog_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig,
'heroku': HerokuConfig,
'unix': UnixConfig
}
| mit | -5,213,742,616,836,574,000 | 28.560976 | 78 | 0.65099 | false |
openstack/python-openstackclient | openstackclient/tests/functional/network/v2/test_network_service_provider.py | 1 | 1655 | # Copyright (c) 2016, Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from openstackclient.tests.functional.network.v2 import common
class TestNetworkServiceProvider(common.NetworkTests):
"""Functional tests for network service provider"""
def setUp(self):
super(TestNetworkServiceProvider, self).setUp()
# Nothing in this class works with Nova Network
if not self.haz_network:
self.skipTest("No Network service present")
# NOTE(slaweq):
# that tests should works only when "standard" Neutron L3 agent is
# used, as e.g. OVN L3 plugin don't supports that.
l3_agent_list = json.loads(self.openstack(
'network agent list -f json --agent-type l3 -c ID'
))
if not l3_agent_list:
self.skipTest("No Neutron L3 Agents present")
def test_network_service_provider_list(self):
cmd_output = json.loads(self.openstack(
'network service provider list -f json'))
self.assertIn('L3_ROUTER_NAT', [x['Service Type'] for x in cmd_output])
| apache-2.0 | 2,719,658,020,632,557,600 | 39.365854 | 79 | 0.680967 | false |
jendap/tensorflow | tensorflow/python/data/kernel_tests/test_base.py | 1 | 9413 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test utilities for tf.data functionality."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.python import tf2
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class DatasetTestBase(test.TestCase):
"""Base class for dataset tests."""
@classmethod
def setUpClass(cls):
if tf2.enabled():
dataset_ops.Dataset = dataset_ops.DatasetV2
else:
dataset_ops.Dataset = dataset_ops.DatasetV1
def assertSparseValuesEqual(self, a, b):
"""Asserts that two SparseTensors/SparseTensorValues are equal."""
self.assertAllEqual(a.indices, b.indices)
self.assertAllEqual(a.values, b.values)
self.assertAllEqual(a.dense_shape, b.dense_shape)
def getNext(self, dataset, requires_initialization=False):
"""Returns a callable that returns the next element of the dataset.
Example use:
```python
# In both graph and eager modes
dataset = ...
get_next = self.getNext(dataset)
result = self.evaluate(get_next())
```
Args:
dataset: A dataset whose elements will be returned.
requires_initialization: Indicates that when the test is executed in graph
mode, it should use an initializable iterator to iterate through the
dataset (e.g. when it contains stateful nodes). Defaults to False.
Returns:
A callable that returns the next element of `dataset`.
"""
if context.executing_eagerly():
iterator = iter(dataset)
return iterator._next_internal # pylint: disable=protected-access
else:
if requires_initialization:
iterator = dataset_ops.make_initializable_iterator(dataset)
self.evaluate(iterator.initializer)
else:
iterator = dataset_ops.make_one_shot_iterator(dataset)
get_next = iterator.get_next()
return lambda: get_next
def _compareOutputToExpected(self, result_values, expected_values,
assert_items_equal):
if assert_items_equal:
# TODO(shivaniagrawal): add support for nested elements containing sparse
# tensors when needed.
self.assertItemsEqual(result_values, expected_values)
return
for i in range(len(result_values)):
nest.assert_same_structure(result_values[i], expected_values[i])
for result_value, expected_value in zip(
nest.flatten(result_values[i]), nest.flatten(expected_values[i])):
if sparse_tensor.is_sparse(result_value):
self.assertSparseValuesEqual(result_value, expected_value)
else:
self.assertAllEqual(result_value, expected_value)
def assertDatasetProduces(self,
dataset,
expected_output=None,
expected_shapes=None,
expected_error=None,
requires_initialization=False,
num_test_iterations=1,
assert_items_equal=False):
"""Asserts that a dataset produces the expected output / error.
Args:
dataset: A dataset to check for the expected output / error.
expected_output: A list of elements that the dataset is expected to
produce.
expected_shapes: A list of TensorShapes which is expected to match
output_shapes of dataset.
expected_error: A tuple `(type, predicate)` identifying the expected error
`dataset` should raise. The `type` should match the expected exception
type, while `predicate` should either be 1) a unary function that inputs
the raised exception and returns a boolean indicator of success or 2) a
regular expression that is expected to match the error message
partially.
requires_initialization: Indicates that when the test is executed in graph
mode, it should use an initializable iterator to iterate through the
dataset (e.g. when it contains stateful nodes). Defaults to False.
num_test_iterations: Number of times `dataset` will be iterated. Defaults
to 2.
assert_items_equal: Tests expected_output has (only) the same elements
regardless of order.
"""
self.assertTrue(
expected_error is not None or expected_output is not None,
"Exactly one of expected_output or expected error should be provided.")
if expected_error:
self.assertTrue(
expected_output is None,
"Exactly one of expected_output or expected error should be provided."
)
with self.assertRaisesWithPredicateMatch(expected_error[0],
expected_error[1]):
get_next = self.getNext(
dataset, requires_initialization=requires_initialization)
self.evaluate(get_next())
return
if expected_shapes:
self.assertEqual(expected_shapes, dataset.output_shapes)
self.assertGreater(num_test_iterations, 0)
for _ in range(num_test_iterations):
get_next = self.getNext(
dataset, requires_initialization=requires_initialization)
result = []
for _ in range(len(expected_output)):
result.append(self.evaluate(get_next()))
self._compareOutputToExpected(result, expected_output, assert_items_equal)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
def assertDatasetsEqual(self, dataset1, dataset2):
"""Checks that datasets are equal. Supports both graph and eager mode."""
self.assertEqual(dataset1.output_types, dataset2.output_types)
self.assertEqual(dataset1.output_classes, dataset2.output_classes)
flattened_types = nest.flatten(dataset1.output_types)
next1 = self.getNext(dataset1)
next2 = self.getNext(dataset2)
while True:
try:
op1 = self.evaluate(next1())
except errors.OutOfRangeError:
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next2())
break
op2 = self.evaluate(next2())
op1 = nest.flatten(op1)
op2 = nest.flatten(op2)
assert len(op1) == len(op2)
for i in range(len(op1)):
if sparse_tensor.is_sparse(op1[i]):
self.assertSparseValuesEqual(op1[i], op2[i])
elif flattened_types[i] == dtypes.string:
self.assertAllEqual(op1[i], op2[i])
else:
self.assertAllClose(op1[i], op2[i])
def assertDatasetsRaiseSameError(self,
dataset1,
dataset2,
exception_class,
replacements=None):
"""Checks that datasets raise the same error on the first get_next call."""
if replacements is None:
replacements = []
next1 = self.getNext(dataset1)
next2 = self.getNext(dataset2)
try:
self.evaluate(next1())
raise ValueError(
"Expected dataset to raise an error of type %s, but it did not." %
repr(exception_class))
except exception_class as e:
expected_message = e.message
for old, new, count in replacements:
expected_message = expected_message.replace(old, new, count)
# Check that the first segment of the error messages are the same.
with self.assertRaisesRegexp(exception_class,
re.escape(expected_message)):
self.evaluate(next2())
def structuredDataset(self, structure, shape=None, dtype=dtypes.int64):
"""Returns a singleton dataset with the given structure."""
if shape is None:
shape = []
if structure is None:
return dataset_ops.Dataset.from_tensors(
array_ops.zeros(shape, dtype=dtype))
else:
return dataset_ops.Dataset.zip(
tuple([
self.structuredDataset(substructure, shape, dtype)
for substructure in structure
]))
def structuredElement(self, structure, shape=None, dtype=dtypes.int64):
"""Returns an element with the given structure."""
if shape is None:
shape = []
if structure is None:
return array_ops.zeros(shape, dtype=dtype)
else:
return tuple([
self.structuredElement(substructure, shape, dtype)
for substructure in structure
])
| apache-2.0 | 8,667,708,725,299,768,000 | 39.748918 | 80 | 0.655901 | false |
mikeboers/aque | aque/commands/submit.py | 1 | 2219 | """aque submit - Schedule a shell command.
Schedules to given command to run on the queue. The environment will have an
extra $AQUE_TID variable containing the ID of the running task.
E.g.:
$ aque submit --shell 'echo $AQUE_TID says: "$@"' one two three
"""
import argparse
import os
import sys
from aque.commands.main import main, command, argument
from aque import utils
@command(
argument('--cwd', help='where to run the task (default: current directory)'),
#argument('--stdin', help='path to read stdin from; "-" means this stdin (which is fully read before the task is submitted)'),
#argument('--stdout', help='path to write stdout to'),
#argument('--stderr', help='path to write stderr to'),
argument('-n', '--name', help='the task\'s name (for `aque status`)'),
argument('-p', '--priority', type=int, help='higher ones go first'),
argument('-c', '--cpus', type=int, help='how many CPUs to use per task'),
argument('--host', help='the host(s) to run on'),
argument('--platform', help='the platform to run on'),
argument('-s', '--shell', action='store_true', help='''the first argument is
executed as a shell script, with the rest provided to it as arguments'''),
argument('-w', '--watch', action='store_true', help='watch the stdout/stderr of the task as it executes'),
argument('command', nargs=argparse.REMAINDER, metavar='COMMAND', help='the command to run'),
help='schedule a shell command',
description=__doc__,
aliases=['s', 'sub'],
)
def submit(args):
cmd = list(args.command)
if args.shell:
cmd.insert(0, os.environ.get('SHELL', '/bin/bash'))
cmd.insert(1, '-c')
cmd.insert(3, 'aque-submit')
options = {'environ': os.environ}
for k in ('cpus', 'cwd', 'host', 'platform', 'priority'):
v = getattr(args, k, None)
if v is not None:
options[k] = getattr(args, k)
options.setdefault('io_paths', utils.paths_from_args(cmd))
name = args.name or ' '.join(cmd)
future = args.queue.submit_ex(pattern='shell', args=cmd, name=name, **options)
if args.watch:
return main(['output', '--watch', str(future.id)])
print future.id
| bsd-3-clause | -4,217,119,700,614,682,000 | 32.621212 | 130 | 0.632267 | false |
iofun/colony | colony/system/kmeans.py | 1 | 19928 | # -*- coding: utf-8 -*-
'''
Colony k-means clustering system logic.
'''
# This file is part of colony.
__author__ = 'Team Machine'
__doc__ = '''
#k-means algorithm
k-means clustering is a method of vector quantization,
popular for cluster analysis in data mining.
k-means clustering aims to partition n observations
into k clusters in which each observation belongs to
the cluster with the nearest mean, serving as a prototype
of the cluster.
This results in a partitioning of the data space into Voronoi cells.
'''
# data wrangler
from sklearn.feature_extraction import DictVectorizer
# dimensionality reduction
from sklearn.decomposition import TruncatedSVD
# text data wrangling stuff
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
# scikit-learn parallel tools
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import Normalizer
# scikit-learn k-means clusters
from sklearn.cluster import KMeans, MiniBatchKMeans
# scikit-learn metrics
from sklearn import metrics
# seconds since the epoch
from time import time
import numpy as np
# standard python tools
import arrow
import motor
import msgpack
import uuid
# this colony code base!
from colony.messages import kmeans
from colony.messages import Feature, Forecast, Metric
from colony.messages import Resource, ClusterResource
from colony.tools import clean_structure
from colony.tools import clean_result
# tornado old gen
from tornado import gen
class Prediction(object):
'''
KMeans cluster prediction
'''
@gen.engine
def new_prediction(self, dataset, callback):
'''
# new prediction dataset
'''
try:
message = Forecast(dataset)
message.validate()
except Exception, e:
callback(None, e)
return
message = clean_structure(message)
message = yield gen.Task(self.db.predictions.insert, message)
message, error = message.args
if error:
callback(None, error)
return
#message = {
# 'uuid': message.get('uuid'),
# 'forecast': np.asarray(message.get('doing!'))
#}
callback(message.get('uuid'), None)
@gen.engine
def get_prediction(self, model_uuid, prediction_uuid, callback):
'''
Get specific k-means model.
'''
if not model_type:
model_type = 'k-means'
try:
message = yield motor.Op(
self.db.predictions.find_one, {
'uuid':prediction_uuid,
'model_type': model_type
},{'_id':0}
)
if message:
message = kmeans.Cluster(message)
message.validate()
except Exception, e:
callback(None, e)
return
callback(message, None)
@gen.engine
def get_predictions(self, model_uuid, page_num, callback):
'''
Get k-means models.
'''
model_type = 'k-means'
page_num = int(page_num)
page_size = self.settings['page_size']
result = []
query = self.db.predictions.find({'active':True},{'_id':0})
query = query.sort([
('uuid', -1)
]).skip(page_num * page_size).limit(page_size)
try:
for record in (yield motor.Op(query.to_list)):
result.append(kmeans.Cluster(record))
struct = {'result': result}
message = kmeans.ClusterResult(struct)
message.validate()
except Exception, e:
callback(None, e)
return
message = clean_result(message)
callback(message, None)
class Trainer(object):
'''
Cluster trainer
'''
@gen.engine
def get_feature(self, feature_uuid, callback):
'''
'''
pass
@gen.engine
def gen_features(self, feature_uuid, callback):
'''
'''
pass
@gen.engine
def new_feature_set(self, dataset, callback):
'''
# new feature dataset
'''
try:
features = Feature(dataset)
features.validate()
except Exception, e:
callback(None, e)
return
features = clean_structure(features)
message = yield gen.Task(self.db.features.insert, features)
message, error = message.args
if error:
callback(None, error)
return
message = {
'uuid': features.get('uuid'),
'features': np.asarray(features.get('dimensions'))
}
callback(message, None)
@gen.engine
def train_new_cluster(self, features, centroids, callback):
'''
Do the actual clustering.
'''
number_seeds = (centroids if centroids else self.settings['number_seeds'])
minibatch = self.settings['minibatch']
max_iter = self.settings['max_iter']
number_init = self.settings['number_init']
max_no_improvement = self.settings['max_no_improvement']
batch_size = self.settings['batch_size']
verbose = self.settings['verbose']
try:
if minibatch:
km = MiniBatchKMeans(
n_clusters=number_seeds,
init='k-means++',
max_iter=max_iter,
max_no_improvement=max_no_improvement,
n_init=number_init,
batch_size=batch_size,
verbose=verbose
)
else:
km = KMeans(
n_clusters=number_seeds,
init='k-means++',
max_iter=max_iter,
n_init=number_init,
verbose=verbose
)
start = time()
km.fit(features)
cluster_time = time() - start
message = {
'cluster':km,
'cluster_time': cluster_time
}
except Exception, e:
callback(None, e)
return
callback(message, None)
class Wrangler(object):
'''
Data Wrangler
'''
@gen.engine
def dict_feature_extraction(self, dataset, callback):
'''
# dictionary feature extraction.
This transformer turns lists of mappings (dict-like objects)
of feature names to feature values into Numpy arrays or
scipy.sparse matrices for use with scikit-learn estimators.
'''
use_sparse = self.settings['use_sparse']
start = time()
if use_sparse:
print("Extracting features from the training dataset using sparse vectorizer")
vectorizer = DictVectorizer(sparce=use_sparse)
else:
print("Extracting features from the training dataset")
vectorizer = DictVectorizer(sparse=False)
features = vectorizer.fit_transform(dataset)
print("done in %fs" % (time() - start))
print("n_samples: %d, n_features: %d" % features.shape)
return features
@gen.engine
def text_feature_extraction(self, callback):
'''
# text feature extraction.
'''
use_hashing = self.settings['use_hashing']
use_idf = self.settings['use_idf']
n_features = self.settings['n_features']
print('''Extracting features from the training \
dataset using a sparse vectorizer''')
# we're don't using it, why?
start = time()
if use_hashing:
if use_idf:
# Perform an IDF normalization
# on the output of HashingVectorizer
hasher = HashingVectorizer(
n_features=n_features,
stop_words='english',
non_negative=True,
norm=None,
binary=False
)
vectorizer = Pipeline((
('hasher', hasher),
('tf_idf', TfidfTransformer())
))
else:
vectorizer = HashingVectorizer(
n_features=n_features,
stop_words='english',
non_negative=False,
norm='l2',
binary=False
)
else:
vectorizer = TfidfVectorizer(
max_df=0.5,
max_features=n_features,
stop_words='english',
use_idf=use_idf
)
return vectorizer
@gen.engine
def dimensionality_reduction(self, feature, callback):
'''
Performing dimensionality reduction using LSA
'''
n_components = self.settings['n_components']
if n_components:
print("Performing dimensionality reduction using LSA")
start = time()
# Vectorizer results are normalized,
# which makes KMeans behave as spherical k-means for better results.
lsa = TruncatedSVD(n_components)
feature = lsa.fit_transform(feature)
# Since LSA/SVD results are not normalized,
# we have to redo the normalization.
feature = Normalizer(copy=False).fit_transform(feature)
print("done in %fs" % (time() - start))
return feature
class Cluster(object):
'''
k-means cluster logic
'''
@property
def n_clusters(self):
'''
The number of clusters to form as well
as the number of centroids to generate.
'''
return self.n_clusters
@property
def max_iter(self):
'''
Maximum number of iterations
of the k-means algorithm for a single run.
'''
return self.max_iter
@property
def n_init(self):
'''
Number of time the k-means algorithm will be run
with different centroid seeds. The final results
will be the best output of n_init consecutive runs
in terms of inertia.
'''
return self.n_init
@property
def init(self):
'''
Method for initialization, defaults to
'k-means++', 'random' or an ndarray.
'''
return self.init
@property
def precompute_distances(self):
'''
Precompute distances (faster but takes more memory).
'''
return self.precompute_distances
@property
def tol(self):
'''
Relative tolerance w.r.t. inertia to declare convergence
float, optional default: 1e-4
'''
return self.tol
@property
def n_jobs(self):
'''
The number of jobs to use for the computation.
This works by breaking down the pairwise matrix
into n_jobs even slices and computing them in parallel.
'''
return self.n_jobs
@property
def random_state(self):
'''
The generator used to initialize the centers.
If an integer is given, it fixes the seed.
Defaults to the global numpy random number generator.
'''
return self.random_state
@gen.engine
def new_model(self, struct, callback):
'''
Create a new cluster model.
'''
try:
cluster = kmeans.Cluster(struct)
cluster.validate()
except Exception, e:
callback(None, e)
return
cluster = clean_structure(cluster)
message = yield gen.Task(self.db.models.insert, cluster)
message, error = message.args
if error:
callback(None, error)
return
# push message to the right channel
callback(cluster.get('uuid'), None)
@gen.engine
def delete_model(self, model_uuid, callback):
'''
Delete k-means model
'''
try:
result = yield motor.Op( self.db.models.remove,
{'uuid':model_uuid} )
except Exception, e:
callback(None, e)
return
callback(result, None)
@gen.engine
def replace_model(self, struct, model_uuid, callback):
'''
Replace k-means model
'''
try:
cluster = kmeans.Cluster(struct)
cluster.validate()
except Exception, e:
callback(None, e)
return
cluster = clean_structure(cluster)
message = yield gen.Task( self.db.models.update,
{'uuid': model_uuid},
cluster )
message, error = message.args
# missing crash_and_die
if error:
callback(None, error)
return
if not message.get('updatedExisting'):
error = {'uuid': model_uuid, 'replaced': False}
callback(None, error)
return
# push message to the right channel
callback(model_uuid, None)
@gen.engine
def get_model(self, model_type, model_uuid, callback):
'''
Get specific k-means model.
'''
if not model_type:
model_type = 'k-means'
try:
message = yield motor.Op(
self.db.models.find_one, {
'uuid':model_uuid,
'model_type': model_type
},{'_id':0}
)
if message:
message = kmeans.Cluster(message)
message.validate()
except Exception, e:
callback(None, e)
return
callback(message, None)
@gen.engine
def get_models(self, model_type, page_num, callback):
'''
Get k-means models.
'''
model_type = 'k-means'
page_num = int(page_num)
page_size = self.settings['page_size']
result = []
query = self.db.models.find({'active':True},{'_id':0})
query = query.sort([
('uuid', -1)
]).skip(page_num * page_size).limit(page_size)
try:
for record in (yield motor.Op(query.to_list)):
result.append(kmeans.Cluster(record))
struct = {'result': result}
message = kmeans.ClusterResult(struct)
message.validate()
except Exception, e:
callback(None, e)
return
message = clean_result(message)
callback(message, None)
@gen.engine
def new_resource(self, struct, callback):
'''
Create a new cluster resource
'''
try:
message = ClusterResource(struct)
message.validate()
message = message.to_primitive()
except Exception, e:
callback(None, e)
return
resource = ''.join(('resources.', message['resource']))
try:
message = yield motor.Op(
self.db.models.update,
{'uuid': message['model_uuid']},
{
'$addToSet': {
''.join((resource, '.contains')): message['uuid']
},
'$inc': {
'resources.total': 1,
''.join((resource, '.total')): 1
}
}
)
except Exception, e:
callback(None, e)
return
callback(message, None)
@gen.engine
def check_exist(self, model_uuid, callback):
'''
Check if cluster exist
'''
try:
exist = yield motor.Op(self.db.models.find_one,
{'uuid': model_uuid},
{'uuid':1, '_id':0})
exist = (True if exist else False)
except Exception, e:
callback(None, e)
callback(exist, None)
@gen.engine
def check_type(self, model_uuid, model_type, callback):
'''
Check cluster type
'''
try:
check_type = yield motor.Op(self.db.models.find_one,
{'uuid': model_uuid,
'model_type': model_type},
{'model_type':1,'_id':0})
check_type = (True if check_type else False)
except Exception, e:
callback(None, e)
callback(check_type, None)
@gen.engine
def get_centroids(self, model_uuid, callback):
'''
Get cluster centroid seeds
'''
try:
centroids = yield motor.Op(self.db.models.find_one,
{'uuid': model_uuid},
{'centroids':1,'_id':0})
centroids = (centroids if centroids else False)
except Exception, e:
callback(None, e)
callback(centroids, None)
@gen.engine
def new_kmeans_unit(self, cluster_labels, cluster_unique_labels, cluster_centers, callback):
'''
New kmeans cluster
The labels over the training data can be found in the labels attributes.
'''
try:
struct = {
'labels': cluster_labels,
'unique_labels': cluster_unique_labels,
'centers': cluster_centers
}
unit = Unit(struct)
unit.validate()
except Exception, e:
callback(None, e)
return
unit = clean_structure(unit)
message = yield ge.Task(self.db.units.insert, unit)
message, error = message.args
if error:
callback(None, error)
return
callback({'uuid': unit.get('uuid')}, None)
@gen.engine
def new_metrics(self, feature_uuid, feature_data, feature_labels, cluster_labels, callback):
'''
Create new metrics
'''
try:
message = {
'homogeneity': metrics.homogeneity_score(feature_labels, cluster_labels),
'completeness': metrics.completeness_score(feature_labels, cluster_labels),
'v_measure': metrics.v_measure_score(feature_labels, cluster_labels),
'adjusted_rand': metrics.adjusted_rand_score(feature_labels, cluster_labels),
'silhouette': metrics.silhouette_score(feature_data, feature_labels, sample_size=1000),
'feature_uuid': feature_uuid,
}
metric = Metric(message)
metric.validate()
except Exception, e:
callback(None, e)
return
metric = clean_structure(metric)
message = yield gen.Task(self.db.metrics.insert, metric)
message, error = message.args
if error:
callback(None, error)
return
callback({'uuid': metric.get('uuid')}, None)
@gen.engine
def set_cluster_time(self, model_uuid, cluster_time, callback):
'''
Set cluster time
'''
try:
message = yield motor.Op(self.db.models.update,
{'uuid': model_uuid},
{'$set': {'cluster_time': cluster_time}})
except Exception, e:
callback(None, e)
return
callback(message, None) | agpl-3.0 | -1,745,465,895,318,321,200 | 25.607477 | 103 | 0.518115 | false |
HaebinShin/tensorflow | tensorflow/contrib/learn/python/learn/utils/export.py | 1 | 7614 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Export utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.session_bundle import exporter
from tensorflow.contrib.session_bundle import gc
from tensorflow.python.client import session as tf_session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saver as tf_saver
def _get_first_op_from_collection(collection_name):
"""Get first element from the collection."""
elements = ops.get_collection(collection_name)
if elements is not None:
if elements:
return elements[0]
return None
def _get_saver():
"""Lazy init and return saver."""
saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
if saver is not None:
if saver:
saver = saver[0]
else:
saver = None
if saver is None and variables.all_variables():
saver = tf_saver.Saver()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
return saver
def _export_graph(graph, saver, checkpoint_path, export_dir,
default_graph_signature, named_graph_signatures,
exports_to_keep):
"""Exports graph via session_bundle, by creating a Session."""
with graph.as_default():
with tf_session.Session('') as session:
variables.initialize_local_variables()
data_flow_ops.initialize_all_tables()
saver.restore(session, checkpoint_path)
export = exporter.Exporter(saver)
export.init(init_op=control_flow_ops.group(
variables.initialize_local_variables(),
data_flow_ops.initialize_all_tables()),
default_graph_signature=default_graph_signature,
named_graph_signatures=named_graph_signatures)
export.export(export_dir, contrib_variables.get_global_step(), session,
exports_to_keep=exports_to_keep)
def generic_signature_fn(examples, unused_features, predictions):
"""Creates generic signature from given examples and predictions.
This is needed for backward compatibility with default behaviour of
export_estimator.
Args:
examples: `Tensor`.
unused_features: `dict` of `Tensor`s.
predictions: `dict` of `Tensor`s.
Returns:
Tuple of default signature and named signature.
"""
tensors = {'inputs': examples}
if not isinstance(predictions, dict):
predictions = {'outputs': predictions}
tensors.update(predictions)
default_signature = exporter.generic_signature(tensors)
return default_signature, {}
def logistic_regression_signature_fn(examples, unused_features, predictions):
"""Creates regression signature from given examples and predictions.
Args:
examples: `Tensor`.
unused_features: `dict` of `Tensor`s.
predictions: `dict` of `Tensor`s.
Returns:
Tuple of default classification signature and named signature.
"""
# predictions has shape [batch_size, 2] where first column is P(Y=0|x)
# while second column is P(Y=1|x). We are only interested in the second
# column for inference.
assert predictions.get_shape()[1] == 2
positive_predictions = predictions[:, 1]
signatures = {}
signatures['regression'] = exporter.regression_signature(examples,
positive_predictions)
return signatures['regression'], signatures
def classification_signature_fn(examples, unused_features, predictions):
"""Creates classification signature from given examples and predictions.
Args:
examples: `Tensor`.
unused_features: `dict` of `Tensor`s.
predictions: `dict` of `Tensor`s.
Returns:
Tuple of default classification signature and named signature.
"""
signatures = {}
signatures['classification'] = exporter.classification_signature(
examples, classes_tensor=predictions)
return signatures['classification'], signatures
# pylint: disable=protected-access
def _default_input_fn(estimator, examples):
"""Creates default input parsing using Estimator's feature signatures."""
return estimator._get_feature_ops_from_example(examples)
def export_estimator(estimator,
export_dir,
signature_fn=None,
input_fn=_default_input_fn,
default_batch_size=1,
exports_to_keep=None):
"""Exports inference graph into given dir.
Args:
estimator: Estimator to export
export_dir: A string containing a directory to write the exported graph
and checkpoints.
signature_fn: Function that given `Tensor` of `Example` strings,
`dict` of `Tensor`s for features and `dict` of `Tensor`s for predictions
input_fn: Function that given `Tensor` of `Example` strings, parses it into
features that are then passed to the model.
and returns default and named exporting signatures.
default_batch_size: Default batch size of the `Example` placeholder.
exports_to_keep: Number of exports to keep.
"""
checkpoint_path = tf_saver.latest_checkpoint(estimator._model_dir)
with ops.Graph().as_default() as g:
contrib_variables.create_global_step(g)
examples = array_ops.placeholder(dtype=dtypes.string,
shape=[default_batch_size],
name='input_example_tensor')
features = input_fn(estimator, examples)
predictions = estimator._get_predict_ops(features)
if signature_fn:
default_signature, named_graph_signatures = signature_fn(examples,
features,
predictions)
else:
logging.warn(
'Change warning: `signature_fn` will be required after 2016-08-01.\n'
'Using generic signatures for now. To maintain this behavior, '
'pass:\n'
' signature_fn=export.generic_signature_fn\n'
'Also consider passing a regression or classification signature; see '
'cl/126430915 for an example.')
default_signature, named_graph_signatures = generic_signature_fn(
examples, features, predictions)
if exports_to_keep is not None:
exports_to_keep = gc.largest_export_versions(exports_to_keep)
_export_graph(g, _get_saver(), checkpoint_path, export_dir,
default_graph_signature=default_signature,
named_graph_signatures=named_graph_signatures,
exports_to_keep=exports_to_keep)
# pylint: enable=protected-access
| apache-2.0 | -3,426,686,088,125,877,000 | 38.046154 | 82 | 0.678356 | false |
iLampard/alphaware | alphaware/tests/utils/test_pandas_utils.py | 1 | 6717 | # -*- coding: utf-8 -*-
from unittest import TestCase
from parameterized import parameterized
import pandas as pd
import numpy as np
from numpy.testing.utils import assert_array_equal
from pandas import (MultiIndex,
Index)
from pandas.util.testing import assert_frame_equal, assert_series_equal
from alphaware.enums import OutputDataFormat, FreqType
from alphaware.const import INDEX_FACTOR
from alphaware.utils import (convert_df_format,
top,
group_by_freq,
fwd_return,
weighted_rank)
from datetime import datetime as dt
class TestPandasUtils(TestCase):
@parameterized.expand([(pd.DataFrame({'001': [1, 2, 3], '002': [2, 3, 4]}, index=['2014', '2015', '2016']),
OutputDataFormat.MULTI_INDEX_DF,
'test_factor',
INDEX_FACTOR,
pd.DataFrame(index=MultiIndex(levels=[['2014', '2015', '2016'], ['001', '002']],
labels=[[0, 0, 1, 1, 2, 2], [0, 1, 0, 1, 0, 1]],
names=['trade_date', 'ticker']),
data=[1, 2, 2, 3, 3, 4],
columns=['test_factor']))])
def test_convert_df_format_1(self, data, target_format, col_name, multi_index, expected):
calculated = convert_df_format(data, target_format, col_name, multi_index)
assert_frame_equal(calculated, expected)
@parameterized.expand(
[(pd.DataFrame(
index=MultiIndex.from_product([['2014', '2015', '2016'], ['001', '002']], names=['trade_date', 'ticker']),
data=[1, 2, 3, 4, 5, 6],
columns=['factor']),
OutputDataFormat.PITVOT_TABLE_DF,
'factor',
INDEX_FACTOR,
pd.DataFrame({'001': [1, 3, 5], '002': [2, 4, 6]},
index=Index(['2014', '2015', '2016'], name='trade_date')))])
def test_convert_df_format_2(self, data, target_format, col_name, multi_index, expected):
calculated = convert_df_format(data, target_format, col_name, multi_index)
assert_frame_equal(calculated, expected)
@parameterized.expand(
[(pd.DataFrame(data=[[1, 23, 4, 5], [4, 5, 7, 8], [10, 5, 11, 8], [34, 65, 27, 78]],
columns=['A', 'B', 'C', 'D']),
2,
['A'],
pd.DataFrame(data=[[34, 65, 27, 78], [10, 5, 11, 8]], index=[3, 2], columns=['A', 'B', 'C', 'D'])
)])
def test_top_1(self, data, n, column, expected):
calculated = top(data, column=column, n=n)
assert_frame_equal(calculated, expected)
@parameterized.expand(
[(pd.Series(data=[35, 12, 45, 79, 123, 74, 35]),
3,
pd.Series(data=[123, 79, 74], index=[4, 3, 5])
)])
def test_top_2(self, data, n, expected):
calculated = top(data, n=n)
assert_series_equal(calculated, expected)
@parameterized.expand(
[(pd.DataFrame(data=[1, 2, 3, 4, 5, 6, 7, 9, 0, 12],
index=[dt(2017, 7, 1), dt(2017, 6, 1), dt(2017, 7, 2), dt(2017, 6, 1), dt(2017, 3, 1),
dt(2017, 3, 1), dt(2017, 1, 1), dt(2017, 2, 1), dt(2017, 1, 1), dt(2017, 2, 1)]),
dt(2017, 7, 31),
FreqType.EOM,
pd.DataFrame(data=[1, 3], index=[dt(2017, 7, 1), dt(2017, 7, 2)])
),
(pd.Series(data=[1, 2, 3, 4, 5, 6, 7, 9, 0, 12],
index=[dt(2016, 7, 1), dt(2016, 6, 1), dt(2017, 7, 2), dt(2017, 7, 1), dt(2017, 3, 1),
dt(2017, 3, 1), dt(2017, 1, 1), dt(2017, 2, 1), dt(2017, 1, 1), dt(2017, 2, 1)]),
dt(2016, 12, 31),
FreqType.EOY,
pd.DataFrame(data=[2, 1], index=[dt(2016, 6, 1), dt(2016, 7, 1)])
),
(pd.Series(data=[1, 2, 3, 4, 5, 6, 7, 9, 0, 12],
index=[dt(2016, 7, 1), dt(2016, 7, 1), dt(2017, 7, 2), dt(2017, 7, 1), dt(2017, 3, 1),
dt(2017, 3, 1), dt(2017, 1, 1), dt(2017, 2, 1), dt(2017, 1, 1), dt(2017, 2, 1)]),
(2016, 7, 1),
FreqType.EOD,
pd.DataFrame(data=[1, 2], index=[dt(2016, 7, 1), dt(2016, 7, 1)])
)
])
def test_group_by_freq(self, data, group, freq, expected):
calculated = group_by_freq(data, freq=freq).get_group(group)
assert_frame_equal(calculated, expected)
@parameterized.expand([(pd.Series(data=[1, 2, 3, 4],
index=pd.MultiIndex.from_product([[dt(2014, 1, 30), dt(2014, 2, 28)], ['a', 'b']],
names=['trade_date', 'ticker'])),
1,
pd.DataFrame(data=[3, 4],
index=pd.MultiIndex.from_product([[dt(2014, 1, 30)], ['a', 'b']],
names=['trade_date', 'ticker']),
columns=['fwd_return'])
),
(pd.DataFrame(data=[1, 2, 3, 4, 5, 6],
index=pd.MultiIndex.from_product(
[[dt(2014, 1, 30), dt(2014, 2, 28), dt(2014, 3, 30)], ['a', 'b']],
names=['trade_date', 'ticker'])),
2,
pd.DataFrame(data=[5, 6],
index=pd.MultiIndex.from_product([[dt(2014, 1, 30)], ['a', 'b']],
names=['trade_date', 'ticker']),
columns=['fwd_return'])
)
])
def test_fwd_return(self, data, period, expected):
calculated = fwd_return(data, period=period)
assert_frame_equal(calculated, expected)
@parameterized.expand(
[(pd.DataFrame({'a': [1, 2, 3], 'b': [2, 4, 6]}), [1, 1], None, True, pd.DataFrame([0.0, 1.0, 2.0])),
(pd.DataFrame({'a': [1, 2, 3], 'b': [2, 4, 6]}), [1, 0], [0.6, 0.4], False, np.array([0.8, 1.0, 1.2]))])
def test_weighted_rank(self, data, order, weight, out_df, expected):
calculated = weighted_rank(data, order, weight, out_df)
if isinstance(expected, pd.DataFrame):
assert_frame_equal(calculated, expected)
else:
assert_array_equal(calculated, expected)
| apache-2.0 | -7,518,428,269,691,289,000 | 51.476563 | 120 | 0.450648 | false |
testmana2/test | Preferences/ConfigurationPages/EditorAutocompletionPage.py | 1 | 2368 | # -*- coding: utf-8 -*-
# Copyright (c) 2006 - 2015 Detlev Offenbach <[email protected]>
#
"""
Module implementing the Editor Autocompletion configuration page.
"""
from __future__ import unicode_literals
from .ConfigurationPageBase import ConfigurationPageBase
from .Ui_EditorAutocompletionPage import Ui_EditorAutocompletionPage
import Preferences
class EditorAutocompletionPage(ConfigurationPageBase,
Ui_EditorAutocompletionPage):
"""
Class implementing the Editor Autocompletion configuration page.
"""
def __init__(self):
"""
Constructor
"""
super(EditorAutocompletionPage, self).__init__()
self.setupUi(self)
self.setObjectName("EditorAutocompletionPage")
# set initial values
self.acEnabledCheckBox.setChecked(
Preferences.getEditor("AutoCompletionEnabled"))
self.acCaseSensitivityCheckBox.setChecked(
Preferences.getEditor("AutoCompletionCaseSensitivity"))
self.acReplaceWordCheckBox.setChecked(
Preferences.getEditor("AutoCompletionReplaceWord"))
self.acThresholdSlider.setValue(
Preferences.getEditor("AutoCompletionThreshold"))
self.acScintillaCheckBox.setChecked(
Preferences.getEditor("AutoCompletionScintillaOnFail"))
def save(self):
"""
Public slot to save the Editor Autocompletion configuration.
"""
Preferences.setEditor(
"AutoCompletionEnabled",
self.acEnabledCheckBox.isChecked())
Preferences.setEditor(
"AutoCompletionCaseSensitivity",
self.acCaseSensitivityCheckBox.isChecked())
Preferences.setEditor(
"AutoCompletionReplaceWord",
self.acReplaceWordCheckBox.isChecked())
Preferences.setEditor(
"AutoCompletionThreshold",
self.acThresholdSlider.value())
Preferences.setEditor(
"AutoCompletionScintillaOnFail",
self.acScintillaCheckBox.isChecked())
def create(dlg):
"""
Module function to create the configuration page.
@param dlg reference to the configuration dialog
@return reference to the instantiated page (ConfigurationPageBase)
"""
page = EditorAutocompletionPage()
return page
| gpl-3.0 | -6,298,200,585,957,023,000 | 31.888889 | 71 | 0.668074 | false |
juanmixp/Pandas | 10_min_tutorial/10_min_pandas.py | 1 | 1338 | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 19 10:57:30 2016
@author: jmaunon
"""
# 10 minutes to pandas
#==============================================================================
#%% Libraries
#==============================================================================
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#==============================================================================
#%% Object creation
#==============================================================================
## Creating series passing a list of values
s = pd.Series([1, 3, 5, np.nan, 6, 8])
## Creating a DataFrame by passing a numpy.array with a datetime index and labeled columns
dates = pd.date_range("20130101", periods = 6)
df = pd.DataFrame(np.random.randn(6,4), index = dates, columns = list("ABCD"))
## Creating a Dataframe by passing a dict of objects
dict = {
"A" : 1,
"B" : pd.Timestamp("20130102"),
"C" : pd.Series(1, index = list(range(4)), dtype = "float32"),
"D" : np.array([3]*4, dtype="int32"),
"E" : pd.Categorical(["test", "train", "test", "train"]),
"F" : "foo"
}
df2 = pd.DataFrame(dict)
## Notice the types of the variables
df2.dtypes
#============================================================================= | gpl-3.0 | -5,046,675,955,946,083,000 | 28.454545 | 90 | 0.428999 | false |
citrix-openstack-build/ironic | ironic/drivers/modules/fake.py | 1 | 2494 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fake driver interfaces used in testing.
"""
from ironic.common import exception
from ironic.common import states
from ironic.drivers import base
class FakePower(base.PowerInterface):
"""Example implementation of a simple power interface."""
def validate(self, node):
return True
def get_power_state(self, task, node):
return states.NOSTATE
def set_power_state(self, task, node, power_state):
pass
def reboot(self, task, node):
pass
class FakeDeploy(base.DeployInterface):
"""Example imlementation of a deploy interface that uses a
separate power interface.
"""
def validate(self, node):
return True
def deploy(self, task, node):
pass
def tear_down(self, task, node):
pass
class FakeVendor(base.VendorInterface):
"""Example implementation of a vendor passthru interface."""
def validate(self, node, **kwargs):
method = kwargs.get('method')
if not method:
raise exception.InvalidParameterValue(_(
"Invalid vendor passthru, no 'method' specified."))
if method == 'foo':
bar = kwargs.get('bar')
if not bar:
raise exception.InvalidParameterValue(_(
"Parameter not passed to Ironic."))
else:
raise exception.InvalidParameterValue(_(
"Unsupported method (%s) passed through to vendor extension.")
% method)
return True
def _foo(self, task, node, bar):
return True if bar == 'baz' else False
def vendor_passthru(self, task, node, **kwargs):
method = kwargs.get('method')
if method == 'foo':
bar = kwargs.get('bar')
return self._foo(task, node, bar)
| apache-2.0 | -7,277,306,020,164,892,000 | 28 | 78 | 0.639134 | false |
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2018_05_01/aio/_configuration.py | 1 | 3218 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ResourceManagementClientConfiguration(Configuration):
"""Configuration for ResourceManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(ResourceManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2018-05-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| mit | -4,927,705,798,240,208,000 | 47.029851 | 134 | 0.682101 | false |
nanobox-io/nanobox-adapter-libcloud | nanobox_libcloud/controllers/meta.py | 1 | 3030 | from flask import render_template, request
from nanobox_libcloud import app
from nanobox_libcloud.adapters import get_adapter
from nanobox_libcloud.adapters.base import AdapterBase
from nanobox_libcloud.utils import output
# Overview and usage endpoints, to explain how this meta-adapter works
@app.route('/', methods=['GET'])
def overview():
"""Provides an overview of the libcloud meta-adapter, and how to use it, in the most general sense."""
adapters = sorted(AdapterBase.registry.keys())
return render_template("overview.html", adapters=adapters)
@app.route('/docs', methods=['GET'])
def docs():
"""Loads Swagger UI with all the supported adapters' OpenAPI Spec Files pre-loaded into the Topbar for exploration."""
adapters = sorted(AdapterBase.registry.keys())
return render_template("docs.html", adapters=adapters)
@app.route('/<adapter_id>', methods=['GET'])
def usage(adapter_id):
"""Provides usage info for a certain adapter, and how to use it, in a more specific sense."""
adapter = get_adapter(adapter_id)
if not adapter:
return output.failure("That adapter doesn't (yet) exist. Please check the adapter name and try again.", 501)
return render_template("usage.html", adapter=adapter)
@app.route('/<adapter_id>/docs', methods=['GET'])
def adapter_docs(adapter_id):
"""Loads Swagger UI with a certain adapter's OpenAPI Spec File pre-loaded."""
return render_template("docs.html", adapters=[adapter_id])
# Actual metadata endpoints for the Nanobox Provider Adapter API
@app.route('/<adapter_id>/meta', methods=['GET'])
def meta(adapter_id):
"""Provides the metadata for a certain adapter."""
adapter = get_adapter(adapter_id)
if not adapter:
return output.failure("That adapter doesn't (yet) exist. Please check the adapter name and try again.", 501)
return output.success(adapter.do_meta())
@app.route('/<adapter_id>/catalog', methods=['GET'])
def catalog(adapter_id):
"""Provides the catalog data for a certain adapter."""
adapter = get_adapter(adapter_id)
if not adapter:
return output.failure("That adapter doesn't (yet) exist. Please check the adapter name and try again.", 501)
result = adapter.do_catalog(request.headers)
if not isinstance(result, list):
return output.failure('%d: %s' % (result.code, result.message) if hasattr(result, 'code') and hasattr(result, 'message') else repr(result), 500)
return output.success(result)
@app.route('/<adapter_id>/verify', methods=['POST'])
def verify(adapter_id):
"""Verifies user credentials for a certain adapter."""
adapter = get_adapter(adapter_id)
if not adapter:
return output.failure("That adapter doesn't (yet) exist. Please check the adapter name and try again.", 501)
result = adapter.do_verify(request.headers)
if result is not True:
return output.failure("Credential verification failed. Please check your credentials and try again. (Error %s)" % (result), 401)
return ""
| mit | -6,108,083,656,066,812,000 | 36.407407 | 152 | 0.705941 | false |
dpausp/pyrailway | pyrailway/operation.py | 1 | 2267 | class Operation:
def __init__(self, *stations):
self.stations = stations
def __call__(self, params=None, **dependencies):
options = dict(params=(params or {}), **dependencies)
success = True
for station in self.stations:
if (success and station.runs_on_success) or (not success and station.runs_on_failure):
success = station(options, dependencies)
if success == FailFast:
return Result(False, options)
return Result(success, options)
class Result:
def __init__(self, success, result_data):
self.result_data = result_data
self.success = success
@property
def failure(self):
return not self.success
def __getitem__(self, key):
return self.result_data[key]
def __contains__(self, key):
return key in self.result_data
def get(self, key):
return self.result_data.get(key)
class FailFast:
pass
class Activity:
runs_on_success = False
runs_on_failure = False
def __init__(self, func, name=None):
self.func = func
self.name = name
def callfunc(self, options, dependencies):
params = options["params"]
return self.func(options=options, params=params, **dependencies)
def __call__(self, options, dependencies):
self.callfunc(options, dependencies)
return True
def __repr__(self):
return "{} {}".format(self.__class__.__name__, self.name or self.func.__name__)
class step(Activity):
runs_on_success = True
def __init__(self, func, name=None, fail_fast=False):
super().__init__(func, name)
self.fail_fast = fail_fast
def __call__(self, options, dependencies):
res = self.callfunc(options, dependencies)
success = bool(res)
if not success and self.fail_fast:
return FailFast
return success
class failure(Activity):
runs_on_failure = True
def __call__(self, options, dependencies):
self.callfunc(options, dependencies)
return False
class success(Activity):
runs_on_success = True
| mit | -8,869,891,410,881,496,000 | 24.188889 | 99 | 0.574327 | false |
cbitstech/Purple-Robot-Django | management/commands/extractors/builtin_rawlocationprobe.py | 1 | 3339 | # pylint: disable=line-too-long
import datetime
import psycopg2
import pytz
CREATE_PROBE_TABLE_SQL = 'CREATE TABLE builtin_rawlocationprobe(id SERIAL PRIMARY KEY, user_id TEXT, guid TEXT, timestamp BIGINT, utc_logged TIMESTAMP, latitude DOUBLE PRECISION, longitude DOUBLE PRECISION, altitude DOUBLE PRECISION, accuracy DOUBLE PRECISION, provider TEXT, network_available BOOLEAN, gps_available BOOLEAN);'
CREATE_PROBE_USER_ID_INDEX = 'CREATE INDEX ON builtin_rawlocationprobe(user_id);'
CREATE_PROBE_GUID_INDEX = 'CREATE INDEX ON builtin_rawlocationprobe(guid);'
CREATE_PROBE_UTC_LOGGED_INDEX = 'CREATE INDEX ON builtin_rawlocationprobe(utc_logged);'
def exists(connection_str, user_id, reading):
conn = psycopg2.connect(connection_str)
if probe_table_exists(conn) is False:
conn.close()
return False
cursor = conn.cursor()
cursor.execute('SELECT id FROM builtin_rawlocationprobe WHERE (user_id = %s AND guid = %s);', (user_id, reading['GUID']))
row_exists = (cursor.rowcount > 0)
cursor.close()
conn.close()
return row_exists
def probe_table_exists(conn):
cursor = conn.cursor()
cursor.execute('SELECT table_name FROM information_schema.tables WHERE (table_schema = \'public\' AND table_name = \'builtin_rawlocationprobe\')')
table_exists = (cursor.rowcount > 0)
cursor.close()
return table_exists
def insert(connection_str, user_id, reading, check_exists=True):
conn = psycopg2.connect(connection_str)
cursor = conn.cursor()
if check_exists and probe_table_exists(conn) is False:
cursor.execute(CREATE_PROBE_TABLE_SQL)
cursor.execute(CREATE_PROBE_USER_ID_INDEX)
cursor.execute(CREATE_PROBE_GUID_INDEX)
cursor.execute(CREATE_PROBE_UTC_LOGGED_INDEX)
conn.commit()
reading_cmd = 'INSERT INTO builtin_rawlocationprobe(user_id, ' + \
'guid, ' + \
'timestamp, ' + \
'utc_logged, ' + \
'latitude, ' + \
'longitude, ' + \
'altitude, ' + \
'accuracy, ' + \
'provider, ' + \
'network_available, ' + \
'gps_available) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id;'
values = [user_id, reading['GUID'], reading['TIMESTAMP'], datetime.datetime.fromtimestamp(reading['TIMESTAMP'], tz=pytz.utc), reading['LATITUDE'], reading['LONGITUDE']]
if 'ALTITUDE' in reading:
values.append(reading['ALTITUDE'])
else:
values.append(None)
values.append(reading['ACCURACY'])
values.append(reading['PROVIDER'])
if 'NETWORK_AVAILABLE' in reading:
values.append(reading['NETWORK_AVAILABLE'])
else:
values.append(None)
if 'GPS_AVAILABLE' in reading:
values.append(reading['GPS_AVAILABLE'])
else:
values.append(None)
cursor.execute(reading_cmd, values)
conn.commit()
cursor.close()
conn.close()
| gpl-3.0 | -5,321,546,172,391,040,000 | 35.293478 | 327 | 0.575621 | false |
FMCorz/SublimeMoodle | new_file.py | 1 | 2299 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Moodle bundle for Sublime Text
Copyright (c) 2013 Frédéric Massart - FMCorz.net
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
http://github.com/FMCorz/SublimeMoodle
"""
import sublime
import sublime_plugin
import os
import datetime
class MoodleNewFileCommand(sublime_plugin.WindowCommand):
def run(self):
view = self.window.new_file()
syntax = os.path.join(sublime.packages_path(), 'PHP/PHP.tmLanguage')
view.set_syntax_file(syntax)
view.run_command('insert_snippet', {
"contents": self.snippet.replace('{YEAR}', str(datetime.date.today().year))
})
def description(self):
return
"""
Opens a new window with a template for writing testing instructions.
""".strip()
snippet = """<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* ${1:File}.
*
* @package ${2:core}
* @copyright {YEAR} ${TM_FULLNAME}
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
${3:defined('MOODLE_INTERNAL') || die();}
$0"""
| gpl-3.0 | -2,795,095,273,131,239,400 | 30.902778 | 87 | 0.703526 | false |
teamtaverna/core | app/api/tests/utils.py | 1 | 1160 | from base64 import b64encode
from django.contrib.auth.models import User
admin_test_credentials = ('admin1', '[email protected]', 'qwerty123',)
normal_user_credentials = ('user1', '[email protected]', 'qwerty123',)
endpoint = '/api'
def obtain_api_key(client):
credentials = '{}:{}'.format(
admin_test_credentials[0],
admin_test_credentials[2]
)
b64_encoded_credentials = b64encode(credentials.encode('utf-8'))
return client.post(
'/api/api_key',
**{'HTTP_AUTHORIZATION': 'Basic %s' % b64_encoded_credentials.decode('utf-8')}
).json()['api_key']
def create_admin_account():
return User.objects.create_superuser(*admin_test_credentials)
def create_normal_user_acount():
return User.objects.create_user(*normal_user_credentials)
def make_request(client, query, method='GET'):
header = {
'HTTP_X_TAVERNATOKEN': obtain_api_key(client)
}
if method == 'GET':
return client.get(endpoint, data={'query': query}, **header).json()
if method == 'POST':
return client.post(endpoint, data={'query': query}, **header).json()
| mit | -72,517,511,289,812,450 | 28 | 90 | 0.631034 | false |
rgayon/plaso | tests/serializer/json_serializer.py | 1 | 17546 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the serializer object implementation using JSON."""
from __future__ import unicode_literals
import collections
import json
import time
import unittest
import uuid
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import fake_path_spec
from dfvfs.path import factory as path_spec_factory
import plaso
from plaso.containers import event_sources
from plaso.containers import events
from plaso.containers import reports
from plaso.containers import sessions
from plaso.containers import tasks
from plaso.serializer import json_serializer
from tests import test_lib as shared_test_lib
class JSONSerializerTestCase(shared_test_lib.BaseTestCase):
"""Tests for a JSON serializer object."""
def _TestReadSerialized(self, serializer_object, json_dict):
"""Tests the ReadSerialized function.
Args:
serializer_object (JSONSerializer): the JSON serializer object.
json_dict (dict[str, object]): one or more JSON serialized values
Returns:
object: unserialized object.
"""
# We use json.dumps to make sure the dict does not serialize into
# an invalid JSON string such as one that contains Python string prefixes
# like b'' or u''.
json_string = json.dumps(json_dict)
unserialized_object = serializer_object.ReadSerialized(json_string)
self.assertIsNotNone(unserialized_object)
return unserialized_object
def _TestWriteSerialized(
self, serializer_object, unserialized_object, expected_json_dict):
"""Tests the WriteSerialized function.
Args:
serializer_object (JSONSerializer): the JSON serializer object.
unserialized_object (object): the unserialized object.
expected_json_dict (dict[str, object]): one or more expected JSON
serialized values.
Returns:
str: serialized JSON string.
"""
json_string = serializer_object.WriteSerialized(unserialized_object)
# We use json.loads here to compare dicts since we cannot pre-determine
# the actual order of values in the JSON string.
json_dict = json.loads(json_string)
self.assertEqual(
sorted(json_dict.items()), sorted(expected_json_dict.items()))
return json_string
class JSONAttributeContainerSerializerTest(JSONSerializerTestCase):
"""Tests for the JSON attribute container serializer object."""
def testReadAndWriteSerializedAnalysisReport(self):
"""Test ReadSerialized and WriteSerialized of AnalysisReport."""
expected_report_dict = {
'dude': [
['Google Keep - notes and lists',
'hmjkmjkepdijhoojdojkdfohbdgmmhki']
],
'frank': [
['YouTube', 'blpcfgokakmgnkcojhhkbfbldkacnbeo'],
['Google Play Music', 'icppfcnhkcmnfdhfhphakoifcfokfdhg']
]
}
expected_report_text = (
' == USER: dude ==\n'
' Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]\n'
'\n'
' == USER: frank ==\n'
' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]\n'
' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]\n'
'\n')
expected_analysis_report = reports.AnalysisReport(
plugin_name='chrome_extension_test', text=expected_report_text)
expected_analysis_report.report_dict = expected_report_dict
expected_analysis_report.time_compiled = 1431978243000000
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_analysis_report))
self.assertIsNotNone(json_string)
analysis_report = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(analysis_report)
self.assertIsInstance(analysis_report, reports.AnalysisReport)
# TODO: preserve the tuples in the report dict.
# TODO: add report_array tests.
expected_analysis_report_dict = {
'plugin_name': 'chrome_extension_test',
'report_dict': expected_report_dict,
'text': expected_report_text,
'time_compiled': 1431978243000000}
analysis_report_dict = analysis_report.CopyToDict()
self.assertEqual(
sorted(analysis_report_dict.items()),
sorted(expected_analysis_report_dict.items()))
# TODO: add ExtractionWarning tests.
def testReadAndWriteSerializedEventData(self):
"""Test ReadSerialized and WriteSerialized of EventData."""
expected_event_data = events.EventData()
expected_event_data.data_type = 'test:event2'
expected_event_data.parser = 'test_parser'
expected_event_data.empty_string = ''
expected_event_data.zero_integer = 0
expected_event_data.integer = 34
expected_event_data.float = -122.082203542683
expected_event_data.string = 'Normal string'
expected_event_data.unicode_string = 'And I am a unicorn.'
expected_event_data.my_list = ['asf', 4234, 2, 54, 'asf']
expected_event_data.a_tuple = ('some item', [234, 52, 15])
expected_event_data.null_value = None
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_event_data))
self.assertIsNotNone(json_string)
event_data = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(event_data)
self.assertIsInstance(event_data, events.EventData)
expected_event_data_dict = {
'a_tuple': ('some item', [234, 52, 15]),
'data_type': 'test:event2',
'empty_string': '',
'integer': 34,
'float': -122.082203542683,
'my_list': ['asf', 4234, 2, 54, 'asf'],
'parser': 'test_parser',
'string': 'Normal string',
'unicode_string': 'And I am a unicorn.',
'zero_integer': 0}
event_data_dict = event_data.CopyToDict()
self.assertEqual(event_data_dict, expected_event_data_dict)
def testReadAndWriteSerializedEventDataStream(self):
"""Test ReadSerialized and WriteSerialized of EventDataStream."""
test_file = self._GetTestFilePath(['ímynd.dd'])
volume_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file)
path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, location='/',
parent=volume_path_spec)
expected_event_data_stream = events.EventDataStream()
expected_event_data_stream.md5_hash = 'e3df0d2abd2c27fbdadfb41a47442520'
expected_event_data_stream.path_spec = path_spec
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_event_data_stream))
self.assertIsNotNone(json_string)
event_data_stream = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(event_data_stream)
self.assertIsInstance(event_data_stream, events.EventDataStream)
expected_event_data_stream_dict = {
'md5_hash': 'e3df0d2abd2c27fbdadfb41a47442520',
'path_spec': path_spec.comparable}
event_data_stream_dict = event_data_stream.CopyToDict()
path_spec = event_data_stream_dict.get('path_spec', None)
if path_spec:
event_data_stream_dict['path_spec'] = path_spec.comparable
self.assertEqual(event_data_stream_dict, expected_event_data_stream_dict)
def testReadAndWriteSerializedEventObject(self):
"""Test ReadSerialized and WriteSerialized of EventObject."""
expected_event = events.EventObject()
expected_event.parser = 'test_parser'
expected_event.timestamp = 1234124
expected_event.timestamp_desc = 'Written'
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_event))
self.assertIsNotNone(json_string)
event = json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string)
self.assertIsNotNone(event)
self.assertIsInstance(event, events.EventObject)
expected_event_dict = {
'parser': 'test_parser',
'timestamp': 1234124,
'timestamp_desc': 'Written'}
event_dict = event.CopyToDict()
self.assertEqual(event_dict, expected_event_dict)
def testReadAndWriteSerializedEventSource(self):
"""Test ReadSerialized and WriteSerialized of EventSource."""
test_path_spec = fake_path_spec.FakePathSpec(location='/opt/plaso.txt')
expected_event_source = event_sources.EventSource(path_spec=test_path_spec)
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_event_source))
self.assertIsNotNone(json_string)
event_source = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(event_source)
self.assertIsInstance(event_source, event_sources.EventSource)
expected_event_source_dict = {
'path_spec': test_path_spec.comparable,
}
event_source_dict = event_source.CopyToDict()
path_spec = event_source_dict.get('path_spec', None)
if path_spec:
event_source_dict['path_spec'] = path_spec.comparable
self.assertEqual(
sorted(event_source_dict.items()),
sorted(expected_event_source_dict.items()))
def testReadAndWriteSerializedEventTag(self):
"""Test ReadSerialized and WriteSerialized of EventTag."""
expected_event_tag = events.EventTag()
expected_event_tag.AddLabels(['Malware', 'Common'])
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_event_tag))
self.assertIsNotNone(json_string)
event_tag = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(event_tag)
self.assertIsInstance(event_tag, events.EventTag)
expected_event_tag_dict = {
'labels': ['Malware', 'Common'],
}
event_tag_dict = event_tag.CopyToDict()
self.assertEqual(
sorted(event_tag_dict.items()),
sorted(expected_event_tag_dict.items()))
def testReadAndWriteSerializedSession(self):
"""Test ReadSerialized and WriteSerialized of Session."""
parsers_counter = collections.Counter()
parsers_counter['filestat'] = 3
parsers_counter['total'] = 3
expected_session = sessions.Session()
expected_session.product_name = 'plaso'
expected_session.product_version = plaso.__version__
expected_session.parsers_counter = parsers_counter
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_session))
self.assertIsNotNone(json_string)
session = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(session)
self.assertIsInstance(session, sessions.Session)
expected_session_dict = {
'aborted': False,
'analysis_reports_counter': session.analysis_reports_counter,
'debug_mode': False,
'event_labels_counter': session.event_labels_counter,
'identifier': session.identifier,
'parsers_counter': parsers_counter,
'preferred_encoding': 'utf-8',
'preferred_time_zone': 'UTC',
'product_name': 'plaso',
'product_version': plaso.__version__,
'start_time': session.start_time
}
session_dict = session.CopyToDict()
self.assertEqual(
sorted(session_dict.items()), sorted(expected_session_dict.items()))
def testReadAndWriteSerializedSessionCompletion(self):
"""Test ReadSerialized and WriteSerialized of SessionCompletion."""
timestamp = int(time.time() * 1000000)
session_identifier = '{0:s}'.format(uuid.uuid4().hex)
parsers_counter = collections.Counter()
parsers_counter['filestat'] = 3
parsers_counter['total'] = 3
expected_session_completion = sessions.SessionCompletion(
identifier=session_identifier)
expected_session_completion.timestamp = timestamp
expected_session_completion.parsers_counter = parsers_counter
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_session_completion))
self.assertIsNotNone(json_string)
session_completion = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(session_completion)
self.assertIsInstance(session_completion, sessions.SessionCompletion)
expected_session_completion_dict = {
'aborted': False,
'identifier': session_identifier,
'parsers_counter': parsers_counter,
'timestamp': timestamp
}
session_completion_dict = session_completion.CopyToDict()
self.assertEqual(
sorted(session_completion_dict.items()),
sorted(expected_session_completion_dict.items()))
def testReadAndWriteSerializedSessionStart(self):
"""Test ReadSerialized and WriteSerialized of SessionStart."""
timestamp = int(time.time() * 1000000)
session_identifier = '{0:s}'.format(uuid.uuid4().hex)
expected_session_start = sessions.SessionStart(
identifier=session_identifier)
expected_session_start.timestamp = timestamp
expected_session_start.product_name = 'plaso'
expected_session_start.product_version = plaso.__version__
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_session_start))
self.assertIsNotNone(json_string)
session_start = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(session_start)
self.assertIsInstance(session_start, sessions.SessionStart)
expected_session_start_dict = {
'identifier': session_identifier,
'product_name': 'plaso',
'product_version': plaso.__version__,
'timestamp': timestamp
}
session_start_dict = session_start.CopyToDict()
self.assertEqual(
sorted(session_start_dict.items()),
sorted(expected_session_start_dict.items()))
def testReadAndWriteSerializedTask(self):
"""Test ReadSerialized and WriteSerialized of Task."""
session_identifier = '{0:s}'.format(uuid.uuid4().hex)
expected_task = tasks.Task(session_identifier=session_identifier)
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_task))
self.assertIsNotNone(json_string)
task = json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string)
self.assertIsNotNone(task)
self.assertIsInstance(task, tasks.Task)
expected_task_dict = {
'aborted': False,
'has_retry': False,
'identifier': task.identifier,
'session_identifier': session_identifier,
'start_time': task.start_time
}
task_dict = task.CopyToDict()
self.assertEqual(
sorted(task_dict.items()), sorted(expected_task_dict.items()))
def testReadAndWriteSerializedTaskCompletion(self):
"""Test ReadSerialized and WriteSerialized of TaskCompletion."""
timestamp = int(time.time() * 1000000)
session_identifier = '{0:s}'.format(uuid.uuid4().hex)
task_identifier = '{0:s}'.format(uuid.uuid4().hex)
expected_task_completion = tasks.TaskCompletion(
identifier=task_identifier, session_identifier=session_identifier)
expected_task_completion.timestamp = timestamp
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_task_completion))
self.assertIsNotNone(json_string)
task_completion = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(task_completion)
self.assertIsInstance(task_completion, tasks.TaskCompletion)
expected_task_completion_dict = {
'aborted': False,
'identifier': task_identifier,
'session_identifier': session_identifier,
'timestamp': timestamp
}
task_completion_dict = task_completion.CopyToDict()
self.assertEqual(
sorted(task_completion_dict.items()),
sorted(expected_task_completion_dict.items()))
def testReadAndWriteSerializedTaskStart(self):
"""Test ReadSerialized and WriteSerialized of TaskStart."""
timestamp = int(time.time() * 1000000)
session_identifier = '{0:s}'.format(uuid.uuid4().hex)
task_identifier = '{0:s}'.format(uuid.uuid4().hex)
expected_task_start = tasks.TaskStart(
identifier=task_identifier, session_identifier=session_identifier)
expected_task_start.timestamp = timestamp
json_string = (
json_serializer.JSONAttributeContainerSerializer.WriteSerialized(
expected_task_start))
self.assertIsNotNone(json_string)
task_start = (
json_serializer.JSONAttributeContainerSerializer.ReadSerialized(
json_string))
self.assertIsNotNone(task_start)
self.assertIsInstance(task_start, tasks.TaskStart)
expected_task_start_dict = {
'identifier': task_identifier,
'session_identifier': session_identifier,
'timestamp': timestamp
}
task_start_dict = task_start.CopyToDict()
self.assertEqual(
sorted(task_start_dict.items()),
sorted(expected_task_start_dict.items()))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 2,450,257,676,703,365,000 | 32.611111 | 79 | 0.691935 | false |
canihavesomecoffee/sample-platform | decorators.py | 1 | 4344 | """define decorators for use across app."""
from datetime import date
from functools import wraps
from typing import Any, Callable, Dict, List, Optional, Union
from flask import g, render_template, request
from database import EnumSymbol
from mod_auth.models import User
def get_menu_entries(user: Optional[User], title: str, icon: str, access: Optional[List] = None, route: str = '',
all_entries: Optional[List[Dict[str, Union[str, List[EnumSymbol]]]]] = None) -> Dict[Any, Any]:
"""
Parse a given set of entries and checks which ones the user can access.
:param access: Grant access to these roles. Empty means public access.
:type access: list[str]
:param user: The user object.
:type user: mod_auth.models.User
:param title: The title of the root menu entry.
:type title: str
:param icon: The icon of the root menu entry.
:type icon: str
:param route: The route of the root menu entry.
:type route: str
:param all_entries: The sub entries for this menu entry.
:type all_entries: list[dict]
:return: A dict consisting of the menu entry.
:rtype: dict
"""
if all_entries is None:
all_entries = []
if access is None:
access = []
result: Dict[Any, Any] = {
'title': title,
'icon': icon
}
allowed_entries = []
passed = False
if user is not None:
if len(route) > 0:
result['route'] = route
passed = len(access) == 0 or user.role in access
else:
for entry in all_entries:
# TODO: make this recursive if necessary
if len(entry['access']) == 0 or user.role in entry['access']:
allowed_entries.append(entry)
if len(allowed_entries) > 0:
result['entries'] = allowed_entries
passed = True
elif len(access) == 0:
if len(route) > 0:
result['route'] = route
passed = True
else:
for entry in all_entries:
# TODO: make this recursive if necessary
if len(entry['access']) == 0:
allowed_entries.append(entry)
if len(allowed_entries) > 0:
result['entries'] = allowed_entries
passed = True
return result if passed else {}
def template_renderer(template: Optional[str] = None, status: int = 200) -> Callable:
"""
Decorate to render a template.
:param template: The template if it's not equal to the name of the endpoint.
:type template: str
:param status: The return code
:type status: int
"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
template_name = template
if template_name is None:
template_name = request.endpoint.replace('.', '/') + '.html'
ctx = f(*args, **kwargs)
if ctx is None:
ctx = {}
elif not isinstance(ctx, dict):
return ctx
# Add default values
ctx['applicationName'] = 'CCExtractor CI platform'
ctx['applicationVersion'] = getattr(g, 'version', 'Unknown')
ctx['currentYear'] = date.today().strftime('%Y')
try:
from build_commit import build_commit
except ImportError:
build_commit = 'Unknown'
ctx['build_commit'] = build_commit
user = getattr(g, 'user', None)
ctx['user'] = user
# Create menu entries
menu_entries = getattr(g, 'menu_entries', {})
ctx['menu'] = [
menu_entries.get('home', {}),
menu_entries.get('samples', {}),
menu_entries.get('upload', {}),
menu_entries.get('custom', {}),
menu_entries.get('tests', {}),
menu_entries.get('regression', {}),
menu_entries.get('config', {}),
menu_entries.get('account', {}),
menu_entries.get('auth', {})
]
ctx['active_route'] = request.endpoint
# Render template & return
return render_template(template_name, **ctx), status
return decorated_function
return decorator
| isc | 5,094,530,706,758,486,000 | 34.317073 | 116 | 0.548803 | false |
hgiemza/DIRAC | WorkloadManagementSystem/Agent/StalledJobAgent.py | 2 | 20827 | ########################################################################
# File : StalledJobAgent.py
########################################################################
""" The StalledJobAgent hunts for stalled jobs in the Job database. Jobs in "running"
state not receiving a heart beat signal for more than stalledTime
seconds will be assigned the "Stalled" state.
"""
__RCSID__ = "$Id$"
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities.Time import fromString, toEpoch, dateTime, second
from DIRAC import S_OK, S_ERROR, gConfig
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.AccountingSystem.Client.Types.Job import Job
from DIRAC.Core.Utilities.ClassAd.ClassAdLight import ClassAd
from DIRAC.ConfigurationSystem.Client.Helpers import cfgPath
from DIRAC.ConfigurationSystem.Client.PathFinder import getSystemInstance
from DIRAC.WorkloadManagementSystem.Client.WMSClient import WMSClient
import types
class StalledJobAgent( AgentModule ):
"""
The specific agents must provide the following methods:
- initialize() for initial settings
- beginExecution()
- execute() - the main method called in the agent cycle
- endExecution()
- finalize() - the graceful exit of the method, this one is usually used
for the agent restart
"""
jobDB = None
logDB = None
matchedTime = 7200
rescheduledTime = 600
completedTime = 86400
#############################################################################
def initialize( self ):
"""Sets default parameters
"""
self.jobDB = JobDB()
self.logDB = JobLoggingDB()
self.am_setOption( 'PollingTime', 60 * 60 )
self.stalledJobsTolerantSites = self.am_getOption( 'StalledJobsTolerantSites', [] )
if not self.am_getOption( 'Enable', True ):
self.log.info( 'Stalled Job Agent running in disabled mode' )
return S_OK()
#############################################################################
def execute( self ):
""" The main agent execution method
"""
self.log.verbose( 'Waking up Stalled Job Agent' )
wms_instance = getSystemInstance( 'WorkloadManagement' )
if not wms_instance:
return S_ERROR( 'Can not get the WorkloadManagement system instance' )
wrapperSection = cfgPath( 'Systems', 'WorkloadManagement', wms_instance, 'JobWrapper' )
stalledTime = self.am_getOption( 'StalledTimeHours', 2 )
failedTime = self.am_getOption( 'FailedTimeHours', 6 )
self.stalledJobsToleranceTime = self.am_getOption( 'StalledJobsToleranceTime', 0 )
self.matchedTime = self.am_getOption( 'MatchedTime', self.matchedTime )
self.rescheduledTime = self.am_getOption( 'RescheduledTime', self.rescheduledTime )
self.completedTime = self.am_getOption( 'CompletedTime', self.completedTime )
self.log.verbose( 'StalledTime = %s cycles' % ( stalledTime ) )
self.log.verbose( 'FailedTime = %s cycles' % ( failedTime ) )
watchdogCycle = gConfig.getValue( cfgPath( wrapperSection , 'CheckingTime' ), 30 * 60 )
watchdogCycle = max( watchdogCycle, gConfig.getValue( cfgPath( wrapperSection , 'MinCheckingTime' ), 20 * 60 ) )
# Add half cycle to avoid race conditions
stalledTime = watchdogCycle * ( stalledTime + 0.5 )
failedTime = watchdogCycle * ( failedTime + 0.5 )
result = self.__markStalledJobs( stalledTime )
if not result['OK']:
self.log.error( 'Failed to detect stalled jobs', result['Message'] )
# Note, jobs will be revived automatically during the heartbeat signal phase and
# subsequent status changes will result in jobs not being selected by the
# stalled job agent.
result = self.__failStalledJobs( failedTime )
if not result['OK']:
self.log.error( 'Failed to process stalled jobs', result['Message'] )
result = self.__failCompletedJobs()
if not result['OK']:
self.log.error( 'Failed to process completed jobs', result['Message'] )
result = self.__kickStuckJobs()
if not result['OK']:
self.log.error( 'Failed to kick stuck jobs', result['Message'] )
return S_OK( 'Stalled Job Agent cycle complete' )
#############################################################################
def __markStalledJobs( self, stalledTime ):
""" Identifies stalled jobs running without update longer than stalledTime.
"""
stalledCounter = 0
runningCounter = 0
result = self.jobDB.selectJobs( {'Status':'Running'} )
if not result['OK']:
return result
if not result['Value']:
return S_OK()
jobs = result['Value']
self.log.info( '%s Running jobs will be checked for being stalled' % ( len( jobs ) ) )
jobs.sort()
# jobs = jobs[:10] #for debugging
for job in jobs:
site = self.jobDB.getJobAttribute( job, 'site' )['Value']
if site in self.stalledJobsTolerantSites:
result = self.__getStalledJob( job, stalledTime + self.stalledJobsToleranceTime )
else:
result = self.__getStalledJob( job, stalledTime )
if result['OK']:
self.log.verbose( 'Updating status to Stalled for job %s' % ( job ) )
self.__updateJobStatus( job, 'Stalled' )
stalledCounter += 1
else:
self.log.verbose( result['Message'] )
runningCounter += 1
self.log.info( 'Total jobs: %s, Stalled job count: %s, Running job count: %s' %
( len( jobs ), stalledCounter, runningCounter ) )
return S_OK()
#############################################################################
def __failStalledJobs( self, failedTime ):
""" Changes the Stalled status to Failed for jobs long in the Stalled status
"""
result = self.jobDB.selectJobs( {'Status':'Stalled'} )
if not result['OK']:
return result
jobs = result['Value']
failedCounter = 0
minorStalledStatuses = ( "Job stalled: pilot not running", 'Stalling for more than %d sec' % failedTime )
if jobs:
self.log.info( '%s Stalled jobs will be checked for failure' % ( len( jobs ) ) )
for job in jobs:
setFailed = False
# Check if the job pilot is lost
result = self.__getJobPilotStatus( job )
if not result['OK']:
self.log.error( 'Failed to get pilot status', result['Message'] )
continue
pilotStatus = result['Value']
if pilotStatus != "Running":
setFailed = minorStalledStatuses[0]
else:
result = self.__getLatestUpdateTime( job )
if not result['OK']:
self.log.error( 'Failed to get job update time', result['Message'] )
continue
elapsedTime = toEpoch() - result['Value']
if elapsedTime > failedTime:
setFailed = minorStalledStatuses[1]
# Set the jobs Failed, send them a kill signal in case they are not really dead and send accounting info
if setFailed:
# Send a kill signal to the job such that it cannot continue running
WMSClient().killJob( job )
self.__updateJobStatus( job, 'Failed', setFailed )
failedCounter += 1
result = self.__sendAccounting( job )
if not result['OK']:
self.log.error( 'Failed to send accounting', result['Message'] )
recoverCounter = 0
for minor in minorStalledStatuses:
result = self.jobDB.selectJobs( {'Status':'Failed', 'MinorStatus': minor, 'AccountedFlag': 'False' } )
if not result['OK']:
return result
if result['Value']:
jobs = result['Value']
self.log.info( '%s Stalled jobs will be Accounted' % ( len( jobs ) ) )
for job in jobs:
result = self.__sendAccounting( job )
if not result['OK']:
self.log.error( 'Failed to send accounting', result['Message'] )
continue
recoverCounter += 1
if not result['OK']:
break
if failedCounter:
self.log.info( '%d jobs set to Failed' % failedCounter )
if recoverCounter:
self.log.info( '%d jobs properly Accounted' % recoverCounter )
return S_OK( failedCounter )
#############################################################################
def __getJobPilotStatus( self, jobID ):
""" Get the job pilot status
"""
result = self.jobDB.getJobParameter( jobID, 'Pilot_Reference' )
if not result['OK']:
return result
if not result['Value']:
# There is no pilot reference, hence its status is unknown
return S_OK( 'NoPilot' )
pilotReference = result['Value']
wmsAdminClient = RPCClient( 'WorkloadManagement/WMSAdministrator' )
result = wmsAdminClient.getPilotInfo( pilotReference )
if not result['OK']:
if "No pilots found" in result['Message']:
self.log.warn( result['Message'] )
return S_OK( 'NoPilot' )
self.log.error( 'Failed to get pilot information',
'for job %d: ' % jobID + result['Message'] )
return S_ERROR( 'Failed to get the pilot status' )
pilotStatus = result['Value'][pilotReference]['Status']
return S_OK( pilotStatus )
#############################################################################
def __getStalledJob( self, job, stalledTime ):
""" Compares the most recent of LastUpdateTime and HeartBeatTime against
the stalledTime limit.
"""
result = self.__getLatestUpdateTime( job )
if not result['OK']:
return result
currentTime = toEpoch()
lastUpdate = result['Value']
elapsedTime = currentTime - lastUpdate
self.log.verbose( '(CurrentTime-LastUpdate) = %s secs' % ( elapsedTime ) )
if elapsedTime > stalledTime:
self.log.info( 'Job %s is identified as stalled with last update > %s secs ago' % ( job, elapsedTime ) )
return S_OK( 'Stalled' )
return S_ERROR( 'Job %s is running and will be ignored' % job )
#############################################################################
def __getLatestUpdateTime( self, job ):
""" Returns the most recent of HeartBeatTime and LastUpdateTime
"""
result = self.jobDB.getJobAttributes( job, ['HeartBeatTime', 'LastUpdateTime'] )
if not result['OK']:
self.log.error( 'Failed to get job attributes', result['Message'] )
if not result['OK'] or not result['Value']:
self.log.error( 'Could not get attributes for job', '%s' % job )
return S_ERROR( 'Could not get attributes for job' )
self.log.verbose( result )
latestUpdate = 0
if not result['Value']['HeartBeatTime'] or result['Value']['HeartBeatTime'] == 'None':
self.log.verbose( 'HeartBeatTime is null for job %s' % job )
else:
latestUpdate = toEpoch( fromString( result['Value']['HeartBeatTime'] ) )
if not result['Value']['LastUpdateTime'] or result['Value']['LastUpdateTime'] == 'None':
self.log.verbose( 'LastUpdateTime is null for job %s' % job )
else:
lastUpdate = toEpoch( fromString( result['Value']['LastUpdateTime'] ) )
if latestUpdate < lastUpdate:
latestUpdate = lastUpdate
if not latestUpdate:
return S_ERROR( 'LastUpdate and HeartBeat times are null for job %s' % job )
else:
self.log.verbose( 'Latest update time from epoch for job %s is %s' % ( job, latestUpdate ) )
return S_OK( latestUpdate )
#############################################################################
def __updateJobStatus( self, job, status, minorstatus = None ):
""" This method updates the job status in the JobDB, this should only be
used to fail jobs due to the optimizer chain.
"""
self.log.verbose( "self.jobDB.setJobAttribute(%s,'Status','%s',update=True)" % ( job, status ) )
if self.am_getOption( 'Enable', True ):
result = self.jobDB.setJobAttribute( job, 'Status', status, update = True )
else:
result = S_OK( 'DisabledMode' )
if result['OK']:
if minorstatus:
self.log.verbose( "self.jobDB.setJobAttribute(%s,'MinorStatus','%s',update=True)" % ( job, minorstatus ) )
result = self.jobDB.setJobAttribute( job, 'MinorStatus', minorstatus, update = True )
if not minorstatus: # Retain last minor status for stalled jobs
result = self.jobDB.getJobAttributes( job, ['MinorStatus'] )
if result['OK']:
minorstatus = result['Value']['MinorStatus']
logStatus = status
result = self.logDB.addLoggingRecord( job, status = logStatus, minor = minorstatus, source = 'StalledJobAgent' )
if not result['OK']:
self.log.warn( result )
return result
def __getProcessingType( self, jobID ):
""" Get the Processing Type from the JDL, until it is promoted to a real Attribute
"""
processingType = 'unknown'
result = self.jobDB.getJobJDL( jobID, original = True )
if not result['OK']:
return processingType
classAdJob = ClassAd( result['Value'] )
if classAdJob.lookupAttribute( 'ProcessingType' ):
processingType = classAdJob.getAttributeString( 'ProcessingType' )
return processingType
#############################################################################
def __sendAccounting( self, jobID ):
""" Send WMS accounting data for the given job
"""
try:
accountingReport = Job()
endTime = 'Unknown'
lastHeartBeatTime = 'Unknown'
result = self.jobDB.getJobAttributes( jobID )
if not result['OK']:
return result
jobDict = result['Value']
startTime, endTime = self.__checkLoggingInfo( jobID, jobDict )
lastCPUTime, lastWallTime, lastHeartBeatTime = self.__checkHeartBeat( jobID, jobDict )
lastHeartBeatTime = fromString( lastHeartBeatTime )
if lastHeartBeatTime is not None and lastHeartBeatTime > endTime:
endTime = lastHeartBeatTime
cpuNormalization = self.jobDB.getJobParameter( jobID, 'CPUNormalizationFactor' )
if not cpuNormalization['OK'] or not cpuNormalization['Value']:
cpuNormalization = 0.0
else:
cpuNormalization = float( cpuNormalization['Value'] )
except Exception:
self.log.exception( "Exception in __sendAccounting for job %s: endTime=%s, lastHBTime %s" % ( str( jobID ), str( endTime ), str( lastHeartBeatTime ) ), '' , False )
return S_ERROR( "Exception" )
processingType = self.__getProcessingType( jobID )
accountingReport.setStartTime( startTime )
accountingReport.setEndTime( endTime )
# execTime = toEpoch( endTime ) - toEpoch( startTime )
# Fill the accounting data
acData = { 'Site' : jobDict['Site'],
'User' : jobDict['Owner'],
'UserGroup' : jobDict['OwnerGroup'],
'JobGroup' : jobDict['JobGroup'],
'JobType' : jobDict['JobType'],
'JobClass' : jobDict['JobSplitType'],
'ProcessingType' : processingType,
'FinalMajorStatus' : 'Failed',
'FinalMinorStatus' : 'Stalled',
'CPUTime' : lastCPUTime,
'NormCPUTime' : lastCPUTime * cpuNormalization,
'ExecTime' : lastWallTime,
'InputDataSize' : 0.0,
'OutputDataSize' : 0.0,
'InputDataFiles' : 0,
'OutputDataFiles' : 0,
'DiskSpace' : 0.0,
'InputSandBoxSize' : 0.0,
'OutputSandBoxSize' : 0.0,
'ProcessedEvents' : 0
}
# For accidentally stopped jobs ExecTime can be not set
if not acData['ExecTime']:
acData['ExecTime'] = acData['CPUTime']
elif acData['ExecTime'] < acData['CPUTime']:
acData['ExecTime'] = acData['CPUTime']
self.log.verbose( 'Accounting Report is:' )
self.log.verbose( acData )
accountingReport.setValuesFromDict( acData )
result = accountingReport.commit()
if result['OK']:
self.jobDB.setJobAttribute( jobID, 'AccountedFlag', 'True' )
else:
self.log.error( 'Failed to send accounting report', 'Job: %d, Error: %s' % ( int( jobID ), result['Message'] ) )
return result
def __checkHeartBeat( self, jobID, jobDict ):
""" Get info from HeartBeat
"""
result = self.jobDB.getHeartBeatData( jobID )
lastCPUTime = 0
lastWallTime = 0
lastHeartBeatTime = jobDict['StartExecTime']
if lastHeartBeatTime == "None":
lastHeartBeatTime = 0
if result['OK']:
for name, value, heartBeatTime in result['Value']:
if 'CPUConsumed' == name:
try:
value = int( float( value ) )
if value > lastCPUTime:
lastCPUTime = value
except ValueError:
pass
if 'WallClockTime' == name:
try:
value = int( float( value ) )
if value > lastWallTime:
lastWallTime = value
except ValueError:
pass
if heartBeatTime > lastHeartBeatTime:
lastHeartBeatTime = heartBeatTime
return lastCPUTime, lastWallTime, lastHeartBeatTime
def __checkLoggingInfo( self, jobID, jobDict ):
""" Get info from JobLogging
"""
logList = []
result = self.logDB.getJobLoggingInfo( jobID )
if result['OK']:
logList = result['Value']
startTime = jobDict['StartExecTime']
if not startTime or startTime == 'None':
# status, minor, app, stime, source
for items in logList:
if items[0] == 'Running':
startTime = items[3]
break
if not startTime or startTime == 'None':
startTime = jobDict['SubmissionTime']
if type( startTime ) in types.StringTypes:
startTime = fromString( startTime )
if startTime == None:
self.log.error( 'Wrong timestamp in DB', items[3] )
startTime = dateTime()
endTime = dateTime()
# status, minor, app, stime, source
for items in logList:
if items[0] == 'Stalled':
endTime = fromString( items[3] )
if endTime == None:
self.log.error( 'Wrong timestamp in DB', items[3] )
endTime = dateTime()
return startTime, endTime
def __kickStuckJobs( self ):
""" Reschedule jobs stuck in initialization status Rescheduled, Matched
"""
message = ''
checkTime = str( dateTime() - self.matchedTime * second )
result = self.jobDB.selectJobs( {'Status':'Matched'}, older = checkTime )
if not result['OK']:
self.log.error( 'Failed to select jobs', result['Message'] )
return result
jobIDs = result['Value']
if jobIDs:
self.log.info( 'Rescheduling %d jobs stuck in Matched status' % len( jobIDs ) )
result = self.jobDB.rescheduleJobs( jobIDs )
if 'FailedJobs' in result:
message = 'Failed to reschedule %d jobs stuck in Matched status' % len( result['FailedJobs'] )
checkTime = str( dateTime() - self.rescheduledTime * second )
result = self.jobDB.selectJobs( {'Status':'Rescheduled'}, older = checkTime )
if not result['OK']:
self.log.error( 'Failed to select jobs', result['Message'] )
return result
jobIDs = result['Value']
if jobIDs:
self.log.info( 'Rescheduling %d jobs stuck in Rescheduled status' % len( jobIDs ) )
result = self.jobDB.rescheduleJobs( jobIDs )
if 'FailedJobs' in result:
if message:
message += '\n'
message += 'Failed to reschedule %d jobs stuck in Rescheduled status' % len( result['FailedJobs'] )
if message:
return S_ERROR( message )
else:
return S_OK()
def __failCompletedJobs( self ):
""" Failed Jobs stuck in Completed Status for a long time.
They are due to pilots being killed during the
finalization of the job execution.
"""
# Get old Completed Jobs
checkTime = str( dateTime() - self.completedTime * second )
result = self.jobDB.selectJobs( {'Status':'Completed'}, older = checkTime )
if not result['OK']:
self.log.error( 'Failed to select jobs', result['Message'] )
return result
jobIDs = result['Value']
if not jobIDs:
return S_OK()
# Remove those with Minor Status "Pending Requests"
for jobID in jobIDs:
result = self.jobDB.getJobAttributes( jobID, ['Status', 'MinorStatus'] )
if not result['OK']:
self.log.error( 'Failed to get job attributes', result['Message'] )
continue
if result['Value']['Status'] != "Completed":
continue
if result['Value']['MinorStatus'] == "Pending Requests":
continue
result = self.__updateJobStatus( jobID, 'Failed',
"Job died during finalization" )
result = self.__sendAccounting( jobID )
if not result['OK']:
self.log.error( 'Failed to send accounting', result['Message'] )
continue
return S_OK()
# EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
| gpl-3.0 | -8,255,214,876,684,673,000 | 37.426199 | 170 | 0.612954 | false |
roryscarson/NOVUS | novus_pkg/F_cashFlow_B_pnl.py | 1 | 22178 | #!python
# -*- encoding: utf-8 -*-
# F_cashFlow_B_pnl.py
# Greg Wilson, 2012
# [email protected]
# This software is part of the Public Domain.
# This file is part of the NOVUS Entrepreneurship Training Program.
# NOVUS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# NOVUS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with NOVUS. If not, see <http://www.gnu.org/licenses/>.
'''
E_game_pnl.py +
+ X_listbook.py +
+ E_finance_pnl.py +
+ F_incomeStmt_B_pnl.py
+ F_balanceSheet_B_pnl.py
+ F_cashFlow_B_pnl.py
This module contains the Cash Flow "B" Panel class code for the Novus
Business and IT education program. The "B" panel show the
pro forma results for this year, and, after the submission of decisions,
the actual results for the round.
'''
import wx
import wx.lib.scrolledpanel as scrolled
import X_styles, X_miscPnls
import Q_data
from Q_language import GetPhrase
class CashFlow_B_Pnl(scrolled.ScrolledPanel):
'''This class holds the CashFlow panel for the Novus Business and IT
education program.'''
def __init__(self, parent, *args, **kwargs):
scrolled.ScrolledPanel.__init__(self, parent, *args, **kwargs)
# Styles ----------------------------------------------------------
self.styles = X_styles.NovusStyle(self)
self.SetBackgroundColour(wx.WHITE)
# Data ------------------------------------------------------------
self.data = Q_data.Data(None)
# Labels
#------------------------------------------------------------------
lang = self.data.GetData1()[1][1]
self.cashFlowAndRatio_lbl = GetPhrase('cashFlow_lbl', lang) + ' / ' + GetPhrase('financialRatios_lbl', lang)
self.forecast_lbl = GetPhrase('forecast_lbl', lang)
self.actual_lbl = GetPhrase('actual_lbl', lang)
# Cash Flow Labels ------------------------------------------------
self.cashFlowStmt_lbl = GetPhrase('cashFlow_lbl', lang)
self.opActivities_lbl = GetPhrase('opActivities_lbl', lang)
self.netIncome_lbl = GetPhrase('netIncome_lbl', lang)
self.depreciation_lbl = GetPhrase('depreciation_lbl', lang)
self.chgInAR_lbl = GetPhrase('chgInAR_lbl', lang)
self.chgInInv_lbl = GetPhrase('chgInInv_lbl', lang)
self.chgInAP_lbl = GetPhrase('chgInAP_lbl', lang)
self.NetCashFlowOps_lbl = GetPhrase('NetCashFlowOps_lbl', lang)
self.investingActivities_lbl = GetPhrase('investingActivities_lbl', lang)
self.capExp_lbl = GetPhrase('capExp_lbl', lang)
self.addGFA_lbl = GetPhrase('addGFA_lbl', lang)
self.addTrucks_lbl = GetPhrase('addTrucks_lbl', lang)
self.addMach_lbl = GetPhrase('addMach_lbl', lang)
self.netCashFlowInv_lbl = GetPhrase('netCashFlowInv_lbl', lang)
self.FinancingActivities_lbl = GetPhrase('FinancingActivities_lbl', lang)
self.chgShortTermFin_lbl = GetPhrase('chgShortTermFin_lbl', lang)
self.chgShortTermLoC_lbl = GetPhrase('chgShortTermLoC_lbl', lang)
self.incLTD_lbl = GetPhrase('incLTD_lbl', lang)
self.decLTD_lbl = GetPhrase('decLTD_lbl', lang)
self.proceedsFromStock_lbl = GetPhrase('proceedsFromStock_lbl', lang)
self.cashDivPaid_lbl = GetPhrase('cashDivPaid_lbl', lang)
self.netCashFlowFin_lbl = GetPhrase('netCashFlowFin_lbl', lang)
self.netCashFlowAll_lbl = GetPhrase('netCashFlowAll_lbl', lang)
self.begCashBal_lbl = GetPhrase('begCashBal_lbl', lang)
self.endCashBal_lbl = GetPhrase('endCashBal_lbl', lang)
# Ratio Labels ----------------------------------------------------
self.financialRatios_lbl = GetPhrase('financialRatios_lbl', lang)
self.y2yGrowth_lbl = GetPhrase('y2yGrowth_lbl', lang)
self.grossMargin_lbl = GetPhrase('grossMargin_lbl', lang)
self.SGAofSales_lbl = GetPhrase('SGAofSales_lbl', lang)
self.EBITDAOpMarg_lbl = GetPhrase('EBITDAOpMarg_lbl', lang)
self.EBITOpMarg_lbl = GetPhrase('EBITOpMarg_lbl', lang)
self.taxRate_lbl = GetPhrase('taxRate_lbl', lang)
self.netProfMarg_lbl = GetPhrase('netProfMarg_lbl', lang)
self.currentRatio_lbl = GetPhrase('currentRatio_lbl', lang)
self.quickRatio_lbl = GetPhrase('quickRatio_lbl', lang)
self.cashRatio_lbl = GetPhrase('cashRatio_lbl', lang)
self.daysInvOut_lbl = GetPhrase('daysInvOut_lbl', lang)
self.daysSalesOut_lbl = GetPhrase('daysSalesOut_lbl', lang)
self.daysPayablesOut_lbl = GetPhrase('daysPayablesOut_lbl', lang)
self.ccc_lbl = GetPhrase('ccc_lbl', lang)
self.roa_lbl = GetPhrase('RoA_lbl', lang)
self.roe_lbl = GetPhrase('RoE_lbl', lang)
self.roi_lbl = GetPhrase('roi_lbl', lang)
self.estIR_lbl = GetPhrase('estIR_lbl', lang)
self.debtEquity_lbl = GetPhrase('debtEquity_lbl', lang)
self.ebitdaToIntExp_lbl = GetPhrase('timesInt_lbl', lang)
# Cash Flow List Objects
#------------------------------------------------------------------
self.opActivities_list = [self.opActivities_lbl, '', '', '', '', '', '']
self.netIncome_list = [' '+self.netIncome_lbl, '-', '-']
self.depreciation_list = [' ( + )'+self.depreciation_lbl, '-', '-']
self.chgInAR_list = [' (+/-)'+self.chgInAR_lbl, '-', '-']
self.chgInInv_list = [' (+/-)'+self.chgInInv_lbl, '-', '-']
self.chgInAP_list = [' (+/-)'+self.chgInAP_lbl, '-', '-']
self.NetCashFlowOps_list = [self.NetCashFlowOps_lbl, '-', '-']
self.investingActivities_list = [self.investingActivities_lbl, '', '', '', '', '', '']
self.addGFA_list = [' '+self.capExp_lbl+' - '+self.addGFA_lbl, '-', '-']
self.addTrucks_list = [' '+self.capExp_lbl+' - '+self.addTrucks_lbl, '-', '-']
self.addMach_list = [' '+self.capExp_lbl+' - '+self.addMach_lbl, '-', '-']
self.netCashFlowInv_list = [self.netCashFlowInv_lbl, '-', '-']
self.FinancingActivities_list = [self.FinancingActivities_lbl, '', '', '', '', '', '']
self.chgShortTermFin_list = [' '+self.chgShortTermFin_lbl, '-', '-']
self.chgShortTermLoC_list = [' '+self.chgShortTermLoC_lbl, '-', '-']
self.incLTD_list = [' '+self.incLTD_lbl, '-', '-']
self.decLTD_list = [' '+self.decLTD_lbl, '-', '-']
self.proceedsFromStock_list = [' '+self.proceedsFromStock_lbl, '-', '-']
self.cashDivPaid_list = [' '+self.cashDivPaid_lbl, '-', '-']
self.netCashFlowFin_list = [self.netCashFlowFin_lbl, '-', '-']
self.netCashFlowAll_list = [self.netCashFlowAll_lbl, '-', '-']
self.begCashBal_list = [self.begCashBal_lbl, '-', '-']
self.endCashBal_list = [self.endCashBal_lbl, '-', '-']
self.cf_fields = [self.opActivities_list, self.netIncome_list, self.depreciation_list,
self.chgInAR_list, self.chgInInv_list, self.chgInAP_list,
self.NetCashFlowOps_list, self.investingActivities_list,
self.addGFA_list, self.addTrucks_list, self.addMach_list,
self.netCashFlowInv_list, self.FinancingActivities_list,
self.chgShortTermFin_list, self.chgShortTermLoC_list,
self.incLTD_list, self.decLTD_list, self.proceedsFromStock_list,
self.cashDivPaid_list, self.netCashFlowFin_list, self.netCashFlowAll_list,
self.begCashBal_list, self.endCashBal_list]
# Financial Ratio List Objects
#------------------------------------------------------------------
self.y2yGrowth_list = [self.y2yGrowth_lbl, '-', '-']
self.grossMargin_list = [self.grossMargin_lbl, '-', '-']
self.SGAofSales_list = [self.SGAofSales_lbl, '-', '-']
self.EBITDAOpMarg_list = [self.EBITDAOpMarg_lbl, '-', '-']
self.EBITOpMarg_list = [self.EBITOpMarg_lbl, '-', '-']
self.taxRate_list = [self.taxRate_lbl, '-', '-']
self.netProfMarg_list = [self.netProfMarg_lbl, '-', '-']
self.currentRatio_list = [self.currentRatio_lbl, '-', '-']
self.quickRatio_list = [self.quickRatio_lbl, '-', '-']
self.cashRatio_list = [self.cashRatio_lbl, '-', '-']
self.daysInvOut_list = [self.daysInvOut_lbl, '-', '-']
self.daysSalesOut_list = [self.daysSalesOut_lbl, '-', '-']
self.daysPayablesOut_list = [self.daysPayablesOut_lbl, '-', '-']
self.ccc_list = [self.ccc_lbl, '-', '-']
self.roa_list = [self.roa_lbl, '-', '-']
self.roe_list = [self.roe_lbl, '-', '-']
self.roi_list = [self.roi_lbl, '-', '-']
self.estIR_list = [self.estIR_lbl, '-', '-']
self.debtEquity_list = [self.debtEquity_lbl, '-', '-']
self.ebitdaToIntExp_list = [self.ebitdaToIntExp_lbl, '-', '-']
self.fr_fields = [self.y2yGrowth_list, self.grossMargin_list, self.SGAofSales_list,
self.EBITDAOpMarg_list, self.EBITOpMarg_list, self.taxRate_list,
self.netProfMarg_list, self.currentRatio_list, self.quickRatio_list,
self.cashRatio_list,
self.daysInvOut_list, self.daysSalesOut_list, self.daysPayablesOut_list,
self.ccc_list, self.roa_list, self.roe_list,
self.roi_list, self.estIR_list, self.debtEquity_list,
self.ebitdaToIntExp_list]
# Formatting ------------------------------------------------------
self.bold_list = [self.opActivities_list, self.NetCashFlowOps_list,
self.investingActivities_list, self.netCashFlowInv_list,
self.FinancingActivities_list, self.netCashFlowFin_list,
self.netCashFlowAll_list, self.begCashBal_list,
self.endCashBal_list]
self.italic_list = []
# Sizer
#------------------------------------------------------------------
sizer = wx.BoxSizer(wx.VERTICAL)
# Title -----------------------------------------------------------
self.cashFlowAndRatio_st = wx.StaticText(self, -1, self.cashFlowAndRatio_lbl)
self.cashFlowAndRatio_st.SetFont(self.styles.h1_font)
sizer.Add(self.cashFlowAndRatio_st, 0, wx.ALIGN_CENTER|wx.TOP|wx.BOTTOM, 5)
# Cash Flow Panels ------------------------------------------------
self.opActivities_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.netIncome_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.depreciation_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.chgInAR_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.chgInInv_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.chgInAP_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.NetCashFlowOps_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.investingActivities_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.addGFA_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.addTrucks_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.addMach_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.netCashFlowInv_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.FinancingActivities_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.chgShortTermFin_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.chgShortTermLoC_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.incLTD_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.decLTD_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.proceedsFromStock_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.cashDivPaid_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.netCashFlowFin_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.netCashFlowAll_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.begCashBal_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.endCashBal_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.cf_pnls = [self.opActivities_pnl, self.netIncome_pnl, self.depreciation_pnl,
self.chgInAR_pnl, self.chgInInv_pnl, self.chgInAP_pnl,
self.NetCashFlowOps_pnl, self.investingActivities_pnl,
self.addGFA_pnl, self.addTrucks_pnl, self.addMach_pnl,
self.netCashFlowInv_pnl , self.FinancingActivities_pnl,
self.chgShortTermFin_pnl, self.chgShortTermLoC_pnl,
self.incLTD_pnl, self.decLTD_pnl, self.proceedsFromStock_pnl,
self.cashDivPaid_pnl, self.netCashFlowFin_pnl , self.netCashFlowAll_pnl,
self.begCashBal_pnl, self.endCashBal_pnl]
# Financial Ratio Panels ------------------------------------------
self.y2yGrowth_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.grossMargin_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.SGAofSales_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.EBITDAOpMarg_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.EBITOpMarg_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.taxRate_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.netProfMarg_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.currentRatio_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.quickRatio_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.cashRatio_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.daysInvOut_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.daysSalesOut_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.daysPayablesOut_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.ccc_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.roa_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.roe_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.roi_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.estIR_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.debtEquity_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.ebitdaToIntExp_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.fr_pnls = [self.y2yGrowth_pnl , self.grossMargin_pnl , self.SGAofSales_pnl ,
self.EBITDAOpMarg_pnl , self.EBITOpMarg_pnl , self.taxRate_pnl ,
self.netProfMarg_pnl , self.currentRatio_pnl , self.quickRatio_pnl ,
self.cashRatio_pnl,
self.daysInvOut_pnl , self.daysSalesOut_pnl , self.daysPayablesOut_pnl ,
self.ccc_pnl , self.roa_pnl , self.roe_pnl ,
self.roi_pnl , self.estIR_pnl , self.debtEquity_pnl,
self.ebitdaToIntExp_pnl]
# Add Cash Flow Panels to Sizer -----------------------------------
self.cashFlowStmt_list = [self.cashFlowStmt_lbl, self.forecast_lbl, self.actual_lbl]
self.cashFlowStmt_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.cashFlowStmt_pnl.Init(self.cashFlowStmt_list)
sizer.Add(self.cashFlowStmt_pnl, 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
sizer.Add(wx.StaticLine(self, -1), 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
lineCount = 0
addSL_list = (6, 11, 18, 22) # Indicates where to insert a static line
for pnl, fld in zip(self.cf_pnls, self.cf_fields):
bold, italic = False, False
if fld in self.bold_list:
bold = True
if fld in self.italic_list:
italic = True
pnl.Init(fld, bold, italic)
sizer.Add(pnl, 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
if lineCount in addSL_list:
sizer.Add(wx.StaticLine(self, -1), 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
if lineCount % 2 == 0:
pnl.SetBackgroundColour(self.styles.lightGrey)
if lineCount in (20, ):
sizer.Add((-1, 10))
lineCount += 1
# Add Financial Ratios --------------------------------------------
sizer.Add((-1, 20))
self.financialRatios_list = [self.financialRatios_lbl, self.forecast_lbl, self.actual_lbl]
self.financialRatios_pnl = X_miscPnls.Report2_Row_Pnl(self, -1)
self.financialRatios_pnl.Init(self.financialRatios_list)
sizer.Add(self.financialRatios_pnl, 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
sizer.Add(wx.StaticLine(self, -1), 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
lineCount = 0
for pnl, fld in zip(self.fr_pnls, self.fr_fields):
bold, italic = False, False
if fld in self.bold_list:
bold = True
if fld in self.italic_list:
italic = True
pnl.Init(fld, bold, italic)
sizer.Add(pnl, 0, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 5)
if lineCount % 2 == 0:
pnl.SetBackgroundColour(self.styles.lightGrey)
lineCount += 1
self.SetSizer(sizer)
self.SetupScrolling()
#----------------------------------------------------------------------
def UpdateCF(self, cfList, isPF=True):
'''Adds values to the Cash Flow statement.'''
if isPF:
insCol = 1
else:
insCol = 2
# Net Cash Flows from Operations Activities
netIncome, depr, chRec, chInv, chPay = cfList[0]
self.netIncome_pnl.AddVal(netIncome, insCol)
self.depreciation_pnl.AddVal(depr, insCol)
self.chgInAR_pnl.AddVal(chRec, insCol)
self.chgInInv_pnl.AddVal(chInv, insCol)
self.chgInAP_pnl.AddVal(chPay, insCol)
netOps = netIncome + depr + chRec + chInv + chPay
self.NetCashFlowOps_pnl.AddVal(netOps, insCol)
# Net Cash Flows from Investing Activities
eq, tr, ma = cfList[1]
self.addGFA_pnl.AddVal(eq, insCol)
self.addTrucks_pnl.AddVal(tr, insCol)
self.addMach_pnl.AddVal(ma, insCol)
netInv = eq + tr + ma
self.netCashFlowInv_pnl.AddVal(netInv, insCol)
# Net Cash Flows from Financing Activities
chSTB, chLoC, incLTD, decLTD, incEq, divPaid = cfList[2]
self.chgShortTermFin_pnl.AddVal(chSTB, insCol)
self.chgShortTermLoC_pnl.AddVal(chLoC, insCol)
self.incLTD_pnl.AddVal(incLTD, insCol)
self.decLTD_pnl.AddVal(decLTD, insCol)
self.proceedsFromStock_pnl.AddVal(incEq, insCol)
self.cashDivPaid_pnl.AddVal(divPaid, insCol)
netFin = chSTB + chLoC + incLTD + decLTD + incEq + divPaid
self.netCashFlowFin_pnl.AddVal(netFin, insCol)
netAll = netOps + netInv + netFin
self.netCashFlowAll_pnl.AddVal(netAll, insCol)
# Beginning and ending cash balance
begCB = cfList[3]
endCB = begCB + netAll
self.begCashBal_pnl.AddVal(begCB, insCol)
self.endCashBal_pnl.AddVal(endCB, insCol)
#----------------------------------------------------------------------
def UpdateFR(self, frList, isPF=True):
'''Adds the financial ratios to the cash flow / ratio panel.'''
if isPF:
insCol = 1
else:
insCol = 2
self.y2yGrowth_pnl.AddVal(frList[0], insCol, isCur=False, isPerc=True)
self.grossMargin_pnl.AddVal(frList[1], insCol, isCur=False, isPerc=True)
self.SGAofSales_pnl.AddVal(frList[2], insCol, isCur=False, isPerc=True)
self.EBITDAOpMarg_pnl.AddVal(frList[3], insCol, isCur=False, isPerc=True)
self.EBITOpMarg_pnl.AddVal(frList[4], insCol, isCur=False, isPerc=True)
self.taxRate_pnl.AddVal(frList[5], insCol, isCur=False, isPerc=True)
self.netProfMarg_pnl.AddVal(frList[6], insCol, isCur=False, isPerc=True)
self.currentRatio_pnl.AddFloat(frList[7], insCol)
self.quickRatio_pnl.AddFloat(frList[8], insCol)
self.cashRatio_pnl.AddFloat(frList[9], insCol)
self.daysInvOut_pnl.AddFloat(frList[10], insCol)
self.daysSalesOut_pnl.AddFloat(frList[11], insCol)
self.daysPayablesOut_pnl.AddFloat(frList[12], insCol)
self.ccc_pnl.AddFloat(frList[13], insCol)
self.roa_pnl.AddFloat(frList[14], insCol)
self.roe_pnl.AddFloat(frList[15], insCol)
self.roi_pnl.AddFloat(frList[16], insCol)
self.estIR_pnl.AddVal(frList[17], insCol, isCur=False, isPerc=True)
self.debtEquity_pnl.AddFloat(frList[18], insCol)
self.ebitdaToIntExp_pnl.AddFloat(frList[19], insCol)
self.Scroll(0, 0)
#----------------------------------------------------------------------
def ExportCF(self):
'''Exports the statement of cash flows'''
cf = []
for p in [self.cashFlowStmt_pnl]+self.cf_pnls:
cf.append(p.ExportRow())
return cf
#----------------------------------------------------------------------
def ExportFR(self):
'''Exports the financial ratios.'''
fr = []
for p in [self.financialRatios_pnl]+self.fr_pnls:
fr.append(p.ExportRow())
return fr | gpl-3.0 | 8,315,200,174,922,877,000 | 53.762963 | 116 | 0.571287 | false |
82Flex/DCRM | fluent_comments/templatetags/fluent_comments_tags.py | 1 | 5568 | import django
from django.conf import settings
from django.template import Library, Node
from django.template.loader import get_template
from fluent_comments.utils import get_comment_template_name, get_comment_context_data
from tag_parser import parse_token_kwargs
from tag_parser.basetags import BaseInclusionNode
from fluent_comments import appsettings
from fluent_comments.models import get_comments_for_model
from fluent_comments.moderation import comments_are_open, comments_are_moderated
try:
from django.template import context_processors # Django 1.10+
except:
from django.core import context_processors
register = Library()
class AjaxCommentTags(BaseInclusionNode):
"""
Custom inclusion node with some special parsing features.
Using the ``@register.inclusion_tag`` is not sufficient,
because some keywords require custom parsing.
"""
template_name = "fluent_comments/templatetags/ajax_comment_tags.html"
min_args = 1
max_args = 1
@classmethod
def parse(cls, parser, token):
"""
Custom parsing for the ``{% ajax_comment_tags for ... %}`` tag.
"""
# Process the template line.
tag_name, args, kwargs = parse_token_kwargs(
parser, token,
allowed_kwargs=cls.allowed_kwargs,
compile_args=False, # Only overrule here, keep at render() phase.
compile_kwargs=cls.compile_kwargs
)
# remove "for" keyword, so all other args can be resolved in render().
if args[0] == 'for':
args.pop(0)
# And apply the compilation afterwards
for i in range(len(args)):
args[i] = parser.compile_filter(args[i])
cls.validate_args(tag_name, *args, **kwargs)
return cls(tag_name, *args, **kwargs)
def get_context_data(self, parent_context, *tag_args, **tag_kwargs):
"""
The main logic for the inclusion node, analogous to ``@register.inclusion_node``.
"""
target_object = tag_args[0] # moved one spot due to .pop(0)
new_context = {
'STATIC_URL': parent_context.get('STATIC_URL', None),
'USE_THREADEDCOMMENTS': appsettings.USE_THREADEDCOMMENTS,
'target_object': target_object,
}
# Be configuration independent:
if new_context['STATIC_URL'] is None:
try:
request = parent_context['request']
except KeyError:
new_context.update({'STATIC_URL': settings.STATIC_URL})
else:
new_context.update(context_processors.static(request))
return new_context
@register.tag
def ajax_comment_tags(parser, token):
"""
Display the required ``<div>`` elements to let the Ajax comment functionality work with your form.
"""
return AjaxCommentTags.parse(parser, token)
register.filter('comments_are_open', comments_are_open)
register.filter('comments_are_moderated', comments_are_moderated)
@register.filter
def comments_count(content_object):
"""
Return the number of comments posted at a target object.
You can use this instead of the ``{% get_comment_count for [object] as [varname] %}`` tag.
"""
return get_comments_for_model(content_object).count()
class FluentCommentsList(Node):
def render(self, context):
# Include proper template, avoid parsing it twice by operating like @register.inclusion_tag()
if not getattr(self, 'nodelist', None):
if appsettings.USE_THREADEDCOMMENTS:
template = get_template("fluent_comments/templatetags/threaded_list.html")
else:
template = get_template("fluent_comments/templatetags/flat_list.html")
self.nodelist = template
# NOTE NOTE NOTE
# HACK: Determine the parent object based on the comment list queryset.
# the {% render_comment_list for article %} tag does not pass the object in a general form to the template.
# Not assuming that 'object.pk' holds the correct value.
#
# This obviously doesn't work when the list is empty.
# To address that, the client-side code also fixes that, by looking for the object ID in the nearby form.
target_object_id = context.get('target_object_id', None)
if not target_object_id:
comment_list = context['comment_list']
if isinstance(comment_list, list) and comment_list:
target_object_id = comment_list[0].object_pk
# Render the node
context['USE_THREADEDCOMMENTS'] = appsettings.USE_THREADEDCOMMENTS
context['target_object_id'] = target_object_id
if django.VERSION >= (1, 8):
context = context.flatten()
return self.nodelist.render(context)
@register.tag
def fluent_comments_list(parser, token):
"""
A tag to select the proper template for the current comments app.
"""
return FluentCommentsList()
class RenderCommentNode(BaseInclusionNode):
min_args = 1
max_args = 1
def get_template_name(self, *tag_args, **tag_kwargs):
return get_comment_template_name(comment=tag_args[0])
def get_context_data(self, parent_context, *tag_args, **tag_kwargs):
return get_comment_context_data(comment=tag_args[0])
@register.tag
def render_comment(parser, token):
"""
Render a single comment.
This tag does not exist in the standard django_comments,
because it only renders a complete list.
"""
return RenderCommentNode.parse(parser, token)
| agpl-3.0 | 6,444,168,918,875,907,000 | 34.018868 | 115 | 0.655352 | false |
sysbot/CouchPotatoServer | couchpotato/core/plugins/scanner/main.py | 1 | 33209 | from couchpotato import get_session
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import toUnicode, simplifyString, ss, sp
from couchpotato.core.helpers.variable import getExt, getImdb, tryInt, \
splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import File, Media
from enzyme.exceptions import NoParserError, ParseError
from guessit import guess_movie_info
from subliminal.videos import Video
import enzyme
import os
import re
import threading
import time
import traceback
from six.moves import filter, map, zip
log = CPLog(__name__)
class Scanner(Plugin):
ignored_in_path = [os.path.sep + 'extracted' + os.path.sep, 'extracting', '_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_',
'_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo',
'thumbs.db', 'ehthumbs.db', 'desktop.ini'] #unpacking, smb-crap, hidden files
ignore_names = ['extract', 'extracting', 'extracted', 'movie', 'movies', 'film', 'films', 'download', 'downloads', 'video_ts', 'audio_ts', 'bdmv', 'certificate']
extensions = {
'movie': ['mkv', 'wmv', 'avi', 'mpg', 'mpeg', 'mp4', 'm2ts', 'iso', 'img', 'mdf', 'ts', 'm4v'],
'movie_extra': ['mds'],
'dvd': ['vts_*', 'vob'],
'nfo': ['nfo', 'txt', 'tag'],
'subtitle': ['sub', 'srt', 'ssa', 'ass'],
'subtitle_extra': ['idx'],
'trailer': ['mov', 'mp4', 'flv']
}
file_types = {
'subtitle': ('subtitle', 'subtitle'),
'subtitle_extra': ('subtitle', 'subtitle_extra'),
'trailer': ('video', 'trailer'),
'nfo': ('nfo', 'nfo'),
'movie': ('video', 'movie'),
'movie_extra': ('movie', 'movie_extra'),
'backdrop': ('image', 'backdrop'),
'poster': ('image', 'poster'),
'thumbnail': ('image', 'thumbnail'),
'leftover': ('leftover', 'leftover'),
}
file_sizes = { # in MB
'movie': {'min': 300},
'trailer': {'min': 2, 'max': 250},
'backdrop': {'min': 0, 'max': 5},
}
codecs = {
'audio': ['dts', 'ac3', 'ac3d', 'mp3'],
'video': ['x264', 'h264', 'divx', 'xvid']
}
audio_codec_map = {
0x2000: 'ac3',
0x2001: 'dts',
0x0055: 'mp3',
0x0050: 'mp2',
0x0001: 'pcm',
0x003: 'pcm',
0x77a1: 'tta1',
0x5756: 'wav',
0x6750: 'vorbis',
0xF1AC: 'flac',
0x00ff: 'aac',
}
source_media = {
'bluray': ['bluray', 'blu-ray', 'brrip', 'br-rip'],
'hddvd': ['hddvd', 'hd-dvd'],
'dvd': ['dvd'],
'hdtv': ['hdtv']
}
clean = '[ _\,\.\(\)\[\]\-]?(extended.cut|directors.cut|french|swedisch|danish|dutch|swesub|spanish|german|ac3|dts|custom|dc|divx|divx5|dsr|dsrip|dutch|dvd|dvdr|dvdrip|dvdscr|dvdscreener|screener|dvdivx|cam|fragment|fs|hdtv|hdrip' \
'|hdtvrip|internal|limited|multisubs|ntsc|ogg|ogm|pal|pdtv|proper|repack|rerip|retail|r3|r5|bd5|se|svcd|swedish|german|read.nfo|nfofix|unrated|ws|telesync|ts|telecine|tc|brrip|bdrip|video_ts|audio_ts|480p|480i|576p|576i|720p|720i|1080p|1080i|hrhd|hrhdtv|hddvd|bluray|x264|h264|xvid|xvidvd|xxx|www.www|cd[1-9]|\[.*\])([ _\,\.\(\)\[\]\-]|$)'
multipart_regex = [
'[ _\.-]+cd[ _\.-]*([0-9a-d]+)', #*cd1
'[ _\.-]+dvd[ _\.-]*([0-9a-d]+)', #*dvd1
'[ _\.-]+part[ _\.-]*([0-9a-d]+)', #*part1
'[ _\.-]+dis[ck][ _\.-]*([0-9a-d]+)', #*disk1
'cd[ _\.-]*([0-9a-d]+)$', #cd1.ext
'dvd[ _\.-]*([0-9a-d]+)$', #dvd1.ext
'part[ _\.-]*([0-9a-d]+)$', #part1.mkv
'dis[ck][ _\.-]*([0-9a-d]+)$', #disk1.mkv
'()[ _\.-]+([0-9]*[abcd]+)(\.....?)$',
'([a-z])([0-9]+)(\.....?)$',
'()([ab])(\.....?)$' #*a.mkv
]
cp_imdb = '(.cp.(?P<id>tt[0-9{7}]+).)'
def __init__(self):
addEvent('scanner.create_file_identifier', self.createStringIdentifier)
addEvent('scanner.remove_cptag', self.removeCPTag)
addEvent('scanner.scan', self.scan)
addEvent('scanner.name_year', self.getReleaseNameYear)
addEvent('scanner.partnumber', self.getPartNumber)
def scan(self, folder = None, files = None, release_download = None, simple = False, newer_than = 0, return_ignored = True, on_found = None):
folder = sp(folder)
if not folder or not os.path.isdir(folder):
log.error('Folder doesn\'t exists: %s', folder)
return {}
# Get movie "master" files
movie_files = {}
leftovers = []
# Scan all files of the folder if no files are set
if not files:
check_file_date = True
try:
files = []
for root, dirs, walk_files in os.walk(folder):
files.extend([sp(os.path.join(root, filename)) for filename in walk_files])
# Break if CP wants to shut down
if self.shuttingDown():
break
except:
log.error('Failed getting files from %s: %s', (folder, traceback.format_exc()))
log.debug('Found %s files to scan and group in %s', (len(files), folder))
else:
check_file_date = False
files = [sp(x) for x in files]
for file_path in files:
if not os.path.exists(file_path):
continue
# Remove ignored files
if self.isSampleFile(file_path):
leftovers.append(file_path)
continue
elif not self.keepFile(file_path):
continue
is_dvd_file = self.isDVDFile(file_path)
if self.filesizeBetween(file_path, self.file_sizes['movie']) or is_dvd_file: # Minimal 300MB files or is DVD file
# Normal identifier
identifier = self.createStringIdentifier(file_path, folder, exclude_filename = is_dvd_file)
identifiers = [identifier]
# Identifier with quality
quality = fireEvent('quality.guess', [file_path], single = True) if not is_dvd_file else {'identifier':'dvdr'}
if quality:
identifier_with_quality = '%s %s' % (identifier, quality.get('identifier', ''))
identifiers = [identifier_with_quality, identifier]
if not movie_files.get(identifier):
movie_files[identifier] = {
'unsorted_files': [],
'identifiers': identifiers,
'is_dvd': is_dvd_file,
}
movie_files[identifier]['unsorted_files'].append(file_path)
else:
leftovers.append(file_path)
# Break if CP wants to shut down
if self.shuttingDown():
break
# Cleanup
del files
# Sort reverse, this prevents "Iron man 2" from getting grouped with "Iron man" as the "Iron Man 2"
# files will be grouped first.
leftovers = set(sorted(leftovers, reverse = True))
# Group files minus extension
ignored_identifiers = []
for identifier, group in movie_files.items():
if identifier not in group['identifiers'] and len(identifier) > 0: group['identifiers'].append(identifier)
log.debug('Grouping files: %s', identifier)
has_ignored = 0
for file_path in list(group['unsorted_files']):
ext = getExt(file_path)
wo_ext = file_path[:-(len(ext) + 1)]
found_files = set([i for i in leftovers if wo_ext in i])
group['unsorted_files'].extend(found_files)
leftovers = leftovers - found_files
has_ignored += 1 if ext == 'ignore' else 0
if has_ignored == 0:
for file_path in list(group['unsorted_files']):
ext = getExt(file_path)
has_ignored += 1 if ext == 'ignore' else 0
if has_ignored > 0:
ignored_identifiers.append(identifier)
# Break if CP wants to shut down
if self.shuttingDown():
break
# Create identifiers for all leftover files
path_identifiers = {}
for file_path in leftovers:
identifier = self.createStringIdentifier(file_path, folder)
if not path_identifiers.get(identifier):
path_identifiers[identifier] = []
path_identifiers[identifier].append(file_path)
# Group the files based on the identifier
delete_identifiers = []
for identifier, found_files in path_identifiers.items():
log.debug('Grouping files on identifier: %s', identifier)
group = movie_files.get(identifier)
if group:
group['unsorted_files'].extend(found_files)
delete_identifiers.append(identifier)
# Remove the found files from the leftover stack
leftovers = leftovers - set(found_files)
# Break if CP wants to shut down
if self.shuttingDown():
break
# Cleaning up used
for identifier in delete_identifiers:
if path_identifiers.get(identifier):
del path_identifiers[identifier]
del delete_identifiers
# Group based on folder
delete_identifiers = []
for identifier, found_files in path_identifiers.items():
log.debug('Grouping files on foldername: %s', identifier)
for ff in found_files:
new_identifier = self.createStringIdentifier(os.path.dirname(ff), folder)
group = movie_files.get(new_identifier)
if group:
group['unsorted_files'].extend([ff])
delete_identifiers.append(identifier)
# Remove the found files from the leftover stack
leftovers -= leftovers - set([ff])
# Break if CP wants to shut down
if self.shuttingDown():
break
# leftovers should be empty
if leftovers:
log.debug('Some files are still left over: %s', leftovers)
# Cleaning up used
for identifier in delete_identifiers:
if path_identifiers.get(identifier):
del path_identifiers[identifier]
del delete_identifiers
# Make sure we remove older / still extracting files
valid_files = {}
while True and not self.shuttingDown():
try:
identifier, group = movie_files.popitem()
except:
break
# Check if movie is fresh and maybe still unpacking, ignore files newer than 1 minute
file_too_new = False
for cur_file in group['unsorted_files']:
if not os.path.isfile(cur_file):
file_too_new = time.time()
break
file_time = [os.path.getmtime(cur_file), os.path.getctime(cur_file)]
for t in file_time:
if t > time.time() - 60:
file_too_new = tryInt(time.time() - t)
break
if file_too_new:
break
if check_file_date and file_too_new:
try:
time_string = time.ctime(file_time[0])
except:
try:
time_string = time.ctime(file_time[1])
except:
time_string = 'unknown'
log.info('Files seem to be still unpacking or just unpacked (created on %s), ignoring for now: %s', (time_string, identifier))
# Delete the unsorted list
del group['unsorted_files']
continue
# Only process movies newer than x
if newer_than and newer_than > 0:
has_new_files = False
for cur_file in group['unsorted_files']:
file_time = [os.path.getmtime(cur_file), os.path.getctime(cur_file)]
if file_time[0] > newer_than or file_time[1] > newer_than:
has_new_files = True
break
if not has_new_files:
log.debug('None of the files have changed since %s for %s, skipping.', (time.ctime(newer_than), identifier))
# Delete the unsorted list
del group['unsorted_files']
continue
valid_files[identifier] = group
del movie_files
total_found = len(valid_files)
# Make sure only one movie was found if a download ID is provided
if release_download and total_found == 0:
log.info('Download ID provided (%s), but no groups found! Make sure the download contains valid media files (fully extracted).', release_download.get('imdb_id'))
elif release_download and total_found > 1:
log.info('Download ID provided (%s), but more than one group found (%s). Ignoring Download ID...', (release_download.get('imdb_id'), len(valid_files)))
release_download = None
# Determine file types
db = get_session()
processed_movies = {}
while True and not self.shuttingDown():
try:
identifier, group = valid_files.popitem()
except:
break
if return_ignored is False and identifier in ignored_identifiers:
log.debug('Ignore file found, ignoring release: %s', identifier)
continue
# Group extra (and easy) files first
group['files'] = {
'movie_extra': self.getMovieExtras(group['unsorted_files']),
'subtitle': self.getSubtitles(group['unsorted_files']),
'subtitle_extra': self.getSubtitlesExtras(group['unsorted_files']),
'nfo': self.getNfo(group['unsorted_files']),
'trailer': self.getTrailers(group['unsorted_files']),
'leftover': set(group['unsorted_files']),
}
# Media files
if group['is_dvd']:
group['files']['movie'] = self.getDVDFiles(group['unsorted_files'])
else:
group['files']['movie'] = self.getMediaFiles(group['unsorted_files'])
if len(group['files']['movie']) == 0:
log.error('Couldn\'t find any movie files for %s', identifier)
continue
log.debug('Getting metadata for %s', identifier)
group['meta_data'] = self.getMetaData(group, folder = folder, release_download = release_download)
# Subtitle meta
group['subtitle_language'] = self.getSubtitleLanguage(group) if not simple else {}
# Get parent dir from movie files
for movie_file in group['files']['movie']:
group['parentdir'] = os.path.dirname(movie_file)
group['dirname'] = None
folder_names = group['parentdir'].replace(folder, '').split(os.path.sep)
folder_names.reverse()
# Try and get a proper dirname, so no "A", "Movie", "Download" etc
for folder_name in folder_names:
if folder_name.lower() not in self.ignore_names and len(folder_name) > 2:
group['dirname'] = folder_name
break
break
# Leftover "sorted" files
for file_type in group['files']:
if not file_type is 'leftover':
group['files']['leftover'] -= set(group['files'][file_type])
group['files'][file_type] = list(group['files'][file_type])
group['files']['leftover'] = list(group['files']['leftover'])
# Delete the unsorted list
del group['unsorted_files']
# Determine movie
group['library'] = self.determineMovie(group, release_download = release_download)
if not group['library']:
log.error('Unable to determine movie: %s', group['identifiers'])
else:
movie = db.query(Media).filter_by(library_id = group['library']['id']).first()
group['movie_id'] = None if not movie else movie.id
db.expire_all()
processed_movies[identifier] = group
# Notify parent & progress on something found
if on_found:
on_found(group, total_found, total_found - len(processed_movies))
# Wait for all the async events calm down a bit
while threading.activeCount() > 100 and not self.shuttingDown():
log.debug('Too many threads active, waiting a few seconds')
time.sleep(10)
if len(processed_movies) > 0:
log.info('Found %s movies in the folder %s', (len(processed_movies), folder))
else:
log.debug('Found no movies in the folder %s', folder)
return processed_movies
def getMetaData(self, group, folder = '', release_download = None):
data = {}
files = list(group['files']['movie'])
for cur_file in files:
if not self.filesizeBetween(cur_file, self.file_sizes['movie']): continue # Ignore smaller files
meta = self.getMeta(cur_file)
try:
data['video'] = meta.get('video', self.getCodec(cur_file, self.codecs['video']))
data['audio'] = meta.get('audio', self.getCodec(cur_file, self.codecs['audio']))
data['resolution_width'] = meta.get('resolution_width', 720)
data['resolution_height'] = meta.get('resolution_height', 480)
data['audio_channels'] = meta.get('audio_channels', 2.0)
data['aspect'] = round(float(meta.get('resolution_width', 720)) / meta.get('resolution_height', 480), 2)
except:
log.debug('Error parsing metadata: %s %s', (cur_file, traceback.format_exc()))
pass
if data.get('audio'): break
# Use the quality guess first, if that failes use the quality we wanted to download
data['quality'] = None
if release_download and release_download.get('quality'):
data['quality'] = fireEvent('quality.single', release_download.get('quality'), single = True)
if not data['quality']:
data['quality'] = fireEvent('quality.guess', files = files, extra = data, single = True)
if not data['quality']:
data['quality'] = fireEvent('quality.single', 'dvdr' if group['is_dvd'] else 'dvdrip', single = True)
data['quality_type'] = 'HD' if data.get('resolution_width', 0) >= 1280 or data['quality'].get('hd') else 'SD'
filename = re.sub('(.cp\(tt[0-9{7}]+\))', '', files[0])
data['group'] = self.getGroup(filename[len(folder):])
data['source'] = self.getSourceMedia(filename)
return data
def getMeta(self, filename):
try:
p = enzyme.parse(filename)
# Video codec
vc = ('h264' if p.video[0].codec == 'AVC1' else p.video[0].codec).lower()
# Audio codec
ac = p.audio[0].codec
try: ac = self.audio_codec_map.get(p.audio[0].codec)
except: pass
return {
'video': vc,
'audio': ac,
'resolution_width': tryInt(p.video[0].width),
'resolution_height': tryInt(p.video[0].height),
'audio_channels': p.audio[0].channels,
}
except ParseError:
log.debug('Failed to parse meta for %s', filename)
except NoParserError:
log.debug('No parser found for %s', filename)
except:
log.debug('Failed parsing %s', filename)
return {}
def getSubtitleLanguage(self, group):
detected_languages = {}
# Subliminal scanner
paths = None
try:
paths = group['files']['movie']
scan_result = []
for p in paths:
if not group['is_dvd']:
video = Video.from_path(toUnicode(p))
video_result = [(video, video.scan())]
scan_result.extend(video_result)
for video, detected_subtitles in scan_result:
for s in detected_subtitles:
if s.language and s.path not in paths:
detected_languages[s.path] = [s.language]
except:
log.debug('Failed parsing subtitle languages for %s: %s', (paths, traceback.format_exc()))
# IDX
for extra in group['files']['subtitle_extra']:
try:
if os.path.isfile(extra):
output = open(extra, 'r')
txt = output.read()
output.close()
idx_langs = re.findall('\nid: (\w+)', txt)
sub_file = '%s.sub' % os.path.splitext(extra)[0]
if len(idx_langs) > 0 and os.path.isfile(sub_file):
detected_languages[sub_file] = idx_langs
except:
log.error('Failed parsing subtitle idx for %s: %s', (extra, traceback.format_exc()))
return detected_languages
def determineMovie(self, group, release_download = None):
# Get imdb id from downloader
imdb_id = release_download and release_download.get('imdb_id')
if imdb_id:
log.debug('Found movie via imdb id from it\'s download id: %s', release_download.get('imdb_id'))
files = group['files']
# Check for CP(imdb_id) string in the file paths
if not imdb_id:
for cur_file in files['movie']:
imdb_id = self.getCPImdb(cur_file)
if imdb_id:
log.debug('Found movie via CP tag: %s', cur_file)
break
# Check and see if nfo contains the imdb-id
nfo_file = None
if not imdb_id:
try:
for nf in files['nfo']:
imdb_id = getImdb(nf, check_inside = True)
if imdb_id:
log.debug('Found movie via nfo file: %s', nf)
nfo_file = nf
break
except:
pass
# Check and see if filenames contains the imdb-id
if not imdb_id:
try:
for filetype in files:
for filetype_file in files[filetype]:
imdb_id = getImdb(filetype_file)
if imdb_id:
log.debug('Found movie via imdb in filename: %s', nfo_file)
break
except:
pass
# Check if path is already in db
if not imdb_id:
db = get_session()
for cf in files['movie']:
f = db.query(File).filter_by(path = toUnicode(cf)).first()
try:
imdb_id = f.library[0].identifier
log.debug('Found movie via database: %s', cf)
cur_file = cf
break
except:
pass
# Search based on identifiers
if not imdb_id:
for identifier in group['identifiers']:
if len(identifier) > 2:
try: filename = list(group['files'].get('movie'))[0]
except: filename = None
name_year = self.getReleaseNameYear(identifier, file_name = filename if not group['is_dvd'] else None)
if name_year.get('name') and name_year.get('year'):
movie = fireEvent('movie.search', q = '%(name)s %(year)s' % name_year, merge = True, limit = 1)
if len(movie) > 0:
imdb_id = movie[0].get('imdb')
log.debug('Found movie via search: %s', cur_file)
if imdb_id: break
else:
log.debug('Identifier to short to use for search: %s', identifier)
if imdb_id:
return fireEvent('library.add.movie', attrs = {
'identifier': imdb_id
}, update_after = False, single = True)
log.error('No imdb_id found for %s. Add a NFO file with IMDB id or add the year to the filename.', group['identifiers'])
return {}
def getCPImdb(self, string):
try:
m = re.search(self.cp_imdb, string.lower())
id = m.group('id')
if id: return id
except AttributeError:
pass
return False
def removeCPTag(self, name):
try:
return re.sub(self.cp_imdb, '', name)
except:
pass
return name
def getSamples(self, files):
return set(filter(lambda s: self.isSampleFile(s), files))
def getMediaFiles(self, files):
def test(s):
return self.filesizeBetween(s, self.file_sizes['movie']) and getExt(s.lower()) in self.extensions['movie'] and not self.isSampleFile(s)
return set(filter(test, files))
def getMovieExtras(self, files):
return set(filter(lambda s: getExt(s.lower()) in self.extensions['movie_extra'], files))
def getDVDFiles(self, files):
def test(s):
return self.isDVDFile(s)
return set(filter(test, files))
def getSubtitles(self, files):
return set(filter(lambda s: getExt(s.lower()) in self.extensions['subtitle'], files))
def getSubtitlesExtras(self, files):
return set(filter(lambda s: getExt(s.lower()) in self.extensions['subtitle_extra'], files))
def getNfo(self, files):
return set(filter(lambda s: getExt(s.lower()) in self.extensions['nfo'], files))
def getTrailers(self, files):
def test(s):
return re.search('(^|[\W_])trailer\d*[\W_]', s.lower()) and self.filesizeBetween(s, self.file_sizes['trailer'])
return set(filter(test, files))
def getImages(self, files):
def test(s):
return getExt(s.lower()) in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'tbn']
files = set(filter(test, files))
images = {
'backdrop': set(filter(lambda s: re.search('(^|[\W_])fanart|backdrop\d*[\W_]', s.lower()) and self.filesizeBetween(s, self.file_sizes['backdrop']), files))
}
# Rest
images['rest'] = files - images['backdrop']
return images
def isDVDFile(self, file_name):
if list(set(file_name.lower().split(os.path.sep)) & set(['video_ts', 'audio_ts'])):
return True
for needle in ['vts_', 'video_ts', 'audio_ts', 'bdmv', 'certificate']:
if needle in file_name.lower():
return True
return False
def keepFile(self, filename):
# ignoredpaths
for i in self.ignored_in_path:
if i in filename.lower():
log.debug('Ignored "%s" contains "%s".', (filename, i))
return False
# All is OK
return True
def isSampleFile(self, filename):
is_sample = re.search('(^|[\W_])sample\d*[\W_]', filename.lower())
if is_sample: log.debug('Is sample file: %s', filename)
return is_sample
def filesizeBetween(self, file, file_size = None):
if not file_size: file_size = []
try:
return (file_size.get('min', 0) * 1048576) < os.path.getsize(file) < (file_size.get('max', 100000) * 1048576)
except:
log.error('Couldn\'t get filesize of %s.', file)
return False
def createStringIdentifier(self, file_path, folder = '', exclude_filename = False):
year = self.findYear(file_path)
identifier = file_path.replace(folder, '').lstrip(os.path.sep) # root folder
identifier = os.path.splitext(identifier)[0] # ext
try:
path_split = splitString(identifier, os.path.sep)
identifier = path_split[-2] if len(path_split) > 1 and len(path_split[-2]) > len(path_split[-1]) else path_split[-1] # Only get filename
except: pass
if exclude_filename:
identifier = identifier[:len(identifier) - len(os.path.split(identifier)[-1])]
# multipart
identifier = self.removeMultipart(identifier)
# remove cptag
identifier = self.removeCPTag(identifier)
# groups, release tags, scenename cleaner, regex isn't correct
identifier = re.sub(self.clean, '::', simplifyString(identifier)).strip(':')
# Year
if year and identifier[:4] != year:
split_by = ':::' if ':::' in identifier else year
identifier = '%s %s' % (identifier.split(split_by)[0].strip(), year)
else:
identifier = identifier.split('::')[0]
# Remove duplicates
out = []
for word in identifier.split():
if not word in out:
out.append(word)
identifier = ' '.join(out)
return simplifyString(identifier)
def removeMultipart(self, name):
for regex in self.multipart_regex:
try:
found = re.sub(regex, '', name)
if found != name:
name = found
except:
pass
return name
def getPartNumber(self, name):
for regex in self.multipart_regex:
try:
found = re.search(regex, name)
if found:
return found.group(1)
return 1
except:
pass
return 1
def getCodec(self, filename, codecs):
codecs = map(re.escape, codecs)
try:
codec = re.search('[^A-Z0-9](?P<codec>' + '|'.join(codecs) + ')[^A-Z0-9]', filename, re.I)
return (codec and codec.group('codec')) or ''
except:
return ''
def getGroup(self, file):
try:
match = re.findall('\-([A-Z0-9]+)[\.\/]', file, re.I)
return match[-1] or ''
except:
return ''
def getSourceMedia(self, file):
for media in self.source_media:
for alias in self.source_media[media]:
if alias in file.lower():
return media
return None
def findYear(self, text):
# Search year inside () or [] first
matches = re.findall('(\(|\[)(?P<year>19[0-9]{2}|20[0-9]{2})(\]|\))', text)
if matches:
return matches[-1][1]
# Search normal
matches = re.findall('(?P<year>19[0-9]{2}|20[0-9]{2})', text)
if matches:
return matches[-1]
return ''
def getReleaseNameYear(self, release_name, file_name = None):
release_name = release_name.strip(' .-_')
# Use guessit first
guess = {}
if file_name:
try:
guessit = guess_movie_info(toUnicode(file_name))
if guessit.get('title') and guessit.get('year'):
guess = {
'name': guessit.get('title'),
'year': guessit.get('year'),
}
except:
log.debug('Could not detect via guessit "%s": %s', (file_name, traceback.format_exc()))
# Backup to simple
cleaned = ' '.join(re.split('\W+', simplifyString(release_name)))
cleaned = re.sub(self.clean, ' ', cleaned)
for year_str in [file_name, release_name, cleaned]:
if not year_str: continue
year = self.findYear(year_str)
if year:
break
cp_guess = {}
if year: # Split name on year
try:
movie_name = cleaned.rsplit(year, 1).pop(0).strip()
if movie_name:
cp_guess = {
'name': movie_name,
'year': int(year),
}
except:
pass
if not cp_guess: # Split name on multiple spaces
try:
movie_name = cleaned.split(' ').pop(0).strip()
cp_guess = {
'name': movie_name,
'year': int(year) if movie_name[:4] != year else 0,
}
except:
pass
if cp_guess.get('year') == guess.get('year') and len(cp_guess.get('name', '')) > len(guess.get('name', '')):
return cp_guess
elif guess == {}:
return cp_guess
return guess
| gpl-3.0 | -6,318,653,490,351,516,000 | 35.898889 | 351 | 0.523202 | false |
mppmu/secdec | nodist_examples/hexabox/generate_I73_1.py | 1 | 2168 | #! /usr/bin/env python
from pySecDec.loop_integral import loop_package
import pySecDec as psd
li = psd.loop_integral.LoopIntegralFromPropagators(
loop_momenta = ['k1','k2'],
external_momenta = ['p1','p2','p3','p4','p5'],
#Lorentz_indices = ['mu'],
propagators = ['k1**2','(k1-p1)**2','(k1-p1-p2)**2','(k1-p1-p2-p3)**2','k2**2','(k2-p1-p2-p3-p4)**2','(k1-k2)**2','(k1-k2+p4)**2','(k2-p1)**2','(k2-p1-p2)**2','(k2-p1-p2-p3)**2'],
powerlist = [1,1,1,1,1,0,1,0,0,0,0],
#numerator = 'k1(mu)*k2(mu)',
replacement_rules = [
('p1*p1', 0),
('p2*p2', 0),
('p3*p3', 0),
('p4*p4', 0),
('p1*p2', 'v1/2'),
('p2*p3', 'v2/2'),
('p1*p3', '(v4-v1-v2)/2'),
('p1*p4', '(v2-v5-v4)/2'),
('p2*p4', '(-v2-v3+v5)/2'),
('p3*p4', 'v3/2')
]
)
Mandelstam_symbols = ['v1', 'v2','v3', 'v4', 'v5']
#mass_symbols = []
loop_package(
name = 'I73_1',
loop_integral = li,
real_parameters = Mandelstam_symbols,
# the highest order of the final epsilon expansion --> change this value to whatever you think is appropriate
requested_order = 0,
additional_prefactor = 'exp(2*EulerGamma*eps)',
# the optimization level to use in FORM (can be 0, 1, 2, 3)
form_optimization_level = 2,
# the WorkSpace parameter for FORM
form_work_space = '2G',
# the method to be used for the sector decomposition
# valid values are ``iterative`` or ``geometric`` or ``geometric_ku``
decomposition_method = 'geometric',
# if you choose ``geometric[_ku]`` and 'normaliz' is not in your
# $PATH, you can set the path to the 'normaliz' command-line
# executable here
#normaliz_executable='/path/to/normaliz',
# whether or not to produce code to perform the contour deformation
# contour deformation is not required if we only want to compute euclidean points (all Mandelstam invariants negative)
contour_deformation = False,
# no symmetries --> no need to run the full symmetry finder
#use_Pak = False,
)
# generates 34 sectors, no symmetries
| gpl-3.0 | 7,686,921,309,786,268,000 | 29.111111 | 179 | 0.572417 | false |
boothead/karl | karl/content/views/tests/test_utils.py | 1 | 3676 | # Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: [email protected]
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import unittest
from zope.testing.cleanup import cleanUp
from karl import testing
from repoze.bfg import testing as bfgtesting
class ExtractDescriptionTests(unittest.TestCase):
def _callFUT(self, htmlstring):
from karl.content.views.utils import extract_description
return extract_description(htmlstring)
def test_plain_bytes(self):
summary = self._callFUT("I am text")
self.assertEqual(summary, "I am text")
def test_plain_unicode(self):
summary = self._callFUT(u"I am text")
self.assertEqual(summary, u"I am text")
def test_html_body(self):
summary = self._callFUT("<html><body>I am text</body></html>")
self.assertEqual(summary, "I am text")
def test_html_elements(self):
summary = self._callFUT("<div>I</div> <span>am</span> <b>text</b>")
self.assertEqual(summary, "I am text")
def test_bad_html(self):
summary = self._callFUT("<b>I <i>am</i> <u>broken text")
self.assertEqual(summary, "I am broken text")
def test_newline(self):
summary = self._callFUT("I am \r\n divided text")
self.assertEqual(summary, "I am divided text")
def test_wiki_markup(self):
summary = self._callFUT("I am ((wiki linked)) text")
self.assertEqual(summary, "I am wiki linked text")
def test_limit(self):
summary = self._callFUT("I am quite long text. " * 50)
self.assertEqual(len(summary), 222)
self.assertTrue(summary.endswith('...'))
class Test_get_show_sendalert(unittest.TestCase):
def setUp(self):
cleanUp()
def tearDown(self):
cleanUp()
def _call_fut(self, context, request):
from karl.content.views.utils import get_show_sendalert
return get_show_sendalert(context, request)
def test_not_intranet(self):
context = testing.DummyModel()
self.failUnless(self._call_fut(context, None))
def test_in_intranet(self):
from karl.content.interfaces import IIntranets
from zope.interface import directlyProvides
intranet = testing.DummyModel()
directlyProvides(intranet, IIntranets)
intranet['foo'] = context = testing.DummyModel()
self.failIf(self._call_fut(context, None))
def test_override_adapter(self):
class DummyAdapter(object):
show_sendalert = 'foo'
def __init__(self, context, request):
pass
from zope.interface import Interface
from karl.content.views.interfaces import IShowSendalert
bfgtesting.registerAdapter(
DummyAdapter, (Interface, Interface), IShowSendalert
)
context = testing.DummyModel()
request = bfgtesting.DummyRequest()
self.assertEqual(self._call_fut(context, request), 'foo')
| gpl-2.0 | -6,927,958,212,933,433,000 | 35.39604 | 75 | 0.670022 | false |
pkill-nine/qutebrowser | qutebrowser/config/configdata.py | 1 | 75773 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Configuration data for config.py.
Module attributes:
FIRST_COMMENT: The initial comment header to place in the config.
SECTION_DESC: A dictionary with descriptions for sections.
DATA: A global read-only copy of the default config, an OrderedDict of
sections.
"""
import sys
import re
import collections
from qutebrowser.config import configtypes as typ
from qutebrowser.config import sections as sect
from qutebrowser.config.value import SettingValue
from qutebrowser.utils.qtutils import MAXVALS
from qutebrowser.utils import usertypes, qtutils
FIRST_COMMENT = r"""
# vim: ft=dosini
# Configfile for qutebrowser.
#
# This configfile is parsed by python's configparser in extended
# interpolation mode. The format is very INI-like, so there are
# categories like [general] with "key = value"-pairs.
#
# Note that you shouldn't add your own comments, as this file is
# regenerated every time the config is saved.
#
# Interpolation looks like ${value} or ${section:value} and will be
# replaced by the respective value.
#
# Some settings will expand environment variables. Note that, since
# interpolation is run first, you will need to escape the $ char as
# described below.
#
# This is the default config, so if you want to remove anything from
# here (as opposed to change/add), for example a key binding, set it to
# an empty value.
#
# You will need to escape the following values:
# - # at the start of the line (at the first position of the key) (\#)
# - $ in a value ($$)
"""
SECTION_DESC = {
'general': "General/miscellaneous options.",
'ui': "General options related to the user interface.",
'input': "Options related to input modes.",
'network': "Settings related to the network.",
'completion': "Options related to completion and command history.",
'tabs': "Configuration of the tab bar.",
'storage': "Settings related to cache and storage.",
'content': "Loaded plugins/scripts and allowed actions.",
'hints': "Hinting settings.",
'searchengines': (
"Definitions of search engines which can be used via the address "
"bar.\n"
"The searchengine named `DEFAULT` is used when "
"`general -> auto-search` is true and something else than a URL was "
"entered to be opened. Other search engines can be used by prepending "
"the search engine name to the search term, e.g. "
"`:open google qutebrowser`. The string `{}` will be replaced by the "
"search term, use `{{` and `}}` for literal `{`/`}` signs."),
'aliases': (
"Aliases for commands.\n"
"By default, no aliases are defined. Example which adds a new command "
"`:qtb` to open qutebrowsers website:\n\n"
"`qtb = open https://www.qutebrowser.org/`"),
'colors': (
"Colors used in the UI.\n"
"A value can be in one of the following format:\n\n"
" * `#RGB`/`#RRGGBB`/`#RRRGGGBBB`/`#RRRRGGGGBBBB`\n"
" * An SVG color name as specified in http://www.w3.org/TR/SVG/"
"types.html#ColorKeywords[the W3C specification].\n"
" * transparent (no color)\n"
" * `rgb(r, g, b)` / `rgba(r, g, b, a)` (values 0-255 or "
"percentages)\n"
" * `hsv(h, s, v)` / `hsva(h, s, v, a)` (values 0-255, hue 0-359)\n"
" * A gradient as explained in http://doc.qt.io/qt-5/"
"stylesheet-reference.html#list-of-property-types[the Qt "
"documentation] under ``Gradient''.\n\n"
"A *.system value determines the color system to use for color "
"interpolation between similarly-named *.start and *.stop entries, "
"regardless of how they are defined in the options. "
"Valid values are 'rgb', 'hsv', and 'hsl'.\n\n"
"The `hints.*` values are a special case as they're real CSS "
"colors, not Qt-CSS colors. There, for a gradient, you need to use "
"`-webkit-gradient`, see https://www.webkit.org/blog/175/introducing-"
"css-gradients/[the WebKit documentation]."),
'fonts': (
"Fonts used for the UI, with optional style/weight/size.\n\n"
" * Style: `normal`/`italic`/`oblique`\n"
" * Weight: `normal`, `bold`, `100`..`900`\n"
" * Size: _number_ `px`/`pt`"),
}
DEFAULT_FONT_SIZE = '10pt' if sys.platform == 'darwin' else '8pt'
def data(readonly=False):
"""Get the default config data.
Return:
A {name: section} OrderedDict.
"""
return collections.OrderedDict([
('general', sect.KeyValue(
('ignore-case',
SettingValue(typ.IgnoreCase(), 'smart'),
"Whether to find text on a page case-insensitively."),
('startpage',
SettingValue(typ.List(typ.String()),
'https://start.duckduckgo.com'),
"The default page(s) to open at the start, separated by commas."),
('yank-ignored-url-parameters',
SettingValue(typ.List(typ.String()),
'ref,utm_source,utm_medium,utm_campaign,utm_term,'
'utm_content'),
"The URL parameters to strip with :yank url, separated by "
"commas."),
('default-open-dispatcher',
SettingValue(typ.String(none_ok=True), ''),
"The default program used to open downloads. Set to an empty "
"string to use the default internal handler.\n\n"
"Any {} in the string will be expanded to the filename, else "
"the filename will be appended."),
('default-page',
SettingValue(typ.FuzzyUrl(), '${startpage}'),
"The page to open if :open -t/-b/-w is used without URL. Use "
"`about:blank` for a blank page."),
('auto-search',
SettingValue(typ.AutoSearch(), 'naive'),
"Whether to start a search when something else than a URL is "
"entered."),
('auto-save-config',
SettingValue(typ.Bool(), 'true'),
"Whether to save the config automatically on quit."),
('auto-save-interval',
SettingValue(typ.Int(minval=0, maxval=MAXVALS['int']), '15000'),
"How often (in milliseconds) to auto-save config/cookies/etc."),
('editor',
SettingValue(typ.ShellCommand(placeholder=True), 'gvim -f "{}"'),
"The editor (and arguments) to use for the `open-editor` "
"command.\n\n"
"The arguments get split like in a shell, so you can use `\"` or "
"`'` to quote them.\n"
"`{}` gets replaced by the filename of the file to be edited."),
('editor-encoding',
SettingValue(typ.Encoding(), 'utf-8'),
"Encoding to use for editor."),
('private-browsing',
SettingValue(typ.Bool(), 'false'),
"Open new windows in private browsing mode which does not record "
"visited pages."),
('developer-extras',
SettingValue(typ.Bool(), 'false',
backends=[usertypes.Backend.QtWebKit]),
"Enable extra tools for Web developers.\n\n"
"This needs to be enabled for `:inspector` to work and also adds "
"an _Inspect_ entry to the context menu. For QtWebEngine, see "
"'qutebrowser --help' instead."),
('print-element-backgrounds',
SettingValue(typ.Bool(), 'true',
backends=(
None if qtutils.version_check('5.8', strict=True)
else [usertypes.Backend.QtWebKit])),
"Whether the background color and images are also drawn when the "
"page is printed.\n"
"This setting only works with Qt 5.8 or newer when using the "
"QtWebEngine backend."),
('xss-auditing',
SettingValue(typ.Bool(), 'false'),
"Whether load requests should be monitored for cross-site "
"scripting attempts.\n\n"
"Suspicious scripts will be blocked and reported in the "
"inspector's JavaScript console. Enabling this feature might "
"have an impact on performance."),
('default-encoding',
SettingValue(typ.String(), 'iso-8859-1'),
"Default encoding to use for websites.\n\n"
"The encoding must be a string describing an encoding such as "
"_utf-8_, _iso-8859-1_, etc."),
('new-instance-open-target',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('tab', "Open a new tab in the existing "
"window and activate the window."),
('tab-bg', "Open a new background tab in the "
"existing window and activate the "
"window."),
('tab-silent', "Open a new tab in the existing "
"window without activating "
"the window."),
('tab-bg-silent', "Open a new background tab "
"in the existing window "
"without activating the "
"window."),
('window', "Open in a new window.")
)), 'tab'),
"How to open links in an existing instance if a new one is "
"launched."),
('new-instance-open-target.window',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('first-opened', "Open new tabs in the first (oldest) "
"opened window."),
('last-opened', "Open new tabs in the last (newest) "
"opened window."),
('last-focused', "Open new tabs in the most recently "
"focused window."),
('last-visible', "Open new tabs in the most recently "
"visible window.")
)), 'last-focused'),
"Which window to choose when opening links as new tabs."),
('log-javascript-console',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('none', "Don't log messages."),
('debug', "Log messages with debug level."),
('info', "Log messages with info level.")
)), 'debug'),
"How to log javascript console messages."),
('save-session',
SettingValue(typ.Bool(), 'false'),
"Whether to always save the open pages."),
('session-default-name',
SettingValue(typ.SessionName(none_ok=True), ''),
"The name of the session to save by default, or empty for the "
"last loaded session."),
('url-incdec-segments',
SettingValue(
typ.FlagList(valid_values=typ.ValidValues(
'host', 'path', 'query', 'anchor')),
'path,query'),
"The URL segments where `:navigate increment/decrement` will "
"search for a number."),
readonly=readonly
)),
('ui', sect.KeyValue(
('history-session-interval',
SettingValue(typ.Int(), '30'),
"The maximum time in minutes between two history items for them "
"to be considered being from the same session. Use -1 to "
"disable separation."),
('zoom-levels',
SettingValue(typ.List(typ.Perc(minval=0)),
'25%,33%,50%,67%,75%,90%,100%,110%,125%,150%,175%,'
'200%,250%,300%,400%,500%'),
"The available zoom levels, separated by commas."),
('default-zoom',
SettingValue(typ.Perc(), '100%'),
"The default zoom level."),
('downloads-position',
SettingValue(typ.VerticalPosition(), 'top'),
"Where to show the downloaded files."),
('status-position',
SettingValue(typ.VerticalPosition(), 'bottom'),
"The position of the status bar."),
('message-timeout',
SettingValue(typ.Int(minval=0), '2000'),
"Time (in ms) to show messages in the statusbar for.\n"
"Set to 0 to never clear messages."),
('message-unfocused',
SettingValue(typ.Bool(), 'false'),
"Whether to show messages in unfocused windows."),
('confirm-quit',
SettingValue(typ.ConfirmQuit(), 'never'),
"Whether to confirm quitting the application."),
('zoom-text-only',
SettingValue(typ.Bool(), 'false',
backends=[usertypes.Backend.QtWebKit]),
"Whether the zoom factor on a frame applies only to the text or "
"to all content."),
('frame-flattening',
SettingValue(typ.Bool(), 'false',
backends=[usertypes.Backend.QtWebKit]),
"Whether to expand each subframe to its contents.\n\n"
"This will flatten all the frames to become one scrollable "
"page."),
('user-stylesheet',
SettingValue(typ.File(none_ok=True), ''),
"User stylesheet to use (absolute filename or filename relative "
"to the config directory). Will expand environment variables."),
('hide-scrollbar',
SettingValue(typ.Bool(), 'true'),
"Hide the main scrollbar."),
('smooth-scrolling',
SettingValue(typ.Bool(), 'false'),
"Whether to enable smooth scrolling for web pages. Note smooth "
"scrolling does not work with the :scroll-px command."),
('remove-finished-downloads',
SettingValue(typ.Int(minval=-1), '-1'),
"Number of milliseconds to wait before removing finished "
"downloads. Will not be removed if value is -1."),
('hide-statusbar',
SettingValue(typ.Bool(), 'false'),
"Whether to hide the statusbar unless a message is shown."),
('statusbar-padding',
SettingValue(typ.Padding(), '1,1,0,0'),
"Padding for statusbar (top, bottom, left, right)."),
('window-title-format',
SettingValue(typ.FormatString(fields=['perc', 'perc_raw', 'title',
'title_sep', 'id',
'scroll_pos', 'host',
'backend', 'private']),
'{perc}{title}{title_sep}qutebrowser'),
"The format to use for the window title. The following "
"placeholders are defined:\n\n"
"* `{perc}`: The percentage as a string like `[10%]`.\n"
"* `{perc_raw}`: The raw percentage, e.g. `10`\n"
"* `{title}`: The title of the current web page\n"
"* `{title_sep}`: The string ` - ` if a title is set, empty "
"otherwise.\n"
"* `{id}`: The internal window ID of this window.\n"
"* `{scroll_pos}`: The page scroll position.\n"
"* `{host}`: The host of the current web page.\n"
"* `{backend}`: Either 'webkit' or 'webengine'\n"
"* `{private}` : Indicates when private mode is enabled.\n"),
('modal-js-dialog',
SettingValue(typ.Bool(), 'false'),
"Use standard JavaScript modal dialog for alert() and confirm()"),
('hide-wayland-decoration',
SettingValue(typ.Bool(), 'false'),
"Hide the window decoration when using wayland "
"(requires restart)"),
('keyhint-blacklist',
SettingValue(typ.List(typ.String(), none_ok=True), ''),
"Keychains that shouldn't be shown in the keyhint dialog\n\n"
"Globs are supported, so ';*' will blacklist all keychains"
"starting with ';'. Use '*' to disable keyhints"),
('keyhint-delay',
SettingValue(typ.Int(minval=0), '500'),
"Time from pressing a key to seeing the keyhint dialog (ms)"),
('prompt-radius',
SettingValue(typ.Int(minval=0), '8'),
"The rounding radius for the edges of prompts."),
('prompt-filebrowser',
SettingValue(typ.Bool(), 'true'),
"Show a filebrowser in upload/download prompts."),
readonly=readonly
)),
('network', sect.KeyValue(
('do-not-track',
SettingValue(typ.Bool(), 'true'),
"Value to send in the `DNT` header."),
('accept-language',
SettingValue(typ.String(none_ok=True), 'en-US,en'),
"Value to send in the `accept-language` header."),
('referer-header',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('always', "Always send."),
('never', "Never send; this is not recommended,"
" as some sites may break."),
('same-domain', "Only send for the same domain."
" This will still protect your privacy, but"
" shouldn't break any sites.")
)), 'same-domain', backends=[usertypes.Backend.QtWebKit]),
"Send the Referer header"),
('user-agent',
SettingValue(typ.UserAgent(none_ok=True), ''),
"User agent to send. Empty to send the default."),
('proxy',
SettingValue(typ.Proxy(), 'system'),
"The proxy to use.\n\n"
"In addition to the listed values, you can use a `socks://...` "
"or `http://...` URL."),
('proxy-dns-requests',
SettingValue(typ.Bool(), 'true',
backends=[usertypes.Backend.QtWebKit]),
"Whether to send DNS requests over the configured proxy."),
('ssl-strict',
SettingValue(typ.BoolAsk(), 'ask'),
"Whether to validate SSL handshakes."),
('dns-prefetch',
SettingValue(typ.Bool(), 'true',
backends=[usertypes.Backend.QtWebKit]),
"Whether to try to pre-fetch DNS entries to speed up browsing."),
('custom-headers',
SettingValue(typ.HeaderDict(none_ok=True), ''),
"Set custom headers for qutebrowser HTTP requests."),
('netrc-file',
SettingValue(typ.File(none_ok=True), ''),
"Set location of a netrc-file for HTTP authentication. If empty, "
"~/.netrc is used."),
readonly=readonly
)),
('completion', sect.KeyValue(
('show',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('always', "Whenever a completion is available."),
('auto', "Whenever a completion is requested."),
('never', "Never.")
)), 'always'),
"When to show the autocompletion window."),
('download-path-suggestion',
SettingValue(
typ.String(valid_values=typ.ValidValues(
('path', "Show only the download path."),
('filename', "Show only download filename."),
('both', "Show download path and filename."))),
'path'),
"What to display in the download filename input."),
('timestamp-format',
SettingValue(typ.TimestampTemplate(none_ok=True), '%Y-%m-%d'),
"How to format timestamps (e.g. for history)"),
('height',
SettingValue(typ.PercOrInt(minperc=0, maxperc=100, minint=1),
'50%'),
"The height of the completion, in px or as percentage of the "
"window."),
('cmd-history-max-items',
SettingValue(typ.Int(minval=-1), '100'),
"How many commands to save in the command history.\n\n"
"0: no history / -1: unlimited"),
('web-history-max-items',
SettingValue(typ.Int(minval=-1, maxval=MAXVALS['int64']), '-1'),
"How many URLs to show in the web history.\n\n"
"0: no history / -1: unlimited"),
('quick-complete',
SettingValue(typ.Bool(), 'true'),
"Whether to move on to the next part when there's only one "
"possible completion left."),
('shrink',
SettingValue(typ.Bool(), 'false'),
"Whether to shrink the completion to be smaller than the "
"configured size if there are no scrollbars."),
('scrollbar-width',
SettingValue(typ.Int(minval=0), '12'),
"Width of the scrollbar in the completion window (in px)."),
('scrollbar-padding',
SettingValue(typ.Int(minval=0), '2'),
"Padding of scrollbar handle in completion window (in px)."),
readonly=readonly
)),
('input', sect.KeyValue(
('timeout',
SettingValue(typ.Int(minval=0, maxval=MAXVALS['int']), '500'),
"Timeout (in milliseconds) for ambiguous key bindings.\n\n"
"If the current input forms both a complete match and a partial "
"match, the complete match will be executed after this time."),
('partial-timeout',
SettingValue(typ.Int(minval=0, maxval=MAXVALS['int']), '5000'),
"Timeout (in milliseconds) for partially typed key bindings.\n\n"
"If the current input forms only partial matches, the keystring "
"will be cleared after this time."),
('insert-mode-on-plugins',
SettingValue(typ.Bool(), 'false'),
"Whether to switch to insert mode when clicking flash and other "
"plugins."),
('auto-leave-insert-mode',
SettingValue(typ.Bool(), 'true'),
"Whether to leave insert mode if a non-editable element is "
"clicked."),
('auto-insert-mode',
SettingValue(typ.Bool(), 'false'),
"Whether to automatically enter insert mode if an editable "
"element is focused after page load."),
('forward-unbound-keys',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('all', "Forward all unbound keys."),
('auto', "Forward unbound non-alphanumeric "
"keys."),
('none', "Don't forward any keys.")
)), 'auto'),
"Whether to forward unbound keys to the webview in normal mode."),
('spatial-navigation',
SettingValue(typ.Bool(), 'false'),
"Enables or disables the Spatial Navigation feature.\n\n"
"Spatial navigation consists in the ability to navigate between "
"focusable elements in a Web page, such as hyperlinks and form "
"controls, by using Left, Right, Up and Down arrow keys. For "
"example, if a user presses the Right key, heuristics determine "
"whether there is an element he might be trying to reach towards "
"the right and which element he probably wants."),
('links-included-in-focus-chain',
SettingValue(typ.Bool(), 'true'),
"Whether hyperlinks should be included in the keyboard focus "
"chain."),
('rocker-gestures',
SettingValue(typ.Bool(), 'false'),
"Whether to enable Opera-like mouse rocker gestures. This "
"disables the context menu."),
('mouse-zoom-divider',
SettingValue(typ.Int(minval=0), '512'),
"How much to divide the mouse wheel movements to translate them "
"into zoom increments."),
readonly=readonly
)),
('tabs', sect.KeyValue(
('background-tabs',
SettingValue(typ.Bool(), 'false'),
"Whether to open new tabs (middleclick/ctrl+click) in "
"background."),
('select-on-remove',
SettingValue(typ.SelectOnRemove(), 'next'),
"Which tab to select when the focused tab is removed."),
('new-tab-position',
SettingValue(typ.NewTabPosition(), 'next'),
"How new tabs are positioned."),
('new-tab-position-explicit',
SettingValue(typ.NewTabPosition(), 'last'),
"How new tabs opened explicitly are positioned."),
('last-close',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('ignore', "Don't do anything."),
('blank', "Load a blank page."),
('startpage', "Load the start page."),
('default-page', "Load the default page."),
('close', "Close the window.")
)), 'ignore'),
"Behavior when the last tab is closed."),
('show',
SettingValue(
typ.String(valid_values=typ.ValidValues(
('always', "Always show the tab bar."),
('never', "Always hide the tab bar."),
('multiple', "Hide the tab bar if only one tab "
"is open."),
('switching', "Show the tab bar when switching "
"tabs.")
)), 'always'),
"When to show the tab bar"),
('show-switching-delay',
SettingValue(typ.Int(), '800'),
"Time to show the tab bar before hiding it when tabs->show is "
"set to 'switching'."),
('wrap',
SettingValue(typ.Bool(), 'true'),
"Whether to wrap when changing tabs."),
('movable',
SettingValue(typ.Bool(), 'true'),
"Whether tabs should be movable."),
('close-mouse-button',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('right', "Close tabs on right-click."),
('middle', "Close tabs on middle-click."),
('none', "Don't close tabs using the mouse.")
)), 'middle'),
"On which mouse button to close tabs."),
('position',
SettingValue(typ.Position(), 'top'),
"The position of the tab bar."),
('show-favicons',
SettingValue(typ.Bool(), 'true'),
"Whether to show favicons in the tab bar."),
('favicon-scale',
SettingValue(typ.Float(minval=0.0), '1.0'),
"Scale for favicons in the tab bar. The tab size is unchanged, "
"so big favicons also require extra `tabs->padding`."),
('width',
SettingValue(typ.PercOrInt(minperc=0, maxperc=100, minint=1),
'20%'),
"The width of the tab bar if it's vertical, in px or as "
"percentage of the window."),
('pinned-width',
SettingValue(typ.Int(minval=10),
'43'),
"The width for pinned tabs with a horizontal tabbar, in px."),
('indicator-width',
SettingValue(typ.Int(minval=0), '3'),
"Width of the progress indicator (0 to disable)."),
('tabs-are-windows',
SettingValue(typ.Bool(), 'false'),
"Whether to open windows instead of tabs."),
('title-format',
SettingValue(typ.FormatString(
fields=['perc', 'perc_raw', 'title', 'title_sep', 'index',
'id', 'scroll_pos', 'host', 'private'], none_ok=True),
'{index}: {title}'),
"The format to use for the tab title. The following placeholders "
"are defined:\n\n"
"* `{perc}`: The percentage as a string like `[10%]`.\n"
"* `{perc_raw}`: The raw percentage, e.g. `10`\n"
"* `{title}`: The title of the current web page\n"
"* `{title_sep}`: The string ` - ` if a title is set, empty "
"otherwise.\n"
"* `{index}`: The index of this tab.\n"
"* `{id}`: The internal tab ID of this tab.\n"
"* `{scroll_pos}`: The page scroll position.\n"
"* `{host}`: The host of the current web page.\n"
"* `{backend}`: Either 'webkit' or 'webengine'\n"
"* `{private}` : Indicates when private mode is enabled.\n"),
('title-format-pinned',
SettingValue(typ.FormatString(
fields=['perc', 'perc_raw', 'title', 'title_sep', 'index',
'id', 'scroll_pos', 'host', 'private'], none_ok=True),
'{index}'),
"The format to use for the tab title for pinned tabs. "
"The same placeholders like for title-format are defined."),
('title-alignment',
SettingValue(typ.TextAlignment(), 'left'),
"Alignment of the text inside of tabs"),
('mousewheel-tab-switching',
SettingValue(typ.Bool(), 'true'),
"Switch between tabs using the mouse wheel."),
('padding',
SettingValue(typ.Padding(), '0,0,5,5'),
"Padding for tabs (top, bottom, left, right)."),
('indicator-padding',
SettingValue(typ.Padding(), '2,2,0,4'),
"Padding for indicators (top, bottom, left, right)."),
readonly=readonly
)),
('storage', sect.KeyValue(
('download-directory',
SettingValue(typ.Directory(none_ok=True), ''),
"The directory to save downloads to. An empty value selects a "
"sensible os-specific default. Will expand environment "
"variables."),
('prompt-download-directory',
SettingValue(typ.Bool(), 'true'),
"Whether to prompt the user for the download location.\n"
"If set to false, 'download-directory' will be used."),
('remember-download-directory',
SettingValue(typ.Bool(), 'true'),
"Whether to remember the last used download directory."),
# Defaults from QWebSettings::QWebSettings() in
# qtwebkit/Source/WebKit/qt/Api/qwebsettings.cpp
('maximum-pages-in-cache',
SettingValue(typ.Int(minval=0, maxval=MAXVALS['int']), '0',
backends=[usertypes.Backend.QtWebKit]),
"The maximum number of pages to hold in the global memory page "
"cache.\n\n"
"The Page Cache allows for a nicer user experience when "
"navigating forth or back to pages in the forward/back history, "
"by pausing and resuming up to _n_ pages.\n\n"
"For more information about the feature, please refer to: "
"http://webkit.org/blog/427/webkit-page-cache-i-the-basics/"),
('offline-web-application-cache',
SettingValue(typ.Bool(), 'true',
backends=[usertypes.Backend.QtWebKit]),
"Whether support for the HTML 5 web application cache feature is "
"enabled.\n\n"
"An application cache acts like an HTTP cache in some sense. For "
"documents that use the application cache via JavaScript, the "
"loader engine will first ask the application cache for the "
"contents, before hitting the network.\n\n"
"The feature is described in details at: "
"http://dev.w3.org/html5/spec/Overview.html#appcache"),
('local-storage',
SettingValue(typ.Bool(), 'true'),
"Whether support for HTML 5 local storage and Web SQL is "
"enabled."),
('cache-size',
SettingValue(typ.Int(none_ok=True, minval=0,
maxval=MAXVALS['int64']), ''),
"Size of the HTTP network cache. Empty to use the default "
"value."),
readonly=readonly
)),
('content', sect.KeyValue(
('allow-images',
SettingValue(typ.Bool(), 'true'),
"Whether images are automatically loaded in web pages."),
('allow-javascript',
SettingValue(typ.Bool(), 'true'),
"Enables or disables the running of JavaScript programs."),
('allow-plugins',
SettingValue(typ.Bool(), 'false'),
"Enables or disables plugins in Web pages.\n\n"
'Qt plugins with a mimetype such as "application/x-qt-plugin" '
"are not affected by this setting."),
('webgl',
SettingValue(typ.Bool(), 'true'),
"Enables or disables WebGL."),
('hyperlink-auditing',
SettingValue(typ.Bool(), 'false'),
"Enable or disable hyperlink auditing (<a ping>)."),
('geolocation',
SettingValue(typ.BoolAsk(), 'ask'),
"Allow websites to request geolocations."),
('notifications',
SettingValue(typ.BoolAsk(), 'ask'),
"Allow websites to show notifications."),
('media-capture',
SettingValue(typ.BoolAsk(), 'ask',
backends=[usertypes.Backend.QtWebEngine]),
"Allow websites to record audio/video."),
('javascript-can-open-windows-automatically',
SettingValue(typ.Bool(), 'false'),
"Whether JavaScript programs can open new windows without user "
"interaction."),
('javascript-can-close-windows',
SettingValue(typ.Bool(), 'false',
backends=[usertypes.Backend.QtWebKit]),
"Whether JavaScript programs can close windows."),
('javascript-can-access-clipboard',
SettingValue(typ.Bool(), 'false'),
"Whether JavaScript programs can read or write to the "
"clipboard.\nWith QtWebEngine, writing the clipboard as response "
"to a user interaction is always allowed."),
('ignore-javascript-prompt',
SettingValue(typ.Bool(), 'false'),
"Whether all javascript prompts should be ignored."),
('ignore-javascript-alert',
SettingValue(typ.Bool(), 'false'),
"Whether all javascript alerts should be ignored."),
('local-content-can-access-remote-urls',
SettingValue(typ.Bool(), 'false'),
"Whether locally loaded documents are allowed to access remote "
"urls."),
('local-content-can-access-file-urls',
SettingValue(typ.Bool(), 'true'),
"Whether locally loaded documents are allowed to access other "
"local urls."),
('cookies-accept',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('all', "Accept all cookies."),
('no-3rdparty', "Accept cookies from the same"
" origin only."),
('no-unknown-3rdparty', "Accept cookies from "
"the same origin only, unless a cookie is "
"already set for the domain."),
('never', "Don't accept cookies at all.")
)), 'no-3rdparty', backends=[usertypes.Backend.QtWebKit]),
"Control which cookies to accept."),
('cookies-store',
SettingValue(typ.Bool(), 'true'),
"Whether to store cookies. Note this option needs a restart with "
"QtWebEngine on Qt < 5.9."),
('host-block-lists',
SettingValue(
typ.List(typ.Url(), none_ok=True),
'https://www.malwaredomainlist.com/hostslist/hosts.txt,'
'http://someonewhocares.org/hosts/hosts,'
'http://winhelp2002.mvps.org/hosts.zip,'
'http://malwaredomains.lehigh.edu/files/justdomains.zip,'
'https://pgl.yoyo.org/adservers/serverlist.php?'
'hostformat=hosts&mimetype=plaintext'),
"List of URLs of lists which contain hosts to block.\n\n"
"The file can be in one of the following formats:\n\n"
"- An '/etc/hosts'-like file\n"
"- One host per line\n"
"- A zip-file of any of the above, with either only one file, or "
"a file named 'hosts' (with any extension)."),
('host-blocking-enabled',
SettingValue(typ.Bool(), 'true'),
"Whether host blocking is enabled."),
('host-blocking-whitelist',
SettingValue(typ.List(typ.String(), none_ok=True), 'piwik.org'),
"List of domains that should always be loaded, despite being "
"ad-blocked.\n\n"
"Domains may contain * and ? wildcards and are otherwise "
"required to exactly match the requested domain.\n\n"
"Local domains are always exempt from hostblocking."),
('enable-pdfjs', SettingValue(typ.Bool(), 'false'),
"Enable pdf.js to view PDF files in the browser.\n\n"
"Note that the files can still be downloaded by clicking"
" the download button in the pdf.js viewer."),
readonly=readonly
)),
('hints', sect.KeyValue(
('border',
SettingValue(typ.String(), '1px solid #E3BE23'),
"CSS border value for hints."),
('mode',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('number', "Use numeric hints. (In this mode you can "
"also type letters form the hinted element to filter "
"and reduce the number of elements that are hinted.)"),
('letter', "Use the chars in the hints -> "
"chars setting."),
('word', "Use hints words based on the html "
"elements and the extra words."),
)), 'letter'),
"Mode to use for hints."),
('chars',
SettingValue(typ.UniqueCharString(minlen=2, completions=[
('asdfghjkl', "Home row"),
('aoeuidnths', "Home row (Dvorak)"),
('abcdefghijklmnopqrstuvwxyz', "All letters"),
]), 'asdfghjkl'),
"Chars used for hint strings."),
('min-chars',
SettingValue(typ.Int(minval=1), '1'),
"Minimum number of chars used for hint strings."),
('scatter',
SettingValue(typ.Bool(), 'true'),
"Whether to scatter hint key chains (like Vimium) or not (like "
"dwb). Ignored for number hints."),
('uppercase',
SettingValue(typ.Bool(), 'false'),
"Make chars in hint strings uppercase."),
('dictionary',
SettingValue(typ.File(required=False), '/usr/share/dict/words'),
"The dictionary file to be used by the word hints."),
('auto-follow',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('always', "Auto-follow whenever there is only a single "
"hint on a page."),
('unique-match', "Auto-follow whenever there is a unique "
"non-empty match in either the hint string (word mode) "
"or filter (number mode)."),
('full-match', "Follow the hint when the user typed the "
"whole hint (letter, word or number mode) or the "
"element's text (only in number mode)."),
('never', "The user will always need to press Enter to "
"follow a hint."),
)), 'unique-match'),
"Controls when a hint can be automatically followed without the "
"user pressing Enter."),
('auto-follow-timeout',
SettingValue(typ.Int(), '0'),
"A timeout (in milliseconds) to inhibit normal-mode key bindings "
"after a successful auto-follow."),
('next-regexes',
SettingValue(typ.List(typ.Regex(flags=re.IGNORECASE)),
r'\bnext\b,\bmore\b,\bnewer\b,\b[>→≫]\b,\b(>>|»)\b,'
r'\bcontinue\b'),
"A comma-separated list of regexes to use for 'next' links."),
('prev-regexes',
SettingValue(typ.List(typ.Regex(flags=re.IGNORECASE)),
r'\bprev(ious)?\b,\bback\b,\bolder\b,\b[<←≪]\b,'
r'\b(<<|«)\b'),
"A comma-separated list of regexes to use for 'prev' links."),
('find-implementation',
SettingValue(typ.String(
valid_values=typ.ValidValues(
('javascript', "Better but slower"),
('python', "Slightly worse but faster"),
)), 'python'),
"Which implementation to use to find elements to hint."),
('hide-unmatched-rapid-hints',
SettingValue(typ.Bool(), 'true'),
"Controls hiding unmatched hints in rapid mode."),
readonly=readonly
)),
('searchengines', sect.ValueList(
typ.SearchEngineName(), typ.SearchEngineUrl(),
('DEFAULT', 'https://duckduckgo.com/?q={}'),
readonly=readonly
)),
('aliases', sect.ValueList(
typ.String(forbidden=' '), typ.Command(),
readonly=readonly
)),
('colors', sect.KeyValue(
('completion.fg',
SettingValue(typ.QtColor(), 'white'),
"Text color of the completion widget."),
('completion.bg',
SettingValue(typ.QssColor(), '#333333'),
"Background color of the completion widget."),
('completion.alternate-bg',
SettingValue(typ.QssColor(), '#444444'),
"Alternating background color of the completion widget."),
('completion.category.fg',
SettingValue(typ.QtColor(), 'white'),
"Foreground color of completion widget category headers."),
('completion.category.bg',
SettingValue(typ.QssColor(), 'qlineargradient(x1:0, y1:0, x2:0, '
'y2:1, stop:0 #888888, stop:1 #505050)'),
"Background color of the completion widget category headers."),
('completion.category.border.top',
SettingValue(typ.QssColor(), 'black'),
"Top border color of the completion widget category headers."),
('completion.category.border.bottom',
SettingValue(typ.QssColor(), '${completion.category.border.top}'),
"Bottom border color of the completion widget category headers."),
('completion.item.selected.fg',
SettingValue(typ.QtColor(), 'black'),
"Foreground color of the selected completion item."),
('completion.item.selected.bg',
SettingValue(typ.QssColor(), '#e8c000'),
"Background color of the selected completion item."),
('completion.item.selected.border.top',
SettingValue(typ.QssColor(), '#bbbb00'),
"Top border color of the completion widget category headers."),
('completion.item.selected.border.bottom',
SettingValue(
typ.QssColor(), '${completion.item.selected.border.top}'),
"Bottom border color of the selected completion item."),
('completion.match.fg',
SettingValue(typ.QssColor(), '#ff4444'),
"Foreground color of the matched text in the completion."),
('completion.scrollbar.fg',
SettingValue(typ.QssColor(), '${completion.fg}'),
"Color of the scrollbar handle in completion view."),
('completion.scrollbar.bg',
SettingValue(typ.QssColor(), '${completion.bg}'),
"Color of the scrollbar in completion view"),
('statusbar.fg',
SettingValue(typ.QssColor(), 'white'),
"Foreground color of the statusbar."),
('statusbar.bg',
SettingValue(typ.QssColor(), 'black'),
"Background color of the statusbar."),
('statusbar.fg.private',
SettingValue(typ.QssColor(), '${statusbar.fg}'),
"Foreground color of the statusbar in private browsing mode."),
('statusbar.bg.private',
SettingValue(typ.QssColor(), '#666666'),
"Background color of the statusbar in private browsing mode."),
('statusbar.fg.insert',
SettingValue(typ.QssColor(), '${statusbar.fg}'),
"Foreground color of the statusbar in insert mode."),
('statusbar.bg.insert',
SettingValue(typ.QssColor(), 'darkgreen'),
"Background color of the statusbar in insert mode."),
('statusbar.fg.command',
SettingValue(typ.QssColor(), '${statusbar.fg}'),
"Foreground color of the statusbar in command mode."),
('statusbar.bg.command',
SettingValue(typ.QssColor(), '${statusbar.bg}'),
"Background color of the statusbar in command mode."),
('statusbar.fg.command.private',
SettingValue(typ.QssColor(), '${statusbar.fg.private}'),
"Foreground color of the statusbar in private browsing + command "
"mode."),
('statusbar.bg.command.private',
SettingValue(typ.QssColor(), '${statusbar.bg.private}'),
"Background color of the statusbar in private browsing + command "
"mode."),
('statusbar.fg.caret',
SettingValue(typ.QssColor(), '${statusbar.fg}'),
"Foreground color of the statusbar in caret mode."),
('statusbar.bg.caret',
SettingValue(typ.QssColor(), 'purple'),
"Background color of the statusbar in caret mode."),
('statusbar.fg.caret-selection',
SettingValue(typ.QssColor(), '${statusbar.fg}'),
"Foreground color of the statusbar in caret mode with a "
"selection"),
('statusbar.bg.caret-selection',
SettingValue(typ.QssColor(), '#a12dff'),
"Background color of the statusbar in caret mode with a "
"selection"),
('statusbar.progress.bg',
SettingValue(typ.QssColor(), 'white'),
"Background color of the progress bar."),
('statusbar.url.fg',
SettingValue(typ.QssColor(), '${statusbar.fg}'),
"Default foreground color of the URL in the statusbar."),
('statusbar.url.fg.success',
SettingValue(typ.QssColor(), 'white'),
"Foreground color of the URL in the statusbar on successful "
"load (http)."),
('statusbar.url.fg.success.https',
SettingValue(typ.QssColor(), 'lime'),
"Foreground color of the URL in the statusbar on successful "
"load (https)."),
('statusbar.url.fg.error',
SettingValue(typ.QssColor(), 'orange'),
"Foreground color of the URL in the statusbar on error."),
('statusbar.url.fg.warn',
SettingValue(typ.QssColor(), 'yellow'),
"Foreground color of the URL in the statusbar when there's a "
"warning."),
('statusbar.url.fg.hover',
SettingValue(typ.QssColor(), 'aqua'),
"Foreground color of the URL in the statusbar for hovered "
"links."),
('tabs.fg.odd',
SettingValue(typ.QtColor(), 'white'),
"Foreground color of unselected odd tabs."),
('tabs.bg.odd',
SettingValue(typ.QtColor(), 'grey'),
"Background color of unselected odd tabs."),
('tabs.fg.even',
SettingValue(typ.QtColor(), 'white'),
"Foreground color of unselected even tabs."),
('tabs.bg.even',
SettingValue(typ.QtColor(), 'darkgrey'),
"Background color of unselected even tabs."),
('tabs.fg.selected.odd',
SettingValue(typ.QtColor(), 'white'),
"Foreground color of selected odd tabs."),
('tabs.bg.selected.odd',
SettingValue(typ.QtColor(), 'black'),
"Background color of selected odd tabs."),
('tabs.fg.selected.even',
SettingValue(typ.QtColor(), '${tabs.fg.selected.odd}'),
"Foreground color of selected even tabs."),
('tabs.bg.selected.even',
SettingValue(typ.QtColor(), '${tabs.bg.selected.odd}'),
"Background color of selected even tabs."),
('tabs.bg.bar',
SettingValue(typ.QtColor(), '#555555'),
"Background color of the tab bar."),
('tabs.indicator.start',
SettingValue(typ.QtColor(), '#0000aa'),
"Color gradient start for the tab indicator."),
('tabs.indicator.stop',
SettingValue(typ.QtColor(), '#00aa00'),
"Color gradient end for the tab indicator."),
('tabs.indicator.error',
SettingValue(typ.QtColor(), '#ff0000'),
"Color for the tab indicator on errors.."),
('tabs.indicator.system',
SettingValue(typ.ColorSystem(), 'rgb'),
"Color gradient interpolation system for the tab indicator."),
('hints.fg',
SettingValue(typ.QssColor(), 'black'),
"Font color for hints."),
('hints.bg',
SettingValue(typ.QssColor(), 'qlineargradient(x1:0, y1:0, x2:0, '
'y2:1, stop:0 rgba(255, 247, 133, 0.8), '
'stop:1 rgba(255, 197, 66, 0.8))'),
"Background color for hints. Note that you can use a `rgba(...)` "
"value for transparency."),
('hints.fg.match',
SettingValue(typ.QssColor(), 'green'),
"Font color for the matched part of hints."),
('downloads.bg.bar',
SettingValue(typ.QssColor(), 'black'),
"Background color for the download bar."),
('downloads.fg.start',
SettingValue(typ.QtColor(), 'white'),
"Color gradient start for download text."),
('downloads.bg.start',
SettingValue(typ.QtColor(), '#0000aa'),
"Color gradient start for download backgrounds."),
('downloads.fg.stop',
SettingValue(typ.QtColor(), '${downloads.fg.start}'),
"Color gradient end for download text."),
('downloads.bg.stop',
SettingValue(typ.QtColor(), '#00aa00'),
"Color gradient stop for download backgrounds."),
('downloads.fg.system',
SettingValue(typ.ColorSystem(), 'rgb'),
"Color gradient interpolation system for download text."),
('downloads.bg.system',
SettingValue(typ.ColorSystem(), 'rgb'),
"Color gradient interpolation system for download backgrounds."),
('downloads.fg.error',
SettingValue(typ.QtColor(), 'white'),
"Foreground color for downloads with errors."),
('downloads.bg.error',
SettingValue(typ.QtColor(), 'red'),
"Background color for downloads with errors."),
('webpage.bg',
SettingValue(typ.QtColor(none_ok=True), 'white'),
"Background color for webpages if unset (or empty to use the "
"theme's color)"),
('keyhint.fg',
SettingValue(typ.QssColor(), '#FFFFFF'),
"Text color for the keyhint widget."),
('keyhint.fg.suffix',
SettingValue(typ.CssColor(), '#FFFF00'),
"Highlight color for keys to complete the current keychain"),
('keyhint.bg',
SettingValue(typ.QssColor(), 'rgba(0, 0, 0, 80%)'),
"Background color of the keyhint widget."),
('messages.fg.error',
SettingValue(typ.QssColor(), 'white'),
"Foreground color of an error message."),
('messages.bg.error',
SettingValue(typ.QssColor(), 'red'),
"Background color of an error message."),
('messages.border.error',
SettingValue(typ.QssColor(), '#bb0000'),
"Border color of an error message."),
('messages.fg.warning',
SettingValue(typ.QssColor(), 'white'),
"Foreground color a warning message."),
('messages.bg.warning',
SettingValue(typ.QssColor(), 'darkorange'),
"Background color of a warning message."),
('messages.border.warning',
SettingValue(typ.QssColor(), '#d47300'),
"Border color of an error message."),
('messages.fg.info',
SettingValue(typ.QssColor(), 'white'),
"Foreground color an info message."),
('messages.bg.info',
SettingValue(typ.QssColor(), 'black'),
"Background color of an info message."),
('messages.border.info',
SettingValue(typ.QssColor(), '#333333'),
"Border color of an info message."),
('prompts.fg',
SettingValue(typ.QssColor(), 'white'),
"Foreground color for prompts."),
('prompts.bg',
SettingValue(typ.QssColor(), 'darkblue'),
"Background color for prompts."),
('prompts.selected.bg',
SettingValue(typ.QssColor(), '#308cc6'),
"Background color for the selected item in filename prompts."),
readonly=readonly
)),
('fonts', sect.KeyValue(
('_monospace',
SettingValue(typ.Font(), 'xos4 Terminus, Terminus, Monospace, '
'"DejaVu Sans Mono", Monaco, '
'"Bitstream Vera Sans Mono", "Andale Mono", '
'"Courier New", Courier, "Liberation Mono", '
'monospace, Fixed, Consolas, Terminal'),
"Default monospace fonts."),
('completion',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used in the completion widget."),
('completion.category',
SettingValue(typ.Font(), 'bold ${completion}'),
"Font used in the completion categories."),
('tabbar',
SettingValue(typ.QtFont(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used in the tab bar."),
('statusbar',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used in the statusbar."),
('downloads',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used for the downloadbar."),
('hints',
SettingValue(typ.Font(), 'bold 13px ${_monospace}'),
"Font used for the hints."),
('debug-console',
SettingValue(typ.QtFont(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used for the debugging console."),
('web-family-standard',
SettingValue(typ.FontFamily(none_ok=True), ''),
"Font family for standard fonts."),
('web-family-fixed',
SettingValue(typ.FontFamily(none_ok=True), ''),
"Font family for fixed fonts."),
('web-family-serif',
SettingValue(typ.FontFamily(none_ok=True), ''),
"Font family for serif fonts."),
('web-family-sans-serif',
SettingValue(typ.FontFamily(none_ok=True), ''),
"Font family for sans-serif fonts."),
('web-family-cursive',
SettingValue(typ.FontFamily(none_ok=True), ''),
"Font family for cursive fonts."),
('web-family-fantasy',
SettingValue(typ.FontFamily(none_ok=True), ''),
"Font family for fantasy fonts."),
# Defaults for web-size-* from WebEngineSettings::initDefaults in
# qtwebengine/src/core/web_engine_settings.cpp and
# QWebSettings::QWebSettings() in
# qtwebkit/Source/WebKit/qt/Api/qwebsettings.cpp
('web-size-minimum',
SettingValue(typ.Int(minval=0, maxval=MAXVALS['int']), '0'),
"The hard minimum font size."),
# This is 0 as default on QtWebKit, and 6 on QtWebEngine - so let's
# just go for 6 here.
('web-size-minimum-logical',
SettingValue(typ.Int(minval=0, maxval=MAXVALS['int']), '6'),
"The minimum logical font size that is applied when zooming "
"out."),
('web-size-default',
SettingValue(typ.Int(minval=1, maxval=MAXVALS['int']), '16'),
"The default font size for regular text."),
('web-size-default-fixed',
SettingValue(typ.Int(minval=1, maxval=MAXVALS['int']), '13'),
"The default font size for fixed-pitch text."),
('keyhint',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used in the keyhint widget."),
('messages.error',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used for error messages."),
('messages.warning',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used for warning messages."),
('messages.info',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' ${_monospace}'),
"Font used for info messages."),
('prompts',
SettingValue(typ.Font(), DEFAULT_FONT_SIZE + ' sans-serif'),
"Font used for prompts."),
readonly=readonly
)),
])
DATA = data(readonly=True)
KEY_FIRST_COMMENT = """
# vim: ft=conf
#
# In this config file, qutebrowser's key bindings are configured.
# The format looks like this:
#
# [keymode]
#
# command
# keychain
# keychain2
# ...
#
# All blank lines and lines starting with '#' are ignored.
# Inline-comments are not permitted.
#
# keymode is a comma separated list of modes in which the key binding should be
# active. If keymode starts with !, the key binding is active in all modes
# except the listed modes.
#
# For special keys (can't be part of a keychain), enclose them in `<`...`>`.
# For modifiers, you can use either `-` or `+` as delimiters, and these names:
#
# * Control: `Control`, `Ctrl`
# * Meta: `Meta`, `Windows`, `Mod4`
# * Alt: `Alt`, `Mod1`
# * Shift: `Shift`
#
# For simple keys (no `<>`-signs), a capital letter means the key is pressed
# with Shift. For special keys (with `<>`-signs), you need to explicitly add
# `Shift-` to match a key pressed with shift.
#
# Note that default keybindings are always bound, and need to be explicitly
# unbound if you wish to remove them:
#
# <unbound>
# keychain
# keychain2
# ...
"""
KEY_SECTION_DESC = {
'all': "Keybindings active in all modes.",
'normal': "Keybindings for normal mode.",
'insert': (
"Keybindings for insert mode.\n"
"Since normal keypresses are passed through, only special keys are "
"supported in this mode.\n"
"Useful hidden commands to map in this section:\n\n"
" * `open-editor`: Open a texteditor with the focused field.\n"
" * `paste-primary`: Paste primary selection at cursor position."),
'hint': (
"Keybindings for hint mode.\n"
"Since normal keypresses are passed through, only special keys are "
"supported in this mode.\n"
"Useful hidden commands to map in this section:\n\n"
" * `follow-hint`: Follow the currently selected hint."),
'passthrough': (
"Keybindings for passthrough mode.\n"
"Since normal keypresses are passed through, only special keys are "
"supported in this mode."),
'command': (
"Keybindings for command mode.\n"
"Since normal keypresses are passed through, only special keys are "
"supported in this mode.\n"
"Useful hidden commands to map in this section:\n\n"
" * `command-history-prev`: Switch to previous command in history.\n"
" * `command-history-next`: Switch to next command in history.\n"
" * `completion-item-focus`: Select another item in completion.\n"
" * `command-accept`: Execute the command currently in the "
"commandline."),
'prompt': (
"Keybindings for prompts in the status line.\n"
"You can bind normal keys in this mode, but they will be only active "
"when a yes/no-prompt is asked. For other prompt modes, you can only "
"bind special keys.\n"
"Useful hidden commands to map in this section:\n\n"
" * `prompt-accept`: Confirm the entered value.\n"
" * `prompt-accept yes`: Answer yes to a yes/no question.\n"
" * `prompt-accept no`: Answer no to a yes/no question."),
'caret': (
""),
}
# Keys which are similar to Return and should be bound by default where Return
# is bound.
RETURN_KEYS = ['<Return>', '<Ctrl-M>', '<Ctrl-J>', '<Shift-Return>', '<Enter>',
'<Shift-Enter>']
KEY_DATA = collections.OrderedDict([
('!normal', collections.OrderedDict([
('leave-mode', ['<Escape>', '<Ctrl-[>']),
])),
('normal', collections.OrderedDict([
('clear-keychain ;; search ;; fullscreen --leave',
['<Escape>', '<Ctrl-[>']),
('set-cmd-text -s :open', ['o']),
('set-cmd-text :open {url:pretty}', ['go']),
('set-cmd-text -s :open -t', ['O']),
('set-cmd-text :open -t -i {url:pretty}', ['gO']),
('set-cmd-text -s :open -b', ['xo']),
('set-cmd-text :open -b -i {url:pretty}', ['xO']),
('set-cmd-text -s :open -w', ['wo']),
('set-cmd-text :open -w {url:pretty}', ['wO']),
('set-cmd-text /', ['/']),
('set-cmd-text ?', ['?']),
('set-cmd-text :', [':']),
('open -t', ['ga', '<Ctrl-T>']),
('open -w', ['<Ctrl-N>']),
('tab-close', ['d', '<Ctrl-W>']),
('tab-close -o', ['D']),
('tab-only', ['co']),
('tab-focus', ['T']),
('tab-move', ['gm']),
('tab-move -', ['gl']),
('tab-move +', ['gr']),
('tab-next', ['J', '<Ctrl-PgDown>']),
('tab-prev', ['K', '<Ctrl-PgUp>']),
('tab-clone', ['gC']),
('reload', ['r', '<F5>']),
('reload -f', ['R', '<Ctrl-F5>']),
('back', ['H', '<back>']),
('back -t', ['th']),
('back -w', ['wh']),
('forward', ['L', '<forward>']),
('forward -t', ['tl']),
('forward -w', ['wl']),
('fullscreen', ['<F11>']),
('hint', ['f']),
('hint all tab', ['F']),
('hint all window', ['wf']),
('hint all tab-bg', [';b']),
('hint all tab-fg', [';f']),
('hint all hover', [';h']),
('hint images', [';i']),
('hint images tab', [';I']),
('hint links fill :open {hint-url}', [';o']),
('hint links fill :open -t -i {hint-url}', [';O']),
('hint links yank', [';y']),
('hint links yank-primary', [';Y']),
('hint --rapid links tab-bg', [';r']),
('hint --rapid links window', [';R']),
('hint links download', [';d']),
('hint inputs', [';t']),
('scroll left', ['h']),
('scroll down', ['j']),
('scroll up', ['k']),
('scroll right', ['l']),
('undo', ['u', '<Ctrl-Shift-T>']),
('scroll-perc 0', ['gg']),
('scroll-perc', ['G']),
('search-next', ['n']),
('search-prev', ['N']),
('enter-mode insert', ['i']),
('enter-mode caret', ['v']),
('enter-mode set_mark', ['`']),
('enter-mode jump_mark', ["'"]),
('yank', ['yy']),
('yank -s', ['yY']),
('yank title', ['yt']),
('yank title -s', ['yT']),
('yank domain', ['yd']),
('yank domain -s', ['yD']),
('yank pretty-url', ['yp']),
('yank pretty-url -s', ['yP']),
('open -- {clipboard}', ['pp']),
('open -- {primary}', ['pP']),
('open -t -- {clipboard}', ['Pp']),
('open -t -- {primary}', ['PP']),
('open -w -- {clipboard}', ['wp']),
('open -w -- {primary}', ['wP']),
('quickmark-save', ['m']),
('set-cmd-text -s :quickmark-load', ['b']),
('set-cmd-text -s :quickmark-load -t', ['B']),
('set-cmd-text -s :quickmark-load -w', ['wb']),
('bookmark-add', ['M']),
('set-cmd-text -s :bookmark-load', ['gb']),
('set-cmd-text -s :bookmark-load -t', ['gB']),
('set-cmd-text -s :bookmark-load -w', ['wB']),
('save', ['sf']),
('set-cmd-text -s :set', ['ss']),
('set-cmd-text -s :set -t', ['sl']),
('set-cmd-text -s :bind', ['sk']),
('zoom-out', ['-']),
('zoom-in', ['+']),
('zoom', ['=']),
('navigate prev', ['[[']),
('navigate next', [']]']),
('navigate prev -t', ['{{']),
('navigate next -t', ['}}']),
('navigate up', ['gu']),
('navigate up -t', ['gU']),
('navigate increment', ['<Ctrl-A>']),
('navigate decrement', ['<Ctrl-X>']),
('inspector', ['wi']),
('download', ['gd']),
('download-cancel', ['ad']),
('download-clear', ['cd']),
('view-source', ['gf']),
('set-cmd-text -s :buffer', ['gt']),
('tab-focus last', ['<Ctrl-Tab>', '<Ctrl-6>', '<Ctrl-^>']),
('enter-mode passthrough', ['<Ctrl-V>']),
('quit', ['<Ctrl-Q>', 'ZQ']),
('wq', ['ZZ']),
('scroll-page 0 1', ['<Ctrl-F>']),
('scroll-page 0 -1', ['<Ctrl-B>']),
('scroll-page 0 0.5', ['<Ctrl-D>']),
('scroll-page 0 -0.5', ['<Ctrl-U>']),
('tab-focus 1', ['<Alt-1>', 'g0', 'g^']),
('tab-focus 2', ['<Alt-2>']),
('tab-focus 3', ['<Alt-3>']),
('tab-focus 4', ['<Alt-4>']),
('tab-focus 5', ['<Alt-5>']),
('tab-focus 6', ['<Alt-6>']),
('tab-focus 7', ['<Alt-7>']),
('tab-focus 8', ['<Alt-8>']),
('tab-focus -1', ['<Alt-9>', 'g$']),
('home', ['<Ctrl-h>']),
('stop', ['<Ctrl-s>']),
('print', ['<Ctrl-Alt-p>']),
('open qute://settings', ['Ss']),
('follow-selected', RETURN_KEYS),
('follow-selected -t', ['<Ctrl-Return>', '<Ctrl-Enter>']),
('repeat-command', ['.']),
('tab-pin', ['<Ctrl-p>']),
('record-macro', ['q']),
('run-macro', ['@']),
])),
('insert', collections.OrderedDict([
('open-editor', ['<Ctrl-E>']),
('insert-text {primary}', ['<Shift-Ins>']),
])),
('hint', collections.OrderedDict([
('follow-hint', RETURN_KEYS),
('hint --rapid links tab-bg', ['<Ctrl-R>']),
('hint links', ['<Ctrl-F>']),
('hint all tab-bg', ['<Ctrl-B>']),
])),
('passthrough', {}),
('command', collections.OrderedDict([
('command-history-prev', ['<Ctrl-P>']),
('command-history-next', ['<Ctrl-N>']),
('completion-item-focus prev', ['<Shift-Tab>', '<Up>']),
('completion-item-focus next', ['<Tab>', '<Down>']),
('completion-item-focus next-category', ['<Ctrl-Tab>']),
('completion-item-focus prev-category', ['<Ctrl-Shift-Tab>']),
('completion-item-del', ['<Ctrl-D>']),
('command-accept', RETURN_KEYS),
])),
('prompt', collections.OrderedDict([
('prompt-accept', RETURN_KEYS),
('prompt-accept yes', ['y']),
('prompt-accept no', ['n']),
('prompt-open-download', ['<Ctrl-X>']),
('prompt-item-focus prev', ['<Shift-Tab>', '<Up>']),
('prompt-item-focus next', ['<Tab>', '<Down>']),
])),
('command,prompt', collections.OrderedDict([
('rl-backward-char', ['<Ctrl-B>']),
('rl-forward-char', ['<Ctrl-F>']),
('rl-backward-word', ['<Alt-B>']),
('rl-forward-word', ['<Alt-F>']),
('rl-beginning-of-line', ['<Ctrl-A>']),
('rl-end-of-line', ['<Ctrl-E>']),
('rl-unix-line-discard', ['<Ctrl-U>']),
('rl-kill-line', ['<Ctrl-K>']),
('rl-kill-word', ['<Alt-D>']),
('rl-unix-word-rubout', ['<Ctrl-W>']),
('rl-backward-kill-word', ['<Alt-Backspace>']),
('rl-yank', ['<Ctrl-Y>']),
('rl-delete-char', ['<Ctrl-?>']),
('rl-backward-delete-char', ['<Ctrl-H>']),
])),
('caret', collections.OrderedDict([
('toggle-selection', ['v', '<Space>']),
('drop-selection', ['<Ctrl-Space>']),
('enter-mode normal', ['c']),
('move-to-next-line', ['j']),
('move-to-prev-line', ['k']),
('move-to-next-char', ['l']),
('move-to-prev-char', ['h']),
('move-to-end-of-word', ['e']),
('move-to-next-word', ['w']),
('move-to-prev-word', ['b']),
('move-to-start-of-next-block', [']']),
('move-to-start-of-prev-block', ['[']),
('move-to-end-of-next-block', ['}']),
('move-to-end-of-prev-block', ['{']),
('move-to-start-of-line', ['0']),
('move-to-end-of-line', ['$']),
('move-to-start-of-document', ['gg']),
('move-to-end-of-document', ['G']),
('yank selection -s', ['Y']),
('yank selection', ['y'] + RETURN_KEYS),
('scroll left', ['H']),
('scroll down', ['J']),
('scroll up', ['K']),
('scroll right', ['L']),
])),
])
# A list of (regex, replacement) tuples of changed key commands.
CHANGED_KEY_COMMANDS = [
(re.compile(r'^open -([twb]) about:blank$'), r'open -\1'),
(re.compile(r'^download-page$'), r'download'),
(re.compile(r'^cancel-download$'), r'download-cancel'),
(re.compile(r"""^search (''|"")$"""),
r'clear-keychain ;; search ;; fullscreen --leave'),
(re.compile(r'^search$'),
r'clear-keychain ;; search ;; fullscreen --leave'),
(re.compile(r'^clear-keychain ;; search$'),
r'clear-keychain ;; search ;; fullscreen --leave'),
(re.compile(r"""^set-cmd-text ['"](.*) ['"]$"""), r'set-cmd-text -s \1'),
(re.compile(r"""^set-cmd-text ['"](.*)['"]$"""), r'set-cmd-text \1'),
(re.compile(r"^hint links rapid$"), r'hint --rapid links tab-bg'),
(re.compile(r"^hint links rapid-win$"), r'hint --rapid links window'),
(re.compile(r'^scroll -50 0$'), r'scroll left'),
(re.compile(r'^scroll 0 50$'), r'scroll down'),
(re.compile(r'^scroll 0 -50$'), r'scroll up'),
(re.compile(r'^scroll 50 0$'), r'scroll right'),
(re.compile(r'^scroll ([-\d]+ [-\d]+)$'), r'scroll-px \1'),
(re.compile(r'^search *;; *clear-keychain$'),
r'clear-keychain ;; search ;; fullscreen --leave'),
(re.compile(r'^clear-keychain *;; *leave-mode$'), r'leave-mode'),
(re.compile(r'^download-remove --all$'), r'download-clear'),
(re.compile(r'^hint links fill "([^"]*)"$'), r'hint links fill \1'),
(re.compile(r'^yank -t(\S+)'), r'yank title -\1'),
(re.compile(r'^yank -t'), r'yank title'),
(re.compile(r'^yank -d(\S+)'), r'yank domain -\1'),
(re.compile(r'^yank -d'), r'yank domain'),
(re.compile(r'^yank -p(\S+)'), r'yank pretty-url -\1'),
(re.compile(r'^yank -p'), r'yank pretty-url'),
(re.compile(r'^yank-selected -p'), r'yank selection -s'),
(re.compile(r'^yank-selected'), r'yank selection'),
(re.compile(r'^paste$'), r'open -- {clipboard}'),
(re.compile(r'^paste -s$'), r'open -- {primary}'),
(re.compile(r'^paste -([twb])$'), r'open -\1 -- {clipboard}'),
(re.compile(r'^paste -([twb])s$'), r'open -\1 -- {primary}'),
(re.compile(r'^paste -s([twb])$'), r'open -\1 -- {primary}'),
(re.compile(r'^completion-item-next'), r'completion-item-focus next'),
(re.compile(r'^completion-item-prev'), r'completion-item-focus prev'),
(re.compile(r'^open {clipboard}$'), r'open -- {clipboard}'),
(re.compile(r'^open -([twb]) {clipboard}$'), r'open -\1 -- {clipboard}'),
(re.compile(r'^open {primary}$'), r'open -- {primary}'),
(re.compile(r'^open -([twb]) {primary}$'), r'open -\1 -- {primary}'),
(re.compile(r'^paste-primary$'), r'insert-text {primary}'),
(re.compile(r'^set-cmd-text -s :search$'), r'set-cmd-text /'),
(re.compile(r'^set-cmd-text -s :search -r$'), r'set-cmd-text ?'),
(re.compile(r'^set-cmd-text -s :$'), r'set-cmd-text :'),
(re.compile(r'^set-cmd-text -s :set keybind$'), r'set-cmd-text -s :bind'),
(re.compile(r'^prompt-yes$'), r'prompt-accept yes'),
(re.compile(r'^prompt-no$'), r'prompt-accept no'),
(re.compile(r'^tab-close -l$'), r'tab-close --prev'),
(re.compile(r'^tab-close --left$'), r'tab-close --prev'),
(re.compile(r'^tab-close -r$'), r'tab-close --next'),
(re.compile(r'^tab-close --right$'), r'tab-close --next'),
(re.compile(r'^tab-only -l$'), r'tab-only --prev'),
(re.compile(r'^tab-only --left$'), r'tab-only --prev'),
(re.compile(r'^tab-only -r$'), r'tab-only --next'),
(re.compile(r'^tab-only --right$'), r'tab-only --next'),
]
| gpl-3.0 | -5,174,265,861,828,115,000 | 40.019491 | 79 | 0.522313 | false |
cuauv/software | shm_tools/shmlog/util/logbenchmark.py | 1 | 1703 | #!/usr/bin/env python2
from shm_tools.shmlog.parser import LogParser
import argparse
import sys
from datetime import datetime
from time import sleep, mktime, time
import os
import struct
import sys
'''
Utility for benchmarking log file access
Jeff Heidel 2013
'''
GROUP = 0xFFFF
END_STBL = 0xFFFFFFFFFFFFFFFF
ap = argparse.ArgumentParser(description='Benchmark a log file playback.')
ap.add_argument('log file', type=str, help='filename of the log file')
args = vars(ap.parse_args())
filename = args['log file']
try:
login = open(filename, 'r')
except IOError:
print "Input filename " + filename + " not found."
sys.exit(0)
st = time()
try:
parse = LogParser(filename, parse_file_end=False)
except:
print "There was a problem with the log header."
sys.exit(0)
et = time() - st
print "Log header parse took %.2f ms" % (et * 1e3)
#Starting log benchmark
st = time()
total_time = 0
all_type_times = {}
slices = 0
#Continue parsing and copying until an error occurs!
while True:
logslice = parse.parse_one_slice(benchmark=True)
if logslice is None:
break
(dt, vc, parse_time, type_times) = logslice
total_time += parse_time
for k in type_times.keys():
if k not in all_type_times:
all_type_times[k] = type_times[k]
else:
all_type_times[k] = all_type_times[k] + type_times[k]
slices += 1
et = time() - st
print "Complete log parse took %.2f sec" % et
print "Parsed %d slices" % slices
print "%15s %11s" % ("Var Type", "Time")
print "-"*28
for (k,v) in zip(all_type_times.keys(), all_type_times.values()):
print "%15s: %7.2f sec" % (k.__name__, v)
print "-"*28
print "Log benchmark complete"
| bsd-3-clause | -3,123,619,079,962,118,700 | 21.706667 | 74 | 0.660012 | false |
hayd/leanpub | leanpub/watcher.py | 1 | 1473 | from watchdog.observers import Observer
from watchdog.watchmedo import observe_with
from leanpub.shellcommandtrick import ShellCommandTrick
def pandoc_cmd(book):
"""Create the command to convert the files (listed in `book`)
into a pdf. This is wrapped with echos that the build has started and
is complete."""
with open(book + ".txt") as f:
return ('echo "Starting build of {book}.pdf"'
" && pandoc {files} "
"-f markdown-smart --table-of-contents --top-level-division=chapter -o {book}.pdf"
' && echo " {book}.pdf created."'
).format(book=book,
files=f.read().replace("\n", " "))
try:
MAKE_BOOK = pandoc_cmd("Book")
except IOError:
print("Can't find Book.txt in directory.")
exit(1)
try:
MAKE_SAMPLE = pandoc_cmd("Sample")
except IOError:
# Sample.txt is optional.
MAKE_SAMPLE = ""
# TODO watch images
PATTERNS = ["*.markdown", "*.md", "Book.txt", "Sample.txt"]
DIRECTORIES = "."
RECURSIVE = False
TIMEOUT = 1.0
def watch():
"""Watch for changes to the markdown files, and build the book and the
sample pdf upon each change."""
handler = ShellCommandTrick(shell_command=MAKE_BOOK + " && " + MAKE_SAMPLE,
patterns=PATTERNS,
terminate_on_event=True)
observer = Observer(timeout=TIMEOUT)
observe_with(observer, handler, DIRECTORIES, RECURSIVE)
| mit | -9,163,009,473,004,781,000 | 31.021739 | 98 | 0.60964 | false |
bsmithgall/wexplorer | wexplorer/explorer/models.py | 1 | 2184 | # -*- coding: utf-8 -*-
from wexplorer.database import (
Column,
db,
Model
)
from wexplorer.extensions import bcrypt
class FileUploadPassword(Model):
__tablename__ = 'file_upload_password'
password = Column(db.String(128), nullable=False, primary_key=True)
def __init__(self, password):
if password:
self.password = bcrypt.generate_password_hash(password)
else:
raise Exception('File Upload Password must be supplied')
class LastUpdated(Model):
__tablename__ = 'last_updated'
last_updated = Column(db.DateTime, primary_key=True)
def __init__(self, last_updated):
self.last_updated = last_updated
class Company(Model):
__tablename__ = 'company'
row_id = Column(db.Integer)
company_id = Column(db.String(32), primary_key=True)
company = Column(db.String(255))
bus_type = Column(db.String(255))
company_contacts = db.relationship('CompanyContact', backref='company', lazy='joined')
contracts = db.relationship('Contract', backref='company', lazy='joined')
class CompanyContact(Model):
__tablename__ = 'company_contact'
row_id = Column(db.Integer)
company_contact_id = Column(db.String(32), primary_key=True)
contact_name = Column(db.String(255))
address_1 = Column(db.String(255))
address_2 = Column(db.String(255))
phone_number = Column(db.String(255))
fax_number = Column(db.String(255))
email = Column(db.String(255))
fin = Column(db.String(255))
company_id = Column(db.String(32), db.ForeignKey('company.company_id'))
class Contract(Model):
__tablename__ = 'contract'
row_id = Column(db.Integer)
contract_id = Column(db.String(32), primary_key=True)
description = Column(db.Text)
notes = Column(db.Text)
county = Column(db.String(255))
type_of_contract = Column(db.String(255))
pa = Column(db.String(255))
expiration = Column(db.DateTime)
contract_number = Column(db.String(255))
contract_sub_number = Column(db.Integer)
controller_number = Column(db.Integer)
commcode = Column(db.Integer)
company_id = Column(db.String(32), db.ForeignKey('company.company_id'))
| bsd-3-clause | 3,477,530,918,630,866,400 | 33.125 | 90 | 0.665293 | false |
jolyonb/edx-platform | lms/djangoapps/survey/tests/test_models.py | 1 | 10963 | """
Python tests for the Survey models
"""
from collections import OrderedDict
import ddt
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.test.client import Client
from survey.exceptions import SurveyFormNameAlreadyExists, SurveyFormNotFound
from survey.models import SurveyAnswer, SurveyForm
@ddt.ddt
class SurveyModelsTests(TestCase):
"""
All tests for the Survey models.py file
"""
def setUp(self):
"""
Set up the test data used in the specific tests
"""
super(SurveyModelsTests, self).setUp()
self.client = Client()
# Create two accounts
self.password = 'abc'
self.student = User.objects.create_user('student', '[email protected]', self.password)
self.student2 = User.objects.create_user('student2', '[email protected]', self.password)
self.test_survey_name = 'TestForm'
self.test_form = '<li><input name="field1" /></li><li><input name="field2" /></li><li><select name="ddl"><option>1</option></select></li>'
self.test_form_update = '<input name="field1" />'
self.course_id = 'foo/bar/baz'
self.student_answers = OrderedDict({
'field1': 'value1',
'field2': 'value2',
})
self.student_answers_update = OrderedDict({
'field1': 'value1-updated',
'field2': 'value2-updated',
})
self.student_answers_update2 = OrderedDict({
'field1': 'value1-updated2',
})
self.student2_answers = OrderedDict({
'field1': 'value3'
})
def _create_test_survey(self):
"""
Helper method to set up test form
"""
return SurveyForm.create(self.test_survey_name, self.test_form)
def test_form_not_found_raise_exception(self):
"""
Asserts that when looking up a form that does not exist
"""
with self.assertRaises(SurveyFormNotFound):
SurveyForm.get(self.test_survey_name)
def test_form_not_found_none(self):
"""
Asserts that when looking up a form that does not exist
"""
self.assertIsNone(SurveyForm.get(self.test_survey_name, throw_if_not_found=False))
def test_create_new_form(self):
"""
Make sure we can create a new form a look it up
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
new_survey = SurveyForm.get(self.test_survey_name)
self.assertIsNotNone(new_survey)
self.assertEqual(new_survey.form, self.test_form)
def test_unicode_rendering(self):
"""
See if the survey form returns the expected unicode string
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
self.assertEquals(unicode(survey), self.test_survey_name)
def test_create_form_with_malformed_html(self):
"""
Make sure that if a SurveyForm is saved with unparseable html
an exception is thrown
"""
with self.assertRaises(ValidationError):
SurveyForm.create('badform', '<input name="oops" /><<<>')
def test_create_form_with_no_fields(self):
"""
Make sure that if a SurveyForm is saved without any named fields
an exception is thrown
"""
with self.assertRaises(ValidationError):
SurveyForm.create('badform', '<p>no input fields here</p>')
with self.assertRaises(ValidationError):
SurveyForm.create('badform', '<input id="input_without_name" />')
def test_create_form_already_exists(self):
"""
Make sure we can't create two surveys of the same name
"""
self._create_test_survey()
with self.assertRaises(SurveyFormNameAlreadyExists):
self._create_test_survey()
def test_create_form_update_existing(self):
"""
Make sure we can update an existing form
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey = SurveyForm.create(self.test_survey_name, self.test_form_update, update_if_exists=True)
self.assertIsNotNone(survey)
survey = SurveyForm.get(self.test_survey_name)
self.assertIsNotNone(survey)
self.assertEquals(survey.form, self.test_form_update)
def test_survey_has_no_answers(self):
"""
Create a new survey and assert that there are no answers to that survey
"""
survey = self._create_test_survey()
self.assertEquals(len(survey.get_answers()), 0)
def test_user_has_no_answers(self):
"""
Create a new survey with no answers in it and check that a user is determined to not have answered it
"""
survey = self._create_test_survey()
self.assertFalse(survey.has_user_answered_survey(self.student))
self.assertEquals(len(survey.get_answers()), 0)
@ddt.data(None, 'foo/bar/baz')
def test_single_user_answers(self, course_id):
"""
Create a new survey and add answers to it
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey.save_user_answers(self.student, self.student_answers, course_id)
self.assertTrue(survey.has_user_answered_survey(self.student))
all_answers = survey.get_answers()
self.assertEquals(len(all_answers.keys()), 1)
self.assertIn(self.student.id, all_answers)
self.assertEquals(all_answers[self.student.id], self.student_answers)
answers = survey.get_answers(self.student)
self.assertEquals(len(answers.keys()), 1)
self.assertIn(self.student.id, answers)
self.assertEquals(all_answers[self.student.id], self.student_answers)
# check that the course_id was set
answer_objs = SurveyAnswer.objects.filter(
user=self.student,
form=survey
)
for answer_obj in answer_objs:
if course_id:
self.assertEquals(unicode(answer_obj.course_key), course_id)
else:
self.assertIsNone(answer_obj.course_key)
def test_multiple_user_answers(self):
"""
Create a new survey and add answers to it
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey.save_user_answers(self.student, self.student_answers, self.course_id)
survey.save_user_answers(self.student2, self.student2_answers, self.course_id)
self.assertTrue(survey.has_user_answered_survey(self.student))
all_answers = survey.get_answers()
self.assertEquals(len(all_answers.keys()), 2)
self.assertIn(self.student.id, all_answers)
self.assertIn(self.student2.id, all_answers)
self.assertEquals(all_answers[self.student.id], self.student_answers)
self.assertEquals(all_answers[self.student2.id], self.student2_answers)
answers = survey.get_answers(self.student)
self.assertEquals(len(answers.keys()), 1)
self.assertIn(self.student.id, answers)
self.assertEquals(answers[self.student.id], self.student_answers)
answers = survey.get_answers(self.student2)
self.assertEquals(len(answers.keys()), 1)
self.assertIn(self.student2.id, answers)
self.assertEquals(answers[self.student2.id], self.student2_answers)
def test_update_answers(self):
"""
Make sure the update case works
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey.save_user_answers(self.student, self.student_answers, self.course_id)
answers = survey.get_answers(self.student)
self.assertEquals(len(answers.keys()), 1)
self.assertIn(self.student.id, answers)
self.assertEquals(answers[self.student.id], self.student_answers)
# update
survey.save_user_answers(self.student, self.student_answers_update, self.course_id)
answers = survey.get_answers(self.student)
self.assertEquals(len(answers.keys()), 1)
self.assertIn(self.student.id, answers)
self.assertEquals(answers[self.student.id], self.student_answers_update)
# update with just a subset of the origin dataset
survey.save_user_answers(self.student, self.student_answers_update2, self.course_id)
answers = survey.get_answers(self.student)
self.assertEquals(len(answers.keys()), 1)
self.assertIn(self.student.id, answers)
self.assertEquals(answers[self.student.id], self.student_answers_update2)
def test_limit_num_users(self):
"""
Verify that the limit_num_users parameter to get_answers()
works as intended
"""
survey = self._create_test_survey()
survey.save_user_answers(self.student, self.student_answers, self.course_id)
survey.save_user_answers(self.student2, self.student2_answers, self.course_id)
# even though we have 2 users submitted answers
# limit the result set to just 1
all_answers = survey.get_answers(limit_num_users=1)
self.assertEquals(len(all_answers.keys()), 1)
def test_get_field_names(self):
"""
Create a new survey and add answers to it
"""
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey.save_user_answers(self.student, self.student_answers, self.course_id)
survey.save_user_answers(self.student2, self.student2_answers, self.course_id)
names = survey.get_field_names()
self.assertEqual(sorted(names), ['ddl', 'field1', 'field2'])
def test_retire_user_successful(self):
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey.save_user_answers(self.student, self.student_answers, self.course_id)
survey.save_user_answers(self.student2, self.student2_answers, self.course_id)
retire_result = SurveyAnswer.retire_user(self.student.id)
self.assertTrue(retire_result)
answers = survey.get_answers(self.student)
blanked_out_student_answser = {key: '' for key in self.student_answers}
self.assertEquals(answers[self.student.id], blanked_out_student_answser)
self.assertEquals(survey.get_answers(self.student2)[self.student2.id], self.student2_answers)
def test_retire_user_not_exist(self):
survey = self._create_test_survey()
self.assertIsNotNone(survey)
survey.save_user_answers(self.student, self.student_answers, self.course_id)
retire_result = SurveyAnswer.retire_user(self.student2.id)
self.assertFalse(retire_result)
answers = survey.get_answers(self.student)
self.assertEquals(answers[self.student.id], self.student_answers)
| agpl-3.0 | 5,778,791,308,947,804,000 | 34.594156 | 146 | 0.641978 | false |
nelhage/taktician | python/scripts/allmoves.py | 1 | 1177 | import tak.ptn
import sqlite3
import os.path
import collections
import traceback
SIZE = 5
GAMES_DIR = os.path.join(os.path.dirname(__file__), "../../games")
DB = sqlite3.connect(os.path.join(GAMES_DIR, "games.db"))
cur = DB.cursor()
cur.execute('select day, id from games where size = ?', (SIZE,))
corpus = collections.Counter()
while True:
row = cur.fetchone()
if not row:
break
day, id = None, None
try:
day, id = row
text = open(os.path.join(GAMES_DIR, day, str(id) + ".ptn")).read()
ptn = tak.ptn.PTN.parse(text)
for m in ptn.moves:
corpus[m] += 1
except Exception as e:
print("{0}/{1}: {2}".format(day, id, e))
traceback.print_exc()
continue
all_moves = set(tak.enumerate_moves(SIZE))
seen_moves = set(corpus.keys())
total = sum(corpus.values())
print("observed {0} unique moves".format(len(corpus)))
print("failed to generate: ", [tak.ptn.format_move(m) for m in seen_moves - all_moves])
print("did not observe: ", [tak.ptn.format_move(m) for m in all_moves - seen_moves])
for k, v in sorted(corpus.items(), key=lambda p:-p[1])[:50]:
print("{0:6.2f}% ({2:6d}) {1}".format(100*v/total, tak.ptn.format_move(k), v))
| mit | -6,197,537,536,854,740,000 | 27.707317 | 87 | 0.643161 | false |
andymckay/zamboni | mkt/feed/tests/test_models.py | 1 | 9612 | # -*- coding: utf-8 -*-
import random
import string
from itertools import cycle
from django.core.exceptions import ValidationError
import mock
from nose.tools import eq_, ok_
import amo.tests
import mkt.feed.constants as feed
from mkt.feed.models import (FeedApp, FeedBrand, FeedCollection, FeedItem,
FeedShelf)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
class FeedTestMixin(object):
fixtures = fixture('webapp_337141')
def feed_app_factory(self, app_id=None, app_type=feed.FEEDAPP_ICON,
**kwargs):
count = FeedApp.objects.count()
return FeedApp.objects.create(
app_id=app_id or Webapp.objects.get(id=337141).id,
slug='feed-app-%s' % count, type=app_type, **kwargs)
def feed_brand_factory(self, app_ids=None, layout=feed.BRAND_GRID,
brand_type='mystery-app', **kwargs):
count = FeedBrand.objects.count()
brand = FeedBrand.objects.create(slug='feed-brand-%s' % count,
type=brand_type, **kwargs)
brand.set_apps(app_ids or [337141])
return brand
def feed_collection_factory(self, app_ids=None, name='test-coll',
coll_type=feed.COLLECTION_LISTING,
grouped=False, **kwargs):
count = FeedCollection.objects.count()
coll = FeedCollection.objects.create(
name=name, slug='feed-coll-%s' % count, type=coll_type, **kwargs)
app_ids = app_ids or [337141]
coll.set_apps(app_ids)
if grouped:
for i, mem in enumerate(coll.feedcollectionmembership_set.all()):
if i == len(app_ids) - 1 and len(app_ids) > 1:
mem.group = 'second-group'
else:
mem.group = 'first-group'
mem.save()
return coll
def feed_shelf_factory(self, app_ids=None, name='test-shelf',
carrier=1, region=1, **kwargs):
count = FeedShelf.objects.count()
shelf = FeedShelf.objects.create(
name=name, slug='feed-shelf-%s' % count, carrier=carrier,
region=region, **kwargs)
shelf.set_apps(app_ids or [337141])
return shelf
def feed_item_factory(self, carrier=1, region=1,
item_type=feed.FEED_TYPE_APP, **kw):
"""Creates a single FeedItem of any feed element type specified."""
feed_item = FeedItem(carrier=carrier, region=region,
item_type=item_type, **kw)
if item_type == feed.FEED_TYPE_APP:
feed_item.app = self.feed_app_factory()
elif item_type == feed.FEED_TYPE_BRAND:
feed_item.brand = self.feed_brand_factory()
elif item_type == feed.FEED_TYPE_COLL:
feed_item.collection = self.feed_collection_factory()
elif item_type == feed.FEED_TYPE_SHELF:
feed_item.shelf = self.feed_shelf_factory()
feed_item.save()
return feed_item
def feed_factory(self, carrier=1, region=1, item_types=None, num_items=None):
"""
Iterates over a list of feed element types and creates `num_items`
FeedItems, cycling over those types. By default, creates one of each
type. Returns a list of FeedItems.
"""
item_types = item_types or [feed.FEED_TYPE_APP, feed.FEED_TYPE_BRAND,
feed.FEED_TYPE_COLL, feed.FEED_TYPE_SHELF]
if not num_items:
num_items = len(item_types)
item_types = cycle(item_types)
feed_items = []
for i in xrange(num_items):
feed_items.append(
self.feed_item_factory(carrier=carrier, region=region,
item_type=item_types.next()))
return feed_items
class FeedAppMixin(object):
fixtures = fixture('webapp_337141')
def setUp(self):
self.feedapp_data = {
'app': 337141,
'background_color': '#B90000',
'type': 'icon',
'description': {
'en-US': u'pan-fried potatoes'
},
'slug': self.random_slug()
}
self.pullquote_data = {
'pullquote_text': {'en-US': u'The bést!'},
'pullquote_rating': 4,
'pullquote_attribution': u'Jamés Bod'
}
self.feedapps = []
super(FeedAppMixin, self).setUp()
def random_slug(self):
return ''.join(random.choice(string.ascii_uppercase + string.digits)
for _ in range(10)).lower()
def create_feedapps(self, n=2, **kwargs):
data = dict(self.feedapp_data)
data.update(kwargs)
if not isinstance(data['app'], Webapp):
data['app'] = Webapp.objects.get(pk=data['app'])
feedapps = []
for idx in xrange(n):
data['slug'] = self.random_slug()
feedapps.append(FeedApp.objects.create(**data))
self.feedapps.extend(feedapps)
return feedapps
class TestFeedApp(FeedAppMixin, amo.tests.TestCase):
def setUp(self):
super(TestFeedApp, self).setUp()
self.feedapp_data.update(**self.pullquote_data)
self.feedapp_data['app'] = (
Webapp.objects.get(pk=self.feedapp_data['app']))
def test_create(self):
feedapp = FeedApp(**self.feedapp_data)
ok_(isinstance(feedapp, FeedApp))
feedapp.clean_fields() # Tests validators on fields.
feedapp.clean() # Test model validation.
feedapp.save() # Tests required fields.
def test_missing_pullquote_rating(self):
del self.feedapp_data['pullquote_rating']
self.test_create()
def test_missing_pullquote_text(self):
del self.feedapp_data['pullquote_text']
with self.assertRaises(ValidationError):
self.test_create()
def test_pullquote_rating_fractional(self):
"""
This passes because PositiveSmallIntegerField will coerce the float
into an int, which effectively returns math.floor(value).
"""
self.feedapp_data['pullquote_rating'] = 4.5
self.test_create()
def test_bad_pullquote_rating_low(self):
self.feedapp_data['pullquote_rating'] = -1
with self.assertRaises(ValidationError):
self.test_create()
def test_bad_pullquote_rating_high(self):
self.feedapp_data['pullquote_rating'] = 6
with self.assertRaises(ValidationError):
self.test_create()
class TestFeedBrand(amo.tests.TestCase):
def setUp(self):
super(TestFeedBrand, self).setUp()
self.apps = [amo.tests.app_factory() for i in xrange(3)]
self.brand = None
self.brand_data = {
'slug': 'potato',
'type': 1,
'layout': 1
}
def test_create(self):
self.brand = FeedBrand.objects.create(**self.brand_data)
ok_(isinstance(self.brand, FeedBrand))
for name, value in self.brand_data.iteritems():
eq_(getattr(self.brand, name), value, name)
def test_add_app(self):
self.test_create()
m = self.brand.add_app(self.apps[0], order=3)
ok_(self.brand.apps(), [self.apps[0]])
eq_(m.order, 3)
eq_(m.app, self.apps[0])
eq_(m.obj, self.brand)
def test_add_app_sort_order_respected(self):
self.test_add_app()
self.brand.add_app(self.apps[1], order=1)
ok_(self.brand.apps(), [self.apps[1], self.apps[0]])
def test_add_app_no_order_passed(self):
self.test_add_app()
m = self.brand.add_app(self.apps[1])
ok_(m.order, 4)
def test_remove_app(self):
self.test_add_app()
ok_(self.apps[0] in self.brand.apps())
removed = self.brand.remove_app(self.apps[0])
ok_(removed)
ok_(self.apps[0] not in self.brand.apps())
def test_remove_app_not_in_brand(self):
self.test_remove_app()
removed = self.brand.remove_app(self.apps[1])
ok_(not removed)
def test_set_apps(self):
self.test_add_app_sort_order_respected()
new_apps = [app.pk for app in self.apps][::-1]
self.brand.set_apps(new_apps)
eq_(new_apps, [app.pk for app in self.brand.apps().no_cache()])
def test_set_apps_nonexistant(self):
self.test_add_app_sort_order_respected()
with self.assertRaises(Webapp.DoesNotExist):
self.brand.set_apps([99999])
class TestESReceivers(FeedTestMixin, amo.tests.TestCase):
@mock.patch('mkt.search.indexers.BaseIndexer.index_ids')
def test_update_search_index(self, update_mock):
feed_items = self.feed_factory()
calls = [update_call[0][0][0] for update_call in
update_mock.call_args_list]
for feed_item in feed_items:
assert feed_item.id in calls
assert getattr(feed_item, feed_item.item_type).id in calls
@mock.patch('mkt.search.indexers.BaseIndexer.unindex')
def test_delete_search_index(self, delete_mock):
for x in xrange(4):
self.feed_item_factory()
count = FeedItem.objects.count()
FeedItem.objects.all().delete()
eq_(delete_mock.call_count, count)
class TestFeedShelf(FeedTestMixin, amo.tests.TestCase):
def test_is_published(self):
shelf = self.feed_shelf_factory()
assert not shelf.is_published
shelf.feeditem_set.create()
assert shelf.is_published
| bsd-3-clause | -394,396,989,793,389,250 | 34.201465 | 81 | 0.583975 | false |
Clinical-Genomics/scout | tests/adapter/mongo/test_user_handling.py | 1 | 4378 | from scout.build.user import build_user
def test_delete_user(adapter):
institutes = ["test-1", "test-2"]
## GIVEN an adapter with two users
for i, ins in enumerate(institutes, 1):
user_info = {
"email": "clark.kent{}@mail.com".format(i),
"id": "clke0" + str(i),
"location": "here",
"name": "Clark Kent",
"institutes": [ins],
}
user_obj = build_user(user_info)
user_obj = adapter.add_user(user_obj)
assert sum(1 for user in adapter.users()) == 2
## WHEN deleting a user
adapter.delete_user(email="[email protected]")
## THEN assert that there is only ine user left
assert sum(1 for user in adapter.users()) == 1
def test_update_user(real_adapter):
adapter = real_adapter
## GIVEN an adapter with a user
user_info = {
"email": "[email protected]",
"location": "here",
"name": "Clark Kent",
"institutes": ["test-1"],
}
user_obj = build_user(user_info)
user_obj = adapter.add_user(user_obj)
assert user_obj["institutes"] == ["test-1"]
## WHEN updating a user
user_info["institutes"].append("test-2")
user_obj = build_user(user_info)
adapter.update_user(user_obj)
## THEN assert that the user is in the database
updated_user = adapter.user_collection.find_one()
assert set(updated_user["institutes"]) == set(["test-1", "test-2"])
def test_insert_user(adapter):
user_info = {
"email": "[email protected]",
"location": "here",
"name": "Clark Kent",
"institutes": ["test-1"],
}
user_obj = build_user(user_info)
## GIVEN a empty adapter
assert adapter.user_collection.find_one() is None
## WHEN inserting a user
user_obj = adapter.add_user(user_obj)
## THEN assert that the user is in the database
assert adapter.user_collection.find_one()
def test_get_users_institute(adapter):
institutes = ["test-1", "test-2"]
## GIVEN an adapter with multiple users
for i, ins in enumerate(institutes, 1):
user_info = {
"email": "clark.kent{}@mail.com".format(i),
"id": "clke0" + str(i),
"location": "here",
"name": "Clark Kent",
"institutes": [ins],
}
user_obj = build_user(user_info)
user_obj = adapter.add_user(user_obj)
## WHEN fetching all users
users = adapter.users(institute=institutes[0])
## THEN assert that both users are fetched
assert sum(1 for user in users) == 1
def test_get_users(adapter):
institutes = ["test-1", "test-2"]
## GIVEN an adapter with multiple users
for i, ins in enumerate(institutes, 1):
user_info = {
"email": "clark.kent{}@mail.com".format(i),
"id": "clke0" + str(i),
"location": "here",
"name": "Clark Kent",
"institutes": [ins],
}
user_obj = build_user(user_info)
user_obj = adapter.add_user(user_obj)
## WHEN fetching all users
users = adapter.users()
## THEN assert that both users are fetched
assert sum(1 for user in users) == len(institutes)
def test_get_user_id(adapter):
user_info = {
"email": "[email protected]",
"id": "clke01",
"location": "here",
"name": "Clark Kent",
"institutes": ["test-1"],
}
user_obj = build_user(user_info)
user_obj = adapter.add_user(user_obj)
## WHEN fetching the user with email
user = adapter.user(user_id="clke01")
## THEN assert that the user is fetched
assert user
def test_get_user_email(adapter):
user_info = {
"email": "[email protected]",
"id": "clke01",
"location": "here",
"name": "Clark Kent",
"institutes": ["test-1"],
}
user_obj = build_user(user_info)
user_obj = adapter.add_user(user_obj)
## WHEN fetching the user with email
user = adapter.user(email="[email protected]")
## THEN assert that the user is fetched
assert user
def test_get_nonexisting_user(adapter):
## GIVEN an empty adapter
assert adapter.user_collection.find_one() is None
## WHEN fetching a non existing user
user_obj = adapter.user(email="[email protected]")
## THEN assert the user is None
assert user_obj is None
| bsd-3-clause | 870,829,471,759,422,700 | 28.986301 | 71 | 0.584513 | false |
diver-in-sky/django-threaded-multihost | threaded_multihost/test_app/settings.py | 1 | 3063 | # Django settings for mytest project.
from os.path import normpath, join, dirname
#ROOT : the django project root
ROOT = lambda *base : normpath(join(dirname(__file__), *base)).replace('\\','/')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'threaded.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '$%b)@1zxin5gh19vsj(@nn=hm-!31ejy4gyc*391@-(odwsf+u'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'threaded_multihost.middleware.ThreadLocalMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
# ROOT('templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'test_app.model_tests',
)
| bsd-3-clause | 6,086,891,033,615,040,000 | 34.206897 | 101 | 0.709435 | false |
raccoongang/edx-platform | lms/djangoapps/shoppingcart/tests/test_views.py | 1 | 102209 | """
Tests for Shopping Cart views
"""
import json
from collections import OrderedDict
from datetime import datetime, timedelta
from decimal import Decimal
from urlparse import urlparse
import ddt
import pytz
from django.conf import settings
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import Group, User
from django.contrib.messages.storage.fallback import FallbackStorage
from django.core import mail
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.http import HttpRequest
from django.test import TestCase
from django.test.utils import override_settings
from freezegun import freeze_time
from mock import Mock, patch
from nose.plugins.attrib import attr
from pytz import UTC
from common.test.utils import XssTestMixin
from course_modes.models import CourseMode
from courseware.tests.factories import InstructorFactory
from edxmako.shortcuts import render_to_response
from openedx.core.djangoapps.embargo.test_utils import restrict_course
from shoppingcart.admin import SoftDeleteCouponAdmin
from shoppingcart.models import (
CertificateItem,
Coupon,
CouponRedemption,
CourseRegCodeItem,
CourseRegistrationCode,
DonationConfiguration,
Order,
PaidCourseRegistration,
RegistrationCodeRedemption
)
from shoppingcart.processors import render_purchase_form_html
from shoppingcart.processors.CyberSource2 import sign
from shoppingcart.tests.payment_fake import PaymentFakeView
from shoppingcart.views import _can_download_report, _get_date_from_str, initialize_report
from student.models import CourseEnrollment
from student.roles import CourseSalesAdminRole
from student.tests.factories import AdminFactory, CourseModeFactory, UserFactory
from util.date_utils import get_default_time_display
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.django import modulestore
def mock_render_purchase_form_html(*args, **kwargs):
return render_purchase_form_html(*args, **kwargs)
form_mock = Mock(side_effect=mock_render_purchase_form_html)
def mock_render_to_response(*args, **kwargs):
return render_to_response(*args, **kwargs)
render_mock = Mock(side_effect=mock_render_to_response)
postpay_mock = Mock()
@attr(shard=3)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': True})
@ddt.ddt
class ShoppingCartViewsTests(SharedModuleStoreTestCase, XssTestMixin):
"""
Test shopping cart view under various states
"""
@classmethod
def setUpClass(cls):
super(ShoppingCartViewsTests, cls).setUpClass()
cls.course = CourseFactory.create(org='MITx', number='999', display_name='Robot Super Course')
cls.course_key = cls.course.id
verified_course = CourseFactory.create(org='org', number='test', display_name='Test Course')
cls.verified_course_key = verified_course.id
xss_course = CourseFactory.create(org='xssorg', number='test', display_name='<script>alert("XSS")</script>')
cls.xss_course_key = xss_course.id
cls.testing_course = CourseFactory.create(org='edX', number='888', display_name='Testing Super Course')
def setUp(self):
super(ShoppingCartViewsTests, self).setUp()
patcher = patch('student.models.tracker')
self.mock_tracker = patcher.start()
self.user = UserFactory.create()
self.user.set_password('password')
self.user.save()
self.instructor = AdminFactory.create()
self.cost = 40
self.coupon_code = 'abcde'
self.reg_code = 'qwerty'
self.percentage_discount = 10
self.course_mode = CourseMode(
course_id=self.course_key,
mode_slug=CourseMode.HONOR,
mode_display_name="honor cert",
min_price=self.cost
)
self.course_mode.save()
# Saving another testing course mode
self.testing_cost = 20
self.testing_course_mode = CourseMode(
course_id=self.testing_course.id,
mode_slug=CourseMode.HONOR,
mode_display_name="testing honor cert",
min_price=self.testing_cost
)
self.testing_course_mode.save()
# And for the XSS course
CourseMode(
course_id=self.xss_course_key,
mode_slug=CourseMode.HONOR,
mode_display_name="honor cert",
min_price=self.cost
).save()
# And the verified course
self.verified_course_mode = CourseMode(
course_id=self.verified_course_key,
mode_slug=CourseMode.HONOR,
mode_display_name="honor cert",
min_price=self.cost
)
self.verified_course_mode.save()
self.cart = Order.get_cart_for_user(self.user)
self.addCleanup(patcher.stop)
self.now = datetime.now(pytz.UTC)
self.yesterday = self.now - timedelta(days=1)
self.tomorrow = self.now + timedelta(days=1)
def get_discount(self, cost):
"""
This method simple return the discounted amount
"""
val = Decimal("{0:.2f}".format(Decimal(self.percentage_discount / 100.00) * cost))
return cost - val
def add_coupon(self, course_key, is_active, code):
"""
add dummy coupon into models
"""
coupon = Coupon(code=code, description='testing code', course_id=course_key,
percentage_discount=self.percentage_discount, created_by=self.user, is_active=is_active)
coupon.save()
def add_reg_code(self, course_key, mode_slug=None, is_valid=True):
"""
add dummy registration code into models
"""
if mode_slug is None:
mode_slug = self.course_mode.mode_slug
course_reg_code = CourseRegistrationCode(
code=self.reg_code, course_id=course_key,
created_by=self.user, mode_slug=mode_slug,
is_valid=is_valid
)
course_reg_code.save()
def _add_course_mode(self, min_price=50, mode_slug='honor', expiration_date=None):
"""
Adds a course mode to the test course.
"""
mode = CourseModeFactory.create()
mode.course_id = self.course.id
mode.min_price = min_price
mode.mode_slug = mode_slug
mode.expiration_date = expiration_date
mode.save()
return mode
def add_course_to_user_cart(self, course_key):
"""
adding course to user cart
"""
self.login_user()
reg_item = PaidCourseRegistration.add_to_order(self.cart, course_key, mode_slug=self.course_mode.mode_slug)
return reg_item
def login_user(self):
self.client.login(username=self.user.username, password="password")
def test_add_course_to_cart_anon(self):
resp = self.client.post(reverse('shoppingcart.views.add_course_to_cart', args=[self.course_key.to_deprecated_string()]))
self.assertEqual(resp.status_code, 403)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_billing_details(self):
billing_url = reverse('billing_details')
self.login_user()
# page not found error because order_type is not business
resp = self.client.get(billing_url)
self.assertEqual(resp.status_code, 404)
#chagne the order_type to business
self.cart.order_type = 'business'
self.cart.save()
resp = self.client.get(billing_url)
self.assertEqual(resp.status_code, 200)
((template, context), _) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/billing_details.html')
# check for the default currency in the context
self.assertEqual(context['currency'], 'usd')
self.assertEqual(context['currency_symbol'], '$')
data = {'company_name': 'Test Company', 'company_contact_name': 'JohnDoe',
'company_contact_email': '[email protected]', 'recipient_name': 'Mocker',
'recipient_email': '[email protected]', 'company_address_line_1': 'DC Street # 1',
'company_address_line_2': '',
'company_city': 'DC', 'company_state': 'NY', 'company_zip': '22003', 'company_country': 'US',
'customer_reference_number': 'PO#23'}
resp = self.client.post(billing_url, data)
self.assertEqual(resp.status_code, 200)
@patch('shoppingcart.views.render_to_response', render_mock)
@override_settings(PAID_COURSE_REGISTRATION_CURRENCY=['PKR', 'Rs'])
def test_billing_details_with_override_currency_settings(self):
billing_url = reverse('billing_details')
self.login_user()
#chagne the order_type to business
self.cart.order_type = 'business'
self.cart.save()
resp = self.client.get(billing_url)
self.assertEqual(resp.status_code, 200)
((template, context), __) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/billing_details.html')
# check for the override currency settings in the context
self.assertEqual(context['currency'], 'PKR')
self.assertEqual(context['currency_symbol'], 'Rs')
def test_same_coupon_code_applied_on_multiple_items_in_the_cart(self):
"""
test to check that that the same coupon code applied on multiple
items in the cart.
"""
self.login_user()
# add first course to user cart
resp = self.client.post(
reverse('shoppingcart.views.add_course_to_cart', args=[self.course_key.to_deprecated_string()])
)
self.assertEqual(resp.status_code, 200)
# add and apply the coupon code to course in the cart
self.add_coupon(self.course_key, True, self.coupon_code)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
# now add the same coupon code to the second course(testing_course)
self.add_coupon(self.testing_course.id, True, self.coupon_code)
#now add the second course to cart, the coupon code should be
# applied when adding the second course to the cart
resp = self.client.post(
reverse('shoppingcart.views.add_course_to_cart', args=[self.testing_course.id.to_deprecated_string()])
)
self.assertEqual(resp.status_code, 200)
#now check the user cart and see that the discount has been applied on both the courses
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
#first course price is 40$ and the second course price is 20$
# after 10% discount on both the courses the total price will be 18+36 = 54
self.assertIn('54.00', resp.content)
def test_add_course_to_cart_already_in_cart(self):
PaidCourseRegistration.add_to_order(self.cart, self.course_key)
self.login_user()
resp = self.client.post(reverse('shoppingcart.views.add_course_to_cart', args=[self.course_key.to_deprecated_string()]))
self.assertEqual(resp.status_code, 400)
self.assertIn('The course {0} is already in your cart.'.format(self.course_key.to_deprecated_string()), resp.content)
def test_course_discount_invalid_coupon(self):
self.add_coupon(self.course_key, True, self.coupon_code)
self.add_course_to_user_cart(self.course_key)
non_existing_code = "non_existing_code"
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': non_existing_code})
self.assertEqual(resp.status_code, 404)
self.assertIn("Discount does not exist against code '{0}'.".format(non_existing_code), resp.content)
def test_valid_qty_greater_then_one_and_purchase_type_should_business(self):
qty = 2
item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': qty})
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['total_cost'], item.unit_cost * qty)
cart = Order.get_cart_for_user(self.user)
self.assertEqual(cart.order_type, 'business')
def test_in_valid_qty_case(self):
# invalid quantity, Quantity must be between 1 and 1000.
qty = 0
item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': qty})
self.assertEqual(resp.status_code, 400)
self.assertIn("Quantity must be between 1 and 1000.", resp.content)
# invalid quantity, Quantity must be an integer.
qty = 'abcde'
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': qty})
self.assertEqual(resp.status_code, 400)
self.assertIn("Quantity must be an integer.", resp.content)
# invalid quantity, Quantity is not present in request
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id})
self.assertEqual(resp.status_code, 400)
self.assertIn("Quantity must be between 1 and 1000.", resp.content)
def test_valid_qty_but_item_not_found(self):
qty = 2
item_id = '-1'
self.login_user()
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item_id, 'qty': qty})
self.assertEqual(resp.status_code, 404)
self.assertEqual('Order item does not exist.', resp.content)
# now testing the case if item id not found in request,
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'qty': qty})
self.assertEqual(resp.status_code, 400)
self.assertEqual('Order item not found in request.', resp.content)
def test_purchase_type_should_be_personal_when_qty_is_one(self):
qty = 1
item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': qty})
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['total_cost'], item.unit_cost * 1)
cart = Order.get_cart_for_user(self.user)
self.assertEqual(cart.order_type, 'personal')
def test_purchase_type_on_removing_item_and_cart_has_item_with_qty_one(self):
qty = 5
self.add_course_to_user_cart(self.course_key)
item2 = self.add_course_to_user_cart(self.testing_course.id)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item2.id, 'qty': qty})
self.assertEqual(resp.status_code, 200)
cart = Order.get_cart_for_user(self.user)
cart_items = cart.orderitem_set.all()
test_flag = False
for cartitem in cart_items:
if cartitem.qty == 5:
test_flag = True
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]), {'id': cartitem.id})
self.assertEqual(resp.status_code, 200)
self.assertTrue(test_flag)
cart = Order.get_cart_for_user(self.user)
self.assertEqual(cart.order_type, 'personal')
def test_billing_details_btn_in_cart_when_qty_is_greater_than_one(self):
qty = 5
item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': qty})
self.assertEqual(resp.status_code, 200)
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertIn("Billing Details", resp.content)
def test_purchase_type_should_be_personal_when_remove_all_items_from_cart(self):
item1 = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item1.id, 'qty': 2})
self.assertEqual(resp.status_code, 200)
item2 = self.add_course_to_user_cart(self.testing_course.id)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item2.id, 'qty': 5})
self.assertEqual(resp.status_code, 200)
cart = Order.get_cart_for_user(self.user)
cart_items = cart.orderitem_set.all()
test_flag = False
for cartitem in cart_items:
test_flag = True
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]), {'id': cartitem.id})
self.assertEqual(resp.status_code, 200)
self.assertTrue(test_flag)
cart = Order.get_cart_for_user(self.user)
self.assertEqual(cart.order_type, 'personal')
def test_use_valid_coupon_code_and_qty_is_greater_than_one(self):
qty = 5
item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': qty})
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(data['total_cost'], item.unit_cost * qty)
# use coupon code
self.add_coupon(self.course_key, True, self.coupon_code)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
item = self.cart.orderitem_set.all().select_subclasses()[0]
self.assertEquals(item.unit_cost * qty, 180)
def test_course_discount_invalid_reg_code(self):
self.add_reg_code(self.course_key)
self.add_course_to_user_cart(self.course_key)
non_existing_code = "non_existing_code"
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': non_existing_code})
self.assertEqual(resp.status_code, 404)
self.assertIn("Discount does not exist against code '{0}'.".format(non_existing_code), resp.content)
def test_course_discount_inactive_coupon(self):
self.add_coupon(self.course_key, False, self.coupon_code)
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 404)
self.assertIn("Discount does not exist against code '{0}'.".format(self.coupon_code), resp.content)
def test_course_does_not_exist_in_cart_against_valid_coupon(self):
course_key = self.course_key.to_deprecated_string() + 'testing'
self.add_coupon(course_key, True, self.coupon_code)
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 404)
self.assertIn("Discount does not exist against code '{0}'.".format(self.coupon_code), resp.content)
def test_inactive_registration_code_returns_error(self):
"""
test to redeem inactive registration code and
it returns an error.
"""
course_key = self.course_key.to_deprecated_string()
self.add_reg_code(course_key, is_valid=False)
self.add_course_to_user_cart(self.course_key)
# now apply the inactive registration code
# it will raise an exception
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 400)
self.assertIn(
"This enrollment code ({enrollment_code}) is no longer valid.".format(
enrollment_code=self.reg_code), resp.content)
def test_course_does_not_exist_in_cart_against_valid_reg_code(self):
course_key = self.course_key.to_deprecated_string() + 'testing'
self.add_reg_code(course_key)
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 404)
self.assertIn("Code '{0}' is not valid for any course in the shopping cart.".format(self.reg_code), resp.content)
def test_cart_item_qty_greater_than_1_against_valid_reg_code(self):
course_key = self.course_key.to_deprecated_string()
self.add_reg_code(course_key)
item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': item.id, 'qty': 4})
self.assertEqual(resp.status_code, 200)
# now update the cart item quantity and then apply the registration code
# it will raise an exception
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 404)
self.assertIn("Cart item quantity should not be greater than 1 when applying activation code", resp.content)
@ddt.data(True, False)
def test_reg_code_uses_associated_mode(self, expired_mode):
"""Tests the use of reg codes on verified courses, expired or active. """
course_key = self.course_key.to_deprecated_string()
expiration_date = self.yesterday if expired_mode else self.tomorrow
self._add_course_mode(mode_slug='verified', expiration_date=expiration_date)
self.add_reg_code(course_key, mode_slug='verified')
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('register_code_redemption', args=[self.reg_code]), HTTP_HOST='localhost')
self.assertEqual(resp.status_code, 200)
self.assertIn(self.course.display_name.encode('utf-8'), resp.content)
@ddt.data(True, False)
def test_reg_code_uses_unknown_mode(self, expired_mode):
"""Tests the use of reg codes on verified courses, expired or active. """
course_key = self.course_key.to_deprecated_string()
expiration_date = self.yesterday if expired_mode else self.tomorrow
self._add_course_mode(mode_slug='verified', expiration_date=expiration_date)
self.add_reg_code(course_key, mode_slug='bananas')
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('register_code_redemption', args=[self.reg_code]), HTTP_HOST='localhost')
self.assertEqual(resp.status_code, 200)
self.assertIn(self.course.display_name.encode('utf-8'), resp.content)
self.assertIn("error processing your redeem code", resp.content)
def test_course_discount_for_valid_active_coupon_code(self):
self.add_coupon(self.course_key, True, self.coupon_code)
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
# unit price should be updated for that course
item = self.cart.orderitem_set.all().select_subclasses()[0]
self.assertEquals(item.unit_cost, self.get_discount(self.cost))
# after getting 10 percent discount
self.assertEqual(self.cart.total_cost, self.get_discount(self.cost))
# now using the same coupon code against the same order.
# Only one coupon redemption should be allowed per order.
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 400)
self.assertIn("Only one coupon redemption is allowed against an order", resp.content)
def test_course_discount_against_two_distinct_coupon_codes(self):
self.add_coupon(self.course_key, True, self.coupon_code)
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
# unit price should be updated for that course
item = self.cart.orderitem_set.all().select_subclasses()[0]
self.assertEquals(item.unit_cost, self.get_discount(self.cost))
# now using another valid active coupon code.
# Only one coupon redemption should be allowed per order.
self.add_coupon(self.course_key, True, 'abxyz')
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': 'abxyz'})
self.assertEqual(resp.status_code, 400)
self.assertIn("Only one coupon redemption is allowed against an order", resp.content)
def test_same_coupons_code_on_multiple_courses(self):
# add two same coupon codes on two different courses
self.add_coupon(self.course_key, True, self.coupon_code)
self.add_coupon(self.testing_course.id, True, self.coupon_code)
self.add_course_to_user_cart(self.course_key)
self.add_course_to_user_cart(self.testing_course.id)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
# unit price should be updated for that course
item = self.cart.orderitem_set.all().select_subclasses()[0]
self.assertEquals(item.unit_cost, self.get_discount(self.cost))
item = self.cart.orderitem_set.all().select_subclasses()[1]
self.assertEquals(item.unit_cost, self.get_discount(self.testing_cost))
def test_soft_delete_coupon(self):
self.add_coupon(self.course_key, True, self.coupon_code)
coupon = Coupon(code='TestCode', description='testing', course_id=self.course_key,
percentage_discount=12, created_by=self.user, is_active=True)
coupon.save()
self.assertEquals(coupon.__unicode__(), '[Coupon] code: TestCode course: MITx/999/Robot_Super_Course')
admin = User.objects.create_user('Mark', '[email protected]', 'foo')
admin.is_staff = True
get_coupon = Coupon.objects.get(id=1)
request = HttpRequest()
request.user = admin
request.session = 'session'
messages = FallbackStorage(request)
request._messages = messages # pylint: disable=protected-access
coupon_admin = SoftDeleteCouponAdmin(Coupon, AdminSite())
test_query_set = coupon_admin.queryset(request)
test_actions = coupon_admin.get_actions(request)
self.assertIn('really_delete_selected', test_actions['really_delete_selected'])
self.assertEqual(get_coupon.is_active, True)
coupon_admin.really_delete_selected(request, test_query_set)
for coupon in test_query_set:
self.assertEqual(coupon.is_active, False)
coupon_admin.delete_model(request, get_coupon)
self.assertEqual(get_coupon.is_active, False)
coupon = Coupon(code='TestCode123', description='testing123', course_id=self.course_key,
percentage_discount=22, created_by=self.user, is_active=True)
coupon.save()
test_query_set = coupon_admin.queryset(request)
coupon_admin.really_delete_selected(request, test_query_set)
for coupon in test_query_set:
self.assertEqual(coupon.is_active, False)
def test_course_free_discount_for_valid_active_reg_code(self):
self.add_reg_code(self.course_key)
self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 200)
redeem_url = reverse('register_code_redemption', args=[self.reg_code])
response = self.client.get(redeem_url)
self.assertEquals(response.status_code, 200)
# check button text
self.assertIn('Activate Course Enrollment', response.content)
#now activate the user by enrolling him/her to the course
response = self.client.post(redeem_url)
self.assertEquals(response.status_code, 200)
# now testing registration code already used scenario, reusing the same code
# the item has been removed when using the registration code for the first time
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 400)
self.assertIn("This enrollment code ({enrollment_code}) is not valid.".format(
enrollment_code=self.reg_code
), resp.content)
def test_upgrade_from_valid_reg_code(self):
"""Use a valid registration code to upgrade from honor to verified mode. """
# Ensure the course has a verified mode
course_key = self.course_key.to_deprecated_string()
self._add_course_mode(mode_slug='verified')
self.add_reg_code(course_key, mode_slug='verified')
# Enroll as honor in the course with the current user.
CourseEnrollment.enroll(self.user, self.course_key, mode=CourseMode.HONOR)
self.login_user()
current_enrollment, __ = CourseEnrollment.enrollment_mode_for_user(self.user, self.course_key)
self.assertEquals('honor', current_enrollment)
redeem_url = reverse('register_code_redemption', args=[self.reg_code])
response = self.client.get(redeem_url)
self.assertEquals(response.status_code, 200)
# check button text
self.assertIn('Activate Course Enrollment', response.content)
#now activate the user by enrolling him/her to the course
response = self.client.post(redeem_url)
self.assertEquals(response.status_code, 200)
# Once upgraded, should be "verified"
current_enrollment, __ = CourseEnrollment.enrollment_mode_for_user(self.user, self.course_key)
self.assertEquals('verified', current_enrollment)
@patch('shoppingcart.views.log.debug')
def test_non_existing_coupon_redemption_on_removing_item(self, debug_log):
reg_item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]),
{'id': reg_item.id})
debug_log.assert_called_with(
'Code redemption does not exist for order item id=%s.',
str(reg_item.id)
)
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 0)
@patch('shoppingcart.views.log.info')
def test_existing_coupon_redemption_on_removing_item(self, info_log):
self.add_coupon(self.course_key, True, self.coupon_code)
reg_item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]),
{'id': reg_item.id})
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 0)
info_log.assert_called_with(
'Coupon "%s" redemption entry removed for user "%s" for order item "%s"',
self.coupon_code,
self.user,
str(reg_item.id)
)
@patch('shoppingcart.views.log.info')
def test_reset_redemption_for_coupon(self, info_log):
self.add_coupon(self.course_key, True, self.coupon_code)
reg_item = self.add_course_to_user_cart(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
resp = self.client.post(reverse('shoppingcart.views.reset_code_redemption', args=[]))
self.assertEqual(resp.status_code, 200)
info_log.assert_called_with(
'Coupon redemption entry removed for user %s for order %s',
self.user,
reg_item.id
)
@patch('shoppingcart.views.log.info')
def test_coupon_discount_for_multiple_courses_in_cart(self, info_log):
reg_item = self.add_course_to_user_cart(self.course_key)
self.add_coupon(self.course_key, True, self.coupon_code)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.assertEquals(self.cart.orderitem_set.count(), 2)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
# unit_cost should be updated for that particular course for which coupon code is registered
items = self.cart.orderitem_set.all().select_subclasses()
for item in items:
if item.id == reg_item.id:
self.assertEquals(item.unit_cost, self.get_discount(self.cost))
self.assertEquals(item.list_price, self.cost)
elif item.id == cert_item.id:
self.assertEquals(item.list_price, self.cost)
self.assertEquals(item.unit_cost, self.cost)
# Delete the discounted item, corresponding coupon redemption should
# be removed for that particular discounted item
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]),
{'id': reg_item.id})
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 1)
info_log.assert_called_with(
'Coupon "%s" redemption entry removed for user "%s" for order item "%s"',
self.coupon_code,
self.user,
str(reg_item.id)
)
@patch('shoppingcart.views.log.info')
def test_delete_certificate_item(self, info_log):
self.add_course_to_user_cart(self.course_key)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.assertEquals(self.cart.orderitem_set.count(), 2)
# Delete the discounted item, corresponding coupon redemption should be removed for that particular discounted item
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]),
{'id': cert_item.id})
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 1)
info_log.assert_called_with("order item %s removed for user %s", str(cert_item.id), self.user)
@patch('shoppingcart.views.log.info')
def test_remove_coupon_redemption_on_clear_cart(self, info_log):
reg_item = self.add_course_to_user_cart(self.course_key)
CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.assertEquals(self.cart.orderitem_set.count(), 2)
self.add_coupon(self.course_key, True, self.coupon_code)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
resp = self.client.post(reverse('shoppingcart.views.clear_cart', args=[]))
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 0)
info_log.assert_called_with(
'Coupon redemption entry removed for user %s for order %s',
self.user,
reg_item.id
)
def test_add_course_to_cart_already_registered(self):
CourseEnrollment.enroll(self.user, self.course_key)
self.login_user()
resp = self.client.post(reverse('shoppingcart.views.add_course_to_cart', args=[self.course_key.to_deprecated_string()]))
self.assertEqual(resp.status_code, 400)
self.assertIn('You are already registered in course {0}.'.format(self.course_key.to_deprecated_string()), resp.content)
def test_add_nonexistent_course_to_cart(self):
self.login_user()
resp = self.client.post(reverse('shoppingcart.views.add_course_to_cart', args=['non/existent/course']))
self.assertEqual(resp.status_code, 404)
self.assertIn("The course you requested does not exist.", resp.content)
def test_add_course_to_cart_success(self):
self.login_user()
reverse('shoppingcart.views.add_course_to_cart', args=[self.course_key.to_deprecated_string()])
resp = self.client.post(reverse('shoppingcart.views.add_course_to_cart', args=[self.course_key.to_deprecated_string()]))
self.assertEqual(resp.status_code, 200)
self.assertTrue(PaidCourseRegistration.contained_in_order(self.cart, self.course_key))
@patch('shoppingcart.views.render_purchase_form_html', form_mock)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_cart(self):
self.login_user()
reg_item = PaidCourseRegistration.add_to_order(
self.cart,
self.course_key,
mode_slug=self.course_mode.mode_slug
)
cert_item = CertificateItem.add_to_order(
self.cart,
self.verified_course_key,
self.cost,
self.course_mode.mode_slug
)
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
((purchase_form_arg_cart,), _) = form_mock.call_args # pylint: disable=redefined-outer-name
purchase_form_arg_cart_items = purchase_form_arg_cart.orderitem_set.all().select_subclasses()
self.assertIn(reg_item, purchase_form_arg_cart_items)
self.assertIn(cert_item, purchase_form_arg_cart_items)
self.assertEqual(len(purchase_form_arg_cart_items), 2)
((template, context), _) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/shopping_cart.html')
self.assertEqual(len(context['shoppingcart_items']), 2)
self.assertEqual(context['amount'], 80)
self.assertIn("80.00", context['form_html'])
# check for the default currency in the context
self.assertEqual(context['currency'], 'usd')
self.assertEqual(context['currency_symbol'], '$')
@patch('shoppingcart.views.render_purchase_form_html', form_mock)
@patch('shoppingcart.views.render_to_response', render_mock)
@override_settings(PAID_COURSE_REGISTRATION_CURRENCY=['PKR', 'Rs'])
def test_show_cart_with_override_currency_settings(self):
self.login_user()
reg_item = PaidCourseRegistration.add_to_order(self.cart, self.course_key)
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
((purchase_form_arg_cart,), _) = form_mock.call_args # pylint: disable=redefined-outer-name
purchase_form_arg_cart_items = purchase_form_arg_cart.orderitem_set.all().select_subclasses()
self.assertIn(reg_item, purchase_form_arg_cart_items)
((template, context), _) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/shopping_cart.html')
# check for the override currency settings in the context
self.assertEqual(context['currency'], 'PKR')
self.assertEqual(context['currency_symbol'], 'Rs')
def test_clear_cart(self):
self.login_user()
PaidCourseRegistration.add_to_order(self.cart, self.course_key)
CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.assertEquals(self.cart.orderitem_set.count(), 2)
resp = self.client.post(reverse('shoppingcart.views.clear_cart', args=[]))
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 0)
@patch('shoppingcart.views.log.exception')
def test_remove_item(self, exception_log):
self.login_user()
reg_item = PaidCourseRegistration.add_to_order(self.cart, self.course_key)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.assertEquals(self.cart.orderitem_set.count(), 2)
resp = self.client.post(reverse('shoppingcart.views.remove_item', args=[]),
{'id': reg_item.id})
self.assertEqual(resp.status_code, 200)
self.assertEquals(self.cart.orderitem_set.count(), 1)
self.assertNotIn(reg_item, self.cart.orderitem_set.all().select_subclasses())
self.cart.purchase()
resp2 = self.client.post(reverse('shoppingcart.views.remove_item', args=[]),
{'id': cert_item.id})
self.assertEqual(resp2.status_code, 200)
exception_log.assert_called_with(
'Cannot remove cart OrderItem id=%s. DoesNotExist or item is already purchased', str(cert_item.id)
)
resp3 = self.client.post(
reverse('shoppingcart.views.remove_item', args=[]),
{'id': -1}
)
self.assertEqual(resp3.status_code, 200)
exception_log.assert_called_with(
'Cannot remove cart OrderItem id=%s. DoesNotExist or item is already purchased',
'-1'
)
@patch('shoppingcart.views.process_postpay_callback', postpay_mock)
def test_postpay_callback_success(self):
postpay_mock.return_value = {'success': True, 'order': self.cart}
self.login_user()
resp = self.client.post(reverse('shoppingcart.views.postpay_callback', args=[]))
self.assertEqual(resp.status_code, 302)
self.assertEqual(urlparse(resp.__getitem__('location')).path,
reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
@patch('shoppingcart.views.process_postpay_callback', postpay_mock)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_postpay_callback_failure(self):
postpay_mock.return_value = {'success': False, 'order': self.cart, 'error_html': 'ERROR_TEST!!!'}
self.login_user()
resp = self.client.post(reverse('shoppingcart.views.postpay_callback', args=[]))
self.assertEqual(resp.status_code, 200)
self.assertIn('ERROR_TEST!!!', resp.content)
((template, context), _) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/error.html')
self.assertEqual(context['order'], self.cart)
self.assertEqual(context['error_html'], 'ERROR_TEST!!!')
@ddt.data(0, 1)
def test_show_receipt_json(self, num_items):
# Create the correct number of items in the order
for __ in range(num_items):
CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.cart.purchase()
self.login_user()
url = reverse('shoppingcart.views.show_receipt', args=[self.cart.id])
resp = self.client.get(url, HTTP_ACCEPT="application/json")
# Should have gotten a successful response
self.assertEqual(resp.status_code, 200)
# Parse the response as JSON and check the contents
json_resp = json.loads(resp.content)
self.assertEqual(json_resp.get('currency'), self.cart.currency)
self.assertEqual(json_resp.get('purchase_datetime'), get_default_time_display(self.cart.purchase_time))
self.assertEqual(json_resp.get('total_cost'), self.cart.total_cost)
self.assertEqual(json_resp.get('status'), "purchased")
self.assertEqual(json_resp.get('billed_to'), {
'first_name': self.cart.bill_to_first,
'last_name': self.cart.bill_to_last,
'street1': self.cart.bill_to_street1,
'street2': self.cart.bill_to_street2,
'city': self.cart.bill_to_city,
'state': self.cart.bill_to_state,
'postal_code': self.cart.bill_to_postalcode,
'country': self.cart.bill_to_country
})
self.assertEqual(len(json_resp.get('items')), num_items)
for item in json_resp.get('items'):
self.assertEqual(item, {
'unit_cost': 40,
'quantity': 1,
'line_cost': 40,
'line_desc': '{} for course Test Course'.format(self.verified_course_mode.mode_display_name),
'course_key': unicode(self.verified_course_key)
})
def test_show_receipt_xss(self):
CertificateItem.add_to_order(self.cart, self.xss_course_key, self.cost, 'honor')
self.cart.purchase()
self.login_user()
url = reverse('shoppingcart.views.show_receipt', args=[self.cart.id])
resp = self.client.get(url)
self.assert_no_xss(resp, '<script>alert("XSS")</script>')
@patch('shoppingcart.views.render_to_response', render_mock)
def test_reg_code_xss(self):
self.add_reg_code(self.xss_course_key)
# One courses in user shopping cart
self.add_course_to_user_cart(self.xss_course_key)
self.assertEquals(self.cart.orderitem_set.count(), 1)
post_response = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(post_response.status_code, 200)
redeem_url = reverse('register_code_redemption', args=[self.reg_code])
redeem_response = self.client.get(redeem_url)
self.assert_no_xss(redeem_response, '<script>alert("XSS")</script>')
def test_show_receipt_json_multiple_items(self):
# Two different item types
PaidCourseRegistration.add_to_order(
self.cart,
self.course_key,
mode_slug=self.course_mode.mode_slug
)
CertificateItem.add_to_order(
self.cart,
self.verified_course_key,
self.cost,
self.verified_course_mode.mode_slug
)
self.cart.purchase()
self.login_user()
url = reverse('shoppingcart.views.show_receipt', args=[self.cart.id])
resp = self.client.get(url, HTTP_ACCEPT="application/json")
# Should have gotten a successful response
self.assertEqual(resp.status_code, 200)
# Parse the response as JSON and check the contents
json_resp = json.loads(resp.content)
self.assertEqual(json_resp.get('total_cost'), self.cart.total_cost)
items = json_resp.get('items')
self.assertEqual(len(items), 2)
self.assertEqual(items[0], {
'unit_cost': 40,
'quantity': 1,
'line_cost': 40,
'line_desc': 'Registration for Course: Robot Super Course',
'course_key': unicode(self.course_key)
})
self.assertEqual(items[1], {
'unit_cost': 40,
'quantity': 1,
'line_cost': 40,
'line_desc': '{} for course Test Course'.format(self.verified_course_mode.mode_display_name),
'course_key': unicode(self.verified_course_key)
})
def test_receipt_json_refunded(self):
mock_enrollment = Mock()
mock_enrollment.refundable.side_effect = lambda: True
mock_enrollment.course_id = self.verified_course_key
mock_enrollment.user = self.user
CourseMode.objects.create(
course_id=self.verified_course_key,
mode_slug="verified",
mode_display_name="verified cert",
min_price=self.cost
)
cert = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'verified')
self.cart.purchase()
cert.refund_cert_callback(course_enrollment=mock_enrollment)
self.login_user()
url = reverse('shoppingcart.views.show_receipt', args=[self.cart.id])
resp = self.client.get(url, HTTP_ACCEPT="application/json")
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(json_resp.get('status'), 'refunded')
def test_show_receipt_404s(self):
PaidCourseRegistration.add_to_order(self.cart, self.course_key)
CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.cart.purchase()
user2 = UserFactory.create()
cart2 = Order.get_cart_for_user(user2)
PaidCourseRegistration.add_to_order(cart2, self.course_key)
cart2.purchase()
self.login_user()
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[cart2.id]))
self.assertEqual(resp.status_code, 404)
resp2 = self.client.get(reverse('shoppingcart.views.show_receipt', args=[1000]))
self.assertEqual(resp2.status_code, 404)
def test_total_amount_of_purchased_course(self):
self.add_course_to_user_cart(self.course_key)
self.assertEquals(self.cart.orderitem_set.count(), 1)
self.add_coupon(self.course_key, True, self.coupon_code)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
# Total amount of a particular course that is purchased by different users
total_amount = PaidCourseRegistration.get_total_amount_of_purchased_item(self.course_key)
self.assertEqual(total_amount, 36)
self.client.login(username=self.instructor.username, password="test")
cart = Order.get_cart_for_user(self.instructor)
PaidCourseRegistration.add_to_order(cart, self.course_key, mode_slug=self.course_mode.mode_slug)
cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
total_amount = PaidCourseRegistration.get_total_amount_of_purchased_item(self.course_key)
self.assertEqual(total_amount, 76)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_with_valid_coupon_code(self):
self.add_course_to_user_cart(self.course_key)
self.add_coupon(self.course_key, True, self.coupon_code)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
self.assertIn('FirstNameTesting123', resp.content)
self.assertIn(str(self.get_discount(self.cost)), resp.content)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_reg_code_and_course_registration_scenario(self):
self.add_reg_code(self.course_key)
# One courses in user shopping cart
self.add_course_to_user_cart(self.course_key)
self.assertEquals(self.cart.orderitem_set.count(), 1)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 200)
redeem_url = reverse('register_code_redemption', args=[self.reg_code])
response = self.client.get(redeem_url)
self.assertEquals(response.status_code, 200)
# check button text
self.assertIn('Activate Course Enrollment', response.content)
#now activate the user by enrolling him/her to the course
response = self.client.post(redeem_url)
self.assertEquals(response.status_code, 200)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_reg_code_with_multiple_courses_and_checkout_scenario(self):
self.add_reg_code(self.course_key)
# Two courses in user shopping cart
self.login_user()
PaidCourseRegistration.add_to_order(self.cart, self.course_key, mode_slug=self.course_mode.mode_slug)
item2 = PaidCourseRegistration.add_to_order(
self.cart,
self.testing_course.id,
mode_slug=self.course_mode.mode_slug
)
self.assertEquals(self.cart.orderitem_set.count(), 2)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 200)
redeem_url = reverse('register_code_redemption', args=[self.reg_code])
resp = self.client.get(redeem_url)
self.assertEquals(resp.status_code, 200)
# check button text
self.assertIn('Activate Course Enrollment', resp.content)
#now activate the user by enrolling him/her to the course
resp = self.client.post(redeem_url)
self.assertEquals(resp.status_code, 200)
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertIn('Payment', resp.content)
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
((template, context), _) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/receipt.html')
self.assertEqual(context['order'], self.cart)
self.assertEqual(context['order'].total_cost, self.testing_cost)
course_enrollment = CourseEnrollment.objects.filter(user=self.user)
self.assertEqual(course_enrollment.count(), 2)
# make sure the enrollment_ids were stored in the PaidCourseRegistration items
# refetch them first since they are updated
# item1 has been deleted from the the cart.
# User has been enrolled for the item1
item2 = PaidCourseRegistration.objects.get(id=item2.id)
self.assertIsNotNone(item2.course_enrollment)
self.assertEqual(item2.course_enrollment.course_id, self.testing_course.id)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_with_valid_reg_code(self):
self.add_course_to_user_cart(self.course_key)
self.add_reg_code(self.course_key)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.reg_code})
self.assertEqual(resp.status_code, 200)
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
self.assertIn('0.00', resp.content)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success(self):
reg_item = PaidCourseRegistration.add_to_order(
self.cart,
self.course_key,
mode_slug=self.course_mode.mode_slug
)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
self.login_user()
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
self.assertIn('FirstNameTesting123', resp.content)
self.assertIn('80.00', resp.content)
((template, context), _) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/receipt.html')
self.assertEqual(context['order'], self.cart)
self.assertIn(reg_item, context['shoppingcart_items'][0])
self.assertIn(cert_item, context['shoppingcart_items'][1])
self.assertFalse(context['any_refunds'])
# check for the default currency settings in the context
self.assertEqual(context['currency_symbol'], '$')
self.assertEqual(context['currency'], 'usd')
@override_settings(PAID_COURSE_REGISTRATION_CURRENCY=['PKR', 'Rs'])
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_with_override_currency_settings(self):
reg_item = PaidCourseRegistration.add_to_order(self.cart, self.course_key)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
self.login_user()
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
((template, context), _) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/receipt.html')
self.assertIn(reg_item, context['shoppingcart_items'][0])
self.assertIn(cert_item, context['shoppingcart_items'][1])
# check for the override currency settings in the context
self.assertEqual(context['currency_symbol'], 'Rs')
self.assertEqual(context['currency'], 'PKR')
@patch('shoppingcart.views.render_to_response', render_mock)
def test_courseregcode_item_total_price(self):
self.cart.order_type = 'business'
self.cart.save()
CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2, mode_slug=self.course_mode.mode_slug)
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
self.assertEquals(CourseRegCodeItem.get_total_amount_of_purchased_item(self.course_key), 80)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_with_order_type_business(self):
self.cart.order_type = 'business'
self.cart.save()
reg_item = CourseRegCodeItem.add_to_order(
self.cart,
self.course_key,
2,
mode_slug=self.course_mode.mode_slug
)
self.cart.add_billing_details(company_name='T1Omega', company_contact_name='C1',
company_contact_email='[email protected]', recipient_email='[email protected]')
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
# mail is sent to these emails recipient_email, company_contact_email, order.user.email
self.assertEquals(len(mail.outbox), 3)
self.login_user()
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
# when order_type = 'business' the user is not enrolled in the
# course but presented with the enrollment links
self.assertFalse(CourseEnrollment.is_enrolled(self.cart.user, self.course_key))
self.assertIn('FirstNameTesting123', resp.content)
self.assertIn('80.00', resp.content)
# check for the enrollment codes content
self.assertIn('Please send each professional one of these unique registration codes to enroll into the course.', resp.content)
# fetch the newly generated registration codes
course_registration_codes = CourseRegistrationCode.objects.filter(order=self.cart)
((template, context), _) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/receipt.html')
self.assertEqual(context['order'], self.cart)
self.assertIn(reg_item, context['shoppingcart_items'][0])
# now check for all the registration codes in the receipt
# and all the codes should be unused at this point
self.assertIn(course_registration_codes[0].code, context['reg_code_info_list'][0]['code'])
self.assertIn(course_registration_codes[1].code, context['reg_code_info_list'][1]['code'])
self.assertFalse(context['reg_code_info_list'][0]['is_redeemed'])
self.assertFalse(context['reg_code_info_list'][1]['is_redeemed'])
self.assertIn(self.cart.purchase_time.strftime("%B %d, %Y"), resp.content)
self.assertIn(self.cart.company_name, resp.content)
self.assertIn(self.cart.company_contact_name, resp.content)
self.assertIn(self.cart.company_contact_email, resp.content)
self.assertIn(self.cart.recipient_email, resp.content)
self.assertIn("Invoice #{order_id}".format(order_id=self.cart.id), resp.content)
self.assertIn('You have successfully purchased <b>{total_registration_codes} course registration codes'
.format(total_registration_codes=context['total_registration_codes']), resp.content)
# now redeem one of registration code from the previous order
redeem_url = reverse('register_code_redemption', args=[context['reg_code_info_list'][0]['code']])
#now activate the user by enrolling him/her to the course
response = self.client.post(redeem_url)
self.assertEquals(response.status_code, 200)
self.assertIn('View Dashboard', response.content)
# now view the receipt page again to see if any registration codes
# has been expired or not
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
((template, context), _) = render_mock.call_args # pylint: disable=redefined-outer-name
self.assertEqual(template, 'shoppingcart/receipt.html')
# now check for all the registration codes in the receipt
# and one of code should be used at this point
self.assertTrue(context['reg_code_info_list'][0]['is_redeemed'])
self.assertFalse(context['reg_code_info_list'][1]['is_redeemed'])
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_with_upgrade(self):
reg_item = PaidCourseRegistration.add_to_order(
self.cart,
self.course_key,
mode_slug=self.course_mode.mode_slug
)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
self.login_user()
self.mock_tracker.emit.reset_mock() # pylint: disable=maybe-no-member
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
self.assertIn('FirstNameTesting123', resp.content)
self.assertIn('80.00', resp.content)
((template, context), _) = render_mock.call_args
# When we come from the upgrade flow, we get these context variables
self.assertEqual(template, 'shoppingcart/receipt.html')
self.assertEqual(context['order'], self.cart)
self.assertIn(reg_item, context['shoppingcart_items'][0])
self.assertIn(cert_item, context['shoppingcart_items'][1])
self.assertFalse(context['any_refunds'])
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_refund(self):
reg_item = PaidCourseRegistration.add_to_order(
self.cart,
self.course_key,
mode_slug=self.course_mode.mode_slug
)
cert_item = CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'honor')
self.cart.purchase(first='FirstNameTesting123', street1='StreetTesting123')
cert_item.status = "refunded"
cert_item.save()
self.assertEqual(self.cart.total_cost, 40)
self.login_user()
resp = self.client.get(reverse('shoppingcart.views.show_receipt', args=[self.cart.id]))
self.assertEqual(resp.status_code, 200)
self.assertIn('40.00', resp.content)
((template, context), _tmp) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/receipt.html')
self.assertEqual(context['order'], self.cart)
self.assertIn(reg_item, context['shoppingcart_items'][0])
self.assertIn(cert_item, context['shoppingcart_items'][1])
self.assertTrue(context['any_refunds'])
@patch('shoppingcart.views.render_to_response', render_mock)
def test_show_receipt_success_custom_receipt_page(self):
cert_item = CertificateItem.add_to_order(self.cart, self.course_key, self.cost, 'honor')
self.cart.purchase()
self.login_user()
receipt_url = reverse('shoppingcart.views.show_receipt', args=[self.cart.id])
resp = self.client.get(receipt_url)
self.assertEqual(resp.status_code, 200)
((template, _context), _tmp) = render_mock.call_args
self.assertEqual(template, cert_item.single_item_receipt_template)
def _assert_404(self, url, use_post=False):
"""
Helper method to assert that a given url will return a 404 status code
"""
if use_post:
response = self.client.post(url)
else:
response = self.client.get(url)
self.assertEquals(response.status_code, 404)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': False})
def test_disabled_paid_courses(self):
"""
Assert that the pages that require ENABLE_PAID_COURSE_REGISTRATION=True return a
HTTP 404 status code when we have this flag turned off
"""
self.login_user()
self._assert_404(reverse('shoppingcart.views.show_cart', args=[]))
self._assert_404(reverse('shoppingcart.views.clear_cart', args=[]))
self._assert_404(reverse('shoppingcart.views.remove_item', args=[]), use_post=True)
self._assert_404(reverse('shoppingcart.views.register_code_redemption', args=["testing"]))
self._assert_404(reverse('shoppingcart.views.use_code', args=[]), use_post=True)
self._assert_404(reverse('shoppingcart.views.update_user_cart', args=[]))
self._assert_404(reverse('shoppingcart.views.reset_code_redemption', args=[]), use_post=True)
self._assert_404(reverse('shoppingcart.views.billing_details', args=[]))
def test_upgrade_postpay_callback_emits_ga_event(self):
# Enroll as honor in the course with the current user.
CourseEnrollment.enroll(self.user, self.course_key)
# add verified mode
CourseMode.objects.create(
course_id=self.verified_course_key,
mode_slug="verified",
mode_display_name="verified cert",
min_price=self.cost
)
# Purchase a verified certificate
self.cart = Order.get_cart_for_user(self.user)
CertificateItem.add_to_order(self.cart, self.verified_course_key, self.cost, 'verified')
self.cart.start_purchase()
self.login_user()
# setting the attempting upgrade session value.
session = self.client.session
session['attempting_upgrade'] = True
session.save()
ordered_params = OrderedDict([
('amount', self.cost),
('currency', 'usd'),
('transaction_type', 'sale'),
('orderNumber', str(self.cart.id)),
('access_key', '123456789'),
('merchantID', 'edx'),
('djch', '012345678912'),
('orderPage_version', 2),
('orderPage_serialNumber', '1234567890'),
('profile_id', "00000001"),
('reference_number', str(self.cart.id)),
('locale', 'en'),
('signed_date_time', '2014-08-18T13:59:31Z'),
])
resp_params = PaymentFakeView.response_post_params(sign(ordered_params))
self.assertTrue(self.client.session.get('attempting_upgrade'))
url = reverse('shoppingcart.views.postpay_callback')
self.client.post(url, resp_params, follow=True)
self.assertFalse(self.client.session.get('attempting_upgrade'))
self.mock_tracker.emit.assert_any_call( # pylint: disable=maybe-no-member
'edx.course.enrollment.upgrade.succeeded',
{
'user_id': self.user.id,
'course_id': self.verified_course_key.to_deprecated_string(),
'mode': 'verified'
}
)
def test_shopping_cart_navigation_link_not_in_microsite(self):
"""
Tests shopping cart link is available in navigation header if request is not from a microsite.
"""
CourseEnrollment.enroll(self.user, self.course_key)
self.add_course_to_user_cart(self.testing_course.id)
resp = self.client.get(reverse('courseware', kwargs={'course_id': unicode(self.course.id)}))
self.assertEqual(resp.status_code, 200)
self.assertIn('<a class="shopping-cart"', resp.content)
def test_shopping_cart_navigation_link_not_in_microsite_and_not_on_courseware(self):
"""
Tests shopping cart link is available in navigation header if request is not from a microsite
and requested page is not courseware too.
"""
CourseEnrollment.enroll(self.user, self.course_key)
self.add_course_to_user_cart(self.testing_course.id)
resp = self.client.get(reverse('dashboard'))
self.assertEqual(resp.status_code, 200)
self.assertIn('<a class="shopping-cart"', resp.content)
def test_shopping_cart_navigation_link_in_microsite_not_on_courseware(self):
"""
Tests shopping cart link is available in navigation header if request is from a microsite but requested
page is not from courseware.
"""
CourseEnrollment.enroll(self.user, self.course_key)
self.add_course_to_user_cart(self.testing_course.id)
with patch('microsite_configuration.microsite.is_request_in_microsite',
Mock(return_value=True)):
resp = self.client.get(reverse('dashboard'))
self.assertEqual(resp.status_code, 200)
self.assertIn('<a class="shopping-cart"', resp.content)
def test_shopping_cart_navigation_link_in_microsite_courseware_page(self):
"""
Tests shopping cart link is not available in navigation header if request is from a microsite
and requested page is from courseware.
"""
CourseEnrollment.enroll(self.user, self.course_key)
self.add_course_to_user_cart(self.testing_course.id)
with patch('microsite_configuration.microsite.is_request_in_microsite',
Mock(return_value=True)):
resp = self.client.get(reverse('courseware', kwargs={'course_id': unicode(self.course.id)}))
self.assertEqual(resp.status_code, 200)
self.assertNotIn('<a class="shopping-cart"', resp.content)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_add_course_to_cart_and_delete_this_course(self):
"""
Testing shopping cart work with course that added to cart and deleted.
"""
course = CourseFactory.create(org='MITxx', number='9999', display_name='Robot Super Puper Course')
course_key = course.id
self.login_user()
resp = self.client.post(
reverse('shoppingcart.views.add_course_to_cart', args=[course_key.to_deprecated_string()]))
self.assertEqual(resp.status_code, 200)
self.assertTrue(PaidCourseRegistration.contained_in_order(self.cart, course_key))
module_store = modulestore()
with module_store.bulk_operations(course_key):
module_store.delete_course(course_key, self.user)
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
(template, context), _tmp = render_mock.call_args
self.assertEqual(context['shoppingcart_items'], [])
self.assertFalse(PaidCourseRegistration.contained_in_order(self.cart, course_key))
class ReceiptRedirectTest(SharedModuleStoreTestCase):
"""Test special-case redirect from the receipt page. """
COST = 40
PASSWORD = 'password'
@classmethod
def setUpClass(cls):
super(ReceiptRedirectTest, cls).setUpClass()
cls.course = CourseFactory.create()
cls.course_key = cls.course.id
def setUp(self):
super(ReceiptRedirectTest, self).setUp()
self.user = UserFactory.create()
self.user.set_password(self.PASSWORD)
self.user.save()
self.course_mode = CourseMode(
course_id=self.course_key,
mode_slug="verified",
mode_display_name="verified cert",
min_price=self.COST
)
self.course_mode.save()
self.cart = Order.get_cart_for_user(self.user)
self.client.login(
username=self.user.username,
password=self.PASSWORD
)
def test_postpay_callback_redirect_to_verify_student(self):
# Create other carts first
# This ensures that the order ID and order item IDs do not match
Order.get_cart_for_user(self.user).start_purchase()
Order.get_cart_for_user(self.user).start_purchase()
Order.get_cart_for_user(self.user).start_purchase()
# Purchase a verified certificate
self.cart = Order.get_cart_for_user(self.user)
CertificateItem.add_to_order(
self.cart,
self.course_key,
self.COST,
'verified'
)
self.cart.start_purchase()
# Simulate hitting the post-pay callback
with patch('shoppingcart.views.process_postpay_callback') as mock_process:
mock_process.return_value = {'success': True, 'order': self.cart}
url = reverse('shoppingcart.views.postpay_callback')
resp = self.client.post(url, follow=True)
# Expect to be redirected to the payment confirmation
# page in the verify_student app
redirect_url = reverse(
'verify_student_payment_confirmation',
kwargs={'course_id': unicode(self.course_key)}
)
redirect_url += '?payment-order-num={order_num}'.format(
order_num=self.cart.id
)
self.assertIn(redirect_url, resp.redirect_chain[0][0])
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': True})
class ShoppingcartViewsClosedEnrollment(ModuleStoreTestCase):
"""
Test suite for ShoppingcartViews Course Enrollments Closed or not
"""
def setUp(self):
super(ShoppingcartViewsClosedEnrollment, self).setUp()
self.user = UserFactory.create()
self.user.set_password('password')
self.user.save()
self.instructor = AdminFactory.create()
self.cost = 40
self.course = CourseFactory.create(org='MITx', number='999', display_name='Robot Super Course')
self.course_key = self.course.id
self.course_mode = CourseMode(
course_id=self.course_key,
mode_slug=CourseMode.HONOR,
mode_display_name="honor cert",
min_price=self.cost
)
self.course_mode.save()
self.testing_course = CourseFactory.create(
org='Edx',
number='999',
display_name='Testing Super Course',
metadata={"invitation_only": False}
)
self.testing_course_mode = CourseMode(
course_id=self.testing_course.id,
mode_slug=CourseMode.HONOR,
mode_display_name="honor cert",
min_price=self.cost
)
self.course_mode.save()
self.percentage_discount = 20.0
self.coupon_code = 'asdsad'
self.course_mode = CourseMode(course_id=self.testing_course.id,
mode_slug="honor",
mode_display_name="honor cert",
min_price=self.cost)
self.course_mode.save()
self.cart = Order.get_cart_for_user(self.user)
self.now = datetime.now(pytz.UTC)
self.tomorrow = self.now + timedelta(days=1)
self.nextday = self.tomorrow + timedelta(days=1)
def add_coupon(self, course_key, is_active, code):
"""
add dummy coupon into models
"""
coupon = Coupon(code=code, description='testing code', course_id=course_key,
percentage_discount=self.percentage_discount, created_by=self.user, is_active=is_active)
coupon.save()
def login_user(self):
"""
Helper fn to login self.user
"""
self.client.login(username=self.user.username, password="password")
@patch('shoppingcart.views.render_to_response', render_mock)
def test_to_check_that_cart_item_enrollment_is_closed(self):
self.login_user()
reg_item1 = PaidCourseRegistration.add_to_order(self.cart, self.course_key)
expired_course_item = PaidCourseRegistration.add_to_order(self.cart, self.testing_course.id)
# update the testing_course enrollment dates
self.testing_course.enrollment_start = self.tomorrow
self.testing_course.enrollment_end = self.nextday
self.testing_course = self.update_course(self.testing_course, self.user.id)
# now add the same coupon code to the second course(testing_course)
self.add_coupon(self.testing_course.id, True, self.coupon_code)
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
coupon_redemption = CouponRedemption.objects.filter(coupon__course_id=expired_course_item.course_id,
order=expired_course_item.order_id)
self.assertEqual(coupon_redemption.count(), 1)
# testing_course enrollment is closed but the course is in the cart
# so we delete that item from the cart and display the message in the cart
# coupon redemption entry should also be deleted when the item is expired.
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
self.assertIn("{course_name} has been removed because the enrollment period has closed.".format(course_name=self.testing_course.display_name), resp.content)
# now the redemption entry should be deleted from the table.
coupon_redemption = CouponRedemption.objects.filter(coupon__course_id=expired_course_item.course_id,
order=expired_course_item.order_id)
self.assertEqual(coupon_redemption.count(), 0)
((template, context), _tmp) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/shopping_cart.html')
self.assertEqual(context['order'], self.cart)
self.assertIn(reg_item1, context['shoppingcart_items'][0])
self.assertEqual(1, len(context['shoppingcart_items']))
self.assertEqual(True, context['is_course_enrollment_closed'])
self.assertIn(self.testing_course.display_name, context['expired_course_names'])
def test_to_check_that_cart_item_enrollment_is_closed_when_clicking_the_payment_button(self):
self.login_user()
PaidCourseRegistration.add_to_order(
self.cart,
self.course_key,
mode_slug=self.course_mode.mode_slug
)
PaidCourseRegistration.add_to_order(
self.cart,
self.testing_course.id,
mode_slug=self.testing_course_mode.mode_slug
)
# update the testing_course enrollment dates
self.testing_course.enrollment_start = self.tomorrow
self.testing_course.enrollment_end = self.nextday
self.testing_course = self.update_course(self.testing_course, self.user.id)
# testing_course enrollment is closed but the course is in the cart
# so we delete that item from the cart and display the message in the cart
resp = self.client.get(reverse('shoppingcart.views.verify_cart'))
self.assertEqual(resp.status_code, 200)
self.assertTrue(json.loads(resp.content)['is_course_enrollment_closed'])
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
self.assertIn("{course_name} has been removed because the enrollment period has closed.".format(course_name=self.testing_course.display_name), resp.content)
self.assertIn('40.00', resp.content)
def test_is_enrollment_closed_when_order_type_is_business(self):
self.login_user()
self.cart.order_type = 'business'
self.cart.save()
PaidCourseRegistration.add_to_order(self.cart, self.course_key, mode_slug=self.course_mode.mode_slug)
CourseRegCodeItem.add_to_order(self.cart, self.testing_course.id, 2, mode_slug=self.course_mode.mode_slug)
# update the testing_course enrollment dates
self.testing_course.enrollment_start = self.tomorrow
self.testing_course.enrollment_end = self.nextday
self.testing_course = self.update_course(self.testing_course, self.user.id)
resp = self.client.post(reverse('shoppingcart.views.billing_details'))
self.assertEqual(resp.status_code, 200)
self.assertTrue(json.loads(resp.content)['is_course_enrollment_closed'])
# testing_course enrollment is closed but the course is in the cart
# so we delete that item from the cart and display the message in the cart
resp = self.client.get(reverse('shoppingcart.views.show_cart', args=[]))
self.assertEqual(resp.status_code, 200)
self.assertIn("{course_name} has been removed because the enrollment period has closed.".format(course_name=self.testing_course.display_name), resp.content)
self.assertIn('40.00', resp.content)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': True})
class RegistrationCodeRedemptionCourseEnrollment(SharedModuleStoreTestCase):
"""
Test suite for RegistrationCodeRedemption Course Enrollments
"""
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
@classmethod
def setUpClass(cls):
super(RegistrationCodeRedemptionCourseEnrollment, cls).setUpClass()
cls.course = CourseFactory.create(org='MITx', number='999', display_name='Robot Super Course')
cls.course_key = cls.course.id
def setUp(self, **kwargs):
super(RegistrationCodeRedemptionCourseEnrollment, self).setUp()
self.user = UserFactory.create()
self.user.set_password('password')
self.user.save()
self.cost = 40
self.course_mode = CourseMode(course_id=self.course_key,
mode_slug="honor",
mode_display_name="honor cert",
min_price=self.cost)
self.course_mode.save()
def login_user(self):
"""
Helper fn to login self.user
"""
self.client.login(username=self.user.username, password="password")
def test_registration_redemption_post_request_ratelimited(self):
"""
Try (and fail) registration code redemption 30 times
in a row on an non-existing registration code post request
"""
cache.clear()
url = reverse('register_code_redemption', args=['asdasd'])
self.login_user()
for i in xrange(30): # pylint: disable=unused-variable
response = self.client.post(url)
self.assertEquals(response.status_code, 404)
# then the rate limiter should kick in and give a HttpForbidden response
response = self.client.post(url)
self.assertEquals(response.status_code, 403)
# now reset the time to 5 mins from now in future in order to unblock
reset_time = datetime.now(UTC) + timedelta(seconds=300)
with freeze_time(reset_time):
response = self.client.post(url)
self.assertEquals(response.status_code, 404)
cache.clear()
def test_registration_redemption_get_request_ratelimited(self):
"""
Try (and fail) registration code redemption 30 times
in a row on an non-existing registration code get request
"""
cache.clear()
url = reverse('register_code_redemption', args=['asdasd'])
self.login_user()
for i in xrange(30): # pylint: disable=unused-variable
response = self.client.get(url)
self.assertEquals(response.status_code, 404)
# then the rate limiter should kick in and give a HttpForbidden response
response = self.client.get(url)
self.assertEquals(response.status_code, 403)
# now reset the time to 5 mins from now in future in order to unblock
reset_time = datetime.now(UTC) + timedelta(seconds=300)
with freeze_time(reset_time):
response = self.client.get(url)
self.assertEquals(response.status_code, 404)
cache.clear()
def test_course_enrollment_active_registration_code_redemption(self):
"""
Test for active registration code course enrollment
"""
cache.clear()
instructor = InstructorFactory(course_key=self.course_key)
self.client.login(username=instructor.username, password='test')
# Registration Code Generation only available to Sales Admins.
CourseSalesAdminRole(self.course.id).add_users(instructor)
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 12, 'company_name': 'Test Group', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street',
'address_line_2': '', 'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(url, data)
self.assertEquals(response.status_code, 200)
# get the first registration from the newly created registration codes
registration_code = CourseRegistrationCode.objects.all()[0].code
redeem_url = reverse('register_code_redemption', args=[registration_code])
self.login_user()
response = self.client.get(redeem_url)
self.assertEquals(response.status_code, 200)
# check button text
self.assertIn('Activate Course Enrollment', response.content)
#now activate the user by enrolling him/her to the course
response = self.client.post(redeem_url)
self.assertEquals(response.status_code, 200)
self.assertIn('View Dashboard', response.content)
#now check that the registration code has already been redeemed and user is already registered in the course
RegistrationCodeRedemption.objects.filter(registration_code__code=registration_code)
response = self.client.get(redeem_url)
self.assertEquals(len(RegistrationCodeRedemption.objects.filter(registration_code__code=registration_code)), 1)
self.assertIn("You've clicked a link for an enrollment code that has already been used.", response.content)
#now check that the registration code has already been redeemed
response = self.client.post(redeem_url)
self.assertIn("You've clicked a link for an enrollment code that has already been used.", response.content)
#now check the response of the dashboard page
dashboard_url = reverse('dashboard')
response = self.client.get(dashboard_url)
self.assertEquals(response.status_code, 200)
self.assertIn(self.course.display_name.encode('utf-8'), response.content)
@ddt.ddt
class RedeemCodeEmbargoTests(UrlResetMixin, ModuleStoreTestCase):
"""Test blocking redeem code redemption based on country access rules. """
USERNAME = 'bob'
PASSWORD = 'test'
URLCONF_MODULES = ['openedx.core.djangoapps.embargo']
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(RedeemCodeEmbargoTests, self).setUp()
self.course = CourseFactory.create()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
result = self.client.login(username=self.user.username, password=self.PASSWORD)
self.assertTrue(result, msg="Could not log in")
@ddt.data('get', 'post')
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_registration_code_redemption_embargo(self, method):
# Create a valid registration code
reg_code = CourseRegistrationCode.objects.create(
code="abcd1234",
course_id=self.course.id,
created_by=self.user
)
# Try to redeem the code from a restricted country
with restrict_course(self.course.id) as redirect_url:
url = reverse(
'register_code_redemption',
kwargs={'registration_code': 'abcd1234'}
)
response = getattr(self.client, method)(url)
self.assertRedirects(response, redirect_url)
# The registration code should NOT be redeemed
is_redeemed = RegistrationCodeRedemption.objects.filter(
registration_code=reg_code
).exists()
self.assertFalse(is_redeemed)
# The user should NOT be enrolled
is_enrolled = CourseEnrollment.is_enrolled(self.user, self.course.id)
self.assertFalse(is_enrolled)
@ddt.ddt
class DonationViewTest(SharedModuleStoreTestCase):
"""Tests for making a donation.
These tests cover both the single-item purchase flow,
as well as the receipt page for donation items.
"""
DONATION_AMOUNT = "23.45"
PASSWORD = "password"
@classmethod
def setUpClass(cls):
super(DonationViewTest, cls).setUpClass()
cls.course = CourseFactory.create(display_name="Test Course")
def setUp(self):
"""Create a test user and order. """
super(DonationViewTest, self).setUp()
# Create and login a user
self.user = UserFactory.create()
self.user.set_password(self.PASSWORD)
self.user.save()
result = self.client.login(username=self.user.username, password=self.PASSWORD)
self.assertTrue(result)
# Enable donations
config = DonationConfiguration.current()
config.enabled = True
config.save()
def test_donation_for_org(self):
self._donate(self.DONATION_AMOUNT)
self._assert_receipt_contains("tax purposes")
def test_donation_for_course_receipt(self):
# Donate to our course
self._donate(self.DONATION_AMOUNT, course_id=self.course.id)
# Verify the receipt page
self._assert_receipt_contains("tax purposes")
self._assert_receipt_contains(self.course.display_name)
def test_smallest_possible_donation(self):
self._donate("0.01")
self._assert_receipt_contains("0.01")
@ddt.data(
{},
{"amount": "abcd"},
{"amount": "-1.00"},
{"amount": "0.00"},
{"amount": "0.001"},
{"amount": "0"},
{"amount": "23.45", "course_id": "invalid"}
)
def test_donation_bad_request(self, bad_params):
response = self.client.post(reverse('donation'), bad_params)
self.assertEqual(response.status_code, 400)
def test_donation_requires_login(self):
self.client.logout()
response = self.client.post(reverse('donation'), {'amount': self.DONATION_AMOUNT})
self.assertEqual(response.status_code, 302)
def test_no_such_course(self):
response = self.client.post(
reverse("donation"),
{"amount": self.DONATION_AMOUNT, "course_id": "edx/DemoX/Demo"}
)
self.assertEqual(response.status_code, 400)
@ddt.data("get", "put", "head", "options", "delete")
def test_donation_requires_post(self, invalid_method):
response = getattr(self.client, invalid_method)(
reverse("donation"), {"amount": self.DONATION_AMOUNT}
)
self.assertEqual(response.status_code, 405)
def test_donations_disabled(self):
config = DonationConfiguration.current()
config.enabled = False
config.save()
# Logged in -- should be a 404
response = self.client.post(reverse('donation'))
self.assertEqual(response.status_code, 404)
# Logged out -- should still be a 404
self.client.logout()
response = self.client.post(reverse('donation'))
self.assertEqual(response.status_code, 404)
def _donate(self, donation_amount, course_id=None):
"""Simulate a donation to a course.
This covers the entire payment flow, except for the external
payment processor, which is simulated.
Arguments:
donation_amount (unicode): The amount the user is donating.
Keyword Arguments:
course_id (CourseKey): If provided, make a donation to the specific course.
Raises:
AssertionError
"""
# Purchase a single donation item
# Optionally specify a particular course for the donation
params = {'amount': donation_amount}
if course_id is not None:
params['course_id'] = course_id
url = reverse('donation')
response = self.client.post(url, params)
self.assertEqual(response.status_code, 200)
# Use the fake payment implementation to simulate the parameters
# we would receive from the payment processor.
payment_info = json.loads(response.content)
self.assertEqual(payment_info["payment_url"], "/shoppingcart/payment_fake")
# If this is a per-course donation, verify that we're sending
# the course ID to the payment processor.
if course_id is not None:
self.assertEqual(
payment_info["payment_params"]["merchant_defined_data1"],
unicode(course_id)
)
self.assertEqual(
payment_info["payment_params"]["merchant_defined_data2"],
"donation_course"
)
else:
self.assertEqual(payment_info["payment_params"]["merchant_defined_data1"], "")
self.assertEqual(
payment_info["payment_params"]["merchant_defined_data2"],
"donation_general"
)
processor_response_params = PaymentFakeView.response_post_params(payment_info["payment_params"])
# Use the response parameters to simulate a successful payment
url = reverse('shoppingcart.views.postpay_callback')
response = self.client.post(url, processor_response_params)
self.assertRedirects(response, self._receipt_url)
def _assert_receipt_contains(self, expected_text):
"""Load the receipt page and verify that it contains the expected text."""
resp = self.client.get(self._receipt_url)
self.assertContains(resp, expected_text)
@property
def _receipt_url(self):
order_id = Order.objects.get(user=self.user, status="purchased").id
return reverse("shoppingcart.views.show_receipt", kwargs={"ordernum": order_id})
class CSVReportViewsTest(SharedModuleStoreTestCase):
"""
Test suite for CSV Purchase Reporting
"""
@classmethod
def setUpClass(cls):
super(CSVReportViewsTest, cls).setUpClass()
cls.course = CourseFactory.create(org='MITx', number='999', display_name='Robot Super Course')
cls.course_key = cls.course.id
verified_course = CourseFactory.create(org='org', number='test', display_name='Test Course')
cls.verified_course_key = verified_course.id
def setUp(self):
super(CSVReportViewsTest, self).setUp()
self.user = UserFactory.create()
self.user.set_password('password')
self.user.save()
self.cost = 40
self.course_mode = CourseMode(course_id=self.course_key,
mode_slug="honor",
mode_display_name="honor cert",
min_price=self.cost)
self.course_mode.save()
self.course_mode2 = CourseMode(course_id=self.course_key,
mode_slug="verified",
mode_display_name="verified cert",
min_price=self.cost)
self.course_mode2.save()
self.cart = Order.get_cart_for_user(self.user)
self.dl_grp = Group(name=settings.PAYMENT_REPORT_GENERATOR_GROUP)
self.dl_grp.save()
def login_user(self):
"""
Helper fn to login self.user
"""
self.client.login(username=self.user.username, password="password")
def add_to_download_group(self, user):
"""
Helper fn to add self.user to group that's allowed to download report CSV
"""
user.groups.add(self.dl_grp)
def test_report_csv_no_access(self):
self.login_user()
response = self.client.get(reverse('payment_csv_report'))
self.assertEqual(response.status_code, 403)
def test_report_csv_bad_method(self):
self.login_user()
self.add_to_download_group(self.user)
response = self.client.put(reverse('payment_csv_report'))
self.assertEqual(response.status_code, 400)
@patch('shoppingcart.views.render_to_response', render_mock)
def test_report_csv_get(self):
self.login_user()
self.add_to_download_group(self.user)
response = self.client.get(reverse('payment_csv_report'))
((template, context), unused_kwargs) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/download_report.html')
self.assertFalse(context['total_count_error'])
self.assertFalse(context['date_fmt_error'])
self.assertIn("Download CSV Reports", response.content.decode('UTF-8'))
@patch('shoppingcart.views.render_to_response', render_mock)
def test_report_csv_bad_date(self):
self.login_user()
self.add_to_download_group(self.user)
response = self.client.post(reverse('payment_csv_report'), {'start_date': 'BAD', 'end_date': 'BAD', 'requested_report': 'itemized_purchase_report'})
((template, context), unused_kwargs) = render_mock.call_args
self.assertEqual(template, 'shoppingcart/download_report.html')
self.assertFalse(context['total_count_error'])
self.assertTrue(context['date_fmt_error'])
self.assertIn("There was an error in your date input. It should be formatted as YYYY-MM-DD",
response.content.decode('UTF-8'))
def test_report_csv_itemized(self):
report_type = 'itemized_purchase_report'
start_date = '1970-01-01'
end_date = '2100-01-01'
PaidCourseRegistration.add_to_order(self.cart, self.course_key, mode_slug=self.course_mode.mode_slug)
self.cart.purchase()
self.login_user()
self.add_to_download_group(self.user)
response = self.client.post(reverse('payment_csv_report'), {'start_date': start_date,
'end_date': end_date,
'requested_report': report_type})
self.assertEqual(response['Content-Type'], 'text/csv')
report = initialize_report(report_type, start_date, end_date)
self.assertIn(",".join(report.header()), response.content)
self.assertIn(
",1,purchased,1,40.00,40.00,usd,Registration for Course: Robot Super Course,",
response.content
)
def test_report_csv_university_revenue_share(self):
report_type = 'university_revenue_share'
start_date = '1970-01-01'
end_date = '2100-01-01'
start_letter = 'A'
end_letter = 'Z'
self.login_user()
self.add_to_download_group(self.user)
response = self.client.post(reverse('payment_csv_report'), {'start_date': start_date,
'end_date': end_date,
'start_letter': start_letter,
'end_letter': end_letter,
'requested_report': report_type})
self.assertEqual(response['Content-Type'], 'text/csv')
report = initialize_report(report_type, start_date, end_date, start_letter, end_letter)
self.assertIn(",".join(report.header()), response.content)
class UtilFnsTest(TestCase):
"""
Tests for utility functions in views.py
"""
def setUp(self):
super(UtilFnsTest, self).setUp()
self.user = UserFactory.create()
def test_can_download_report_no_group(self):
"""
Group controlling perms is not present
"""
self.assertFalse(_can_download_report(self.user))
def test_can_download_report_not_member(self):
"""
User is not part of group controlling perms
"""
Group(name=settings.PAYMENT_REPORT_GENERATOR_GROUP).save()
self.assertFalse(_can_download_report(self.user))
def test_can_download_report(self):
"""
User is part of group controlling perms
"""
grp = Group(name=settings.PAYMENT_REPORT_GENERATOR_GROUP)
grp.save()
self.user.groups.add(grp)
self.assertTrue(_can_download_report(self.user))
def test_get_date_from_str(self):
test_str = "2013-10-01"
date = _get_date_from_str(test_str)
self.assertEqual(2013, date.year)
self.assertEqual(10, date.month)
self.assertEqual(1, date.day)
| agpl-3.0 | -6,718,258,849,561,084,000 | 44.772056 | 164 | 0.647047 | false |
iot-factory/synapse | tests/handlers/test_typing.py | 1 | 13174 | # -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests import unittest
from twisted.internet import defer
from mock import Mock, call, ANY
import json
from ..utils import (
MockHttpResource, MockClock, DeferredMockCallable, setup_test_homeserver
)
from synapse.api.errors import AuthError
from synapse.handlers.typing import TypingNotificationHandler
from synapse.storage.transactions import DestinationsTable
from synapse.types import UserID
def _expect_edu(destination, edu_type, content, origin="test"):
return {
"origin": origin,
"origin_server_ts": 1000000,
"pdus": [],
"edus": [
{
"edu_type": edu_type,
"content": content,
}
],
"pdu_failures": [],
}
def _make_edu_json(origin, edu_type, content):
return json.dumps(_expect_edu("test", edu_type, content, origin=origin))
class JustTypingNotificationHandlers(object):
def __init__(self, hs):
self.typing_notification_handler = TypingNotificationHandler(hs)
class TypingNotificationsTestCase(unittest.TestCase):
"""Tests typing notifications to rooms."""
@defer.inlineCallbacks
def setUp(self):
self.clock = MockClock()
self.mock_http_client = Mock(spec=[])
self.mock_http_client.put_json = DeferredMockCallable()
self.mock_federation_resource = MockHttpResource()
mock_notifier = Mock(spec=["on_new_event"])
self.on_new_event = mock_notifier.on_new_event
self.auth = Mock(spec=[])
hs = yield setup_test_homeserver(
auth=self.auth,
clock=self.clock,
datastore=Mock(spec=[
# Bits that Federation needs
"prep_send_transaction",
"delivered_txn",
"get_received_txn_response",
"set_received_txn_response",
"get_destination_retry_timings",
]),
handlers=None,
notifier=mock_notifier,
resource_for_client=Mock(),
resource_for_federation=self.mock_federation_resource,
http_client=self.mock_http_client,
keyring=Mock(),
)
hs.handlers = JustTypingNotificationHandlers(hs)
self.handler = hs.get_handlers().typing_notification_handler
self.event_source = hs.get_event_sources().sources["typing"]
self.datastore = hs.get_datastore()
retry_timings_res = {
"destination": "",
"retry_last_ts": 0,
"retry_interval": 0,
}
self.datastore.get_destination_retry_timings.return_value = (
defer.succeed(retry_timings_res)
)
def get_received_txn_response(*args):
return defer.succeed(None)
self.datastore.get_received_txn_response = get_received_txn_response
self.room_id = "a-room"
# Mock the RoomMemberHandler
hs.handlers.room_member_handler = Mock(spec=[])
self.room_member_handler = hs.handlers.room_member_handler
self.room_members = []
def get_rooms_for_user(user):
if user in self.room_members:
return defer.succeed([self.room_id])
else:
return defer.succeed([])
self.room_member_handler.get_rooms_for_user = get_rooms_for_user
def get_room_members(room_id):
if room_id == self.room_id:
return defer.succeed(self.room_members)
else:
return defer.succeed([])
self.room_member_handler.get_room_members = get_room_members
def get_joined_rooms_for_user(user):
if user in self.room_members:
return defer.succeed([self.room_id])
else:
return defer.succeed([])
self.room_member_handler.get_joined_rooms_for_user = get_joined_rooms_for_user
@defer.inlineCallbacks
def fetch_room_distributions_into(room_id, localusers=None,
remotedomains=None, ignore_user=None):
members = yield get_room_members(room_id)
for member in members:
if ignore_user is not None and member == ignore_user:
continue
if hs.is_mine(member):
if localusers is not None:
localusers.add(member)
else:
if remotedomains is not None:
remotedomains.add(member.domain)
self.room_member_handler.fetch_room_distributions_into = (
fetch_room_distributions_into)
def check_joined_room(room_id, user_id):
if user_id not in [u.to_string() for u in self.room_members]:
raise AuthError(401, "User is not in the room")
self.auth.check_joined_room = check_joined_room
# Some local users to test with
self.u_apple = UserID.from_string("@apple:test")
self.u_banana = UserID.from_string("@banana:test")
# Remote user
self.u_onion = UserID.from_string("@onion:farm")
@defer.inlineCallbacks
def test_started_typing_local(self):
self.room_members = [self.u_apple, self.u_banana]
self.assertEquals(self.event_source.get_current_key(), 0)
yield self.handler.started_typing(
target_user=self.u_apple,
auth_user=self.u_apple,
room_id=self.room_id,
timeout=20000,
)
self.on_new_event.assert_has_calls([
call('typing_key', 1, rooms=[self.room_id]),
])
self.assertEquals(self.event_source.get_current_key(), 1)
events = yield self.event_source.get_new_events(
room_ids=[self.room_id],
from_key=0,
)
self.assertEquals(
events[0],
[
{"type": "m.typing",
"room_id": self.room_id,
"content": {
"user_ids": [self.u_apple.to_string()],
}},
]
)
@defer.inlineCallbacks
def test_started_typing_remote_send(self):
self.room_members = [self.u_apple, self.u_onion]
put_json = self.mock_http_client.put_json
put_json.expect_call_and_return(
call("farm",
path="/_matrix/federation/v1/send/1000000/",
data=_expect_edu("farm", "m.typing",
content={
"room_id": self.room_id,
"user_id": self.u_apple.to_string(),
"typing": True,
}
),
json_data_callback=ANY,
long_retries=True,
),
defer.succeed((200, "OK"))
)
yield self.handler.started_typing(
target_user=self.u_apple,
auth_user=self.u_apple,
room_id=self.room_id,
timeout=20000,
)
yield put_json.await_calls()
@defer.inlineCallbacks
def test_started_typing_remote_recv(self):
self.room_members = [self.u_apple, self.u_onion]
self.assertEquals(self.event_source.get_current_key(), 0)
yield self.mock_federation_resource.trigger("PUT",
"/_matrix/federation/v1/send/1000000/",
_make_edu_json("farm", "m.typing",
content={
"room_id": self.room_id,
"user_id": self.u_onion.to_string(),
"typing": True,
}
)
)
self.on_new_event.assert_has_calls([
call('typing_key', 1, rooms=[self.room_id]),
])
self.assertEquals(self.event_source.get_current_key(), 1)
events = yield self.event_source.get_new_events(
room_ids=[self.room_id],
from_key=0
)
self.assertEquals(
events[0],
[
{"type": "m.typing",
"room_id": self.room_id,
"content": {
"user_ids": [self.u_onion.to_string()],
}},
]
)
@defer.inlineCallbacks
def test_stopped_typing(self):
self.room_members = [self.u_apple, self.u_banana, self.u_onion]
put_json = self.mock_http_client.put_json
put_json.expect_call_and_return(
call("farm",
path="/_matrix/federation/v1/send/1000000/",
data=_expect_edu("farm", "m.typing",
content={
"room_id": self.room_id,
"user_id": self.u_apple.to_string(),
"typing": False,
}
),
json_data_callback=ANY,
long_retries=True,
),
defer.succeed((200, "OK"))
)
# Gut-wrenching
from synapse.handlers.typing import RoomMember
member = RoomMember(self.room_id, self.u_apple)
self.handler._member_typing_until[member] = 1002000
self.handler._member_typing_timer[member] = (
self.clock.call_later(1002, lambda: 0)
)
self.handler._room_typing[self.room_id] = set((self.u_apple,))
self.assertEquals(self.event_source.get_current_key(), 0)
yield self.handler.stopped_typing(
target_user=self.u_apple,
auth_user=self.u_apple,
room_id=self.room_id,
)
self.on_new_event.assert_has_calls([
call('typing_key', 1, rooms=[self.room_id]),
])
yield put_json.await_calls()
self.assertEquals(self.event_source.get_current_key(), 1)
events = yield self.event_source.get_new_events(
room_ids=[self.room_id],
from_key=0,
)
self.assertEquals(
events[0],
[
{"type": "m.typing",
"room_id": self.room_id,
"content": {
"user_ids": [],
}},
]
)
@defer.inlineCallbacks
def test_typing_timeout(self):
self.room_members = [self.u_apple, self.u_banana]
self.assertEquals(self.event_source.get_current_key(), 0)
yield self.handler.started_typing(
target_user=self.u_apple,
auth_user=self.u_apple,
room_id=self.room_id,
timeout=10000,
)
self.on_new_event.assert_has_calls([
call('typing_key', 1, rooms=[self.room_id]),
])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 1)
events = yield self.event_source.get_new_events(
room_ids=[self.room_id],
from_key=0,
)
self.assertEquals(
events[0],
[
{"type": "m.typing",
"room_id": self.room_id,
"content": {
"user_ids": [self.u_apple.to_string()],
}},
]
)
self.clock.advance_time(11)
self.on_new_event.assert_has_calls([
call('typing_key', 2, rooms=[self.room_id]),
])
self.assertEquals(self.event_source.get_current_key(), 2)
events = yield self.event_source.get_new_events(
room_ids=[self.room_id],
from_key=1,
)
self.assertEquals(
events[0],
[
{"type": "m.typing",
"room_id": self.room_id,
"content": {
"user_ids": [],
}},
]
)
# SYN-230 - see if we can still set after timeout
yield self.handler.started_typing(
target_user=self.u_apple,
auth_user=self.u_apple,
room_id=self.room_id,
timeout=10000,
)
self.on_new_event.assert_has_calls([
call('typing_key', 3, rooms=[self.room_id]),
])
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 3)
events = yield self.event_source.get_new_events(
room_ids=[self.room_id],
from_key=0,
)
self.assertEquals(
events[0],
[
{"type": "m.typing",
"room_id": self.room_id,
"content": {
"user_ids": [self.u_apple.to_string()],
}},
]
)
| apache-2.0 | 8,800,060,916,095,124,000 | 30.668269 | 86 | 0.528237 | false |
tsuru/healthcheck-as-a-service | tests/test_api.py | 1 | 9751 | # Copyright 2015 healthcheck-as-a-service authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import unittest
import json
import mock
import inspect
import os
from healthcheck import api, backends
from . import managers
class APITestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.api = api.app.test_client()
cls.manager = managers.FakeManager()
api.get_manager = lambda: cls.manager
def setUp(self):
self.manager.new("hc")
def tearDown(self):
self.manager.remove("hc")
def test_add_url(self):
resp = self.api.post(
"/resources/hc/url",
data=json.dumps({"url": "http://bla.com"})
)
self.assertEqual(201, resp.status_code)
self.assertIn(
{"url": "http://bla.com", "expected_string": None, "comment": ""},
self.manager.healthchecks["hc"]["urls"]
)
def test_add_url_expected_string(self):
resp = self.api.post(
"/resources/hc/url",
data=json.dumps({"url": "http://blabla.com",
"expected_string": "WORKING"})
)
self.assertEqual(201, resp.status_code)
self.assertIn(
{"url": "http://blabla.com", "expected_string": "WORKING", "comment": ""},
self.manager.healthchecks["hc"]["urls"]
)
def test_add_url_comment(self):
resp = self.api.post(
"/resources/hc/url",
data=json.dumps({"url": "http://blabla.com", "comment": "ble"})
)
self.assertEqual(201, resp.status_code)
self.assertIn(
{"url": "http://blabla.com", "expected_string": None, "comment": "ble"},
self.manager.healthchecks["hc"]["urls"]
)
def test_add_url_bad_request(self):
resp = self.api.post(
"/resources/hc/url",
)
self.assertEqual(400, resp.status_code)
self.assertEqual(resp.data, 'url is required')
resp = self.api.post(
"/resources/hc/url",
data=json.dumps({})
)
self.assertEqual(400, resp.status_code)
self.assertEqual(resp.data, 'url is required')
def test_list_urls(self):
self.manager.add_url("hc", "http://bla.com")
resp = self.api.get(
"/resources/hc/url",
)
self.assertEqual(200, resp.status_code)
self.assertIn(
"http://bla.com",
resp.data
)
def test_remove_url(self):
self.manager.add_url("hc", "http://bla.com/")
resp = self.api.delete("/resources/hc/url",
data=json.dumps({"url": "http://bla.com/"}))
self.assertEqual(204, resp.status_code)
self.assertNotIn(
"http://bla.com/",
self.manager.healthchecks["hc"]["urls"]
)
def test_remove_url_no_data(self):
resp = self.api.delete("/resources/hc/url")
self.assertEqual(400, resp.status_code)
self.assertEqual("url is required", resp.data)
def test_remove_url_invalid_data(self):
resp = self.api.delete("/resources/hc/url", data={})
self.assertEqual(400, resp.status_code)
self.assertEqual("url is required", resp.data)
def test_remove_url_invalid_url(self):
resp = self.api.delete("/resources/hc/url",
data=json.dumps({"url": "http://url-not-exist.com/"}))
self.assertEqual(404, resp.status_code)
self.assertEqual("URL not found.", resp.data)
def test_add_watcher(self):
resp = self.api.post(
"/resources/hc/watcher",
data=json.dumps({"watcher": "[email protected]"})
)
self.assertEqual(201, resp.status_code)
self.assertIn(
"[email protected]",
self.manager.healthchecks["hc"]["users"]
)
def test_add_watcher_with_password(self):
resp = self.api.post(
"/resources/hc/watcher",
data=json.dumps({
"watcher": "[email protected]",
"password": "teste",
})
)
self.assertEqual(201, resp.status_code)
self.assertIn(
"[email protected]",
self.manager.healthchecks["hc"]["users"]
)
def test_add_watcher_bad_request(self):
resp = self.api.post("/resources/hc/watcher")
self.assertEqual(400, resp.status_code)
self.assertEqual(resp.data, "watcher is required")
resp = self.api.post("/resources/hc/watcher", data=json.dumps({}))
self.assertEqual(400, resp.status_code)
self.assertEqual(resp.data, "watcher is required")
def test_list_watchers(self):
self.manager.add_watcher("hc", "[email protected]")
resp = self.api.get(
"/resources/hc/watcher",
)
self.assertEqual(200, resp.status_code)
self.assertIn(
"[email protected]",
resp.data
)
def test_new(self):
resp = self.api.post(
"/resources",
data={"name": "other"}
)
self.assertEqual(201, resp.status_code)
self.assertIn("other", self.manager.healthchecks)
def test_bind_unit(self):
resp = self.api.post("/resources/name/bind")
self.assertEqual(201, resp.status_code)
def test_bind_app(self):
resp = self.api.post("/resources/name/bind-app")
self.assertEqual(200, resp.status_code)
def test_unbind_unit(self):
resp = self.api.delete("/resources/name/bind")
self.assertEqual(200, resp.status_code)
def test_unbind_app(self):
resp = self.api.delete("/resources/name/bind-app")
self.assertEqual(200, resp.status_code)
def test_remove(self):
self.manager.new("blabla")
resp = self.api.delete("/resources/blabla")
self.assertEqual(204, resp.status_code)
self.assertNotIn("blabla", self.manager.healthchecks)
def test_remove_watcher_compat(self):
self.manager.add_watcher("hc", "[email protected]")
resp = self.api.delete("/resources/hc/XanythingX/watcher/[email protected]")
self.assertEqual(204, resp.status_code)
self.assertNotIn(
"[email protected]",
self.manager.healthchecks["hc"]["users"]
)
def test_remove_watcher(self):
self.manager.add_watcher("hc", "[email protected]")
resp = self.api.delete("/resources/hc/watcher/[email protected]")
self.assertEqual(204, resp.status_code)
self.assertNotIn(
"[email protected]",
self.manager.healthchecks["hc"]["users"]
)
def test_plugin(self):
resp = self.api.get("/plugin")
self.assertEqual(200, resp.status_code)
from healthcheck import plugin
expected_source = inspect.getsource(plugin)
self.assertEqual(expected_source, resp.data)
def test_add_group(self):
resp = self.api.post(
"/resources/hc/groups",
data=json.dumps({"group": "mygroup"})
)
self.assertEqual(201, resp.status_code)
self.assertIn(
"mygroup",
self.manager.healthchecks["hc"]["host_groups"]
)
def test_add_group_bad_request(self):
resp = self.api.post("/resources/hc/groups")
self.assertEqual(400, resp.status_code)
self.assertEqual(resp.data, "group is required")
resp = self.api.post("/resources/hc/groups", data=json.dumps({}))
self.assertEqual(400, resp.status_code)
self.assertEqual(resp.data, "group is required")
def test_list_service_groups(self):
resp = self.api.get(
"/resources/hc/servicegroups",
)
self.assertEqual(200, resp.status_code)
self.assertIn(
"mygroup",
resp.data
)
def test_list_service_groups_keyword(self):
resp = self.api.get(
"/resources/hc/servicegroups?keyword=my",
)
self.assertEqual(200, resp.status_code)
self.assertIn(
"mygroup",
resp.data
)
self.assertNotIn(
"anothergroup",
resp.data
)
def test_list_groups(self):
self.manager.add_group("hc", "mygroup")
resp = self.api.get(
"/resources/hc/groups",
)
self.assertEqual(200, resp.status_code)
self.assertIn(
"mygroup",
resp.data
)
def test_remove_group(self):
self.manager.add_group("hc", "mygroup")
resp = self.api.delete("/resources/hc/groups",
data=json.dumps({"group": "mygroup"}))
self.assertEqual(204, resp.status_code)
self.assertNotIn(
"mygroup",
self.manager.healthchecks["hc"]["host_groups"]
)
class GetManagerTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
reload(api)
@mock.patch("pyzabbix.ZabbixAPI")
def test_get_manager(self, zabbix_mock):
os.environ["ZABBIX_URL"] = ""
os.environ["ZABBIX_USER"] = ""
os.environ["ZABBIX_PASSWORD"] = ""
os.environ["ZABBIX_HOST"] = ""
os.environ["ZABBIX_HOST_GROUP"] = ""
manager = api.get_manager()
self.assertIsInstance(manager, backends.Zabbix)
@mock.patch("healthcheck.backends.Zabbix")
def test_get_manager_that_does_not_exist(self, zabbix_mock):
os.environ["API_MANAGER"] = "doesnotexist"
with self.assertRaises(ValueError):
api.get_manager()
| bsd-3-clause | -8,438,007,150,804,135,000 | 31.503333 | 86 | 0.56948 | false |
smartsheet-platform/smartsheet-python-sdk | smartsheet/models/contact.py | 1 | 2222 | # pylint: disable=C0111,R0902,R0904,R0912,R0913,R0915,E1101
# Smartsheet Python SDK.
#
# Copyright 2018 Smartsheet.com, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from ..types import *
from ..util import serialize
from ..util import deserialize
class Contact(object):
"""Smartsheet Contact data model."""
def __init__(self, props=None, base_obj=None):
"""Initialize the Contact model."""
self._base = None
if base_obj is not None:
self._base = base_obj
self._email = String()
self._id_ = String()
self._name = String()
if props:
deserialize(self, props)
# requests package Response object
self.request_response = None
self.__initialized = True
def __getattr__(self, key):
if key == 'id':
return self.id_
else:
raise AttributeError(key)
def __setattr__(self, key, value):
if key == 'id':
self.id_ = value
else:
super(Contact, self).__setattr__(key, value)
@property
def email(self):
return self._email.value
@email.setter
def email(self, value):
self._email.value = value
@property
def id_(self):
return self._id_.value
@id_.setter
def id_(self, value):
self._id_.value = value
@property
def name(self):
return self._name.value
@name.setter
def name(self, value):
self._name.value = value
def to_dict(self):
return serialize(self)
def to_json(self):
return json.dumps(self.to_dict())
def __str__(self):
return self.to_json()
| apache-2.0 | -6,362,989,200,886,816,000 | 23.966292 | 75 | 0.613411 | false |
originaltebas/chmembers | migrations/versions/f7f3dbae07bb_.py | 1 | 1302 | """empty message
Revision ID: f7f3dbae07bb
Revises: 1e4e2ceb943c
Create Date: 2019-06-05 17:52:42.855915
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'f7f3dbae07bb'
down_revision = '1e4e2ceb943c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('asistencias', 'asistio',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Boolean(),
existing_nullable=False)
op.alter_column('miembros', 'hoja_firmada',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Boolean(),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('miembros', 'hoja_firmada',
existing_type=sa.Boolean(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=True)
op.alter_column('asistencias', 'asistio',
existing_type=sa.Boolean(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=False)
# ### end Alembic commands ###
| mit | -7,327,431,518,141,573,000 | 30 | 65 | 0.62212 | false |
thirdkey-solutions/multisig-recovery | multisigrecovery/commands.py | 1 | 5846 | from multisigcore.providers.insight import InsightBatchService
from multisigcore.hierarchy import MasterKey
from .branch import Branch, AccountPubkeys
from .recovery import CachedRecovery
from .batch import Batch
import json
from pycoin.encoding import EncodingError
try:
from termcolor import colored
except ImportError:
def colored(text, color=None): print text
__all__ = ['address', 'create', 'validate', 'cosign', 'broadcast', 'ScriptInputError']
class ScriptInputError(Exception):
pass
def __get_insight(url):
insight = InsightBatchService(url)
try:
insight.get_blockchain_tip()
except Exception:
raise ScriptInputError('Insight node at %s not reachable' % url)
return insight
def __parse_key_sources(key_sources_string, register=None):
try:
strings = key_sources_string.split(',')
return [AccountPubkeys.parse_string_to_account_key_source(string, register_oracle_accounts_file=register) for string in strings]
except ValueError, err:
raise ScriptInputError(err.message)
def __get_template(string):
return getattr(Branch, '%s_account' % string)
def __check_source_strings(args):
def check_cc_last(sources_str):
for source_str in sources_str.split(',')[:-1]:
if 'digitaloracle' in source_str:
raise ScriptInputError('CryptoCorp API always has to be the last account key source\nChange sources order in --origin or --destination')
if args.origin != args.destination and 'digitaloracle' in args.destination and not args.register:
raise ScriptInputError('CryptoCorp API in destination branch but missing --register\nUse --register samples/account-registrations.json')
check_cc_last(args.destination)
check_cc_last(args.origin)
def __add_known_accounts(cached_recovery, known_accounts_file):
with open(known_accounts_file) as fp:
known_accounts = json.load(fp)
for account_number, indexes in known_accounts.items():
if indexes is not None and 'external_leafs' in indexes and 'internal_leafs' in indexes:
cached_recovery.add_known_account(account_number, external_leafs=indexes['external_leafs'], internal_leafs=indexes['internal_leafs'])
else:
cached_recovery.add_known_account(account_number)
############ create, cosign, broadcast methods below ###########################################
def address(args):
"""Will return address of specified path in a branch. Used to manyally cross-check that you are working on correct branch."""
#insight = __get_insight(args.insight)
origin_key_sources = __parse_key_sources(args.origin)
origin_branch = Branch(origin_key_sources, account_template=__get_template(args.origin_template), provider=None)
path = args.path.split('/')
if len(path) != 3 or sum([number.isdigit() for number in path]) != 3:
print "! --path must be in format 0/0/0, digits only"
else:
path = [int(digit) for digit in path]
account = origin_branch.account(int(path[0]))
print "Account %s, address %s/%s: %s" % (path[0], path[1], path[2], account.address(path[2], change=bool(path[1])))
def create(args):
insight = __get_insight(args.insight)
__check_source_strings(args)
# setup
origin_key_sources = __parse_key_sources(args.origin)
origin_branch = Branch(origin_key_sources, account_template=__get_template(args.origin_template), provider=insight)
destination_key_sources = __parse_key_sources(args.destination, register=args.register)
destination_branch = Branch(destination_key_sources, account_template=__get_template(args.destination_template), provider=insight)
cached_recovery = CachedRecovery(origin_branch, destination_branch, provider=insight)
if args.accounts:
__add_known_accounts(cached_recovery, args.accounts)
# recovery
cached_recovery.recover_origin_accounts()
cached_recovery.recover_destination_accounts()
cached_recovery.create_and_sign_txs()
print "Total to recover in this branch: %d" % cached_recovery.total_to_recover
if cached_recovery.total_to_recover:
cached_recovery.export_to_batch(args.save)
def validate(args):
try:
insight = __get_insight(args.insight)
except ScriptInputError:
raw_input("Insight node at " + args.insight + " not reachable. You can start this script again with --insight SOME_URL. Hit ENTER to continue with offline validation, or CTRL+Z to exit...")
insight = None
print ""
try:
batch = Batch.from_file(args.load)
batch.validate(provider=insight)
error = None
except ValueError as err:
print "Validation failed", err
error = err
print " ____"
print " |"
print " | Valid : ", "False, with error: " + str(error) if error else 'True'
print " | Transactions : ", len(batch.batchable_txs)
print " | Merkle root (calc) : ", batch.build_merkle_root()
print " | Merkle root (header) : ", batch.merkle_root
print " | Total out : ", batch.total_out, "satoshi", "-",batch.total_out/(100.0 * 10**6), "BTC"
if not error and insight:
print " | Total in : ", batch.total_in, "satoshi", "-",batch.total_in/(100.0 * 10**6), "BTC"
print " | Total fee : ", batch.total_fee, "satoshi", "-",batch.total_fee/(100.0 * 10**6), "BTC"
print " | Fee Percent : ", batch.total_fee * 100.00 / batch.total_out
print " |____"
print ""
def cosign(args):
try:
backup_mpk = MasterKey.from_key(args.private)
except EncodingError:
backup_mpk = MasterKey.from_seed_hex(args.private)
batch = Batch.from_file(args.load)
batch.validate() # todo - validation
original_merkle_root = batch.merkle_root
batch.sign(master_private_key=backup_mpk)
if batch.merkle_root != original_merkle_root:
batch.to_file(args.save)
else:
print "! All signatures failed: wrong private key used, or malformed batch"
def broadcast(args):
insight = __get_insight(args.insight)
batch = Batch.from_file(args.load)
batch.validate() # todo - validation
batch.broadcast(provider=insight)
| bsd-2-clause | 2,622,204,806,939,911,700 | 37.715232 | 191 | 0.712624 | false |
miniworld-project/miniworld_core | miniworld/model/spatial/Roads.py | 1 | 4215 | # encoding: utf-8
from miniworld.model.singletons.Singletons import singletons
from collections import OrderedDict
import geojson
from .Road import Road
__author__ = "Patrick Lampe"
__email__ = "uni at lampep.de"
class Roads:
"""
Attributes
----------
list_of_roads : list
feature_coll_roads : FeatureCollection
geo_json : geojson
list_of_roads_with_quality_more_or_equal_than_one_for_car : list
list_of_roads_with_quality_more_or_equal_than_one_for_bike : list
"""
def __init__(self):
cursor = singletons.spatial_singleton.get_connection_to_database().cursor()
cursor.execute("SELECT source, target, car_rev, car, bike_rev, bike, foot FROM edges")
self.list_of_roads = [self.__convert_sql_line_to_road(line) for line in cursor.fetchall()]
self.feature_coll_roads = geojson.FeatureCollection(
[self.__get_geo_json_object_for_road(road) for road in self.list_of_roads])
self.geo_json = geojson.dumps(self.feature_coll_roads)
self.list_of_roads_with_quality_more_or_equal_than_one_for_car = [road for road in self.list_of_roads if
road.has_more_or_equal_qualitaty_for_car_then(
1)]
self.list_of_roads_with_quality_more_or_equal_than_one_for_bike = [road for road in self.list_of_roads if
road.has_more_or_equal_qualitaty_for_bike_then(
1)]
def get_geo_json(self):
"""
Returns
-------
geo_json
of all existing roads
"""
return self.geo_json
def get_list_of_roads_with_quality_more_or_equal_than_x_for_car(self, quality):
"""
Parameters
----------
quality : int
Returns
-------
list
"""
return [road for road in self.list_of_roads if road.has_more_or_equal_qualitaty_for_car_then(quality)]
def get_list_of_roads_with_quality_more_or_equal_than_x_for_bike(self, quality):
"""
Parameters
----------
quality : int
Returns
-------
list
"""
return [road for road in self.list_of_roads if road.has_more_or_equal_qualitaty_for_bike_then(quality)]
def get_list_of_next_roads_with_quality_restriction_for_cars(self, end_point, quality):
"""
Parameters
----------
quality : int
Returns
-------
list
"""
return [road for road in self.list_of_roads if
road.is_road_direct_rechable_from_given_point_with_quality_restrictions_for_cars(end_point, quality)]
def get_list_of_next_roads_with_quality_restriction_for_bike(self, end_point, quality):
"""
Parameters
----------
end_point : MapNode
Returns
-------
list
"""
return [road for road in self.list_of_roads if
road.is_road_direct_rechable_from_given_point_with_quality_restrictions_for_bike(end_point, quality)]
def __convert_sql_line_to_road(self, line):
return Road(line[0], line[1], line[2], line[3], line[4], line[5], line[6])
def __get_geo_json_object_for_road(self, road):
source = road.get_start_point()
target = road.get_end_point()
quality = road.get_car_quality()
return OrderedDict(
type="Feature",
geometry=OrderedDict(
type="LineString",
coordinates=[[float(source.get_lon()), float(source.get_lat())],
[float(target.get_lon()), float(target.get_lat())]]
),
properties=OrderedDict(
type=str(quality),
name="Coors Field",
amenity="Baseball Stadium"
)
)
| mit | 180,887,287,657,060,220 | 35.025641 | 122 | 0.515777 | false |
laurybueno/MoniBus | mapa/urls.py | 1 | 1112 | """mapa URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from cronotacografo.views import RegistroViewSet
router = routers.SimpleRouter()
router.register(r'registro', RegistroViewSet)
urlpatterns = [
url(r'^admin/', admin.site.urls),
# Endpoints da API
url(r'^api/', include(router.urls)),
# Autenticação da API
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
| agpl-3.0 | -3,757,937,932,243,510,300 | 31.647059 | 82 | 0.70991 | false |
googleapis/python-iot | samples/api-client/manager/manager.py | 1 | 37823 | #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Example of using the Google Cloud IoT Core device manager to administer
devices.
Usage example:
python manager.py \\
--project_id=my-project-id \\
--cloud_region=us-central1 \\
--service_account_json=$HOME/service_account.json \\
list-registries
"""
import argparse
import io
import os
import sys
import time
from google.api_core.exceptions import AlreadyExists
from google.cloud import iot_v1
from google.cloud import pubsub
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 as gp_field_mask
from googleapiclient import discovery
from googleapiclient.errors import HttpError
def create_iot_topic(project, topic_name):
"""Creates a PubSub Topic and grants access to Cloud IoT Core."""
pubsub_client = pubsub.PublisherClient()
topic_path = pubsub_client.topic_path(project, topic_name)
topic = pubsub_client.create_topic(topic_path)
policy = pubsub_client.get_iam_policy(topic_path)
policy.bindings.add(
role="roles/pubsub.publisher",
members=["serviceAccount:[email protected]"],
)
pubsub_client.set_iam_policy(topic_path, policy)
return topic
def get_client(service_account_json):
"""Returns an authorized API client by discovering the IoT API and creating
a service object using the service account credentials JSON."""
api_scopes = ["https://www.googleapis.com/auth/cloud-platform"]
api_version = "v1"
discovery_api = "https://cloudiot.googleapis.com/$discovery/rest"
service_name = "cloudiotcore"
credentials = service_account.Credentials.from_service_account_file(
service_account_json
)
scoped_credentials = credentials.with_scopes(api_scopes)
discovery_url = "{}?version={}".format(discovery_api, api_version)
return discovery.build(
service_name,
api_version,
discoveryServiceUrl=discovery_url,
credentials=scoped_credentials,
)
def create_rs256_device(
service_account_json,
project_id,
cloud_region,
registry_id,
device_id,
certificate_file,
):
"""Create a new device with the given id, using RS256 for
authentication."""
# [START iot_create_rsa_device]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# certificate_file = 'path/to/certificate.pem'
client = iot_v1.DeviceManagerClient()
parent = client.registry_path(project_id, cloud_region, registry_id)
with io.open(certificate_file) as f:
certificate = f.read()
# Note: You can have multiple credentials associated with a device.
device_template = {
"id": device_id,
"credentials": [
{
"public_key": {
"format": iot_v1.PublicKeyFormat.RSA_X509_PEM,
"key": certificate,
}
}
],
}
return client.create_device(request={"parent": parent, "device": device_template})
# [END iot_create_rsa_device]
def create_es256_device(
service_account_json,
project_id,
cloud_region,
registry_id,
device_id,
public_key_file,
):
"""Create a new device with the given id, using ES256 for
authentication."""
# [START iot_create_es_device]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# public_key_file = 'path/to/certificate.pem'
client = iot_v1.DeviceManagerClient()
parent = client.registry_path(project_id, cloud_region, registry_id)
with io.open(public_key_file) as f:
public_key = f.read()
# Note: You can have multiple credentials associated with a device.
device_template = {
"id": device_id,
"credentials": [
{
"public_key": {
"format": iot_v1.PublicKeyFormat.ES256_PEM,
"key": public_key,
}
}
],
}
return client.create_device(request={"parent": parent, "device": device_template})
# [END iot_create_es_device]
def create_device(
service_account_json, project_id, cloud_region, registry_id, device_id
):
"""Create a device to bind to a gateway if it does not exist."""
# [START iot_create_device]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# Check that the device doesn't already exist
client = iot_v1.DeviceManagerClient()
exists = False
parent = client.registry_path(project_id, cloud_region, registry_id)
devices = list(client.list_devices(request={"parent": parent}))
for device in devices:
if device.id == device_id:
exists = True
# Create the device
device_template = {
"id": device_id,
"gateway_config": {
"gateway_type": iot_v1.GatewayType.NON_GATEWAY,
"gateway_auth_method": iot_v1.GatewayAuthMethod.ASSOCIATION_ONLY,
},
}
if not exists:
res = client.create_device(
request={"parent": parent, "device": device_template}
)
print("Created Device {}".format(res))
else:
print("Device exists, skipping")
# [END iot_create_device]
def create_unauth_device(
service_account_json, project_id, cloud_region, registry_id, device_id
):
"""Create a new device without authentication."""
# [START iot_create_unauth_device]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
client = iot_v1.DeviceManagerClient()
parent = client.registry_path(project_id, cloud_region, registry_id)
device_template = {
"id": device_id,
}
return client.create_device(request={"parent": parent, "device": device_template})
# [END iot_create_unauth_device]
def delete_device(
service_account_json, project_id, cloud_region, registry_id, device_id
):
"""Delete the device with the given id."""
# [START iot_delete_device]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
print("Delete device")
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
return client.delete_device(request={"name": device_path})
# [END iot_delete_device]
def delete_registry(service_account_json, project_id, cloud_region, registry_id):
"""Deletes the specified registry."""
# [START iot_delete_registry]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
print("Delete registry")
client = iot_v1.DeviceManagerClient()
registry_path = client.registry_path(project_id, cloud_region, registry_id)
try:
client.delete_device_registry(request={"name": registry_path})
print("Deleted registry")
return "Registry deleted"
except HttpError:
print("Error, registry not deleted")
raise
# [END iot_delete_registry]
def get_device(service_account_json, project_id, cloud_region, registry_id, device_id):
"""Retrieve the device with the given id."""
# [START iot_get_device]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
print("Getting device")
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
# See full list of device fields: https://cloud.google.com/iot/docs/reference/cloudiot/rest/v1/projects.locations.registries.devices
# Warning! Use snake_case field names.
field_mask = gp_field_mask.FieldMask(
paths=[
"id",
"name",
"num_id",
"credentials",
"last_heartbeat_time",
"last_event_time",
"last_state_time",
"last_config_ack_time",
"last_config_send_time",
"blocked",
"last_error_time",
"last_error_status",
"config",
"state",
"log_level",
"metadata",
"gateway_config",
]
)
device = client.get_device(request={"name": device_path, "field_mask": field_mask})
print("Id : {}".format(device.id))
print("Name : {}".format(device.name))
print("Credentials:")
if device.credentials is not None:
for credential in device.credentials:
keyinfo = credential.public_key
print("\tcertificate: \n{}".format(keyinfo.key))
if keyinfo.format == 4:
keyformat = "ES256_X509_PEM"
elif keyinfo.format == 3:
keyformat = "RSA_PEM"
elif keyinfo.format == 2:
keyformat = "ES256_PEM"
elif keyinfo.format == 1:
keyformat = "RSA_X509_PEM"
else:
keyformat = "UNSPECIFIED_PUBLIC_KEY_FORMAT"
print("\tformat : {}".format(keyformat))
print("\texpiration: {}".format(credential.expiration_time))
print("Config:")
print("\tdata: {}".format(device.config.binary_data))
print("\tversion: {}".format(device.config.version))
print("\tcloudUpdateTime: {}".format(device.config.cloud_update_time))
return device
# [END iot_get_device]
def get_state(service_account_json, project_id, cloud_region, registry_id, device_id):
"""Retrieve a device's state blobs."""
# [START iot_get_device_state]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
device = client.get_device(request={"name": device_path})
print("Last state: {}".format(device.state))
print("State history")
states = client.list_device_states(request={"name": device_path}).device_states
for state in states:
print("State: {}".format(state))
return states
# [END iot_get_device_state]
def list_devices(service_account_json, project_id, cloud_region, registry_id):
"""List all devices in the registry."""
# [START iot_list_devices]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
print("Listing devices")
client = iot_v1.DeviceManagerClient()
registry_path = client.registry_path(project_id, cloud_region, registry_id)
# See full list of device fields: https://cloud.google.com/iot/docs/reference/cloudiot/rest/v1/projects.locations.registries.devices
# Warning! Use snake_case field names.
field_mask = gp_field_mask.FieldMask(
paths=[
"id",
"name",
"num_id",
"credentials",
"last_heartbeat_time",
"last_event_time",
"last_state_time",
"last_config_ack_time",
"last_config_send_time",
"blocked",
"last_error_time",
"last_error_status",
"config",
"state",
"log_level",
"metadata",
"gateway_config",
]
)
devices = list(
client.list_devices(request={"parent": registry_path, "field_mask": field_mask})
)
for device in devices:
print(device)
return devices
# [END iot_list_devices]
def list_registries(service_account_json, project_id, cloud_region):
"""List all registries in the project."""
# [START iot_list_registries]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
print("Listing Registries")
client = iot_v1.DeviceManagerClient()
parent = f"projects/{project_id}/locations/{cloud_region}"
registries = list(client.list_device_registries(request={"parent": parent}))
for registry in registries:
print("id: {}\n\tname: {}".format(registry.id, registry.name))
return registries
# [END iot_list_registries]
def create_registry(
service_account_json, project_id, cloud_region, pubsub_topic, registry_id
):
"""Creates a registry and returns the result. Returns an empty result if
the registry already exists."""
# [START iot_create_registry]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# pubsub_topic = 'your-pubsub-topic'
# registry_id = 'your-registry-id'
client = iot_v1.DeviceManagerClient()
parent = f"projects/{project_id}/locations/{cloud_region}"
if not pubsub_topic.startswith("projects/"):
pubsub_topic = "projects/{}/topics/{}".format(project_id, pubsub_topic)
body = {
"event_notification_configs": [{"pubsub_topic_name": pubsub_topic}],
"id": registry_id,
}
try:
response = client.create_device_registry(
request={"parent": parent, "device_registry": body}
)
print("Created registry")
return response
except HttpError:
print("Error, registry not created")
raise
except AlreadyExists:
print("Error, registry already exists")
raise
# [END iot_create_registry]
def get_registry(service_account_json, project_id, cloud_region, registry_id):
""" Retrieves a device registry."""
# [START iot_get_registry]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
client = iot_v1.DeviceManagerClient()
registry_path = client.registry_path(project_id, cloud_region, registry_id)
return client.get_device_registry(request={"name": registry_path})
# [END iot_get_registry]
def open_registry(
service_account_json, project_id, cloud_region, pubsub_topic, registry_id
):
"""Gets or creates a device registry."""
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# pubsub_topic = 'your-pubsub-topic'
# registry_id = 'your-registry-id'
print("Creating registry")
try:
response = create_registry(
service_account_json, project_id, cloud_region, pubsub_topic, registry_id
)
except AlreadyExists:
# Device registry already exists. We just re-use the existing one.
print("Registry {} already exists - looking it up instead.".format(registry_id))
response = get_registry(
service_account_json, project_id, cloud_region, registry_id
)
print("Registry {} opened: ".format(response.name))
print(response)
def patch_es256_auth(
service_account_json,
project_id,
cloud_region,
registry_id,
device_id,
public_key_file,
):
"""Patch the device to add an ES256 public key to the device."""
# [START iot_patch_es]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# public_key_file = 'path/to/certificate.pem'
print("Patch device with ES256 certificate")
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
public_key_bytes = ""
with io.open(public_key_file) as f:
public_key_bytes = f.read()
key = iot_v1.PublicKeyCredential(
format=iot_v1.PublicKeyFormat.ES256_PEM, key=public_key_bytes
)
cred = iot_v1.DeviceCredential(public_key=key)
device = client.get_device(request={"name": device_path})
device.id = b""
device.num_id = 0
device.credentials.append(cred)
mask = gp_field_mask.FieldMask()
mask.paths.append("credentials")
return client.update_device(request={"device": device, "update_mask": mask})
# [END iot_patch_es]
def patch_rsa256_auth(
service_account_json,
project_id,
cloud_region,
registry_id,
device_id,
public_key_file,
):
"""Patch the device to add an RSA256 public key to the device."""
# [START iot_patch_rsa]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# public_key_file = 'path/to/certificate.pem'
print("Patch device with RSA256 certificate")
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
public_key_bytes = ""
with io.open(public_key_file) as f:
public_key_bytes = f.read()
key = iot_v1.PublicKeyCredential(
format=iot_v1.PublicKeyFormat.RSA_X509_PEM, key=public_key_bytes
)
cred = iot_v1.DeviceCredential(public_key=key)
device = client.get_device(request={"name": device_path})
device.id = b""
device.num_id = 0
device.credentials.append(cred)
mask = gp_field_mask.FieldMask()
mask.paths.append("credentials")
return client.update_device(request={"device": device, "update_mask": mask})
# [END iot_patch_rsa]
def set_config(
service_account_json,
project_id,
cloud_region,
registry_id,
device_id,
version,
config,
):
# [START iot_set_device_config]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# version = '0'
# config= 'your-config-data'
print("Set device configuration")
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
data = config.encode("utf-8")
return client.modify_cloud_to_device_config(
request={"name": device_path, "binary_data": data, "version_to_update": version}
)
# [END iot_set_device_config]
def get_config_versions(
service_account_json, project_id, cloud_region, registry_id, device_id
):
"""Lists versions of a device config in descending order (newest first)."""
# [START iot_get_device_configs]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
configs = client.list_device_config_versions(request={"name": device_path})
for config in configs.device_configs:
print(
"version: {}\n\tcloudUpdateTime: {}\n\t data: {}".format(
config.version, config.cloud_update_time, config.binary_data
)
)
return configs
# [END iot_get_device_configs]
def get_iam_permissions(service_account_json, project_id, cloud_region, registry_id):
"""Retrieves IAM permissions for the given registry."""
# [START iot_get_iam_policy]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
client = iot_v1.DeviceManagerClient()
registry_path = client.registry_path(project_id, cloud_region, registry_id)
policy = client.get_iam_policy(request={"resource": registry_path})
return policy
# [END iot_get_iam_policy]
def set_iam_permissions(
service_account_json, project_id, cloud_region, registry_id, role, member
):
"""Sets IAM permissions for the given registry to a single role/member."""
# [START iot_set_iam_policy]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# role = 'viewer'
# member = 'group:[email protected]'
client = iot_v1.DeviceManagerClient()
registry_path = client.registry_path(project_id, cloud_region, registry_id)
body = {"bindings": [{"members": [member], "role": role}]}
return client.set_iam_policy(request={"resource": registry_path, "policy": body})
# [END iot_set_iam_policy]
def send_command(
service_account_json, project_id, cloud_region, registry_id, device_id, command
):
"""Send a command to a device."""
# [START iot_send_command]
print("Sending command to device")
client = iot_v1.DeviceManagerClient()
device_path = client.device_path(project_id, cloud_region, registry_id, device_id)
# command = 'Hello IoT Core!'
data = command.encode("utf-8")
return client.send_command_to_device(
request={"name": device_path, "binary_data": data}
)
# [END iot_send_command]
def create_gateway(
service_account_json,
project_id,
cloud_region,
registry_id,
device_id,
gateway_id,
certificate_file,
algorithm,
):
"""Create a gateway to bind devices to."""
# [START iot_create_gateway]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# gateway_id = 'your-gateway-id'
# certificate_file = 'path/to/certificate.pem'
# algorithm = 'ES256'
# Check that the gateway doesn't already exist
exists = False
client = iot_v1.DeviceManagerClient()
parent = client.registry_path(project_id, cloud_region, registry_id)
devices = list(client.list_devices(request={"parent": parent}))
for device in devices:
if device.id == gateway_id:
exists = True
print(
"Device: {} : {} : {} : {}".format(
device.id, device.num_id, device.config, device.gateway_config
)
)
with io.open(certificate_file) as f:
certificate = f.read()
if algorithm == "ES256":
certificate_format = iot_v1.PublicKeyFormat.ES256_PEM
else:
certificate_format = iot_v1.PublicKeyFormat.RSA_X509_PEM
# TODO: Auth type
device_template = {
"id": gateway_id,
"credentials": [
{"public_key": {"format": certificate_format, "key": certificate}}
],
"gateway_config": {
"gateway_type": iot_v1.GatewayType.GATEWAY,
"gateway_auth_method": iot_v1.GatewayAuthMethod.ASSOCIATION_ONLY,
},
}
if not exists:
res = client.create_device(
request={"parent": parent, "device": device_template}
)
print("Created Gateway {}".format(res))
else:
print("Gateway exists, skipping")
# [END iot_create_gateway]
def bind_device_to_gateway(
service_account_json, project_id, cloud_region, registry_id, device_id, gateway_id
):
"""Binds a device to a gateway."""
# [START iot_bind_device_to_gateway]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# gateway_id = 'your-gateway-id'
client = iot_v1.DeviceManagerClient()
create_device(
service_account_json, project_id, cloud_region, registry_id, device_id
)
parent = client.registry_path(project_id, cloud_region, registry_id)
res = client.bind_device_to_gateway(
request={"parent": parent, "gateway_id": gateway_id, "device_id": device_id}
)
print("Device Bound! {}".format(res))
# [END iot_bind_device_to_gateway]
def unbind_device_from_gateway(
service_account_json, project_id, cloud_region, registry_id, device_id, gateway_id
):
"""Unbinds a device to a gateway."""
# [START iot_unbind_device_from_gateway]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# device_id = 'your-device-id'
# gateway_id = 'your-gateway-id'
client = iot_v1.DeviceManagerClient()
parent = client.registry_path(project_id, cloud_region, registry_id)
res = client.unbind_device_from_gateway(
request={"parent": parent, "gateway_id": gateway_id, "device_id": device_id}
)
print("Device unbound: {}".format(res))
# [END iot_unbind_device_from_gateway]
def list_gateways(service_account_json, project_id, cloud_region, registry_id):
"""Lists gateways in a registry"""
# [START iot_list_gateways]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
client = iot_v1.DeviceManagerClient()
path = client.registry_path(project_id, cloud_region, registry_id)
mask = gp_field_mask.FieldMask()
mask.paths.append("config")
mask.paths.append("gateway_config")
devices = list(client.list_devices(request={"parent": path, "field_mask": mask}))
for device in devices:
if device.gateway_config is not None:
if device.gateway_config.gateway_type == 1:
print("Gateway ID: {}\n\t{}".format(device.id, device))
# [END iot_list_gateways]
def list_devices_for_gateway(
service_account_json, project_id, cloud_region, registry_id, gateway_id
):
"""List devices bound to a gateway"""
# [START iot_list_devices_for_gateway]
# project_id = 'YOUR_PROJECT_ID'
# cloud_region = 'us-central1'
# registry_id = 'your-registry-id'
# gateway_id = 'your-gateway-id'
client = iot_v1.DeviceManagerClient()
path = client.registry_path(project_id, cloud_region, registry_id)
devices = list(
client.list_devices(
request={
"parent": path,
"gateway_list_options": {"associations_gateway_id": gateway_id},
}
)
)
found = False
for device in devices:
found = True
print("Device: {} : {}".format(device.num_id, device.id))
if not found:
print("No devices bound to gateway {}".format(gateway_id))
# [END iot_list_devices_for_gateway]
def parse_command_line_args():
"""Parse command line arguments."""
default_registry = "cloudiot_device_manager_example_registry_{}".format(
int(time.time())
)
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
# Optional arguments
parser.add_argument(
"--algorithm",
choices=("RS256", "ES256"),
help="Which encryption algorithm to use to generate the JWT.",
)
parser.add_argument("--certificate_path", help="Path to public certificate.")
parser.add_argument(
"--cloud_region", default="us-central1", help="GCP cloud region"
)
parser.add_argument(
"--pubsub_topic",
help=(
"Google Cloud Pub/Sub topic. "
"Format is projects/project_id/topics/topic-id"
),
)
parser.add_argument(
"--config", default=None, help="Configuration sent to a device."
)
parser.add_argument("--device_id", default=None, help="Device id.")
parser.add_argument(
"--ec_public_key_file", default=None, help="Path to public ES256 key file."
)
parser.add_argument("--gateway_id", help="Gateway identifier.")
parser.add_argument("--member", default=None, help="Member used for IAM commands.")
parser.add_argument("--role", default=None, help="Role used for IAM commands.")
parser.add_argument(
"--send_command", default="1", help="The command sent to the device"
)
parser.add_argument(
"--project_id",
default=os.environ.get("GOOGLE_CLOUD_PROJECT"),
help="GCP cloud project name.",
)
parser.add_argument(
"--registry_id",
default=default_registry,
help="Registry id. If not set, a name will be generated.",
)
parser.add_argument(
"--rsa_certificate_file", default=None, help="Path to RS256 certificate file."
)
parser.add_argument(
"--service_account_json",
default=os.environ.get("GOOGLE_APPLICATION_CREDENTIALS"),
help="Path to service account json file.",
)
parser.add_argument(
"--version",
default=0,
type=int,
help="Version number for setting device configuration.",
)
# Command subparser
command = parser.add_subparsers(dest="command")
command.add_parser("bind-device-to-gateway", help=bind_device_to_gateway.__doc__)
command.add_parser("create-es256", help=create_es256_device.__doc__)
command.add_parser("create-gateway", help=create_gateway.__doc__)
command.add_parser("create-registry", help=open_registry.__doc__)
command.add_parser("create-rsa256", help=create_rs256_device.__doc__)
command.add_parser("create-topic", help=create_iot_topic.__doc__)
command.add_parser("create-unauth", help=create_unauth_device.__doc__)
command.add_parser("delete-device", help=delete_device.__doc__)
command.add_parser("delete-registry", help=delete_registry.__doc__)
command.add_parser("get", help=get_device.__doc__)
command.add_parser("get-config-versions", help=get_config_versions.__doc__)
command.add_parser("get-iam-permissions", help=get_iam_permissions.__doc__)
command.add_parser("get-registry", help=get_registry.__doc__)
command.add_parser("get-state", help=get_state.__doc__)
command.add_parser("list", help=list_devices.__doc__)
command.add_parser(
"list-devices-for-gateway", help=list_devices_for_gateway.__doc__
)
command.add_parser("list-gateways", help=list_gateways.__doc__)
command.add_parser("list-registries", help=list_registries.__doc__)
command.add_parser("patch-es256", help=patch_es256_auth.__doc__)
command.add_parser("patch-rs256", help=patch_rsa256_auth.__doc__)
command.add_parser("send-command", help=send_command.__doc__)
command.add_parser("set-config", help=patch_rsa256_auth.__doc__)
command.add_parser("set-iam-permissions", help=set_iam_permissions.__doc__)
command.add_parser(
"unbind-device-from-gateway", help=unbind_device_from_gateway.__doc__
)
return parser.parse_args()
def run_create(args):
"""Handles commands that create devices, registries, or topics."""
if args.command == "create-rsa256":
create_rs256_device(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.rsa_certificate_file,
)
elif args.command == "create-es256":
create_es256_device(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.ec_public_key_file,
)
elif args.command == "create-gateway":
create_gateway(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.gateway_id,
args.certificate_path,
args.algorithm,
)
elif args.command == "create-unauth":
create_unauth_device(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
)
elif args.command == "create-registry":
if args.pubsub_topic is None:
sys.exit("Error: specify --pubsub_topic")
open_registry(
args.service_account_json,
args.project_id,
args.cloud_region,
args.pubsub_topic,
args.registry_id,
)
elif args.command == "create-topic":
if args.pubsub_topic is None:
sys.exit("Error: specify --pubsub_topic")
create_iot_topic(args.project_id, args.pubsub_topic)
def run_get(args):
if args.command == "get":
get_device(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
)
elif args.command == "get-config-versions":
get_config_versions(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
)
elif args.command == "get-state":
get_state(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
)
elif args.command == "get-iam-permissions":
print(
get_iam_permissions(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
)
)
elif args.command == "get-registry":
print(
get_registry(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
)
)
def run_list(args):
if args.command == "list":
list_devices(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
)
elif args.command == "list-devices-for-gateway":
list_devices_for_gateway(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.gateway_id,
)
elif args.command == "list-gateways":
list_gateways(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
)
elif args.command == "list-registries":
list_registries(args.service_account_json, args.project_id, args.cloud_region)
def run_command(args):
"""Calls the program using the specified command."""
if args.project_id is None:
print("You must specify a project ID or set the environment variable.")
return
elif args.command.startswith("create"):
run_create(args)
elif args.command.startswith("get"):
run_get(args)
elif args.command.startswith("list"):
run_list(args)
elif args.command == "bind-device-to-gateway":
bind_device_to_gateway(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.gateway_id,
)
elif args.command == "delete-device":
delete_device(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
)
elif args.command == "delete-registry":
delete_registry(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
)
elif args.command == "patch-es256":
if args.ec_public_key_file is None:
sys.exit("Error: specify --ec_public_key_file")
patch_es256_auth(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.ec_public_key_file,
)
elif args.command == "patch-rs256":
if args.rsa_certificate_file is None:
sys.exit("Error: specify --rsa_certificate_file")
patch_rsa256_auth(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.rsa_certificate_file,
)
elif args.command == "send-command":
send_command(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.send_command,
)
elif args.command == "set-iam-permissions":
if args.member is None:
sys.exit("Error: specify --member")
if args.role is None:
sys.exit("Error: specify --role")
set_iam_permissions(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.role,
args.member,
)
elif args.command == "set-config":
if args.config is None:
sys.exit("Error: specify --config")
if args.version is None:
sys.exit("Error: specify --version")
set_config(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.version,
args.config,
)
elif args.command == "unbind-device-from-gateway":
unbind_device_from_gateway(
args.service_account_json,
args.project_id,
args.cloud_region,
args.registry_id,
args.device_id,
args.gateway_id,
)
if __name__ == "__main__":
args = parse_command_line_args()
run_command(args)
| apache-2.0 | -440,295,333,548,563,100 | 30.598162 | 136 | 0.611903 | false |
Linkid/fofix | fofix/core/Mod.py | 1 | 2968 | #####################################################################
# -*- coding: utf-8 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Ky?stil? #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import logging
import os
from fofix.core.Language import _
from fofix.core import Config
from fofix.core import Theme
log = logging.getLogger(__name__)
def _getModPath(engine):
return engine.resource.fileName("mods")
def init(engine):
# define configuration keys for all available mods
for m in getAvailableMods(engine):
Config.define("mods", "mod_" + m, bool, False, text = m, options = {False: _("Off"), True: _("On")})
# init all active mods
for m in getActiveMods(engine):
activateMod(engine, m)
def getAvailableMods(engine):
modPath = _getModPath(engine)
try:
dirList = os.listdir(modPath)
except OSError:
log.warning("Could not find mods directory")
return []
return [m for m in dirList if os.path.isdir(os.path.join(modPath, m)) and not m.startswith(".")]
def getActiveMods(engine):
mods = []
for mod in getAvailableMods(engine):
if engine.config.get("mods", "mod_" + mod):
mods.append(mod)
mods.sort()
return mods
def activateMod(engine, modName):
modPath = _getModPath(engine)
m = os.path.join(modPath, modName)
t = os.path.join(m, "theme.ini")
if os.path.isdir(m):
engine.resource.addDataPath(m)
if os.path.isfile(t):
theme = Config.load(t)
Theme.open(theme)
def deactivateMod(engine, modName):
modPath = _getModPath(engine)
m = os.path.join(modPath, modName)
engine.resource.removeDataPath(m)
| gpl-2.0 | 1,096,748,204,278,025,300 | 37.545455 | 109 | 0.523248 | false |
Urinx/Project_Euler_Answers | 128.py | 1 | 1330 | #!/usr/bin/env python
#coding:utf-8
"""
Hexagonal tile differences
A hexagonal tile with number 1 is surrounded by a ring of six hexagonal tiles, starting at "12 o'clock" and numbering the tiles 2 to 7 in an anti-clockwise direction.
New rings are added in the same fashion, with the next rings being numbered 8 to 19, 20 to 37, 38 to 61, and so on. The diagram below shows the first three rings.
By finding the difference between tile n and each of its six neighbours we shall define PD(n) to be the number of those differences which are prime.
For example, working clockwise around tile 8 the differences are 12, 29, 11, 6, 1, and 13. So PD(8) = 3.
In the same way, the differences around tile 17 are 1, 17, 16, 1, 11, and 10, hence PD(17) = 2.
It can be shown that the maximum value of PD(n) is 3.
If all of the tiles for which PD(n) = 3 are listed in ascending order to form a sequence, the 10th tile would be 271.
Find the 2000th tile in this sequence.
"""
from projecteuler import is_prime
def tiles(L=2000):
n, c = 1, 1
while c <= L:
r = 6 * n
if is_prime(r-1):
if is_prime(r+1) and is_prime(2*r+5): c += 1
if is_prime(r+5) and is_prime(2*r-7): c += 1
n += 1
return n-1
n = tiles()
print 3*n*(n - 1) + 2 if is_prime(6*n+1) else 3*n*(n + 1) + 1
# 14516824220 | gpl-2.0 | -5,135,474,069,018,303,000 | 40.59375 | 166 | 0.67218 | false |
akx/shoop | shoop/core/models/_order_lines.py | 1 | 5821 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals, with_statement
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from enumfields import Enum, EnumIntegerField
from jsonfield import JSONField
from shoop.core.fields import MoneyValueField, QuantityField, UnsavedForeignKey
from shoop.core.pricing import Priceful
from shoop.core.taxing import LineTax
from shoop.utils.money import Money
from shoop.utils.properties import MoneyProperty, MoneyPropped, PriceProperty
from ._base import ShoopModel
class OrderLineType(Enum):
PRODUCT = 1
SHIPPING = 2
PAYMENT = 3
DISCOUNT = 4
OTHER = 5
class Labels:
PRODUCT = _('product')
SHIPPING = _('shipping')
PAYMENT = _('payment')
DISCOUNT = _('discount')
OTHER = _('other')
class OrderLineManager(models.Manager):
def products(self): # pragma: no cover
return self.filter(type=OrderLineType.PRODUCT)
def shipping(self): # pragma: no cover
return self.filter(type=OrderLineType.SHIPPING)
def payment(self): # pragma: no cover
return self.filter(type=OrderLineType.PAYMENT)
def discounts(self):
return self.filter(type=OrderLineType.DISCOUNT)
def other(self): # pragma: no cover
return self.filter(type=OrderLineType.OTHER)
@python_2_unicode_compatible
class OrderLine(MoneyPropped, models.Model, Priceful):
order = UnsavedForeignKey("Order", related_name='lines', on_delete=models.PROTECT, verbose_name=_('order'))
product = UnsavedForeignKey(
"Product", blank=True, null=True, related_name="order_lines",
on_delete=models.PROTECT, verbose_name=_('product')
)
supplier = UnsavedForeignKey(
"Supplier", blank=True, null=True, related_name="order_lines",
on_delete=models.PROTECT, verbose_name=_('supplier')
)
parent_line = UnsavedForeignKey(
"self", related_name="child_lines", blank=True, null=True,
on_delete=models.PROTECT, verbose_name=_('parent line')
)
ordering = models.IntegerField(default=0, verbose_name=_('ordering'))
type = EnumIntegerField(OrderLineType, default=OrderLineType.PRODUCT, verbose_name=_('line type'))
sku = models.CharField(max_length=48, blank=True, verbose_name=_('line SKU'))
text = models.CharField(max_length=256, verbose_name=_('line text'))
accounting_identifier = models.CharField(max_length=32, blank=True, verbose_name=_('accounting identifier'))
require_verification = models.BooleanField(default=False, verbose_name=_('require verification'))
verified = models.BooleanField(default=False, verbose_name=_('verified'))
extra_data = JSONField(blank=True, null=True, verbose_name=_('extra data'))
# The following fields govern calculation of the prices
quantity = QuantityField(verbose_name=_('quantity'), default=1)
base_unit_price = PriceProperty('base_unit_price_value', 'order.currency', 'order.prices_include_tax')
discount_amount = PriceProperty('discount_amount_value', 'order.currency', 'order.prices_include_tax')
base_unit_price_value = MoneyValueField(verbose_name=_('unit price amount (undiscounted)'), default=0)
discount_amount_value = MoneyValueField(verbose_name=_('total amount of discount'), default=0)
objects = OrderLineManager()
class Meta:
verbose_name = _('order line')
verbose_name_plural = _('order lines')
def __str__(self):
return "%dx %s (%s)" % (self.quantity, self.text, self.get_type_display())
@property
def tax_amount(self):
"""
:rtype: shoop.utils.money.Money
"""
zero = Money(0, self.order.currency)
return sum((x.amount for x in self.taxes.all()), zero)
def save(self, *args, **kwargs):
if not self.sku:
self.sku = u""
if self.type == OrderLineType.PRODUCT and not self.product_id:
raise ValidationError("Product-type order line can not be saved without a set product")
if self.product_id and self.type != OrderLineType.PRODUCT:
raise ValidationError("Order line has product but is not of Product type")
if self.product_id and not self.supplier_id:
raise ValidationError("Order line has product but no supplier")
super(OrderLine, self).save(*args, **kwargs)
if self.product_id:
self.supplier.module.update_stock(self.product_id)
@python_2_unicode_compatible
class OrderLineTax(MoneyPropped, ShoopModel, LineTax):
order_line = models.ForeignKey(
OrderLine, related_name='taxes', on_delete=models.PROTECT,
verbose_name=_('order line'))
tax = models.ForeignKey(
"Tax", related_name="order_line_taxes",
on_delete=models.PROTECT, verbose_name=_('tax'))
name = models.CharField(max_length=200, verbose_name=_('tax name'))
amount = MoneyProperty('amount_value', 'order_line.order.currency')
base_amount = MoneyProperty('base_amount_value', 'order_line.order.currency')
amount_value = MoneyValueField(verbose_name=_('tax amount'))
base_amount_value = MoneyValueField(
verbose_name=_('base amount'),
help_text=_('Amount that this tax is calculated from'))
ordering = models.IntegerField(default=0, verbose_name=_('ordering'))
class Meta:
ordering = ["ordering"]
def __str__(self):
return "%s: %s on %s" % (self.name, self.amount, self.base_amount)
| agpl-3.0 | -6,166,658,032,504,064,000 | 37.806667 | 112 | 0.68356 | false |
kmee/kmee-odoo-sale-mobile | kmee_sale_mobile_meuspedidos/__openerp__.py | 1 | 1670 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 - KMEE- Rafael da Silva Lima (<http://www.kmee.com.br>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Kmee Sale Meus Pedidos',
'version' : '1.0',
'author' : 'KMEE',
'description' : 'Módulo para comunicação com API Meus Pedidos',
'category' : 'Enterprise Innovation',
'website' : 'http://www.kmee.com.br',
'depends' : ['l10n_br_sale', 'l10n_br_delivery', 'l10n_br_account_payment'],
'data': [
'security/groups.xml',
'security/ir.model.access.csv',
'account_view.xml',
'sale_mobile_view.xml',
'sale_mobile_admin.xml',
'data/config_data.xml'
],
'qweb' : [
"static/src/base.xml",
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'application': False,
} | agpl-3.0 | -7,038,819,204,226,068,000 | 35.26087 | 80 | 0.566287 | false |
sissaschool/xmlschema | tests/test_files.py | 1 | 3234 | #!/usr/bin/env python
#
# Copyright (c), 2016-2020, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <[email protected]>
#
"""
This module runs tests on XSD or XML files provided by arguments.
"""
if __name__ == '__main__':
import unittest
import os
import argparse
from xmlschema import XMLSchema10, XMLSchema11
from xmlschema.testing import xsd_version_number, defuse_data, \
make_schema_test_class, make_validation_test_class
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument('--version', dest='version', metavar='VERSION',
type=xsd_version_number, default='1.0',
help="XSD schema version to use for testing (default is 1.0).")
parser.add_argument('--inspect', action="store_true", default=False,
help="Inspect using an observed custom schema class.")
parser.add_argument('--defuse', metavar='(always, remote, never)',
type=defuse_data, default='remote',
help="Define when to use the defused XML data loaders. "
"Defuse remote data for default.")
parser.add_argument('--lxml', dest='lxml', action='store_true', default=False,
help='Check also with lxml.etree.XMLSchema (for XSD 1.0)')
parser.add_argument(
'files', metavar='[FILE ...]', nargs='*',
help='Input files. Each argument can be a file path or a glob pathname. '
'A "-" stands for standard input. If no arguments are given then processes '
'all the files included within the scope of the selected applications.'
)
args = parser.parse_args()
if args.version == '1.0':
schema_class = XMLSchema10
check_with_lxml = args.lxml
else:
schema_class = XMLSchema11
check_with_lxml = False
test_num = 1
test_args = argparse.Namespace(
errors=0, warnings=0, inspect=args.inspect, locations=(),
defuse=args.defuse, skip=False, debug=False
)
test_loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for test_file in args.files:
if not os.path.isfile(test_file):
continue
elif test_file.endswith('xsd'):
test_class = make_schema_test_class(
test_file, test_args, test_num, schema_class, check_with_lxml
)
test_num += 1
elif test_file.endswith('xml'):
test_class = make_validation_test_class(
test_file, test_args, test_num, schema_class, check_with_lxml
)
test_num += 1
else:
continue
print("Add test %r for file %r ..." % (test_class.__name__, test_file))
test_suite.addTest(test_loader.loadTestsFromTestCase(test_class))
if test_num == 1:
print("No XSD or XML file to test, exiting ...")
else:
runner = unittest.TextTestRunner()
runner.run(test_suite)
| mit | 6,779,808,727,647,150,000 | 38.439024 | 89 | 0.606988 | false |
anrl/gini3 | frontend/src/gbuilder/Core/globals.py | 1 | 1798 | """ Various global variables """
import os
PROG_NAME = "gBuilder"
PROG_VERSION = "3.0.0"
GINI_ROOT = os.environ["GINI_ROOT"]
GINI_HOME = os.environ["GINI_HOME"]
environ = {"os":"Windows",
"path":GINI_ROOT+"/",
"remotepath":"./",
"images":os.environ["GINI_SHARE"] +"/gbuilder/images/",
"config":GINI_HOME+"/etc/",
"sav":GINI_HOME+"/sav/",
"tmp":GINI_HOME+"/tmp/",
"doc":GINI_ROOT+"/doc/"}
options = {"names":True,
"systray":False,
"elasticMode":False, "keepElasticMode":False,
"smoothing":True, "glowingLights":True, "style":"Mac",
"grid":True, "gridColor":"(220,220,220)",
"background":environ["images"] + "background.jpg",
"windowTheme":environ["images"] + "window.jpg",
"baseTheme":environ["images"] + "base.jpg",
"autorouting":True, "autogen":True, "autocompile":True,
"graphing":True, "username":"root",
"server":"localhost", "session":"GINI", "autoconnect":True,
"localPort":"10001", "remotePort":"10000",
"restore":True,
"moveAlert":True, "wserver":"192.168.54.121", "wlocalPort":"60001","wremotePort":"60000"}
mainWidgets = {"app":None,
"main":None,
"canvas":None,
"tab":None,
"popup":None,
"inputDialog":None,
"log":None,
"tm":None,
"properties":None,
"interfaces":None,
"routes":None,
"drop":None,
"client":None,
"wgini_client":None}
defaultOptions = {"palette":None}
yRouters = {} # indexed by yun ID
availableyRouters = []
usedyRouters = {} # indexed by topology index
| mit | -6,747,352,971,529,043,000 | 32.924528 | 100 | 0.516129 | false |
dwillis/dayspring | dayspring/settings.py | 1 | 5226 | # Django settings for dayspring project.
import os
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '=5xw*pqse(##*c*+h74^$(t!qa)7=5gx1gua=)8)us+_@t^j*$'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'dayspring.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'dayspring.wsgi.application'
TEMPLATE_DIRS = (os.path.join(PROJECT_PATH, 'templates'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'localflavor',
'dayspring',
'django.contrib.admin',
'swingtime',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dayspring.db',
}
}
SITE_ID = 3
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
) | mit | -976,203,766,647,926,500 | 29.747059 | 79 | 0.69977 | false |
andresriancho/moto | tests/test_s3/test_server.py | 1 | 1530 | import sure # noqa
import moto.server as server
'''
Test the different server responses
'''
def test_s3_server_get():
backend = server.create_backend_app("s3")
test_client = backend.test_client()
res = test_client.get('/')
res.data.should.contain('ListAllMyBucketsResult')
def test_s3_server_bucket_create():
backend = server.create_backend_app("s3")
test_client = backend.test_client()
res = test_client.put('/', 'http://foobaz.localhost:5000/')
res.status_code.should.equal(200)
res = test_client.get('/')
res.data.should.contain('<Name>foobaz</Name>')
res = test_client.get('/', 'http://foobaz.localhost:5000/')
res.status_code.should.equal(200)
res.data.should.contain("ListBucketResult")
res = test_client.put('/bar', 'http://foobaz.localhost:5000/', data='test value')
res.status_code.should.equal(200)
res = test_client.get('/bar', 'http://foobaz.localhost:5000/')
res.status_code.should.equal(200)
res.data.should.equal("test value")
def test_s3_server_post_to_bucket():
backend = server.create_backend_app("s3")
test_client = backend.test_client()
res = test_client.put('/', 'http://tester.localhost:5000/')
res.status_code.should.equal(200)
test_client.post('/', "https://tester.localhost:5000/", data={
'key': 'the-key',
'file': 'nothing'
})
res = test_client.get('/the-key', 'http://tester.localhost:5000/')
res.status_code.should.equal(200)
res.data.should.equal("nothing")
| apache-2.0 | 8,750,110,255,723,227,000 | 26.818182 | 85 | 0.649673 | false |
FrodeSolheim/fs-uae-launcher | launcher/ui2/launcher2leftpanel.py | 1 | 2394 | from fsui import Color, HorizontalLayout, Panel, TextField
from launcher.context import get_settings
from launcher.i18n import gettext
from launcher.ui2.configlistview import ConfigListView
from launcher.ui.newconfigbutton import NewConfigButton
from system.classes.configdispatch import ConfigDispatch
class SearchField(TextField):
def __init__(self, parent):
# FIXME: Should go via gscontext and not settings
# or maybe via settings but with a window id/prefix
text = get_settings(self).get("config_search")
super().__init__(
parent, text=text, clearbutton=True, placeholder=gettext("Search")
)
def on_changed(self):
text = self.text()
# FIXME: Should go via gscontext and not settings
get_settings(self).set("config_search", text)
class SearchPanel(Panel):
def __init__(self, parent):
super().__init__(parent)
horilayout = HorizontalLayout()
self.layout.add(horilayout, fill=True, expand=True, margin=10)
# self.set_background_color(Color(0xAEAEAE))
self.set_background_color(Color(0xB8B8B8))
horilayout.add(NewConfigButton(self), fill=True, margin_right=10)
horilayout.add(SearchField(self), fill=True, expand=True)
class Launcher2LeftPanel(Panel):
def __init__(self, parent):
super().__init__(parent)
# self.set_background_color(Color(0x999999))
self.search_panel = SearchPanel(self)
self.layout.add(self.search_panel, fill=True)
self.config_listview = ConfigListView(self)
self.layout.add(self.config_listview, fill=True, expand=True)
ConfigDispatch(self, {"__running": self.__on_running_config})
# panel = Panel(self)
# panel.set_min_height(40)
# # panel.set_background_color(Color(0x888888))
# panel.set_background_color(Color(0xB8B8B8))
# self.layout.add(panel, fill=True)
def get_min_width(self):
minWidth = super().get_min_width()
print("Launcher2LeftPanel.get_min_width (size) =", minWidth)
return minWidth
def __on_running_config(self, event):
isrunning = bool(event.value)
if self.enabled() == isrunning:
self.set_enabled(not isrunning)
def on_resize(self):
super().on_resize()
print("Launcher2LeftPanel.on_resize, size is now", self.getSize())
| gpl-2.0 | 1,200,961,827,185,365,000 | 37 | 78 | 0.659566 | false |
cojacoo/testcases_echoRD | gen_test2211.py | 1 | 4396 | import numpy as np
import pandas as pd
import scipy as sp
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os, sys
try:
import cPickle as pickle
except:
import pickle
#connect echoRD Tools
pathdir='../echoRD' #path to echoRD
lib_path = os.path.abspath(pathdir)
#sys.path.append(lib_path)
sys.path.append('/home/ka/ka_iwg/ka_oj4748/echoRD/echoRD')
import vG_conv as vG
from hydro_tools import plotparticles_t,hydroprofile,plotparticles_specht
# Prepare echoRD
#connect to echoRD
import run_echoRD as rE
#connect and load project
[dr,mc,mcp,pdyn,cinf,vG]=rE.loadconnect(pathdir='../',mcinif='mcini_gen2a',experimental=True)
mc = mcp.mcpick_out(mc,'gen_test2a.pickle')
runname='gen_test2211'
mc.advectref='Shipitalo'
mc.soilmatrix=pd.read_csv(mc.matrixbf, sep=' ')
mc.soilmatrix['m'] = np.fmax(1-1/mc.soilmatrix.n,0.1)
mc.md_macdepth=mc.md_depth[np.fmax(2,np.sum(np.ceil(mc.md_contact),axis=1).astype(int))]
mc.md_macdepth[mc.md_macdepth<=0.]=0.065
precTS=pd.read_csv(mc.precf, sep=',',skiprows=3)
precTS.tstart=60
precTS.tend=60+1800
precTS.total=0.01
precTS.intense=precTS.total/(precTS.tend-precTS.tstart)
#use modified routines for binned retention definitions
mc.part_sizefac=500
mc.gridcellA=mc.mgrid.vertfac*mc.mgrid.latfac
mc.particleA=abs(mc.gridcellA.values)/(2*mc.part_sizefac) #assume average ks at about 0.5 as reference of particle size
mc.particleD=2.*np.sqrt(mc.particleA/np.pi)
mc.particleV=3./4.*np.pi*(mc.particleD/2.)**3.
mc.particleV/=np.sqrt(abs(mc.gridcellA.values)) #assume grid size as 3rd dimension
mc.particleD/=np.sqrt(abs(mc.gridcellA.values))
mc.particlemass=dr.waterdensity(np.array(20),np.array(-9999))*mc.particleV #assume 20C as reference for particle mass
#DEBUG: a) we assume 2D=3D; b) change 20C to annual mean T?
mc=dr.ini_bins(mc)
mc=dr.mc_diffs(mc,np.max(np.max(mc.mxbin)))
[mc,particles,npart]=dr.particle_setup(mc)
#define bin assignment mode for infiltration particles
mc.LTEdef='instant'#'ks' #'instant' #'random'
mc.LTEmemory=mc.soilgrid.ravel()*0.
#new reference
mc.maccon=np.where(mc.macconnect.ravel()>0)[0] #index of all connected cells
mc.md_macdepth=np.abs(mc.md_macdepth)
mc.prects=False
#theta=mc.zgrid[:,1]*0.+0.273
#[mc,particles,npart]=rE.particle_setup_obs(theta,mc,vG,dr,pdyn)
[thS,npart]=pdyn.gridupdate_thS(particles.lat,particles.z,mc)
#[A,B]=plotparticles_t(particles,thS/100.,mc,vG,store=True)
# Run Model
mc.LTEpercentile=70 #new parameter
t_end=24.*3600.
saveDT=True
#1: MDA
#2: MED
#3: rand
infiltmeth='MDA'
#3: RWdiff
#4: Ediss
#exfiltmeth='RWdiff'
exfiltmeth='Ediss'
#5: film_uconst
#6: dynamic u
film=True
#7: maccoat1
#8: maccoat10
#9: maccoat100
macscale=1. #scale the macropore coating
clogswitch=False
infiltscale=False
#mc.dt=0.11
#mc.splitfac=5
#pdyn.part_diffusion_binned_pd(particles,npart,thS,mc)
#import profile
#%prun -D diff_pd_prof.prof pdyn.part_diffusion_binned_pd(particles,npart,thS,mc)
wdir='/beegfs/work/ka_oj4748/gen_tests'
drained=pd.DataFrame(np.array([]))
leftover=0
output=60. #mind to set also in TXstore.index definition
dummy=np.floor(t_end/output)
t=0.
ix=0
TSstore=np.zeros((int(dummy),mc.mgrid.cells[0],2))
try:
#unpickle:
with open(''.join([wdir,'/results/Z',runname,'_Mstat.pick']),'rb') as handle:
pickle_l = pickle.load(handle)
dummyx = pickle.loads(pickle_l)
particles = pickle.loads(dummyx[0])
[leftover,drained,t,TSstore,ix] = pickle.loads(dummyx[1])
ix+=1
print('resuming into stored run at t='+str(t)+'...')
except:
print('starting new run...')
#loop through plot cycles
for i in np.arange(dummy.astype(int))[ix:]:
plotparticles_specht(particles,mc,pdyn,vG,runname,t,i,saving=True,relative=False,wdir=wdir)
[particles,npart,thS,leftover,drained,t]=rE.CAOSpy_rundx1(i*output,(i+1)*output,mc,pdyn,cinf,precTS,particles,leftover,drained,6.,splitfac=4,prec_2D=False,maccoat=macscale,saveDT=saveDT,clogswitch=clogswitch,infilt_method=infiltmeth,exfilt_method=exfiltmeth,film=film,infiltscale=infiltscale)
TSstore[i,:,:]=rE.part_store(particles,mc)
#if i/5.==np.round(i/5.):
with open(''.join([wdir,'/results/Z',runname,'_Mstat.pick']),'wb') as handle:
pickle.dump(pickle.dumps([pickle.dumps(particles),pickle.dumps([leftover,drained,t,TSstore,i])]), handle, protocol=2)
| gpl-3.0 | 2,470,535,713,098,664,000 | 30.177305 | 296 | 0.720428 | false |
domenicosolazzo/jroc | tests/pipelines/ner/test_nerpipeline.py | 1 | 2749 | # -*- coding: utf-8 -*-
from . import NERPipeline
import unittest
import os
class NERPipelineTestCase(unittest.TestCase):
pipeline = None
name = "NER Pipeline tests"
def setUp(self):
self.pipeline = None
def tearDown(self):
self.pipeline = None
def test_pipeline_execute_with_invalid_input(self):
"""
Test the execution of the pos pipeline with wrong input data. (Malformed JSON)
"""
input = '{"data" "Ivar Aasen ble født på gården Åsen i " " " Hovdebygda på Sunnmøre som sønn av småbrukeren Ivar Jonsson."}'
self.pipeline = NERPipeline(input=input,name=self.name, withEntityAnnotation=False)
self.assertRaises(Exception, self.pipeline.execute, None)
def test_pipeline_execute_with_valid_text(self):
"""
Test the execution of the ner pipeline with a valid text. No Entity Annotation
"""
input = '{"data":"Ivar Aasen ble født på gården Åsen i Hovdebygda på Sunnmøre som sønn av småbrukeren Ivar Jonsson."}'
self.pipeline = NERPipeline(input=input, name=self.name, withEntityAnnotation=True)
self.pipeline.execute()
actual = self.pipeline.getOutput()
expected = [u'Sunnm\xf8re', u'\xc5sen', u'Ivar Aasen', u'Ivar Jonsson', u'Hovdebygda']
self.assertTrue('entities' in actual)
self.assertEqual(expected, actual.get('entities'))
def test_pipeline_execute_with_valid_text_and_entity_annotation(self):
"""
Test the execution of the ner pipeline with a valid text. It is using Entity Annotation
"""
input = '{"data":"Ivar Aasen ble født på gården Åsen i Hovdebygda på Sunnmøre som sønn av småbrukeren Ivar Jonsson."}'
self.pipeline = NERPipeline(input=input, name=self.name, withEntityAnnotation=True)
self.pipeline.execute()
actual = self.pipeline.getOutput()
self.assertTrue('entities-annotated' in actual)
self.assertTrue(isinstance(actual.get('entities-annotated'), list))
def test_pipeline_execute_with_characters_to_be_removed(self):
"""
Test the execution of the ner pipeline with an input with characters that should be removed
"""
input = '{"data":"Ivar Aasen ble født " " "på gården Åsen i Hovdebygda på Sunnmøre som sønn av småbrukeren Ivar Jonsson."}'
self.pipeline = NERPipeline(input=input, name=self.name, withEntityAnnotation=True)
self.pipeline.execute()
actual = self.pipeline.getOutput()
expected = [u'Sunnm\xf8re', u'\xc5sen', u'Ivar Aasen', u'Ivar Jonsson', u'Hovdebygda']
self.assertTrue('entities' in actual)
self.assertEqual(expected, actual.get('entities'))
| gpl-3.0 | 633,803,786,615,165,200 | 43.540984 | 133 | 0.66912 | false |
bkuczenski/lca-tools | antelope_utilities/flowables/create_synonyms.py | 1 | 4679 | import os
import json
import re
from antelope_catalog.providers.ecospold2 import EcospoldV2Archive
from antelope_catalog.providers.ilcd import grab_flow_name, IlcdLcia
from antelope_catalog.providers.xml_widgets import find_tag, find_tags, find_common, find_ns
from lcatools.flowdb.synlist import Flowables, InconsistentIndices, ConflictingCas
ECOSPOLD = os.path.join('/data', 'Dropbox', 'data', 'Ecoinvent', '3.2', 'current_Version_3.2_cutoff_lci_ecoSpold02.7z')
ES_FILE = '00009573-c174-463a-8ebf-183ec587ba0d_7cb72345-4338-4f2d-830f-65bba3530fdb.spold'
ELCD = os.path.join('/data', 'Dropbox', 'data', 'ELCD', 'ELCD3.2-a.zip')
SYNONYMS = os.path.join(os.path.dirname(__file__), 'synonyms.json')
def get_ecospold_exchanges(archive=ECOSPOLD, prefix='datasets', file=ES_FILE):
E = EcospoldV2Archive(archive, prefix=prefix)
o = E.objectify(file)
return find_tags(o, 'elementaryExchange')
def ilcd_flow_generator(archive=ELCD, **kwargs):
I = IlcdLcia(archive, **kwargs)
count = 0
for f in I.list_objects('Flow'):
o = I.objectify(f, dtype='Flow')
if o is not None:
yield o
count += 1
if count % 1000 == 0:
print('%d data sets completed' % count)
def _add_syn_if(syn, synset):
g = syn.strip()
if g != '' and g != 'PSM':
synset.add(syn)
def synonyms_from_ecospold_exchange(exch):
"""
Ecospold exchanges: synonyms are Name, CAS Number, and ', '-separated contents of synonym tags.
Care must be taken not to split on ',' as some chemical names include commas
:param exch:
:return: set of synonyms (stripped)
"""
syns = set()
name = str(exch['name'])
syns.add(name)
cas = exch.get('casNumber')
if cas is not None:
syns.add(cas)
synonym_tag = find_tags(exch, 'synonym')
if len(synonym_tag) == 1:
# parse the comma-separated list
if bool(re.search('etc\.', str(synonym_tag[0]))):
syns.add(str(synonym_tag[0]).strip())
else:
for x in str(synonym_tag[0]).split(', '):
_add_syn_if(x, syns)
else:
# multiple entries- allow embedded comma-space
for syn in synonym_tag:
_add_syn_if(str(syn), syns)
return name, syns
def synonyms_from_ilcd_flow(flow):
"""
ILCD flow files have long synonym blocks at the top. They also have a CAS number and a basename.
:param flow:
:return:
"""
ns = find_ns(flow.nsmap, 'Flow')
syns = set()
name = grab_flow_name(flow, ns=ns)
syns.add(name)
uid = str(find_common(flow, 'UUID')).strip()
syns.add(uid)
cas = str(find_tag(flow, 'CASNumber', ns=ns)).strip()
if cas != '':
syns.add(cas)
for syn in find_tags(flow, 'synonyms', ns='common'):
for x in str(syn).split(';'):
if x.strip() != '' and x.strip().lower() != 'wood':
syns.add(x.strip())
return name, syns, uid
cas_regex = re.compile('^[0-9]{,6}-[0-9]{2}-[0-9]$')
def _add_set(synlist, name, syns, xid):
try:
index = synlist.add_set(syns, merge=True, name=name)
except ConflictingCas:
index = synlist.new_set(syns, name=name)
except InconsistentIndices:
dups = synlist.find_indices(syns)
matches = []
for i in dups:
for j in syns:
if j in synlist[i]:
matches.append((j, i))
break
try:
index = synlist.merge_indices(dups)
print('Merged Inconsistent indices in ID %s, e.g.:' % xid)
for match in matches:
print(' [%s] = %d' % match)
except ConflictingCas:
# print('Conflicting CAS on merge.. creating new group')
index = synlist.new_set(syns, name=name)
return index
def create_new_synonym_list():
"""
This just makes a SynList and populates it, first with ecoinvent, then with ILCD, and saves it to disk
:return:
"""
synonyms = Flowables()
# first, ecoinvent
exchs = get_ecospold_exchanges()
for exch in exchs:
name, syns = synonyms_from_ecospold_exchange(exch)
_add_set(synonyms, name, syns, exch.get('id'))
# next, ILCD - but hold off for now
for flow in ilcd_flow_generator():
name, syns, uid = synonyms_from_ilcd_flow(flow)
_add_set(synonyms, name, syns, uid)
with open(SYNONYMS, 'w') as fp:
json.dump(synonyms.serialize(), fp)
print('Wrote synonym file to %s' % SYNONYMS)
return synonyms
def load_synonyms(file=SYNONYMS):
with open(file) as fp:
return Flowables.from_json(json.load(fp))
| gpl-2.0 | -5,978,112,579,486,739,000 | 30.829932 | 119 | 0.602265 | false |
dylanfried/yesterday-tomorrow | OperationOrganism.py | 1 | 7841 | from copy import copy
import random
from helpers import shift
class OperationOrganism:
length = 150
def __init__(self,genome=None,target=None,population=None):
self.population = population
if genome:
self.genome = genome[:]
else:
self.genome = [(random.sample([1,2,3,4,5,6] if i < self.length/2 else [2,3,4,4,4,4,4],1)[0],random.randint(-20,20)) for i in range(self.length)]
def random_genome(self,length):
return
def mutate(self,gene_range,mutate_max):
''' Return a mutated organism '''
c = self.copy()
for i in range(len(c.genome)):
if random.random() < 0.02:
# New random gene replacement
c.genome[i] = (random.sample([1,2,3,4,5,6] if i < self.length/2 else [2,3,4,4,4,4,4],1)[0],random.randint(-20,20))
elif random.random() < 0.02:
# Permute just the operand
c.genome[i] = (c.genome[i][0],c.genome[i][1] + random.randint(-mutate_max,mutate_max))
return c
def crossover(self,organism):
''' Return an organism that is a crossover between this organism and the provided organism '''
c1 = self.copy()
c2 = organism.copy()
for i in range(min(len(c1.genome),len(c2.genome))):
if random.random() < 0.3:
c1.genome[i] = organism.genome[i]
c2.genome[i] = self.genome[i]
return [c1,c2]
def onepointcrossover(self,organism):
inflection_point = random.randint(0,len(organism.genome)-1)
c1 = self.copy()
c2 = organism.copy()
genome1 = c1.genome[:inflection_point] + c2.genome[inflection_point:]
genome2 = c2.genome[:inflection_point] + c1.genome[inflection_point:]
c1.genome = genome1
c2.genome = genome2
return [c1,c2]
def calculate_fitness(self,target,other_genomes=None):
''' Calculate the fitness of this organism '''
# First, must resolve
result = self.resolve(target[0],target[1])
final_pattern = target[1]
p_common = 0
p_correct = 0
p_common = float(len([1 for item in result if item in final_pattern]))/float(max(len(result),len(final_pattern)))
for idx,item in enumerate(result):
if idx < len(final_pattern) and item == final_pattern[idx]:
p_correct += 1
p_correct = float(p_correct)/float(max(len(result),len(final_pattern)))
self.fitness = 1.0 - 0.5*(p_common + p_correct)
self.fitness_level = self.length-1
return
result_path = self.resolve(target[0],target[1],record_path=True)
final_pattern = target[1]
self.fitness = 1
for level,result in enumerate(result_path):
p_common = 0
p_correct = 0
p_common = float(len([1 for item in result if item in final_pattern]))/float(max(len(result),len(final_pattern)))
for idx,item in enumerate(result):
if idx < len(final_pattern) and item == final_pattern[idx]:
p_correct += 1
p_correct = float(p_correct)/float(max(len(result),len(final_pattern)))
fitness = 1.0 - 0.5*(p_common + p_correct)
if fitness < self.fitness:
self.fitness = fitness
self.fitness_level = level
def copy(self):
c = OperationOrganism(genome=self.genome)
return c
def resolve(self,start_pattern,final_pattern,record_path=False):
result = start_pattern[:]
if record_path:
path = [result[:]]
for operation_tuple in self.genome:
operation = operation_tuple[0]
operand = operation_tuple[1]
if operation == 1:
# no op
pass
elif operation == 2:
# add
#index = random.randint(0,len(final_pattern)-1)
index = operand % len(final_pattern)
result[index:index] = [final_pattern[index]]
elif operation == 3 and len(result) > 0:
# delete
#index = random.randint(0,len(result)-1)
index = operand % len(result)
del result[index]
elif operation == 4 and len(result) > 0:
# mutate
#index = random.randint(0,min(len(result)-1,len(final_pattern)-1))
index = operand % min(len(result),len(final_pattern))
result[index] = final_pattern[index]
elif operation == 5 and len(result) > 0 and operand != 0:
# rotation
amount = (operand/abs(operand)) * (operand % len(result))
result = shift(result,amount)
elif operation == 6 and len(result) > 0:
# exchange
index1 = operand % len(result)
index2 = (operand+1)%len(result)
result[index1],result[index2] = result[index2],result[index1]
elif operation == 7 and len(result) > 0 and operand != 0:
# incorrect rotation
# Only rotate incorrect notes
notes_to_shift_positions = []
notes_to_shift = []
for i in range(len(result)):
if i >= len(final_pattern) or final_pattern[i] != result[i]:
# This note should be shifted
notes_to_shift_positions.append(i)
notes_to_shift.append(result[i])
# Now do the actual shifting of the notes
amount = (operand/abs(operand)) * (operand % len(notes_to_shift))
notes_to_shift = shift(notes_to_shift,amount)
for i in range(len(notes_to_shift)):
result[notes_to_shift_positions[i]] = notes_to_shift[i]
elif operation == 8 and len(result) > 0:
# incorrect exchange
# Only exchange incorrect notes
found = False
for i in range(len(result)):
index1 = (operand+i) % len(result)
if index1 >= len(final_pattern) or final_pattern[index1] != result[index1]:
found = True
break
if found:
found = False
for i in range(len(result)):
index2 = (index1+i+1) % len(result)
if index2 >= len(final_pattern) or final_pattern[index2] != result[index2]:
found = True
break
if found:
result[index1],result[index2] = result[index2],result[index1]
if record_path:
path.append(result[:])
if record_path:
return path
else:
return result
def best_path(self,start_pattern,final_pattern,condense=[1]):
#condense = [i for i in condense for j in range(i)]
to_return = []
result_path = self.resolve(start_pattern,final_pattern,record_path=True)
for j in range(len(condense)):
print "J",j
c = condense[j]
print len(condense),self.fitness_level+1,int((self.fitness_level+1)/len(condense))
start = int((self.fitness_level+1)/len(condense))*j
stop = int((self.fitness_level+1)/len(condense))*(j+1)
print "start",start,"stop",stop
for i in range(start,stop,c):
print "i",i
to_return += result_path[i] + [(0,2,[])]
to_return += result_path[-1] + [(0,2,[])]
#to_return += final_pattern
return to_return
| apache-2.0 | 6,161,296,623,014,132,000 | 43.050562 | 156 | 0.523275 | false |
mlabru/ptracks | view/resources/resources_prisma_rc.py | 1 | 74018 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: Mon Mar 6 11:25:14 2017
# by: The Resource Compiler for PyQt (Qt v4.8.6)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x1b\x93\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x54\x08\x02\x00\x00\x00\xf2\x11\xa5\xd4\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\
\x01\xd2\xdd\x7e\xfc\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd5\x03\
\x03\x0f\x27\x36\xc5\x11\x92\xc3\x00\x00\x00\x06\x62\x4b\x47\x44\
\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x1b\x20\x49\x44\
\x41\x54\x78\xda\xed\x5c\x77\x90\x1d\x45\x7a\xef\x9e\x7d\x9b\x77\
\x25\xad\x24\x24\x84\x84\x04\x16\xc8\x46\x27\x8b\x24\x1d\x20\x71\
\x08\xec\x13\xb1\x10\xa7\x33\x20\xce\x1c\xa6\x08\x55\xa4\x32\x45\
\xce\xc9\x84\x3a\x52\x99\x60\xe0\x70\x51\x47\xb0\xe0\x4c\x2c\x63\
\x62\x95\x39\x71\x87\x41\x75\x60\x81\xc8\x06\xc3\x01\x26\x0a\x09\
\xa5\xd5\xe6\xdd\xf7\xa6\xfd\x9b\xfe\xba\xbf\xfe\xa6\x67\xde\xc2\
\x7f\xe7\xb2\x69\xa8\xa7\x79\xbd\xf3\x66\xba\x7f\xfd\xe5\xef\xeb\
\xd6\x69\x9a\xaa\xef\x5b\xfd\x96\x68\xad\x8d\x31\xf4\x89\x86\x2e\
\xbe\xa0\xc6\x9d\xfc\x59\xaf\x73\x94\xeb\xe2\xd3\x8a\xbf\x2d\x3e\
\x59\x76\x16\xdf\x28\x1f\x55\x6c\xd1\x0f\xa3\x47\x95\x8e\xad\xb4\
\x39\x5c\x18\xa6\xef\x49\x86\x1b\x78\x2b\x49\x92\x0a\x81\x0a\x68\
\x6a\xb5\x5a\x5f\x5f\x5f\xb5\x5a\x23\x6c\x19\x2c\xfb\x57\x7c\x2a\
\xfb\x69\xb4\xd2\xfc\x93\x22\xb2\xf5\xae\xe5\x02\xd2\x4f\xe8\x15\
\xc5\x7b\x4a\x57\x2b\xba\x3f\xfa\x2c\x5f\x79\xad\xe5\x0f\x8b\x74\
\x3d\x0a\x35\x34\x36\x56\x3a\x3a\x3a\xe8\xe1\x15\x26\xb9\x17\x5e\
\x78\xe1\xc4\x13\x8f\x9f\x34\x71\xfc\x48\xb5\xaa\xed\x03\xbe\x3b\
\xd8\xee\x65\xf6\xd5\x0e\xc2\xef\xf4\x33\xfb\x93\x3a\x2f\xd2\xdf\
\xf6\xca\x6c\xd1\x94\x76\x0f\x09\x4f\x8b\x16\x44\x7d\xd7\xd1\x78\
\x9c\x75\xa2\xbf\xf9\x66\xe3\x8a\x15\xcf\x6f\xbf\xfd\xf6\x19\x40\
\x0c\xea\xda\x75\x1b\xe6\xfc\xe9\xb4\xfb\xee\xba\x7a\xdd\x37\x9b\
\x1a\x2a\x15\xed\xa7\x9d\x01\xe9\x31\x70\x40\x64\xa3\xca\x2e\x0d\
\x5d\x65\xd7\xf4\x45\x65\xc4\x26\xfe\xb3\xdd\x61\x39\xed\x7d\xf8\
\x27\x29\x79\xaa\xbd\xd5\xd8\x6b\x42\x98\xee\x55\xf6\x3e\xf7\x0a\
\xd1\x89\x1e\x23\xa8\xc0\x5f\xe0\x15\x89\x98\xae\x76\x0b\x66\x1c\
\x7c\xc6\xa3\xa9\x3d\x72\x91\xb4\x42\x6b\x69\x6b\x3d\xf0\xa0\xe3\
\xba\xbb\x7b\xf0\x4c\x70\x59\x45\xfb\x06\x7e\x6b\x6d\x6d\x1d\x3b\
\xa6\x73\x60\x70\xa4\x52\xa9\x90\x00\xa7\x97\x1b\x71\x2d\x81\x4b\
\x6d\xbf\xfb\xab\x71\x3d\x58\x01\x5c\x27\x09\xe8\x33\xfb\x4c\x0d\
\xf5\x98\x24\x71\xf7\x18\x37\x5f\x4f\x3c\xf6\x83\xee\xf7\x13\x74\
\x30\x19\xed\x26\xa9\xc3\x8d\xfe\x4f\x8c\x71\x09\x4c\xee\x4e\x4f\
\x19\x86\x09\xdc\x78\x5c\x74\x41\xe1\x78\x8c\xd2\xc6\xd6\x36\xe0\
\xe0\xe4\x89\xd6\x95\x48\x2c\x55\xab\x69\xad\x96\x42\x22\x65\x5f\
\xfd\xb8\x8c\xbd\x16\x03\xcd\xf7\x6b\x95\xda\x2f\x86\xba\x53\x6d\
\x1f\x95\x7d\xa9\xa5\x16\xcf\xcc\x90\x30\xd4\x63\xef\x49\x04\xf9\
\xb8\x4f\xfc\xd5\x10\x65\xa4\xbe\xdf\xbf\x22\xbc\x54\x30\x23\x01\
\xad\x75\x10\x4c\x16\x35\x4f\x82\x8c\x8b\x11\xec\x9a\x75\x19\x4f\
\x41\x56\x41\x65\x5d\x39\x0d\x9e\x91\x4c\x5a\xb3\xa6\x8f\x71\x14\
\x24\x20\x34\x62\xfa\x61\xf0\xc6\x91\xbc\xe8\xd2\x9e\xcd\x78\x79\
\x59\xa4\xd2\x90\xc5\xed\xc6\xc1\xe2\xd8\x48\xf4\x28\x9e\xb6\x93\
\x27\x39\x3e\x0a\x64\x90\xeb\xcc\xa3\xeb\xfe\x94\x11\x27\x4d\x3b\
\x61\x2c\x12\xba\x76\x34\xc3\x84\xe3\xc9\x8a\x9e\x61\x69\x59\x79\
\x85\x60\x81\x4e\xf8\x0e\xfa\xb7\x22\xde\x4d\xec\x4d\x58\x1a\x02\
\xb8\x96\xe2\x2a\x55\xee\x67\x26\x40\xe4\xc8\xdb\x09\x15\x5a\x3b\
\x93\x23\x32\xea\x0e\x02\xdc\x0b\x20\xe5\x7e\xe0\xf0\x65\x2a\x20\
\x9b\x23\x2c\x01\xdd\x94\xe8\x88\xd4\x40\xa0\x8e\x2a\x92\x44\x6b\
\xc9\x4b\x9e\x8f\xfc\x12\xfa\x6b\xb7\x7a\xda\xcb\x9e\x94\x96\xd1\
\x51\x86\x13\x7f\xbc\xc2\x29\x2d\x1a\x33\x6f\xd0\x62\x44\x6e\xca\
\xcf\x12\x8c\xd6\xd6\xd6\xd2\xdc\xd4\x94\x34\x24\xb0\x00\xbc\xf0\
\x8c\x28\xcb\x4d\x8f\xbe\x18\x1d\x09\xa9\xc4\x38\xa1\x92\x26\x6e\
\xb6\xda\xbd\x9f\x49\x8e\x3f\x8d\xb8\x96\x9f\x61\xe1\xb5\xe0\x96\
\x46\xb0\x2f\xb8\x76\xb0\xaf\xaf\xa1\xa1\xa1\x44\x9f\x06\x50\x8c\
\xd7\x76\x1e\x22\x52\x06\x7e\x9a\xd9\xc4\x13\x42\x8e\x78\x2a\x23\
\x70\xe2\x28\x62\xac\x20\xa4\xed\x7c\x2d\x1d\x00\x9d\x34\x1d\x37\
\xb6\xf3\x83\x0f\x3f\xfe\xe4\xbf\xbf\x5a\xb3\x6e\x43\x47\x47\xe7\
\xe7\x5f\xac\x1d\xa9\xd6\x82\x02\x96\xba\xd9\x09\x5c\x65\x98\x86\
\xb5\x54\xe2\x42\x99\x93\x98\x16\x6a\x3c\x6f\x12\xc8\x07\x97\x3e\
\x27\xbb\x9e\x39\x73\x7a\x6f\x5f\xdf\xd4\x29\x5b\x4d\x9d\x36\xf9\
\xcf\x77\x99\x3b\xd2\xdf\x47\x7c\x2d\x9e\xc6\xe4\x60\x98\xcc\xe9\
\x4f\xa9\xb5\xe3\x9c\x74\x33\x29\x31\xb5\x14\xd2\x56\x92\x59\xbb\
\xcf\x4b\xb7\x4a\x64\x33\x90\xc5\x38\x71\x5c\xd7\xfd\x0f\x3e\xb9\
\xf2\xd5\x4f\x97\x2d\xfb\xeb\xd9\x53\x3b\xb6\xde\x7a\xeb\x1f\x35\
\x35\xd7\xb3\x4f\x82\x51\x67\x0a\x46\x45\xfe\x07\x34\x02\x1a\xb8\
\xbf\x70\xbc\x5c\x6a\x2e\x3a\xf9\x61\x82\x69\x95\x51\x79\x6a\xbe\
\xfc\xf2\x8b\x6a\x75\xe4\xf6\x7f\x7c\x74\x87\xed\x57\x9e\x73\xce\
\xa9\x43\x83\xbd\x09\x1b\x20\xe2\x51\xcc\x29\x26\x18\x39\x76\x29\
\x9d\xbe\xf4\x1c\xad\xd8\xc0\x28\xa8\x40\x92\x41\x72\x26\x69\x6a\
\x5a\x5b\x5b\xde\xff\xe0\xa3\xdf\xfd\xfe\x0f\xcb\xef\xff\xe7\xc6\
\x4a\xc3\xff\x42\x27\x60\xd6\xac\x99\xf8\x5c\xbc\xf8\xc7\x67\x9c\
\x71\xc6\x6b\xaf\xbe\xb6\xdb\xbc\xdd\x87\x07\xfb\x1a\x92\x84\x65\
\xbf\x09\x64\x9d\xd3\x27\x4c\x52\x16\x74\xe7\x1b\x38\x7d\x16\x16\
\xcc\xc8\x15\xae\x44\xf6\x29\x14\x5b\x5b\x6b\xcb\xef\xff\xe3\xcd\
\xc3\x7e\xf2\x57\x40\x07\xce\x47\x63\x63\x63\xa9\x57\x51\x74\x0b\
\x4a\xff\x3a\x8a\x9f\x51\xcf\x1d\x29\xf5\x2d\x82\xc8\xb0\x0d\x64\
\xde\xdc\xdc\xbc\xf4\xa7\x87\xff\xfb\x8b\xcf\xec\xfe\xc3\xbd\x8c\
\xe9\xb5\xe2\x2e\x9b\x66\x9a\xb7\x8c\x9c\x78\xf5\xac\xc7\xbe\xa7\
\x57\x9c\x10\x65\xd6\x42\x10\x1a\x9c\x64\x77\x00\x88\xd5\x3c\x2b\
\xaf\xa1\xa1\xda\x36\x53\xb6\xc9\x38\xb0\x52\x49\x92\x84\xdf\x94\
\xa6\xe9\x1f\x11\x20\xc8\x63\x0c\xc6\x59\x3d\xb6\x6d\xb3\xcd\x36\
\x2f\xf4\x0e\x1b\x33\x4c\xf6\x83\x12\xd2\x8a\x5f\x91\x49\xf1\x44\
\x13\xaf\x3a\x0e\x4a\x61\xeb\xd5\x58\xba\x39\x40\xb2\xd9\xa7\xc6\
\x3f\x80\x20\xcd\xcb\x20\xcf\xa9\xe8\x5d\xbf\xbe\xbb\xad\xbd\x43\
\x0e\x9d\x56\x0c\xd4\xf4\xc7\x65\xae\xe1\xe1\x61\xc2\xc8\x2d\x6f\
\x63\xa5\xa9\xc9\x79\x45\x04\xb8\x72\xae\x46\x50\xb7\x7d\x43\x43\
\x43\x23\x23\x9a\xbd\x89\x5a\xad\xd2\xd0\xd0\xd1\xde\xee\xe5\x51\
\x6a\x21\xd3\x19\xf3\x38\x4d\x6c\x02\x77\xe2\x15\x21\xaa\xe2\x74\
\x8c\xa9\x55\xab\xad\x6d\x6d\x13\x26\x4c\x30\x4e\xf3\x41\xf6\x1b\
\xa0\xf3\xf1\xc7\x1f\xaf\x58\xb1\xe2\xcb\x2f\xbf\x8c\x1c\xe8\x88\
\x04\x22\x7b\xb7\xae\x67\x5b\xdf\xe3\x8f\x68\xcd\x3a\x22\xc9\x56\
\x5b\x6d\xb5\xe7\x9e\x7b\xee\xbc\xf3\xce\x98\x6f\xf0\x75\x6a\x59\
\x44\xc2\x38\xc6\xb1\x36\xa4\xb3\xf6\xb2\x61\x37\x34\x75\xae\x7c\
\xe9\xa5\x5f\x5e\x7e\xdb\x8c\xad\x27\x8f\xc0\x6c\x6a\x6a\x4c\x1a\
\x1b\x9b\x3b\x3a\xd6\x6e\xda\xbc\x68\xf1\xbc\x65\x3f\x3b\x72\x78\
\x60\x73\xa5\x21\xf1\x53\x27\x15\x90\x91\x93\x15\x44\xce\xb6\xce\
\x6b\x31\xad\x89\x7e\xc1\x4c\x96\x08\x1d\xc7\x82\x70\x5e\x7f\xfd\
\xf5\x63\x8f\x3d\x76\xc6\x8c\x19\x63\xc6\x8c\xc1\x57\xd0\x6d\xc5\
\x36\xba\xc8\xe2\x26\xf6\x9a\xb8\x80\x2e\xa8\x9f\x7b\x12\xdf\xc8\
\xef\x63\xd7\xa1\x18\x8b\xa0\x97\xa6\xf9\xb6\x76\xed\x5a\x88\xe4\
\xf3\xce\x3b\xef\x80\x03\x0e\x00\x46\x44\x47\x78\xea\x17\x5f\xac\
\x51\xa6\x6a\xad\xad\x20\x3a\xbc\xa0\x69\xd8\xb0\xb9\x7b\x61\x5b\
\xd7\x31\x0b\x16\xf5\x34\xb7\xb4\x74\x75\x35\x8c\x1d\x3b\x76\xfa\
\xf4\x17\xdf\x7c\xe7\x9d\xf5\xef\xd2\x4f\x28\x90\xe3\xcc\x22\x8b\
\x8f\x57\x58\x6e\x6c\x15\x41\x8d\x86\x4d\x64\xe3\x98\x9f\x48\xb2\
\xd6\xd2\xd2\xb2\x7c\xf9\xf2\x1d\x76\xd8\x61\xf7\xdd\x77\x07\x91\
\x57\xf2\x8d\xe0\x00\x6a\x0c\x13\x77\xd2\x45\x11\xa6\x52\x8c\x18\
\x1d\xa2\x59\x89\x4e\x15\x44\xdd\xda\x3a\x6d\xda\xb4\x07\x1f\x7c\
\x10\x00\x49\xe7\x7b\xfd\xfa\x4d\xda\xcb\x65\x0c\x38\x35\x89\xf6\
\x7e\x9a\x45\xb0\xc1\x54\x1a\x06\x4d\x3a\x94\xd6\x54\xad\xda\x50\
\x1d\x69\x1b\x1e\xaa\xd6\xaa\x18\x97\x23\x5e\xcf\x9a\x19\xb3\x19\
\x1d\x44\xb5\x5f\xb5\x8a\x90\xb2\x56\x34\xb1\xcd\xef\x47\x80\x99\
\x00\xa3\x75\xeb\xd6\xc1\x1a\xda\xb2\x65\x4b\xf6\x4a\xd1\x30\x74\
\x42\x07\xf7\xe0\x02\x9f\xb8\xe6\xe9\xd1\x05\xfa\xf9\x93\x60\xd2\
\xa2\xe5\xd4\x8d\x98\x39\x1e\x45\x9f\xa9\x95\xa9\x9b\x36\x6d\xc2\
\x00\x70\x8d\x91\x4c\x9c\x38\x11\xeb\x84\xce\x31\x9d\x63\xa6\x4e\
\x9d\x5a\x1b\xa9\x42\x91\xb8\x91\xb3\xae\x76\x71\x3e\xf2\x0f\x54\
\xf8\x5f\x28\x07\xcf\x4c\x4e\x82\x5b\xce\x31\x41\xf7\xb3\xb3\x1a\
\x7c\x31\x0b\xa4\x16\xd6\x9d\x14\x43\x11\xa7\x10\xbd\x74\x76\x76\
\xe2\x82\x98\x8e\x0c\x02\xd0\x7f\x3d\x22\x92\x14\x44\x44\x54\x64\
\x31\x23\x16\x86\x00\x75\x8e\x8e\x52\x4d\x4d\x4d\x6d\x6d\x6d\x03\
\x03\x03\xf4\x15\xfd\xe8\x69\x6f\x6f\xcf\x6e\xd3\x0d\xc1\x1e\xb6\
\x3e\x8d\x0c\x01\xe8\x62\x23\x61\x2c\x8d\x1e\xcd\x9a\x50\xba\x77\
\x3a\xa8\x79\xa3\x0c\x87\x4a\xc8\x7e\x95\xc1\x6d\x9a\x8f\x94\x23\
\x98\x39\xc6\xfa\xe2\x8b\x2f\x76\x75\x75\x0d\x0e\x0e\xe2\x2b\x3a\
\x27\x4d\x9a\xb4\xcf\x3e\xfb\xf4\xf7\xf7\x37\xf8\x26\xa1\xa1\x4f\
\xdc\x46\x44\x44\xd7\x11\x05\x31\xdd\x45\x24\x46\x90\xb9\xd9\x89\
\xe8\x44\xea\xc2\x28\xc6\x4f\x3a\x21\x1b\xc7\xd2\x0d\xf9\x98\x8e\
\x33\xfc\x34\xbd\x31\xe3\xfd\x54\xcb\x99\x19\x19\x38\x93\x91\xac\
\x4d\x56\x94\xc1\x96\x09\x4e\xbe\xe4\xb0\xcc\x14\xda\xbc\x79\xf3\
\xa7\x9f\x7e\x3a\x65\xca\x14\x52\x28\x44\x3b\x18\x19\x44\xde\xd7\
\x5f\x7f\xbd\x7e\xfd\xfa\xa3\x8f\x3e\xba\xbb\xbb\x1b\xd4\xf4\xc4\
\x13\x4f\xf4\xf6\xf6\x1e\x71\xc4\x11\xf8\x5a\xc4\x88\xa0\x91\x48\
\x45\x10\x48\x80\x88\xb3\xc2\x92\x5b\x81\xc2\xe8\xf8\x96\x2a\x0f\
\x8f\x0b\x69\x78\x14\x9c\x03\xee\xfc\xf5\x60\x08\xf8\x88\x82\x9f\
\xa0\x4a\x8c\x61\x53\xc8\x06\xae\x0c\x05\x34\xdc\x82\x89\xb4\x0f\
\x85\x49\x82\x67\x6f\x58\x9b\x00\x0e\x50\x32\xaf\x21\x3e\x47\x46\
\x46\x20\x35\x31\x81\x93\x4e\x3a\x09\x7a\xed\xb3\xcf\x3e\x5b\xb8\
\x70\xe1\xec\xd9\xb3\x2f\xbe\xf8\x62\x58\x03\x4f\x3d\xf5\x14\xb0\
\x23\x34\x25\x67\x45\xba\xaf\xb4\x81\x4f\x99\x3d\x23\x8e\x66\x43\
\x31\x8c\xcd\xfd\xab\x69\xbc\x2c\x83\x48\x5e\x58\xb5\xe3\x04\x8d\
\x5c\x73\x1f\x58\x33\xec\xf9\x53\xb4\x46\xa8\x26\x17\xe5\xcd\xa6\
\x1c\xb4\x18\x45\x2c\xb4\x0f\xce\xf9\x06\x31\x0c\xce\x07\xef\x00\
\x0e\x1a\x1f\xc0\x42\x84\xff\xd9\x67\x9f\x05\x34\x40\xea\x90\x43\
\x0e\x79\xe0\x81\x07\xd6\xac\x59\x83\x6b\xc8\xce\x4b\x2e\xb9\xe4\
\xdd\x77\xdf\xfd\xfc\xf3\xcf\x81\xa0\x9c\x58\x51\x30\x8d\x02\x8d\
\xbc\x81\x51\x26\x0a\x2a\xc6\xf6\xb3\xa9\xfa\xd4\x4b\x10\x44\xec\
\x7c\x19\x1d\x22\x79\x3e\x6e\xe1\x6f\xd0\xc6\x8b\x5d\x6f\x23\xc4\
\xee\x6e\x22\x0c\x45\xe3\xfd\x65\x0a\xfa\x06\x19\x44\xaa\x84\x99\
\x02\xd7\xe3\xc6\x8d\x03\x73\x3d\xff\xfc\xf3\x98\xd5\x3d\xf7\xdc\
\x73\xca\x29\xa7\x40\xc5\xd0\x9f\x90\x30\x99\x3b\x77\x2e\x3b\x71\
\x91\x9a\xaf\x87\x4e\xd1\x74\x28\xa5\x23\xe6\x41\x11\xc6\xb2\x91\
\x54\x1f\x7d\x64\x1b\xd6\x73\x42\x3e\xa0\xe2\x22\x3d\xca\xb9\x68\
\x1e\x47\xcf\x9d\x84\xb2\xf3\xc5\x02\x8b\x71\x00\x53\xb9\x28\x58\
\x79\x96\x92\x55\x0f\x85\x44\x4e\x3f\xfd\x74\xd0\xcb\xd5\x57\x5f\
\xbd\x64\xc9\x92\xfd\xf6\xdb\x8f\xe4\x34\x2d\x38\x88\x4e\x5a\x89\
\xf4\xab\x06\xd1\x46\xc7\x48\xa2\x53\x34\x32\xa3\x0c\xb0\x63\x09\
\xa3\x83\x23\xee\x94\xb4\x71\x26\x5d\x08\x49\x73\x1c\x4e\x09\xc1\
\xcb\x94\xe1\x48\x82\xa2\x7f\xec\xd3\x05\x2d\xc6\x21\x49\x23\xc8\
\x47\x0e\x45\xe6\x3f\xb0\x8c\x60\xa8\xe3\x8f\x3f\x1e\x3a\x6b\x97\
\x5d\x76\x81\x7d\x44\x9a\x3e\x7b\xa2\xf7\x6f\x79\x4a\x72\x86\x0d\
\xf9\x26\x6d\x45\x16\x79\xb4\x00\xf4\x5e\x32\xbb\xa4\x6d\x19\x2d\
\x9e\x76\xb8\x38\x9f\xdd\xbb\xe3\x6e\xf2\x6c\x14\x29\x99\xc3\xf0\
\x4e\xaa\x8b\x2f\xeb\xe0\x03\xa5\xa9\x51\x79\xb3\xa3\x52\xe2\x4c\
\x89\xe1\x32\x40\x51\x52\x81\x97\x7a\xce\x9c\x39\x50\xea\x84\x0e\
\x4f\x06\xe6\x09\x99\x8b\x8c\x69\x3d\x8c\x98\x71\x84\x2f\x59\xe3\
\xaf\xfc\x40\x69\x3d\x15\xd3\xf0\x71\x44\x21\xcb\x2f\x79\x49\x4b\
\xf3\x0d\xa9\x04\xef\xaa\xeb\x20\x4d\x98\xf5\x88\x1c\xd9\xb0\x74\
\x32\x28\xf2\x11\x59\x3c\x8f\xc2\x65\x18\x37\x40\x81\x0c\x82\xb8\
\xc1\x05\x59\x40\x3c\x79\x40\x83\x3f\x41\xd9\x43\xb4\x93\xe4\xe2\
\xf5\x2f\x62\x54\xea\xb5\x44\xca\x4b\x1a\x96\xa6\x0c\x1e\x17\x96\
\x34\x3e\x25\x68\x9c\x9a\xae\x9b\xe7\xd5\x2e\xa8\xc8\xfa\x5e\x07\
\xcb\x40\x0b\xc1\x6e\xfd\xe4\x5c\xb8\xc3\x46\x21\x4d\x14\x32\xcd\
\x7b\xe1\x18\x25\x44\xcf\x82\x05\x0b\x1e\x7e\xf8\x61\x68\xab\xc9\
\x93\x27\x03\x11\x28\x2c\xf8\x6b\xf8\xc4\x35\x2c\xe9\xa5\x4b\x97\
\x3e\xf2\xc8\x23\x50\x64\x50\x73\xd2\xe4\xfb\x56\x8c\x22\xe2\x8a\
\x7e\x58\x1e\x1b\xf0\xdc\xc5\x01\x0d\x1f\xfc\x66\xb9\x22\xa6\xe3\
\x85\x34\xa7\x40\x94\x47\xc7\x27\x36\x28\xbd\xa7\x59\x1f\x26\x26\
\x97\xd6\xd0\xe4\xdf\x1a\x5d\xba\x54\xae\x41\x06\x23\x18\x72\xe4\
\x91\x47\xde\x71\xc7\x1d\xaf\xbc\xf2\x0a\xbc\x24\xe8\x78\x50\x0d\
\x22\x21\xe4\x70\x41\x2a\xc1\xed\xbe\xee\xba\xeb\x36\x6c\xd8\x00\
\xd4\xa4\x11\x2c\x27\xcc\x58\x34\xda\x16\xe1\x12\xfd\x84\xe9\xb7\
\x6c\x60\x21\x3a\xa1\x3d\x45\x68\x6f\x45\x07\xe2\xf0\x3e\x5a\x90\
\xaa\x85\x5a\x1c\xdf\x6b\xd8\x24\xac\xc8\x31\x90\xe4\x27\x57\x25\
\x92\x41\x32\x76\x83\xa1\x83\xb3\x10\xa0\x39\xfc\xf0\xc3\x9f\x7e\
\xfa\x69\x90\x09\x05\xee\x30\x49\x20\x72\xd5\x55\x57\xc1\x05\x99\
\x3f\x7f\xfe\xb9\xe7\x9e\x0b\x1d\x77\xc3\x0d\x37\x20\x96\x04\xd4\
\x48\xeb\x47\x8d\x61\x8a\x84\x5d\x3d\x6f\xb6\x14\x20\xc9\x62\xf9\
\xa2\x0e\xe3\xd3\x03\xb1\x1f\xa6\xbd\xed\x1d\xe5\xe6\x49\x87\x6b\
\x31\x8c\x28\x68\xef\x93\xfb\xc2\x50\x95\xe3\xe6\x21\x62\xb6\x40\
\x01\x18\xc1\xc9\x40\x0f\xd9\x78\x70\x35\x56\xad\x5a\x75\xc5\x15\
\x57\xdc\x7c\xf3\xcd\xd0\x6b\x50\x70\x60\xc6\xeb\xaf\xbf\xfe\x9a\
\x6b\xae\x81\xca\xa3\x52\xb6\x28\x42\xc6\xd4\x54\xf4\xbf\xbe\x33\
\xf9\x94\x42\xe3\x3b\x95\x09\x39\x0a\x9f\xee\xd2\x21\x6f\x6a\xa2\
\x08\x6f\x0a\x6f\xc3\x65\x86\x4c\x2c\x83\x1c\xbf\x92\xfc\x2f\xcb\
\xed\xc8\xd1\x10\xd7\x80\xd7\x20\x8c\x07\x7c\x83\x53\xb6\xd7\x5e\
\x7b\xcd\x9c\x39\xf3\xac\xb3\xce\x82\x07\x0b\xd6\x83\x91\xbd\xc7\
\x1e\x7b\xc0\xff\x00\x76\xa5\x15\x3d\x84\x0b\x19\xa2\x91\x4e\x28\
\xfd\x1a\x8b\x21\xe3\x92\x42\xa6\xbc\xb8\xcd\x33\x0c\x27\x33\x54\
\xc8\xc9\xc7\xf1\x6f\xbe\x9b\x12\x8b\xfe\xef\x41\x06\x59\x9b\x93\
\x49\x53\x95\x16\xb5\x45\x30\x49\x63\x87\x3c\x58\xb0\x18\xf8\x6e\
\xfa\xf4\xe9\x88\xfe\x21\x6a\x83\xaf\x00\x6b\xb7\xdd\x76\xbb\xe8\
\xa2\x8b\x60\x7c\x47\x28\xb0\x47\x8a\x06\xac\x8b\x30\x45\xc5\x74\
\x25\x14\xe4\xd6\xd3\xa5\x4f\xf3\xf7\x2b\xc3\x56\x8e\x0e\xae\x93\
\xb3\x1d\x85\x0c\xd1\x02\x1d\x67\x03\xaa\x30\xc7\xa4\xac\x2c\x8b\
\x7e\xaf\x22\x4a\x8e\x84\x51\xb1\x6a\x91\x2c\x17\xc4\x46\xc1\x77\
\xa0\xa0\x33\xcf\x3c\x13\x3a\x0e\xf1\xad\x0b\x2e\xb8\x60\xd6\xac\
\x59\xa0\xa3\xf1\xe3\xc7\x13\x0a\x4c\x35\x04\x0d\x18\x90\x00\xe2\
\xbf\x72\xbc\xad\x54\x14\xe6\xbc\x55\x1f\xdd\xf0\x5a\x9b\x61\x72\
\x6b\x4d\x20\xb2\xaa\x12\xf9\x09\x27\x9f\x72\x91\x90\x5c\x5a\xc4\
\x32\x4a\x24\x0c\x85\x26\x2b\x5f\xc9\xa8\x0e\xb2\x38\x07\xc2\xe8\
\x84\x13\x4e\x80\x60\x3a\xfb\xec\xb3\x21\xa7\xc0\x7a\x97\x5e\x7a\
\x29\x52\x34\x97\x5f\x7e\x39\x30\x62\x62\x61\xc2\xa9\xfa\x16\x61\
\x14\xd1\x5a\x09\x7d\xb9\x90\xa1\x62\xf3\x99\xd5\xb6\x11\x35\x59\
\xb2\x50\xc4\x57\x1e\xe5\x74\x5f\xae\x34\x8f\x4d\x05\x52\x23\x11\
\x3f\x07\xa8\xeb\xb7\x51\x30\xe2\x60\x20\x08\xe7\xd4\x53\x4f\x85\
\xcb\x0a\x09\x0d\x5e\x43\x78\xe8\xca\x2b\xaf\x84\x61\x09\x99\x8d\
\xc0\x00\x6c\xa5\x9a\x6f\x44\x3e\xa5\x18\xf1\x45\x69\xed\x2a\x2b\
\x65\xe7\xb1\x4a\x99\xe2\x53\x13\x0e\xaa\x9c\xc8\xe1\xbc\x60\xb0\
\x06\x73\x30\xf9\x68\x8f\xa3\xa0\xf0\x67\x56\xf0\x21\x52\x32\x5a\
\x05\xef\xe8\x28\xe2\x49\xc0\x08\x02\xe8\xa3\x8f\x3e\xba\xf7\xde\
\x7b\x11\x1e\x82\x65\x70\xed\xb5\xd7\x42\xaf\xdd\x77\xdf\x7d\xc0\
\x08\xce\x2d\xd3\xce\x88\x6d\x11\x46\x11\xbb\x31\x4c\xb1\x90\x8e\
\x94\x4c\xd0\xc7\x2e\x40\x48\x7a\x5b\x09\x44\xb9\xc4\xa7\xbc\x66\
\x58\x2b\x19\xed\x91\x51\x61\xed\x0c\x4a\x65\xd4\xa8\x65\xc7\x1c\
\xf4\x8b\xee\x29\xae\x36\x84\xf4\x69\xa7\x9d\xf6\xd0\x43\x0f\xc1\
\xb0\x86\xe7\x01\x50\x60\x19\x21\xb9\xf6\xe8\xa3\x8f\x82\xac\xa0\
\xf8\x24\x7f\x31\x3a\x74\xc1\xf2\xbb\x9e\x54\x12\x1c\x65\xa4\x2b\
\x9f\x23\x6e\x57\xff\x20\xe2\xd2\x14\x0b\x0b\x91\xb5\x42\xf1\x34\
\xfb\x2c\x1c\x0f\x62\x43\x5a\x19\xcf\x82\x05\x35\x36\x0a\x5b\x95\
\xea\x26\xea\xa4\xa9\x82\xb9\xee\xba\xeb\xae\x27\x9f\x7c\x12\xee\
\x08\x3a\x11\x3f\x7a\xec\xb1\xc7\x60\x61\x42\x1e\x81\xac\xaa\x85\
\xc6\x7a\x8d\x31\xa2\x0c\x47\xa9\x8e\xd3\x41\xbe\xd6\x29\x3f\xe7\
\x60\x4f\x5e\x74\x49\x22\xca\xd9\x3a\xbe\x74\x36\xae\x0f\x22\xfb\
\xc8\x05\x84\x74\x09\x1c\x3c\x3e\xba\x90\xb1\xab\xb4\x4e\x83\x30\
\x02\xd5\x80\x76\x80\x11\x22\x8d\x10\xdb\x07\x1e\x78\x20\xfa\xef\
\xbe\xfb\xee\x63\x8e\x39\x06\x5f\x17\x2d\x5a\x04\x11\x4e\x19\xed\
\x52\xf5\x2f\x25\x51\x39\x11\xf9\xb5\x8d\xec\x0f\x5f\x6a\xe3\x4a\
\x3e\x8b\xc6\x39\xa7\x77\x62\xfe\x32\xce\x2f\x8b\xeb\x83\xb8\x12\
\xcb\x18\x5f\x89\x57\xd0\xaf\x64\xec\xf2\x1c\x38\x33\x53\x4a\x41\
\x34\x37\x18\x47\x20\x13\x88\x67\x58\xd8\x97\x5d\x76\x19\x81\x82\
\x7e\xd0\xd4\x71\xc7\x1d\x87\xbf\xce\x9b\x37\x0f\x49\x01\x5c\x14\
\x9f\x10\x09\x6c\x0e\x12\xe5\x49\xc8\x95\x1d\xc4\x26\x24\xfb\xa5\
\xaa\xe0\xac\x86\xd0\x7e\x5e\x7f\x39\x6b\x28\x8d\x65\x50\x88\x01\
\xf8\xe0\xa3\x56\xc6\xd4\x11\xc1\x52\x00\x15\x49\x46\xae\x36\x73\
\x07\x1e\x0e\x75\x06\x8f\x0c\x00\x41\xa9\xbd\xf3\xce\x3b\x80\x03\
\x90\xdd\x76\xdb\x6d\xb7\xdc\x72\xcb\x7b\xef\xbd\x07\x87\x16\x84\
\x56\x14\x43\xb5\x42\x2b\xb1\x2d\x4c\x48\x6a\xe4\x7b\xdd\x74\x95\
\x74\x35\x42\xfd\x1a\x33\x65\x2e\xdd\xe8\x73\x5e\xba\x04\x20\xc3\
\x9e\x98\x11\xe2\x2b\xef\x13\x44\xe8\x44\x48\x45\xe8\x44\x2b\x0f\
\xa7\x04\x6e\xed\x39\xe7\x9c\x73\xfe\xf9\xe7\xc3\xfb\xc7\xfc\x21\
\xa7\xa1\xf5\x6f\xbd\xf5\x56\xe2\x32\x5b\x85\x31\x42\x91\x7f\x36\
\x1d\x8b\xa2\x3a\xd2\x62\x9a\x22\x3f\x69\x08\x6c\x84\x30\xaa\x96\
\xda\x3f\x57\x36\xc8\x79\x10\x0e\x92\xf0\xbe\x95\xbc\x51\xa0\x92\
\xa8\x3e\x48\x3b\x4b\x51\xd7\xb3\x7d\x8a\xaa\x37\x92\x14\x52\x76\
\x70\xc3\x6c\xf1\x04\xb0\x12\x92\x6b\x88\x93\x20\x59\xd4\xd3\xd3\
\x03\xaa\xd9\x6e\xbb\xed\x20\x8c\x2e\xbc\xf0\x42\xb0\x21\x44\x15\
\xe5\x36\x10\x90\xa4\x00\x40\x91\x88\x4a\xb4\x18\x19\x3c\xd6\x4f\
\xd2\xdc\xe1\xcd\x19\x76\xbb\xf8\x5e\x4e\x92\x46\xe5\xe4\x4a\x09\
\xe7\x5e\x44\x0d\x65\x7d\x90\xcf\x81\xf8\xa0\x74\x5a\x06\x8d\x94\
\x41\xdc\xc3\x9f\x94\xea\xe3\x64\x71\xbc\xf7\x2a\x49\xa0\xf8\x11\
\x09\x01\xb1\x00\xa3\x3b\xef\xbc\x13\xc4\x02\x91\x04\xd7\x1f\xce\
\x1a\x8c\x6f\x10\x0e\x8f\x18\x4e\x2f\xae\xa9\x87\x20\x26\x91\x57\
\x12\x0d\x12\x51\x30\x2d\xa4\x84\x8f\x3c\x2b\x59\xa0\xc8\xa9\x09\
\xa5\xeb\x6f\xbd\x12\x5f\x2b\xe2\x26\xe3\x64\x8f\x36\xa5\x4e\x50\
\x11\x0e\xd9\xc3\x49\x74\xfe\x2c\x6a\x5c\xf2\x6c\x11\x5a\x43\x40\
\x12\xe4\x03\x53\xfb\xf6\xdb\x6f\x07\x59\xed\xbf\xff\xfe\xe8\x7f\
\xf9\xe5\x97\x41\x41\x04\xc4\xc6\x8d\x1b\x1f\x7f\xfc\x71\xd8\x99\
\x80\x52\xea\xfb\xa2\x0c\xd2\xb2\xfe\xc0\xa8\x7c\x70\x42\x99\xbc\
\x59\xac\x44\x69\x43\xe9\x66\x37\xf9\x93\x62\x7d\x90\xab\x96\x35\
\xba\x64\x07\x8e\x14\xd2\x04\x81\xcc\x0e\x33\xc9\x94\xa2\x13\x01\
\x04\x67\x0d\x32\x68\xdf\x7d\xf7\x05\x46\xc8\x1d\x21\xa2\x06\x38\
\x00\xd9\xde\x7b\xef\x4d\x3c\x05\xaa\x01\x97\x41\xcd\x41\x42\x21\
\x2a\x00\x8c\xd8\x71\x2b\xd9\xfc\xc4\x01\xb3\xfc\xa8\xdd\x8a\x1b\
\xc3\x2a\x3e\xbf\x55\xc0\x48\x5a\x11\x4e\x78\xd8\x1f\x12\x82\xf6\
\x62\xab\x88\x77\xe2\xca\xdc\x88\x74\xd4\xc6\x32\x28\x12\x1c\x45\
\x07\x02\xf3\xc7\x1b\x81\x11\x08\x07\x29\xc6\x9b\x6e\xba\x09\x1a\
\x0d\x74\x04\xee\x43\x08\x09\x60\xe1\xfa\xab\xaf\xbe\x82\x6c\x42\
\x32\x12\x6e\x0a\xe4\x11\x7e\xc2\xa2\xad\xc4\x9b\xd7\x4a\x9a\x7f\
\x52\xa4\xfa\xe0\x57\x08\xf0\x70\xb9\xb6\x8c\x0c\x3a\x2d\xee\x2c\
\x1d\xa3\x85\x43\x9f\xe4\x73\x26\x1c\xbb\x57\x45\xa3\xb9\xe8\x22\
\x45\x3d\x91\xde\x91\xce\x7a\xd1\xa5\xc0\x93\xe1\xac\x21\xa3\x8f\
\x0b\x48\xe8\x28\xad\x84\x06\xd5\x76\xd8\x61\x87\xe1\x4f\x30\x11\
\xf8\xe7\xf5\xac\x44\x97\xef\xd4\xf9\xf0\x9e\xf1\x8a\x9d\x02\xc9\
\x21\x30\xc4\x81\x1f\xbe\xc3\xe9\x2f\xad\x83\x4f\x17\x0c\xc5\x42\
\xd1\xac\xb4\x13\x63\x19\xc4\x17\x5c\x7a\x21\x45\x32\x5f\x73\x78\
\xa8\x34\xbd\x45\xb9\x30\x48\x1c\x88\x67\x96\xee\xbc\x06\x59\x29\
\x72\x5b\x1b\x2e\x20\xad\xc0\x6b\x4c\x7d\x24\xa4\x4b\xbd\x79\xde\
\xee\x14\x2a\x38\x74\x70\xcf\xa5\xa8\x16\x5b\x1f\x72\xfe\x89\x0e\
\x31\x55\x51\x26\x24\xeb\x83\x9c\x59\xe1\x37\x63\xd5\x53\xf0\x04\
\x90\x14\x3d\x2c\x74\x18\x26\xde\x3f\x51\x44\x47\x02\x44\x80\x4a\
\x15\x4e\xf2\x18\xe8\xdc\x78\xe3\x8d\xa8\x87\x3c\xe8\xa0\x83\xa0\
\xe6\x98\x43\xb9\x94\x2b\xcf\x68\x32\x14\x9d\x33\x91\x95\xe3\x2f\
\xad\xf2\x51\x31\x57\x7b\xe0\x92\x14\x85\x7d\xc3\x2e\xa5\xa1\xe2\
\xfa\x20\x1f\xe0\xe5\x00\x66\x2c\x62\xd9\xf0\xd1\x3a\xde\x82\xa3\
\x0a\xdb\x5c\xe5\x3a\x73\xd5\x1e\x57\x40\x30\x40\x98\x39\xbe\x92\
\x59\x48\x37\x40\x1e\x01\x1d\x60\xf4\xc1\x07\x1f\x90\x8f\x02\x8c\
\x88\x7f\xeb\xec\x50\x75\x49\x67\x23\x77\x66\x0a\x3b\x0e\x71\x78\
\xa0\x5e\xb3\xff\xd3\x82\x60\x79\xbd\x9e\x2a\xd1\xf1\x4a\x40\x1a\
\xd5\x28\x12\xdd\x58\x4f\x35\xcf\x62\xac\xa1\x98\xce\x8b\xea\x9c\
\xf2\x65\x04\x07\xbe\x62\xce\x9c\xcf\x91\x14\x14\xa5\x2e\xf0\x13\
\xe2\x32\x92\x4a\x60\x28\x18\xd6\x60\xbd\x65\xcb\x96\x41\xf4\x20\
\x3b\x02\x74\x16\x2f\x5e\x0c\x2b\x9c\x1e\x52\x2e\x83\x72\xa6\xaf\
\x50\xde\xb6\x66\xac\xb3\xb9\x79\x42\x7b\x3b\x8a\x5c\x5b\xc6\x8c\
\x41\x95\x6b\x63\x57\xd7\x38\xe4\xa9\xbe\x89\xed\x1f\x11\xe7\x0f\
\x69\x9f\xb8\x46\xd1\x28\x5f\xfd\x21\x62\xd2\x7e\x27\x62\x95\x25\
\x45\x71\x0b\x64\x29\x67\x49\x16\x93\xb4\x23\x13\x90\x08\xa4\x41\
\xd9\x3f\xf3\xcc\x33\x87\x1e\x7a\x28\xf4\x1a\x3a\x11\xae\x45\x31\
\x34\x3a\x41\x3e\xf0\x66\x57\xaf\x5e\x8d\xb0\x91\x34\xb8\x0a\x32\
\xc8\xe5\x20\x74\x7e\x8f\x33\x8d\x73\x78\x68\xf8\xb9\x0f\x3f\xdc\
\x54\x49\x06\x92\x86\x4a\x5b\x5b\xd2\xd2\xd2\x31\x61\xfc\x1f\xbe\
\x5a\x3b\x7d\xa7\x09\x56\xf9\xa5\xc6\xe8\xd8\x60\xc8\xd7\x28\x26\
\xd2\x50\xd4\x6e\x9f\x47\x88\x9f\xd0\xc4\x10\x30\x45\x1e\x99\x8a\
\x58\xeb\x39\x13\x91\xe6\x8a\x22\xaa\x45\xad\x4f\x0d\x28\xa0\x5e\
\xef\xb9\xe7\x9e\x43\x14\x0d\x71\x7e\xbc\x11\x51\x91\xb7\xde\x7a\
\xeb\x8d\x37\xde\x20\x64\x51\x3c\x4e\x05\xad\x1c\x45\x2b\xb7\xa4\
\x4d\x8e\x04\x54\xe6\xdc\xeb\xc1\x81\xc1\x5d\xe7\xcc\x7a\xbf\xa5\
\x92\x2e\xdc\xbb\x71\xfe\x7c\x3d\x7b\x76\x75\xc6\x8c\xc1\x09\x5b\
\xbd\xf2\xfe\xbb\x7b\xce\x9f\x33\x3c\x30\x24\x76\x12\x6a\x2f\xcb\
\xec\x85\x50\x4d\x15\x59\x1f\xe4\x6c\xab\x3c\x7f\x61\x4c\xf0\xb6\
\x91\x6b\xa6\x0a\xe9\xd1\xab\xe8\xa5\x00\x92\x45\x73\x32\x31\x2f\
\x73\x81\x98\x33\x9c\xaf\xa3\x8e\x3a\x0a\x21\x47\xe0\x82\x57\x80\
\xcb\xd0\x8f\x20\xe4\xc9\x27\x9f\x8c\x52\x36\xa9\xe6\xea\xa5\x55\
\x5d\xf9\x9c\xf6\x01\x7b\x5f\x45\xd6\xdf\x3f\x30\x7b\xe7\x9d\x7e\
\xbc\x70\x6e\xb3\x49\xfe\x72\xff\xc5\xdd\xdd\x5b\x20\xda\xfe\xf3\
\xbd\xff\x5a\xf0\xc3\x9d\xe6\xcf\xdf\x79\xd3\xc6\xee\x24\x5b\x83\
\x90\x23\xb4\x3a\x30\xb5\x9b\x0e\x55\x4e\xcd\xe7\x4d\x74\xc5\xa5\
\xc0\x72\x97\x06\x32\x7f\x94\x1d\xe5\x2c\x6b\x3d\x73\x99\xd5\x56\
\xa4\xbf\x64\x8d\x9a\x6c\xe0\x26\xc8\x07\x64\x8a\x60\x25\xd2\x63\
\xc1\x62\x58\x09\xa2\x3e\x26\x19\x0e\x06\x15\x17\x24\x8b\xe0\xd8\
\x5a\x4c\x4d\x1b\x83\xbd\x2b\x8a\xd7\x0d\xf5\x0e\x2c\x3b\xf2\xc0\
\x2b\x7f\xb1\x7c\x9f\x45\x7b\x83\x92\x51\x29\xf1\x9b\xdf\xfc\xdb\
\xdf\x1c\x85\xeb\x54\x89\x4d\x77\x89\x71\x85\xbd\x76\xeb\x4f\x28\
\xee\x88\xed\x20\xb6\x2c\x64\x56\x8c\x30\xc2\x58\x69\xd0\x45\x8b\
\xae\x28\x77\x8a\xe8\x70\xed\x53\x94\x4a\x96\x5b\x54\x10\x0c\x91\
\x81\x4a\x2a\x15\x65\xa5\x4e\xe2\x0f\x77\xc6\x32\xc8\x55\x8a\xa5\
\xb4\xb0\xe4\xd6\x5b\x01\x9d\x3d\xbf\xaf\xb7\x7f\xd7\xdd\xe6\x4e\
\x9d\xd2\xb1\x6a\xd5\xea\x03\xf6\xff\x8b\xd5\xaf\xbf\xd5\xd2\x34\
\xb4\x60\xc1\x3c\x6c\x8b\x07\x71\xfb\x70\xad\xfb\x87\xca\x5c\x49\
\x02\xe7\xec\x20\xb1\x55\xc3\xc7\x21\xf3\xc4\x0c\xb2\x07\x97\x91\
\xbd\x5b\x1a\xd6\xa3\x42\x7a\x96\x59\x5c\x54\x5f\xba\x3f\xa3\x58\
\x98\xc0\x6b\x50\x52\x6b\xe1\x01\xa2\x12\x75\xb0\x24\x55\x8e\x5a\
\x04\xf5\x96\x9e\x1e\x08\x75\xec\xf9\x19\x1a\xae\x6a\x0e\xfa\x39\
\x13\x28\x25\x8f\x61\x78\x70\xf0\xe7\xcb\x0e\xba\xf9\x97\x4f\x1e\
\x7d\xf4\xcf\x7e\xfb\xdb\x15\x4b\x0e\x5e\xe8\xf6\xe6\x1a\x63\x4c\
\x6e\x53\x28\x3b\xb3\x5a\x95\x45\x14\x4d\xa8\x1c\x51\xc5\xc9\x83\
\xec\x11\x09\xc4\x02\x16\xab\x58\xa2\x52\xf1\x62\x21\xe2\xb7\x16\
\x25\x36\xfa\x26\xcb\x5d\x65\x23\x70\x57\xae\x5c\x89\x18\x08\x52\
\xd8\xec\xb5\xe2\x02\x1c\x1a\x85\xa2\x8d\x0f\x25\x12\x88\xbd\x3d\
\xbd\x7b\xec\xb9\xeb\xa4\x09\x4d\xf7\xdc\xbb\xbc\x31\xe9\xdf\xe7\
\x47\xf3\xb7\x6c\xe9\xc9\x24\xa3\x8a\x67\xea\x7f\xad\xa5\x60\xaa\
\x98\xa8\x74\xc6\x10\x23\x24\xfc\x63\xd2\x62\xd0\x35\x70\x20\xdf\
\x7e\xfb\x6d\x62\x07\x0a\xee\x48\xba\x28\x25\x90\x62\x85\x46\xc4\
\x5c\xa5\xa7\x9f\x14\x33\xda\x74\x1b\x12\xd9\xa8\x8a\x84\x4d\xc4\
\x88\xd4\xaa\xb5\x3f\xd9\x7e\xdb\x4a\x43\x45\xa5\x43\xaa\x41\x07\
\xab\xc8\x09\x6d\x37\x4f\x28\xfb\x63\x7f\xbe\xe4\xe0\x9f\xfc\xed\
\xa3\xbf\xbe\x1e\x9b\x16\xd2\xd4\x88\x64\x7d\xd0\xeb\x21\x55\xa4\
\x83\xeb\x1b\xd7\x07\x79\x62\x1e\x66\x82\xe7\x08\x19\xe2\x7e\x1b\
\x6d\xe3\xd1\x73\xfc\xcc\x9f\x9e\x90\xe6\xb6\x8a\xe4\xed\xcc\xe2\
\x3e\xc2\x52\xa1\x5b\x6f\x27\x22\xe2\xb3\xd0\xa4\xb0\x89\xc8\xa4\
\xe4\x01\xb4\xb4\x34\xdb\xb2\x15\x27\x7e\x78\x0b\xb9\x0f\x13\x19\
\x5b\xcd\x34\xb4\xe3\x0e\x33\xfe\xe1\xef\xcf\xfd\xc1\x0f\x76\xec\
\xeb\x1f\x00\xad\x7b\x44\x52\xc1\x43\xc6\x17\x00\x1b\x2d\xaa\x94\
\x72\x11\x45\xb2\xb6\xb2\x43\x3c\x5a\x2a\x70\xa6\x77\xfa\xb3\x59\
\x52\x4e\x23\x0a\x01\xe2\xdf\x76\xdb\x6d\xa3\x3d\x98\xd1\x6c\x8b\
\xdb\x30\xa3\x32\x4d\x56\xfc\xdf\xba\xa1\x2e\x7f\x66\x46\x15\x84\
\xcc\xc2\xde\xed\xf1\xce\xf6\xd4\xd5\xfc\xb6\x68\x13\x42\xa9\xda\
\x57\x27\x7a\x47\x15\x76\xdb\xd2\x25\x8b\xfb\xfa\xfb\x53\xca\x7f\
\x38\xeb\xc9\x09\x2d\xde\xb3\x9d\x3d\x20\xf5\x85\xb1\xd1\xa6\x5e\
\xa3\x43\xb8\x7a\xec\x98\xf6\xe1\xa1\xa1\x28\xaf\x46\xaa\x1a\x3c\
\xff\xad\x00\x45\x29\xaa\x88\x82\x46\x3f\xdf\xa7\x14\x20\x57\x89\
\xe2\x0b\xc9\x5d\x54\x53\xa9\xee\xee\xcd\x80\x8e\xf4\x03\x17\xcd\
\x5b\x03\x19\x84\xe1\x26\xcb\x16\xce\x96\x9e\xde\x24\xf1\xa7\x2a\
\x50\xd2\x5d\xe5\x23\x91\x5e\xf6\xc8\xc8\x49\x45\x3a\xc5\x78\x24\
\x4a\x68\x87\x6b\x66\xca\xe4\xf1\x2f\xbd\xb6\xfa\xa7\x4b\x0f\xdb\
\xd2\xbd\xc5\xa6\x11\x6c\x46\x4c\xf1\x81\x0d\x8a\x92\x0b\xd6\x58\
\x0f\x47\xf4\x98\x5c\x5e\x45\xf1\xcd\xc6\x67\x22\xd2\xe0\xe8\x66\
\x4f\x08\x47\x6b\x14\x8f\xfd\x71\xbb\xdd\xe4\x14\xe8\x28\x00\x7a\
\x66\xb6\x54\xe3\xbb\xc6\xc3\xa1\xdd\x71\xe6\x34\xa9\x82\x74\x38\
\x01\x87\x4f\x79\x71\xb1\x6a\xda\x37\x1e\x16\x49\xeb\x28\x4c\x1b\
\x2a\x18\xc5\x81\x03\x95\xa2\x08\xe8\xe9\xe9\xdd\xf7\x47\xf3\x1f\
\x7f\xe6\xe6\xbb\x7e\x75\xdf\xc1\x07\x1f\x90\x55\x46\x65\xf5\x87\
\x15\xeb\x97\xa4\x14\x84\xac\x77\xba\x94\xaf\xca\xd1\xa5\xe7\x1d\
\xf9\xc3\x0e\xc8\x2d\x4e\xf9\x81\xfc\x58\x25\x8e\xf5\x91\x50\x3b\
\xbd\x61\x1f\x0e\xd7\x25\x3b\x02\xaa\x96\xfe\xcb\xbf\x3e\xb5\xea\
\x95\xdf\x1d\x7f\xeb\x85\x5b\xb6\xf4\xea\x24\x09\x79\x51\x2d\xd2\
\x15\x5c\x3f\xe4\x4f\x10\x32\x26\xde\xe0\x18\x8d\x9f\xcb\x38\x9d\
\xa1\x18\x6b\x19\xbf\x83\xe1\xef\x2e\x3c\xf1\x57\xff\xf4\xc4\xc3\
\xbf\xfe\xac\xb7\x6f\x18\x35\x3e\xbd\x7d\x83\x23\x4e\x3a\xfa\x62\
\xd9\x42\x9d\x9e\x74\xaa\xb5\xdb\xac\xaf\xa3\x1b\xb8\xdf\x88\x03\
\xa8\x9c\x10\xd1\xf9\x2c\x68\x38\x17\x2c\xec\x63\xc3\xf7\x89\x13\
\xc6\xae\xfd\xfa\xeb\x71\xe3\x3a\x36\x6e\x58\x73\xd5\xa5\x27\x75\
\x76\xb4\x0f\x0c\x0c\x3a\x31\x97\xdd\x9f\xe6\xdc\x0e\x4b\xfd\x1c\
\xe5\x0a\x96\x8e\xdf\x07\x5c\xc8\xc7\xe6\x8f\xc1\x91\x42\x9a\x69\
\x0b\x7c\x0a\x5d\xde\xd9\xd9\x71\xe9\xf9\x27\xc2\x12\xeb\xe9\xe9\
\x03\x3d\x6f\xd8\xd8\x2d\x75\x16\xbd\x21\x3a\xbb\x27\x3a\xf1\x48\
\xe7\xcf\xf7\x50\xf5\x0e\x43\xe2\xa3\x10\x7c\xa5\x93\x32\xf9\x63\
\x80\xfc\x99\x1e\x14\xea\x9b\x38\xb1\xab\xb1\xa9\xb1\xbd\xad\x6d\
\x5c\xd7\xb8\xc1\x81\x21\x38\x5c\x09\x93\x8f\x88\xe5\xa8\x3a\xe5\
\x0c\x85\x32\x58\x1f\x67\x65\x1b\xca\x59\x3a\x62\xdf\x7c\xc8\x2b\
\x86\xd0\x6c\x02\xf7\x79\xfd\xc6\x6e\x30\x17\x22\x12\x78\xe8\xe4\
\xc9\x93\xf8\xf8\x17\xe3\x76\x5b\x05\x1f\x91\x8f\x94\xf1\x7f\x55\
\x7c\xf2\x06\x5f\x28\xd9\x13\x9d\x7c\x26\xa3\xe4\xf9\xfd\x39\xf2\
\x1e\x9a\xd1\x48\xad\x9a\x05\xc1\xb0\x66\x1b\x36\xc3\x5b\x20\x93\
\x2f\xf1\xd9\x54\x3e\x57\xc9\x17\x13\xba\xca\x27\x25\x22\x87\x9a\
\x37\xb6\x98\x20\x9b\x85\xaa\xcf\x95\xd2\x57\xe4\x81\x46\x81\xf0\
\xec\x0d\x95\xcc\x5e\x30\x1c\xf4\xcb\xef\x76\x08\x87\x63\xc8\xe9\
\xe6\xfa\xf3\x40\x14\xcf\x44\x08\x50\x16\xb1\xa8\xb7\x17\xdf\x1d\
\xf8\x65\xbd\xa4\x84\x8e\x50\x32\x3a\x1c\x65\x42\x4a\xcc\xe8\xb0\
\x89\x25\x7f\x64\x47\x20\x4f\xef\x52\x18\xa1\x51\xb8\xb4\x8a\x83\
\xdb\xf2\xfc\x20\xa2\x1f\x87\x91\x2a\x64\xfb\x8b\x47\x74\x89\x23\
\x6b\xc2\x4e\x0e\xfb\xaa\xa4\x8c\xa1\x74\x01\x23\xad\xcb\xce\x22\
\x48\x74\xb1\xa6\x34\x7c\x0d\xdb\x8f\x4d\x70\xb9\xbc\x9f\xa4\x45\
\xdc\xde\x1f\xea\xe2\xe1\xf3\x9e\x01\xef\xa2\xe3\x84\xac\x51\x74\
\x84\x10\x6b\xb8\x50\x50\x94\xab\x0f\xb2\xaa\x37\x1d\xc9\x82\x5b\
\x90\x35\x55\xbf\xdf\xd5\x1b\xa8\x9e\xb3\xe4\x71\x5d\x8a\xfb\x43\
\x2c\xd8\xf3\x5c\xa2\x4d\x38\x03\x2a\x20\xc2\x0f\x31\xa6\x70\x2c\
\x5b\x38\x33\x8f\x8d\xbd\xfc\x5e\x64\x95\x3f\xa0\x88\xce\x8b\x8a\
\xf6\xac\x98\xc0\xcb\x2a\x6c\xcf\x54\x2a\x36\xdc\x59\xc4\xd9\x7b\
\xd3\x60\xc7\x62\x57\xbd\xf2\x25\x69\xd1\xf9\x41\xaa\xa3\xbd\x6d\
\xfc\xe4\x89\xa4\xd7\xb5\x14\x37\x7e\x43\x91\xc4\x28\x09\xa7\x74\
\xb9\xd3\x65\x58\x54\xeb\x70\x24\x90\x16\x87\x66\xf1\xf1\x7e\xc6\
\x1d\x1e\x16\x86\x5a\xe6\x9a\x89\x7e\x93\xff\x1a\x12\x80\xca\xa8\
\x1c\x62\x05\xcd\x67\x4c\x4e\xae\x99\x42\xb1\x47\x48\xfa\x90\x1d\
\x62\x74\x5b\x2b\x55\xc2\x05\x3b\x88\x73\x26\x2f\xbc\xb4\xea\xe2\
\x8b\x6f\x84\xda\x62\xe7\x20\x1c\xd0\x66\x0a\x87\x0f\x71\x86\xc8\
\x27\xdc\x74\xfe\xac\x22\x41\xc7\x12\x02\x3e\xae\x47\x8b\x19\xe9\
\x78\xe2\x65\xe7\x33\x69\x79\x48\x92\x2a\x48\x81\xf8\xfc\xca\xd8\
\x68\x50\x75\x76\x31\x45\x65\x0b\xcd\x4d\x8d\xab\x5e\x7d\x93\x36\
\xfb\xa5\xf6\x70\x3e\x17\xc4\xf9\xe4\x93\x4f\x96\xdf\xff\x40\x7f\
\xff\xa0\x12\x34\x6b\x72\x95\x43\xf2\xb3\xce\x39\x97\xb2\xc7\x94\
\x1e\x57\xc5\x47\x3e\x29\x3e\xab\x30\xd6\xc4\x4c\x80\x26\x67\x61\
\x09\x1b\x2a\xf7\x59\xe7\x20\x4f\x2d\x7f\x58\x34\xd6\xea\x1d\x18\
\x8a\x1b\xc6\x74\x76\x9c\x76\xda\xa9\xb4\x71\x5b\x97\xef\x81\xf8\
\xbe\x95\x1e\x76\xcb\x81\xf1\xc2\x61\xb7\xba\xf4\x2c\xa5\xff\xab\
\x87\xdd\x52\x5a\xdc\xbd\x82\x03\x9a\xf1\x46\xa2\xff\xdf\x00\x85\
\x4d\x60\xdf\x9f\x48\x3e\x7a\x4b\xbe\x87\x60\xf4\xf6\x3f\x17\x5c\
\x8a\xe5\x6a\xba\xb8\x51\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x20\x68\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x54\x08\x02\x00\x00\x00\xf2\x11\xa5\xd4\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\
\x01\xd2\xdd\x7e\xfc\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd5\x03\
\x03\x0f\x29\x0a\x74\xfd\xc3\xca\x00\x00\x00\x06\x62\x4b\x47\x44\
\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x1f\xf5\x49\x44\
\x41\x54\x78\xda\xed\x5c\x09\xb8\x14\xd5\x95\xae\x5b\xbd\xbc\x7e\
\xfb\xc6\x5b\x58\x1e\xbb\x6c\x42\x82\x80\x28\x51\x31\xe2\x46\x40\
\x05\x07\x51\xa2\x66\xe2\x12\x10\x9d\x44\xcc\x0c\x44\x13\x17\x70\
\x41\x8d\x4b\x12\xe7\xcb\xa2\x49\x8c\xc6\x49\x26\x8e\x1a\xa3\x64\
\x12\x26\x01\x13\xe5\x53\x42\x08\x71\x62\x36\x0d\x10\x40\xe1\xf1\
\xf6\xfd\xf5\xeb\xb7\x74\xf7\x9d\xff\xde\x73\xef\xad\x5b\x55\xfd\
\x88\xe6\xd3\x2f\xc9\x37\x96\xd8\xaf\xba\xba\xea\x56\xd5\xa9\xb3\
\xfc\xe7\x3f\xe7\x16\xcb\x66\xb3\x8e\xe3\xa4\xd3\x69\xd7\x75\x39\
\xe7\x58\x67\x8c\x39\x7a\xc1\x16\xfb\xab\xd9\x48\x2b\xe6\x27\xb3\
\x5b\x78\x7f\x7b\xcb\x3f\xd6\xf8\x58\x8f\x44\x22\x2c\x93\xc9\x40\
\x34\xce\xfb\x4b\xae\x45\xc8\x0b\x1a\x34\x34\x34\xb4\x7e\xfd\x86\
\x68\x34\xce\x79\x16\xb2\x65\xf8\xcf\x21\x19\x33\xee\x70\xf9\xd5\
\x7f\x98\xfc\x55\x6e\x65\x66\x8b\xfe\xca\xad\x4f\x79\x0a\x35\xa0\
\xde\xd1\xdb\xed\xef\x76\x7c\x07\x1a\x93\xce\x64\x2a\x2b\xcb\x6f\
\xbd\xe5\x96\x28\x34\xaa\xa7\xa7\xbb\xbb\x27\x75\xed\x75\xd7\xf5\
\xf7\xa7\x84\x82\x31\x07\x82\x12\xc7\x30\x97\xe1\x6f\x4e\x15\x65\
\xde\x09\xb8\xdc\x84\xbd\xf4\xbe\xa4\xa4\xcc\x68\xb3\xf8\xce\xcd\
\xb5\x8b\x9d\xb4\x92\x8b\x0b\xf2\x59\x04\xdd\x93\x93\x6b\x7c\x39\
\x70\x50\x40\xef\xc1\xf8\x38\x34\x12\x71\x93\xc9\xbe\xa7\x9f\x7a\
\x1c\xc7\x46\xc9\xea\x4a\x4b\x4b\xcb\xcb\xcb\xfa\xfb\xf3\xe8\xfe\
\xe2\xb1\x68\x24\x16\xe7\x99\x6c\x26\x93\x66\x96\x01\xc2\x5d\xb9\
\xf2\x04\xfa\xa4\xea\x04\xd0\x43\x1c\x16\x8b\xc7\xd3\x69\xa8\x23\
\xa4\x0f\x8f\x16\xf1\x84\xc9\xcc\x75\xa8\x1b\x90\xba\x8b\x6b\x95\
\xb7\x9a\xeb\x06\x82\xe3\xd3\x0d\xb3\x5c\x1a\xf4\x6e\x8f\x4f\x02\
\x8a\xc7\xf3\x4a\x8a\x8b\xb1\x2d\x4a\xa3\x43\x12\x99\x74\x06\xae\
\x3a\x1a\x89\x42\x22\x47\x8e\xd4\x37\xb7\x34\x15\x17\x95\x14\x14\
\x14\x64\x49\xba\xea\x04\x2c\x74\x02\xa5\xe4\x99\xf4\x50\x5b\x7b\
\x5b\x45\x45\x65\x55\x55\x75\x7e\x41\x7e\x7f\x7f\xbf\xcb\x5c\xf5\
\xb4\xe8\x20\x47\x3e\x4e\xf3\x84\xe5\x0d\xf0\x61\x9e\x70\xe0\x06\
\x86\xd3\xa0\xf7\x62\x7c\x71\x3f\x9c\x43\x1a\x58\x84\x80\x8c\xeb\
\xc6\x80\xb8\x25\x7c\xfd\xe2\x17\xee\x2e\x2b\x2d\x6a\x6b\x6b\x2d\
\x2e\x2e\x2e\x2c\x2c\x82\x93\x0a\x07\x02\x1a\x8d\x73\xf3\xc9\x20\
\xdc\xc6\xc6\x86\xda\xda\x91\x10\xee\xaa\x8f\x5e\x35\x6d\xda\x0c\
\x61\xb0\x42\xfb\xbc\x4b\xd4\x7a\xcd\xf4\x17\x16\x08\x3a\xcc\x1a\
\xdc\x7c\xd3\x6a\x40\xba\xc0\x02\x3b\xbe\x07\xe3\x4b\x69\x30\x35\
\x58\x94\xcc\x95\x4b\xb9\x42\x1c\xbf\xf8\xc5\xcb\xe5\x65\x85\x0f\
\x3c\x70\xff\x5f\xed\xf9\xb7\x6e\xfd\xd1\x4f\xb7\x6d\x9f\x33\x67\
\x5e\x2a\x95\x74\x85\x45\x7a\x97\xae\x1e\x37\x7d\xe7\xc6\x6f\xf8\
\xfd\x9b\x72\x94\xcc\x3c\x72\x63\x3d\xe1\xa7\xe4\x79\xe3\xf7\x60\
\x7c\x31\x02\xe7\x51\x5b\xa4\x08\xfb\x70\xd8\x33\x67\xce\x84\xd6\
\x3c\xf6\xd8\x63\xbb\x77\xef\x86\x5e\xd0\x43\x70\xe5\x22\xff\x32\
\xfd\xa9\x16\x1a\x00\x9b\x8a\x8a\x8a\x36\x6c\xd8\x50\x53\x53\x5b\
\x58\x58\x08\xf7\x65\x7e\x25\x27\xaa\x6e\x48\x1f\x60\x3d\x4c\xe6\
\xf8\x6c\x20\xb0\x31\xd7\x8e\xdc\xb1\x43\xd3\xbb\x3e\x3e\x5d\x39\
\x79\x96\xa8\xde\xc4\x49\xa9\x24\x26\x82\x7c\xb2\x8f\x3f\xfe\xf8\
\x39\xe7\x9c\x53\x52\x52\x92\x48\x24\xf2\xe5\x42\x2b\x79\x79\x79\
\xf4\x89\x25\x1e\x8f\x47\xa3\xc2\x8b\x0d\x0e\x0e\xc2\x1e\x37\x6f\
\xde\xfc\xca\x2b\x3b\xc7\x8c\x19\x25\x61\x67\xc4\x92\x8f\xe7\x35\
\x48\xf1\x85\x77\x70\xe9\xa1\x73\xfb\x57\x13\x91\xac\x8d\xcc\xe8\
\x03\xb3\x7d\x10\xf3\x54\xe0\x6d\x8c\x1f\x10\xe6\x5f\x18\xdf\x7e\
\xf6\x51\xbd\x26\x47\xd5\x36\x0e\xff\x34\x72\xe4\xc8\xaa\xaa\xaa\
\x58\x2c\x96\xaf\x17\x12\x50\xc2\xbf\x18\x01\xc1\x9d\xcf\x9b\x37\
\xaf\xb3\xb3\x63\x6c\xdd\x18\x69\xc6\x5c\x9c\x55\xc6\x15\x11\x5a\
\x18\xb7\x40\xac\x44\x11\x59\x2e\x51\x04\x9e\x93\x2b\x1d\x99\x00\
\xf4\xd8\xee\x2a\xc7\xe1\x2a\xbc\xa2\xfc\x89\x89\x14\xf2\x78\x66\
\xbe\xe5\x18\x9f\xf4\x89\xa9\x98\xcf\x6d\x5d\xd3\xa0\x88\x1b\x8b\
\xcc\x39\xbe\x43\x17\x2f\x85\x2b\x7c\x90\x74\xd2\x9e\xdc\x70\xad\
\x84\x1e\x21\x26\xdc\x3f\xa7\xef\xd9\x2c\xd7\x8b\xb1\x29\x2c\x50\
\x22\x1c\x42\x70\x1c\x87\x40\xa0\xc6\xb8\x7c\x26\x26\xd4\x52\x0c\
\x14\x8d\xc6\xa0\x79\xb1\x58\x1c\x01\x01\x98\x40\x9a\x39\xcf\x66\
\xb2\xb1\x78\x4c\x98\x6e\x24\x02\xe1\x61\x9c\x81\x81\x7e\xa8\x21\
\x2e\xd2\x8d\x32\xcb\x9b\x7a\xd6\x63\xc3\x3f\xbf\x55\x31\xc6\xbc\
\x9d\x82\x26\x16\xd8\xc9\x78\x74\xdf\xf8\x5a\x14\x22\xfa\x73\xd2\
\x20\x85\x1b\x54\xd8\x53\x22\x0b\x5f\x81\x13\xd8\x42\xe3\x90\x43\
\xa2\xaf\x4a\x8c\x5a\x15\xf1\xa3\xb8\xfd\x4c\x06\x3b\xc1\x31\xc5\
\xa3\xb1\x8e\xce\x8e\x83\x07\x0e\x1f\x3a\x74\xa0\xa1\xe1\x68\x47\
\x57\x27\x76\x84\xd2\xa5\xfa\x52\xc0\x07\x03\xfd\x29\x04\xc1\x9a\
\xaa\x9a\xd1\x63\xea\x26\x4e\x9a\x52\x5e\x51\x81\x01\x93\xc9\x5e\
\xc8\x8b\x01\x99\x68\xad\xe0\xf4\xbc\xb5\x50\x08\x2b\x13\xf2\x73\
\xb3\x58\xc9\xe2\xaf\xd0\x45\x28\xa6\xe3\xb8\xb4\x8f\xb6\x48\x5c\
\xa0\xd4\x54\x6c\xc1\x15\x79\x9e\x0b\xda\x6c\xc6\xd4\xd0\x9c\x51\
\x30\x8b\x6a\x3b\x95\xa6\xcb\xd4\x4d\xe2\x96\xb8\x7f\x31\x82\xa3\
\x05\x0a\x25\x90\x53\x3a\x0d\xe3\xa2\x5c\x57\x1f\x22\xc5\xa1\x55\
\x32\x2d\x74\x30\x52\x51\x51\x01\xf4\xf8\xda\x6b\xaf\xbe\xf8\xe2\
\x0b\xcd\x6d\x2d\xed\x1d\x6d\xc5\x65\xe5\x23\xaa\x6b\x26\x4c\x9d\
\x82\x71\xa6\x94\x95\x97\x57\x8e\x80\x42\x75\x77\x75\xb5\x34\x35\
\x1e\x68\x3c\xba\x63\xcf\x2e\x9e\x4e\x8f\xae\x1d\x39\x73\xfa\xcc\
\x13\xe7\x2f\xa8\xae\x1d\x99\xec\xed\x49\xa5\xfa\xa0\x7d\x8e\xca\
\x09\x5c\x65\x2a\x52\x36\x8e\x14\x89\x30\x37\xd7\x51\x32\x11\x66\
\xab\xa0\x22\xb3\xb5\x84\xbb\x24\x04\xf2\x36\x9c\x29\x68\xe5\x7a\
\x6a\xaa\xf5\x53\x23\xa4\xa8\xf2\xd0\x8e\xd6\x2a\x6d\x62\x61\x89\
\x18\x5b\x23\xd1\xe0\xc1\x42\x77\x84\x38\x18\xc3\x3a\x8c\x11\x1b\
\x61\x3b\x84\x9b\x68\x04\x78\xb1\x8e\x8e\xf6\xaf\x7e\xe5\x4b\xcf\
\x3e\xfb\x14\x74\x07\xa2\x58\x74\xe6\xb9\x93\x26\x4c\x82\x58\x71\
\xc3\x47\xde\x78\x03\xb8\xf4\x28\xe7\x2d\x6d\x2d\xa5\x95\x55\x13\
\xa6\x4e\x9d\x3e\x7b\xce\x29\x95\xe7\xf4\x25\x93\xcd\x4d\x8d\xcd\
\x8d\x0d\xaf\xec\xd9\xbd\x65\xeb\x0f\xa7\x4f\x9e\x72\xde\xf9\x17\
\x1e\x37\x65\x5a\x67\x67\x67\x26\x3d\x18\x11\x62\x0a\x45\x31\xeb\
\xe6\x2c\x83\x0c\x1a\x63\xf8\x10\x7f\xcc\x0b\x98\x98\xc6\x41\x5c\
\x7a\x02\x21\x2f\x2e\xdd\x9a\x25\x14\x12\x07\x16\xb3\x62\x44\x23\
\x45\xc9\x69\x05\x5b\x00\x11\x8c\xde\x61\xa5\xb4\xa4\xb4\x3f\xd5\
\x0f\xd1\x7c\xfb\xdb\xdf\x00\x2a\x5d\xb1\x62\xd5\xc2\x85\x67\x8c\
\x1a\x3d\x06\xa6\x94\xc8\x4b\x88\x87\x1d\x75\xe9\x0a\x80\xb9\xeb\
\x8f\x1c\x3e\xf0\xe7\x7d\x87\xde\x3a\xf4\xc8\x0f\x9f\xaf\x1c\x5d\
\x37\x67\xc1\xa9\x93\xa7\x4e\x2b\x28\x2c\x9e\x38\x65\x5a\x57\x67\
\xc7\x6b\x7b\x76\x6f\xda\xbc\x71\xf6\x8c\x99\x9f\x58\x7d\x5d\x79\
\xc5\x88\x8e\xf6\x76\x89\x38\x88\x9c\xb1\xb0\x8e\x46\xd3\xe4\x9c\
\xfd\xd9\xa9\x95\x95\x9a\xec\xd4\x24\x08\xdc\x8f\x83\xb8\xf7\xcf\
\xc6\x41\x64\x5f\x42\xf7\x70\x9b\x59\xbd\x18\xd1\xa4\xf5\x42\xbe\
\x99\xf4\xcb\x50\x25\x52\x77\x62\x10\x13\xf6\x85\x76\x94\x97\x95\
\xbf\xf2\xca\x8e\x1b\x3f\xb3\x0e\xc8\xe8\xf6\xdb\xef\x3d\xe5\xd4\
\xd3\x85\xbc\x06\xfa\x87\x86\x06\xf1\x5f\x5f\x5f\x52\xe7\x05\x52\
\xc4\x91\xc8\xd8\xf1\x13\xa6\x4c\x9d\x06\x87\xd5\xd2\xdc\xb8\x63\
\xc7\xcf\x5e\xdc\xf2\xfd\x97\x12\xf9\x1f\x59\xb1\xaa\xbc\xbc\x02\
\x17\x7f\xca\xa2\xb3\xe7\x7e\xe8\xb4\xed\xff\xfd\xdc\xf5\x37\x5c\
\xbb\xf2\xc2\x95\xe7\x2f\x5b\xd1\xdd\xdd\x8d\x13\x09\x24\x41\x89\
\x8e\x36\x37\xd7\xe4\xf3\xc2\xc2\x28\xdb\x08\x26\xfb\xcc\x00\x67\
\xe5\x54\xb4\xf7\xd5\x08\x52\x8b\xc2\x09\xe0\x20\x66\x45\x31\x65\
\x50\x24\x1d\x23\x1a\xa3\x38\x66\x1f\x68\x0d\x09\x08\x7b\x42\x40\
\xd0\x85\xca\xca\xca\xea\xea\x9a\x27\xfe\xe3\x5b\x55\x5b\x7f\xb8\
\xee\x86\x0d\x4b\x96\x5c\x00\xaf\x98\xec\xed\x6d\x6d\x6d\x41\xe0\
\x76\x29\xf1\x45\xa8\x62\xda\x73\xca\xcf\xa1\x41\x44\xad\x14\xae\
\x2f\xbf\xa0\x60\xc5\xca\x4b\x2f\x58\x76\xd1\xb3\xcf\x7c\xef\xf1\
\x07\xef\x9e\x71\xd2\x29\x67\x2e\x5d\x06\x95\xc1\xe9\x96\x5e\xf4\
\xd1\x96\xc6\x86\xef\x7f\xf7\xf1\x5d\xbf\xdc\x79\xcb\xad\x77\xba\
\x91\xbc\xbe\x64\x2f\xec\xda\x67\x2f\x8e\x6d\x5d\xcc\x78\x66\x27\
\x14\xc6\x4c\xaa\xc1\x1c\xe6\xf8\xfd\x8f\x25\x0d\xc7\xb8\x34\xa5\
\x66\x7a\x55\x05\xf5\xb4\xb5\x0c\xe9\x65\x50\x2f\x03\x72\x81\x50\
\x52\xa9\x14\x3e\x7b\x7b\xc5\xe5\x1e\x3c\x78\xf0\x9a\x6b\xae\x19\
\x3f\x6e\xe2\x7f\x7e\xef\xb9\x25\x4b\xce\x47\x84\xea\xee\xea\x16\
\xcf\x01\xbe\x1a\x11\x1b\x76\x41\x0e\x8f\x00\x8c\xe1\x28\x18\xc2\
\x14\x96\x28\xce\xda\xd9\xde\x9e\xec\x4b\x5e\x7a\xf9\x95\x0f\x7f\
\xf9\x9b\xbc\xbd\xe5\x9b\x5f\xb8\x57\xe8\x58\x2c\x0e\x97\x94\x28\
\x28\x58\xb3\xfe\x73\xac\xa2\x72\xed\xda\x2b\x1b\x8f\xd6\x97\x57\
\x54\x22\x36\x48\xc4\x64\x82\x34\xe3\x0a\xec\xf8\xc6\xb7\xa5\xc3\
\xb8\xe7\x7c\x98\x45\x0c\x71\x29\x02\xce\xfc\x96\x29\x75\x31\xb8\
\x40\x38\x64\x5f\x01\xe9\x18\xa1\x90\x5c\xe8\x93\x56\x20\x23\xdc\
\x5f\x5f\x5f\xdf\x93\x4f\x3e\xb9\x60\xc1\xc9\x67\x9e\x75\x36\xbe\
\xb6\xb5\xb5\x09\xb9\x44\x22\xec\x9d\x2c\x52\x94\x2e\x52\xe5\xc2\
\xa2\xa2\xfb\x1f\xfc\xca\x05\x67\x9c\xf9\xf0\xdd\x1b\x9b\x9b\x9b\
\xf3\x0a\x8a\xc0\xd1\x34\x35\x36\x9e\xbb\x7c\xe5\x89\x8b\xcf\xfb\
\xd4\xa7\xaf\xdb\xf9\xf2\x4b\x95\x23\xaa\x70\x75\xec\xbd\x5c\x74\
\xb2\x1a\x32\x31\xc2\x8a\x3a\x05\x63\x86\xb2\x95\x90\x2f\x4b\x77\
\x6e\x5c\x35\xe0\xe2\x4b\x2f\xbd\xf4\xd0\x43\x0f\x3d\xf0\xc0\x83\
\x33\x67\xce\x78\xea\xe9\x2d\xa4\x35\xf2\x41\x65\xd5\x43\xce\xaa\
\x38\x2b\xbe\x00\xb0\xb8\x12\xaa\x70\x7f\xb6\x9d\x75\xb2\xae\x70\
\x9f\xb1\x58\x14\x67\x69\x6a\x38\x7a\xf9\x15\xab\xc7\x8c\x19\x7b\
\xcf\x17\x3f\x7f\xfe\x95\xd7\x8e\x1c\x3d\xa6\x1f\x01\xae\xb1\x69\
\xda\xac\x13\x46\x8d\x1d\xb7\xf9\x81\xbb\xff\xad\x2f\x75\xee\xd2\
\xf3\x5b\x9a\x9b\x61\xe0\xef\x68\x7c\x01\xf4\x61\x3d\x84\x99\x8c\
\x07\xc7\x85\x66\x19\xfd\x2c\x0f\x75\x14\x50\x14\x7e\x8a\x7b\xbe\
\xdb\x44\xf4\x9c\x02\x12\xc7\xc8\xc5\x38\x20\x32\x46\xe8\xcb\x92\
\x25\x4b\xf0\xad\xa3\xa3\x43\x6a\x0d\x9d\x91\x20\x3c\x3d\x03\x0d\
\x58\xc8\xa1\x1a\x12\xc1\x56\x6a\x91\xfe\x9b\xa0\x02\x31\xc5\x9b\
\x9b\x1a\x4e\x5f\x74\x56\x65\xd5\x88\x75\xeb\xd7\x5d\xb8\x66\x5d\
\xed\xa8\x51\x50\xd7\xce\x8e\x8e\xa2\xa2\xe2\x35\x37\x6d\x7c\xf0\
\xde\x4d\x91\x58\x64\xd1\x59\x8b\xdb\xdb\x5a\x81\x92\xb8\x18\xe0\
\xed\x8e\x2f\xd1\xa2\x2f\x0f\x71\xd5\xff\x5c\xfe\xe5\x12\xcd\x19\
\x03\xce\xa5\x41\x83\xfe\xc5\x18\x17\x2d\x29\xbd\x60\xbd\xab\xab\
\x6b\xc1\x82\x05\x23\x46\x8c\xc0\x0e\x42\xa0\xfe\x01\xff\xea\x05\
\x97\x0f\xdd\x6c\x6d\x69\x9e\x3e\x63\xd6\xfd\x9b\xef\x7b\xf6\xeb\
\x0f\x35\x1e\x3d\x0a\x1c\x04\x22\xab\xa7\xb7\x37\x16\xcb\xfb\xc4\
\x86\xdb\x3e\xff\xa5\x07\xf6\xec\xfe\x05\xe2\x5d\x26\xf3\x2e\xdb\
\x1a\x79\x72\x97\xe4\xcd\x7d\x4e\x9a\x1b\x0d\xb2\x5d\x4f\xc0\x07\
\x05\x96\xd6\xd6\x56\xf8\x69\x48\x46\xa0\x21\x5b\xab\x79\x20\x80\
\x70\x0f\x6d\x84\x10\x9c\xab\xc9\x3e\x6e\xc5\x8d\x78\x5e\xbc\xb3\
\xb3\xfd\x84\x79\x27\xdd\x7b\xfb\xe6\x2d\x8f\x7d\xad\xaf\xbf\x9f\
\xec\x35\x99\x4c\x22\xea\x5d\xf1\xe9\x1b\xef\xb9\xff\xee\x86\xa3\
\x47\xf2\xf3\x0b\xb9\xc0\xa8\xef\x78\x7c\x8b\xea\x77\xec\x8c\x55\
\x01\x06\x91\x39\x51\x8a\xef\xfa\x92\x55\x22\x09\xc3\x1a\x14\x50\
\x22\xb3\x8e\x1d\x08\x19\xc8\x01\xb9\xa7\x03\xe6\x89\xb8\x06\xab\
\xdb\x1b\x7c\x51\x42\x78\x10\xef\x01\xaa\x2f\xb8\xb8\x78\x2c\xde\
\xde\xd6\x72\xca\xc2\x45\x6b\xaf\xb8\xea\xf9\x27\xbe\x11\x4b\x14\
\xe0\xc9\x71\x51\x6e\xe8\xa9\x1c\x51\xbd\x60\xc9\xb2\x4d\x9b\x6e\
\x06\xfd\x42\x3b\xbf\xd3\xf1\xc5\x3e\x8e\xa5\x36\xd6\x35\x0a\x98\
\x26\xff\xd8\x6a\x05\x15\x10\xb2\x83\x46\xc0\x9b\x40\x04\x7d\xa1\
\xc5\x6c\x0c\xfc\x8a\x18\x0f\x3a\x84\x92\x0f\xdf\x09\xb5\xc2\xb2\
\xc0\x85\x0c\xab\xda\xc1\x75\x5c\x0f\xfc\x51\x53\x63\xc3\xc5\xab\
\x3e\x36\x6b\x6c\xdd\x8b\x5b\xb7\x14\x96\x94\x41\xb7\x1d\x37\xd2\
\xde\xde\x3e\xf7\xe4\x53\xe3\x55\x35\xf7\xdd\x7b\x47\x55\x75\xb5\
\x0c\xfc\x8c\x31\xfe\x8e\xc6\xf7\x5f\x9b\x27\x1e\x95\xac\x12\xf5\
\xce\x88\x46\x92\x79\x28\x78\x9f\x65\xcb\x96\xed\xda\xb5\x0b\xf1\
\xd5\x30\x8a\x81\xc4\x3d\x40\x44\xc1\x65\x03\x25\x9e\x70\xc2\x09\
\xfb\xf7\xef\x8b\xc6\xa2\x84\x48\x38\x33\xd8\x9e\x6b\x82\xc6\x67\
\x68\x61\x0a\x15\x67\x57\xd9\x31\xf3\x88\x1d\x4d\xce\x3b\xad\x6d\
\x2d\x37\x7e\xf6\xd6\xcb\x3f\x76\xc9\xbe\x09\x93\xc7\x4f\x9c\x84\
\x18\x1f\x8b\x46\xdb\x5a\x5a\x96\x5f\x76\xc5\x57\xee\xbc\xe5\x47\
\x5b\x9e\x3d\x67\xf1\x79\x00\xa5\x88\x12\x74\x91\xc3\x55\xad\x2c\
\x22\x92\xe9\xba\x98\xb8\x3b\x85\x5d\x1d\xae\x2a\x59\x16\x69\x4f\
\xc3\xf1\x82\xc2\x42\x28\x02\xa2\xe6\xba\x75\xeb\xd6\xae\x5d\x0b\
\xcf\xa3\x80\x3c\xf7\xee\x0a\x51\x2e\xcc\x0f\xe3\x58\xd4\x8e\x70\
\xe0\x9e\x3d\x7b\xa0\x4d\x06\xfe\x84\x92\x49\x3b\x51\xb4\x93\x4b\
\x47\x66\x76\xac\xa4\xb8\x8c\xb9\x11\x65\x2b\x9a\xfe\xd2\xa9\x09\
\x28\xa4\x74\x71\x49\xc1\x83\xf7\x7d\x71\xc3\xc6\x5b\xea\xd6\x5c\
\x4f\x88\x16\x7e\x34\xd5\x97\xbc\xec\xba\x1b\x1e\xf9\xda\x97\xe6\
\x9f\xb4\x60\xd4\xa8\xd1\x82\x4b\x62\x3a\xd3\x08\x61\x3f\x35\x9e\
\x57\x63\x16\xdf\x01\xfe\x06\x07\x06\xfb\x52\x7d\x9e\x0d\xca\x7b\
\x96\x75\x31\x3a\xb9\x1b\x81\xdb\x9b\x37\x6f\xfe\x8f\x7f\xb4\x65\
\xd3\xa6\x4d\x08\x4c\x35\x35\x35\xe5\xe5\xe5\x64\x32\xe1\x92\x86\
\x25\x1a\x87\x48\xc5\x7d\xfb\xf6\x8d\x1a\x35\x6a\xef\xde\x7d\xab\
\xd7\x7c\x12\x8e\x4a\xa5\x94\x1a\x0b\x89\xd8\x29\x98\x50\x6e\xd0\
\xac\x4a\x97\x08\xb8\x64\xb3\x08\x58\x38\x6a\xdb\xf6\x6d\x8d\x0d\
\xf5\x28\x41\x39\x8e\x55\x36\x91\x24\x2a\x05\x65\xdc\x3c\x54\x95\
\x27\x7b\x76\x6e\xff\x9f\x45\x4b\x97\xa7\x7a\xbb\x5d\x71\xe0\x40\
\x65\x55\xf5\xec\xd3\xce\xb8\xf9\x73\x1b\xce\x3e\x77\x49\x7b\x5b\
\x9b\x82\x20\x41\x4e\x5a\x17\x7e\xa4\xf8\x70\xba\xb8\xf0\x5c\x62\
\xbd\xa4\xac\x6c\xea\x94\x69\xd3\xa6\x4d\x47\xa8\x31\x56\xa7\x70\
\x10\x61\x02\x22\x3e\xa1\x02\xb7\xdd\x76\xe7\x5b\x6f\xbd\xd9\xd0\
\x50\x8f\x54\x6c\x98\x82\xb5\x7a\xba\x46\x52\xa4\x5f\x93\x27\x1f\
\x5f\x5d\x5d\xfb\xf1\x8f\x8f\x1a\x31\xa2\x0a\x0e\x1c\x48\x89\x00\
\xbf\xd0\x5e\xad\x0a\x54\x4e\x8c\xa8\x94\xd1\x55\x55\x06\x9c\x37\
\x1a\xed\xed\xed\xb9\xf9\x73\xeb\x17\x2c\x98\x7f\xfc\x8c\xe3\x23\
\xd1\x88\xa9\xc0\x70\x53\x0a\x54\x58\x4c\x48\xf3\xce\xbb\x6e\xbf\
\xe3\x9e\xfb\x60\x4d\x25\x45\x85\x50\x22\x9c\x0b\x39\xca\x69\x67\
\x9d\x7b\xef\xb6\xad\xa7\xf4\xb6\xce\x9b\xf7\x01\x22\x18\x02\x26\
\x66\x05\x6b\x31\x98\x34\x05\x55\x0b\x83\xc3\xfd\xc6\xd7\xff\x7d\
\xe1\xc2\xb3\x57\xac\x58\x09\xdf\xcf\x08\x27\x29\x1f\x64\x71\x83\
\x78\x3e\x91\x68\x14\x55\xad\x59\x1f\xf8\x00\x71\x8e\x8a\x1e\x0e\
\x35\x54\x58\x02\x62\xa6\xad\x82\x80\x81\xcc\x3c\xdc\x10\xfd\x18\
\x2c\x48\x69\xfe\x46\x20\x03\xd0\x20\x4f\x3e\xf9\xdd\xc5\x8b\xcf\
\x5e\xbd\x7a\xcd\xdb\xac\x2f\x21\x3b\xfb\xfa\x33\x3f\x58\x79\xf5\
\xda\x54\x4f\x57\x56\xa2\x7a\x30\xb5\xff\xf4\xcf\x9f\xd8\xfb\xbb\
\x5f\xad\xbb\xe1\x5f\xff\x8a\x82\xd5\xf2\xe5\xcb\xd7\xac\xb9\xf6\
\xac\xb3\xce\x05\xb0\x80\x6f\xa1\x6b\x35\x75\x31\x26\xd3\x48\x26\
\x79\x02\x2e\xc2\x53\x2a\x6b\xcc\x27\x6c\xc3\x54\x67\x33\x2c\x38\
\xb7\xea\x07\xe2\x4a\x65\xbe\x4e\xec\x3a\xd1\x76\xfa\x2b\x93\xd5\
\x6b\x47\x7d\x3a\xca\xc6\x1c\xc9\x3d\x76\x74\xb4\x2d\x5f\x7e\xb9\
\xa1\x0d\x0c\x6a\xd7\xcf\x83\x07\xfa\x5a\x56\x5d\x7a\xe9\x53\xcf\
\x3e\x77\x60\xef\x9f\xc6\x8d\x9f\x80\x4a\x28\x4e\x0a\x7e\x76\xc6\
\x07\x67\xef\xfa\xf9\xf6\xef\x3f\xf3\xf4\x8a\x8b\x56\x2a\xd4\xea\
\x3f\x4a\x3b\x6f\x4e\xac\x1e\x39\x10\x22\xd4\x41\x0a\x4f\x9a\x34\
\xe1\xad\xc3\x6f\x1e\x7f\xfc\xcc\xde\xde\x24\x27\x13\x53\x4f\x98\
\xc9\x28\xe6\xe8\xfe\x9a\x08\x8c\xc2\xd5\x4e\x80\xf9\xe5\x63\x1c\
\x3c\xf3\x51\x51\xde\x6d\x38\x06\xa3\x29\xff\xa1\xb9\x6e\xe9\x87\
\x1c\xf5\xc9\x55\x69\x41\xbb\xe3\x0c\x00\x96\xcc\xe0\x22\x54\xf3\
\x45\xc9\x04\x9f\xf0\xf7\x01\xeb\xa0\x15\x02\x5c\xd7\x5c\x7d\xe5\
\x1d\x0f\x3d\x3c\xf9\x53\x1b\x06\x92\x02\x85\x41\x46\x7d\x3d\x3d\
\x4b\x57\x7e\xf4\x89\xef\x3e\x7a\xd6\xd9\xe7\x0c\xd7\x1c\x44\xec\
\x15\x65\x8b\xa2\x84\x27\x59\x1d\x7a\x24\x00\x53\xd2\xc1\x4b\xa0\
\xc0\xb9\xbf\xf4\x1c\x28\x45\x0c\x53\x15\x08\x71\x9b\x2c\xcc\xe4\
\xfb\x19\x51\x7f\x95\xce\x5f\x9a\xd0\x05\x12\x80\xd2\xcc\xa1\x43\
\x87\xe4\x55\x0a\x8b\x43\x11\xe9\x85\x17\x5e\x78\xe4\x91\x47\x48\
\x83\x94\x62\x5a\x35\x02\x5a\x20\xc4\xde\xe6\xfa\xe6\xc6\xa3\xc2\
\x6d\x4b\x3a\x1e\xb4\xdc\x98\xb1\x63\xf3\x2b\xaa\x2e\xb9\xf8\xe2\
\xd9\x73\xe6\x00\xa7\x85\x95\x88\xaa\x2f\xd8\x0e\x15\x43\x75\xeb\
\xfa\xeb\xaf\xc7\x38\x26\x0f\xa7\xdd\x3d\xca\x55\x03\x02\x1e\x2e\
\xe9\xe6\x2c\xf5\xea\xd0\xe0\xf2\x00\xa6\x61\x9e\x86\x31\xaf\xf9\
\x22\xa0\x7d\x2a\x16\x05\xc6\x97\xa8\x7d\xa8\xae\xae\x0e\x29\x15\
\x5d\x7d\x63\x63\xe3\xad\xb7\xde\x3a\x7f\xfe\x7c\xdc\x80\x20\x8a\
\xd0\xbc\x24\x8b\x94\x58\x62\x7a\xc1\x1d\x0a\x7e\x6e\x44\xe5\xce\
\x6d\x5b\x57\x5e\x75\x4d\x7f\x6f\x97\x70\xff\x6e\xa4\xaf\xb7\xf7\
\x8c\x8f\x9c\xf7\xfc\x63\x0f\xff\xcb\x75\xd7\x11\xf1\x10\x96\x11\
\xd1\x38\x18\xf3\xbe\xfb\xee\x7b\xf4\xd1\x47\x6f\xb8\xe1\x06\x78\
\x95\xa0\xae\x91\xc7\x30\xb5\xec\x61\xd1\x66\x68\x81\x43\xcc\xea\
\x4a\x88\x07\x42\x9d\xdc\xf8\x34\x37\x8c\x0d\xfd\x20\x9e\x55\x4c\
\xdc\x09\xdc\x02\x6e\xfe\xf0\xe1\xc3\xa8\x85\x4c\x9e\x3c\x99\x18\
\x22\x22\x0f\x48\x52\xb4\x82\x3d\xb1\x1b\x0c\xf0\xd4\xd3\x4e\xeb\
\x6b\xad\x6f\x6b\x69\x86\x0c\x85\xb5\x4a\xbd\x18\x37\x61\x62\xf9\
\xc8\xba\x5d\x3b\x77\x96\x96\x95\x11\x71\x6e\x16\xe3\x28\x69\xb9\
\xf8\xe2\x8b\x8f\x1c\x39\x62\xf5\xee\x71\xfb\x9a\x54\x36\xaf\xe2\
\x76\xe0\x46\x5c\x53\xdb\xe2\x36\x6e\xc6\x93\xfb\xf3\xc1\x8e\xb6\
\x36\x14\x61\x5c\x4f\xae\xae\x9d\xf8\x38\x5e\xc2\x23\xaa\xf9\x66\
\x1f\xc1\xb8\xda\xe3\x07\x13\x25\x45\xd4\x09\xdf\xdc\xd4\xd4\x84\
\xfb\x24\x67\x94\xa3\xf8\xe7\x71\x7b\xd9\xb2\x8a\xca\x0f\xcd\x9b\
\xf3\xeb\x97\x5f\x2c\x40\xbc\x47\x0a\x29\x3b\x0c\x06\xfb\xfb\x3f\
\xb4\xe8\xec\x1f\x6c\xd9\xe2\x0a\xf9\xc6\x6c\xbd\xa3\x85\x84\x4e\
\x43\x91\xb3\x33\x4d\x1c\x96\x7c\xb8\x4a\x56\x15\x76\xb2\xfe\x71\
\xe9\xa2\x20\x02\x59\x58\x26\xa4\x20\xb6\x43\x3a\x07\x0f\x76\x36\
\x37\x77\xca\x94\x47\x57\x00\xa4\x83\xe7\x7a\x5d\xd5\x46\x74\x91\
\xc4\x30\x4d\x2a\x08\x30\x6f\x7c\xc7\x09\xfc\x23\x22\x41\xb8\x12\
\x54\x78\x90\xee\xd8\x58\x7c\x38\xfa\x11\x35\xa2\x85\x0b\x17\x36\
\xec\x7f\x1d\xcc\xb7\x81\xef\x40\x61\xd3\xa6\xcf\xe8\xcb\x38\xff\
\xfb\xea\xaf\x4b\xcb\x4a\xa1\x29\x46\x46\xb6\x3e\x92\x26\xda\x55\
\x3f\xc7\x54\x3d\x28\xbd\xa1\x64\x95\xcc\x2d\x70\x6e\x1c\xd9\xd3\
\x33\xd0\xdb\x8b\x42\x85\x23\xf5\xd9\xcd\x8b\x47\x0f\x1c\xec\x44\
\x0e\x5b\x5e\xe6\xa6\xfa\x87\x4c\x80\x7b\xf7\x18\x18\x6e\x8a\xba\
\xb8\x07\xf8\x69\x13\x3d\x02\x8a\x63\xc2\x13\x21\xf8\xb1\x63\xc7\
\x1f\x37\x6e\xcc\x1b\xbf\xfd\x0d\x58\x5a\x95\x36\xc9\x92\xd2\x89\
\xa7\x9e\xfe\x93\x9f\xfe\x14\xdd\x04\x64\x8f\x46\x40\x36\x0b\x48\
\x80\xdb\x8c\xe9\xd3\x20\x29\x13\x8a\x11\x9c\xf9\x54\x4b\xfc\x84\
\xba\x55\x32\x39\x74\xe0\x20\xda\xa8\x62\xe0\xdb\xf3\x0b\xe2\xa8\
\xa4\x23\xff\x28\x29\x76\x07\x87\x78\x7a\x28\xc3\x15\xe1\x2d\x93\
\x07\xd7\xf1\xca\x08\x42\xcd\x85\x50\x55\xde\x26\x8d\x49\x37\xf0\
\x09\xd2\x53\x04\x2d\xf1\x97\xbb\xfe\xea\xb6\xa4\x49\x7c\x25\x39\
\x73\xdd\x4c\xb7\x1d\x06\x44\x46\xde\x04\xb5\xa4\x13\xe7\xce\xd9\
\xfa\xf2\xee\x53\x3e\xbc\x88\x52\x6e\xb8\x6a\x64\x67\x1f\x9c\x33\
\xf7\x5b\x3b\x5e\x68\x6b\x6d\x29\x2a\x2e\x21\xa1\x98\x41\x28\xae\
\x9b\xf6\x5e\xaf\x80\x6c\x05\x74\xa2\xc9\x5c\xbb\x17\xc6\x36\x78\
\x24\xd5\x23\x6b\x8b\x8b\x8b\xf2\x98\x03\x88\x91\xea\xea\xec\x4c\
\xf6\x75\x17\x15\xb9\x99\x2c\xd0\x86\x33\x38\x24\xbc\x43\x3c\x1e\
\x31\x05\x25\x53\x2d\x80\xc0\xb8\xcb\x7c\x91\x40\x91\x9f\xd2\xe0\
\x98\xd5\x5e\xe1\x07\x09\x96\x9e\xfb\x40\x83\x51\x2b\x5b\x52\x36\
\xfa\x87\xab\x9e\x33\x77\xee\x40\x57\x7b\x6b\x73\x13\x7a\x0b\xc9\
\xb7\x65\x44\x9b\xea\x88\xea\x31\xe3\x76\xee\xdc\x89\x68\x45\x4d\
\xcf\x36\x44\xb0\x83\x1a\x79\x3e\x85\x6c\x3d\xaa\x4f\x8e\x1f\x38\
\xbd\xed\xa2\x90\x54\xd7\xd6\x96\xa6\xfa\x85\x63\x4e\x24\x22\xf9\
\x09\xd7\x10\xfc\x3c\x9b\x7e\xfd\xf5\x96\xce\xce\x7e\xc8\x48\xfa\
\xa9\x77\xc1\xd0\xe8\x52\x0d\xe0\x0c\xab\x8c\xb9\x54\x62\xe6\xcc\
\x02\x77\x5e\x50\x58\x34\x61\xec\xe8\xbd\x7f\xf8\x1d\xd8\x08\xf1\
\x1c\x04\x98\x77\xd1\x36\x39\xeb\x84\xb9\xdb\x5f\xf8\x39\x8e\x25\
\xce\x4f\xd0\x6c\xfe\xc5\x33\xec\x90\x89\xd1\x05\xb9\xba\x7f\x4a\
\xb7\xf3\xa8\x7f\x4c\xa2\xd5\x6c\x55\x55\x61\x41\x41\xfe\xe0\x20\
\xf4\x05\xd7\xe4\x10\xa1\x09\xb3\x81\x03\x1d\x1a\xea\x7d\xfd\x8d\
\xa3\xaf\xbf\xd1\xdc\xdb\x3b\x68\xc0\xb2\xb4\x38\xee\x7a\x94\x0a\
\x97\xec\x39\x55\x05\xc4\x0f\x6a\xbb\xc2\x44\xde\x9e\x9a\x46\xf7\
\x5d\xba\xad\x41\x36\x58\x87\x22\x94\x95\x95\xa1\x65\x0b\xfd\x5d\
\x58\x01\x20\xc8\xcf\x2f\x58\x76\xc1\x05\x87\xde\xf8\x83\x23\xdb\
\x8d\x5c\x59\x68\x03\x4a\xfc\xe0\xec\x39\x2d\x1d\x9d\x92\xe4\x2f\
\x82\x1e\x01\x28\x9b\x82\x68\xce\xbe\x0c\x3b\x74\x90\x36\x45\x0d\
\x63\xe1\xfa\x82\xbc\x6a\xad\x8e\xc7\xdd\xca\x8a\xa2\xfa\xfa\x3e\
\x40\x73\xd5\x3c\xaa\x3b\xf6\xf3\xf3\xa3\xf9\xf9\xbc\xab\xb3\xab\
\xb9\x39\x39\xf3\xf8\x9a\xca\xca\x02\x9c\xd7\x58\xab\xd7\x38\xa0\
\x9c\x39\x53\xc1\x52\xb5\xb2\xa9\x35\x63\x6b\xd2\xad\x0e\xdb\x4c\
\x12\xc8\x93\x01\xea\xb6\x6d\xdb\x26\x3c\x0b\x73\x90\xb9\xa1\xfd\
\x58\x58\x0f\x73\xf7\xfd\xfe\x37\xed\xcd\xcd\x95\xe8\x9b\x91\x0e\
\x0e\x3b\xc3\x47\xd4\x4d\x98\x74\xd3\x4d\x37\x9e\x74\xf2\x02\x1c\
\x05\x01\x2d\x5d\xba\x94\xd2\x0b\xe3\xec\x7c\xe7\xb2\x15\x88\x6b\
\xd2\xfe\x18\x6a\x9f\x4e\xf3\xd6\xd6\x24\x9a\xa4\x34\xd1\x4c\x19\
\x95\x90\x43\x2a\x95\xee\xee\xe1\x15\x15\x65\x73\xe7\x8c\xae\x28\
\xcf\xc7\x23\x71\xdd\x77\x29\x8c\x1d\xb3\xf3\x06\xf7\x06\xae\x1e\
\x81\xfc\xb1\x27\xbe\xd3\x8e\xc7\x34\x6e\xda\x40\x61\x65\xba\xa4\
\x26\x53\x5a\x73\xe9\xb5\xeb\xf2\x25\xa8\x11\xd2\x62\x2e\xa0\x77\
\x6f\x4f\xcf\xf9\x17\xae\x98\xbb\xf0\xcc\xa1\x68\x22\x51\x36\xe2\
\xc9\xa7\x9e\x86\xa1\xd9\xe6\x19\x3e\x5d\x10\xbe\x6a\xe2\x82\x33\
\xaf\x63\x4f\x35\xed\xc5\x62\x91\xa6\xa6\x64\x7f\x7f\x5f\x49\x49\
\x04\x82\x86\x6f\x96\x40\x4c\x3c\x78\x54\x16\x12\xf9\xc5\xc7\xd5\
\x95\x95\x95\x26\xd4\x73\x10\x86\x2f\xac\xcf\x0d\x67\xcf\x9e\xc7\
\x36\xf9\x9b\x69\x42\xd5\x78\x8c\xfc\xf8\x30\x5a\x13\x58\xa0\x35\
\x1f\xbf\xe2\x8a\xb9\xf3\xe6\xfe\xd7\xb3\xcf\xd7\x5d\x74\xd9\xc2\
\xe5\x2b\x7a\x3a\x3b\x71\xb5\x02\xe2\x0f\x0d\x0a\x6e\xde\x95\x1a\
\x25\x0b\x50\x79\x89\xc4\xe9\x67\x2c\x2a\x2b\x2d\xfd\xfd\x6f\x5f\
\x3b\x79\xfe\x89\xb5\xb5\xb5\xa0\xdb\x4d\xba\x1b\x90\x91\x2d\x07\
\xba\x60\xd5\x44\x12\xf0\xb2\xf4\x10\x70\xec\x91\xfa\x0e\x3c\x12\
\x74\x6d\xf4\xf7\x67\x52\xa9\x8c\x71\x0a\x40\x5e\xb3\x66\xd4\x94\
\x97\x25\x80\x18\x29\x1e\xdb\x21\x39\x94\x5a\x38\x26\x6e\x1c\xa3\
\x10\x1d\x30\xb1\x9c\x56\x66\x14\x0a\x64\x39\xd8\xbf\x75\xd7\xae\
\x79\xf1\x99\xef\x7c\xfb\xab\x0f\xc5\xe3\xb1\x21\xf4\x3f\x48\x32\
\x30\x22\xc5\x13\xd1\xa9\x05\x76\xef\xe9\xee\x02\x9d\xfa\xe2\xf6\
\x6d\xd3\xa6\x4e\x15\x33\x01\x64\xbb\x17\x7d\xfa\x05\x94\xf5\xe7\
\x14\x26\xcc\x2b\xf2\xcc\x61\x5e\x67\x95\x80\xd1\x47\x1b\xba\x3b\
\xbb\x52\x28\x2c\x33\x37\xbf\x12\x96\x5d\x51\xd1\xdb\x2b\x48\x12\
\x78\x6b\x64\x8e\x10\x32\x4e\x01\x24\x41\xf5\x67\xa6\x8a\x47\xa6\
\x7a\xc1\xcd\x8a\x49\x7e\x11\xef\xda\xda\xb2\x03\x03\x9c\xd8\x71\
\xff\x6e\x8a\x36\xcc\x69\x62\xa6\x1d\xc7\x84\x64\xdc\x3a\x9a\x60\
\x8a\x4b\x4a\x6f\xbf\x7d\x53\x62\xa0\xe7\xa1\xbb\x6e\xc3\x0f\x20\
\x88\xd1\xe1\x16\x91\xc2\x91\x32\x62\xf2\x9f\x8b\xaa\x11\x4c\xb2\
\xfe\xad\x37\x67\xcf\x9e\x0d\x28\x67\x3a\x30\x21\x23\xdb\x07\x65\
\xb3\x46\x57\x54\x5f\x8c\x6e\xe2\x54\xb7\x67\xe3\x20\xa1\x3e\xa5\
\xa5\x89\xf9\xf3\xc6\x25\x12\xc0\x9f\x32\x2e\x44\x19\xc0\xf4\xe1\
\xfa\x96\x44\x1e\x8a\xf1\x80\xa4\xae\xac\x4d\x79\xec\x85\xeb\x3a\
\x26\x5c\xd2\x16\x59\x82\x16\x5f\x70\x19\x90\x4e\x7b\x47\xb6\xa9\
\x25\x33\xb6\xce\x35\xec\xa2\xe9\xeb\x56\xa5\xf3\x2c\x0f\xb4\xb5\
\xd1\x62\xf8\xb3\xac\xb5\x20\x4e\xa1\x1e\x87\x9b\xc4\x34\x9c\x9f\
\xbd\xb0\xfd\x0b\x1b\x6f\xba\xec\x9a\x4f\x9e\x38\xff\x64\x70\xaf\
\x91\x68\xc4\xce\x4e\xe1\x95\xff\xf4\xfa\x1f\x0b\xf3\xf3\xc0\xb2\
\x23\xa6\x51\x12\x47\x23\xe0\xf0\xd0\xe3\xf0\x91\x47\x54\x9b\xcf\
\x91\x5b\xc3\x21\x94\x96\x24\x64\xbb\x2e\x2e\xcb\x41\x53\xd5\x60\
\x9a\x23\x49\x1e\x1c\x4a\x1f\x7a\xb3\xb9\xb2\x32\x66\xaa\x74\x21\
\x26\x5f\xd1\x27\xb8\xb6\xee\x1e\xa1\x2f\x89\x04\x4b\xe4\x31\x50\
\x74\xed\xed\xe9\x92\x22\x8e\x0a\x23\xa1\xeb\x50\x9f\xe8\xb0\x8e\
\xd9\xee\x01\xb4\x5b\xdf\xc8\xae\x91\xd9\x7e\xf8\x8c\x45\x48\xfd\
\xbf\xfc\xd5\xaf\x1d\xda\xb7\xf7\xb2\x2b\xae\x4a\x0a\x52\x99\x47\
\xa4\x1b\xc2\x40\x85\x05\x85\xaf\xee\xd9\x3d\x7d\xfa\xf4\x21\xa9\
\x35\x86\x4b\xcc\xed\x83\xfc\xfe\xc1\x75\x34\xcd\x4f\x94\x2b\xe9\
\x92\x48\x1f\x00\x46\xd1\xcd\x29\xd5\x4e\x80\x0b\xf9\x48\xe0\x8c\
\xa6\x4c\xa9\xaa\xa9\xae\x8c\x89\xca\x17\xcd\x22\xd1\x29\xa9\xec\
\x05\x90\xc7\x8a\x20\x4b\x47\xc1\x6d\xb6\xb6\xa1\xaf\x61\xf0\x48\
\xfd\x50\x6b\x6b\xba\xb8\x48\x76\xb0\x09\x7f\xaf\x40\x17\x19\xb7\
\xfa\xc7\x73\x9b\x58\xf6\x2f\x2d\xb8\x0c\xf8\xdd\xaa\xea\xda\xcd\
\x77\xdd\xd5\xb8\xff\x8f\x77\xdf\xfa\x59\x24\x5c\x08\xf0\xa2\xeb\
\x18\x59\x58\x34\x8a\xfe\x9c\x03\x7b\xf7\xce\x9a\x39\x13\xf9\x00\
\x24\x62\xbb\xa1\xb0\x06\x99\xeb\x31\xd1\xc3\xe1\xaa\x70\xa8\x43\
\x98\x10\x9b\x17\xfe\x5d\xbb\xbc\x25\xa4\xe6\xcc\x9a\x05\x55\x2d\
\xc2\xc8\x8e\xa9\x26\x3b\xae\xb2\x5c\x32\x06\x11\x47\xc0\xec\x38\
\xc5\xc5\x6e\x69\x69\x34\x1e\xe3\xa0\x22\x0a\x0b\xb2\xb2\xbe\xe8\
\xf4\x25\xb3\x5d\x5d\xc2\xc7\x41\x4c\xba\x91\x49\x77\x31\x0d\x23\
\x1a\xd3\x57\x6b\x30\xb4\xdd\x3f\x49\xaa\x04\x42\x1a\x39\xc7\xfa\
\x0d\x9f\x39\x7e\xf2\xf8\xcf\x5e\xbf\xf6\xd0\x81\x03\xb8\x4a\xc8\
\xa0\xa8\xb0\xf0\xcf\xfb\xf6\xa1\x86\x50\x53\x5b\x8b\x1d\xec\x8e\
\x27\x12\x93\x0f\x28\x32\x4f\x0e\x54\xeb\x8c\x6a\xa9\x05\x4d\x2c\
\x40\xa1\xfa\x4b\x27\x5c\x14\x2d\x18\x63\xb9\xa8\x55\xbb\x53\x1b\
\xbb\x96\x97\x47\x1a\x1b\xc0\x36\xa8\x59\x59\x50\xf9\x78\x2c\xd3\
\xd4\x94\xed\xea\x8e\x54\x94\x03\x9c\xb0\x63\x98\x98\x2d\x08\x29\
\xf1\xac\x69\xa1\x35\x05\x6e\x53\xb6\xa3\x36\x5b\x50\x91\xcb\x96\
\x2d\x3f\x6e\xf2\x71\x0f\xde\x79\xdb\x85\xab\x2e\xbb\x64\xd5\xa5\
\xe0\x36\x7e\xb5\x7b\xd7\xe4\x89\x13\xb3\x92\x6c\xb5\xdd\x19\x1d\
\x1e\xd0\xa0\x00\x2e\xd3\x51\xcc\x97\x67\xe4\x24\x6b\x94\x31\xb9\
\x86\x4c\x65\x3a\x51\xf0\x66\x1d\x58\xc4\x10\xa1\x75\xee\xe4\x09\
\x9e\xcf\x95\x73\x3d\x15\x5b\x0b\x50\x5e\x58\xc0\xbb\xbb\xd3\xed\
\x1d\x19\xd7\x8a\x9b\x01\xb7\x63\xbb\x1b\xbb\x9d\xd4\x56\x9c\xf0\
\x82\xbb\x81\x8c\x26\x4c\x9c\x78\xfb\xc6\x8d\x2f\xfd\xe4\xc7\x77\
\xde\x76\x33\xda\xa4\xf6\xff\xe9\x8d\xe9\x33\x66\xf4\x88\xd6\xcf\
\xac\x1d\xe3\x73\x99\x98\x47\x66\x11\xe8\x77\xed\xcc\xd5\xea\x82\
\xb0\x68\x40\x9b\x22\x75\x54\x03\x96\xd8\xce\x75\x7b\x35\x39\x30\
\xb9\x25\xc0\x1c\x46\x23\xc2\x4f\xa3\x9b\x3a\x12\x35\xc1\x8e\xf7\
\x26\x59\xff\x60\x74\xc2\xf8\xc4\xd8\xba\x18\x67\x26\x2f\x77\xc3\
\x26\x66\x34\x28\x20\xa6\x9c\x32\xa2\x7b\x86\x8e\x60\x40\xc4\x72\
\xec\x77\xf3\xcd\x37\x17\xe6\x45\x2f\xf8\xc8\xe2\x81\x54\x1f\x4a\
\xbe\x28\x99\x1a\xfb\x32\x9f\x01\x01\xf9\xf2\x0a\xee\xf5\x49\x1f\
\xd3\xc4\x82\xd3\x86\xfc\xf3\x23\x3c\x6c\xec\x2b\x54\x38\xb2\xd0\
\x0e\x87\xd5\xd9\x99\x8e\x47\x79\xaa\x5f\xcc\x5c\xc9\x8b\x3b\xa8\
\xc7\xc5\xf3\xa2\xe3\xc6\xc6\xa5\x76\x38\xa6\x2a\x19\x36\x31\xdb\
\xb8\x6c\xfb\xd2\x65\x19\x66\xd7\xc4\xcd\x81\x64\x41\x54\xb1\x00\
\x98\xbc\xe4\x92\x55\x23\x6b\x47\x02\x4c\x13\x81\x6b\xdb\xaf\x99\
\x0f\x3f\x6c\xaa\xe1\x58\x7d\xd2\x3c\x47\x55\xc3\x6e\xc7\x60\xde\
\xdc\x1a\x83\x5e\xbc\x0c\xc2\x4c\x98\x91\xda\xc4\x54\x3b\x00\x7c\
\x70\x5b\x5b\x66\x70\x00\xfe\x38\x06\xb6\x24\xd5\x9f\x05\x0f\x89\
\xeb\x17\x0c\x1f\x73\x86\x32\x5a\x28\x3e\xce\x2c\xb7\x93\xf6\xb7\
\xd4\x30\x9a\xc5\x16\x2e\x78\x11\xe9\x63\xb3\xf4\x0d\x0d\x0d\xa0\
\x8a\x30\x02\xaa\xc9\x34\x2f\x20\x2c\x20\x1f\x61\x66\x4f\xd3\x34\
\xdd\x1d\x04\x61\x42\x1a\x14\x6a\x3f\xf6\xcd\x7a\xf4\x26\x36\x2b\
\xd3\xcb\x72\x6b\x9e\x8d\x6c\x58\xc8\x32\x10\x6c\x53\xa6\x24\x20\
\x29\x04\x2f\x84\x8b\x43\x87\x78\x5f\x2a\x5d\x5a\xe6\x72\x5d\x06\
\xf1\xc6\x75\x59\xd8\xfb\xd8\x26\x66\x64\x14\x2e\xb7\xd9\xa2\xb1\
\xa5\x43\x74\x1a\xe8\xed\x9c\xf5\x55\x5b\x83\xfc\x81\xcc\x5c\x9b\
\x37\x57\x83\xbc\x07\x37\xc0\x5f\x4c\x96\x61\x66\x0e\x2d\xf7\x37\
\xaa\x83\x55\xe7\xba\x99\x5d\xd8\x11\x80\x5f\x7a\xc8\x29\x28\x60\
\x59\x3d\xfb\x4b\x36\x8f\x52\xf7\xab\x2a\xe1\xe3\x32\xb0\x67\x5d\
\x5d\x7c\xef\x3e\x28\x94\xec\x68\x30\x50\x83\xd1\x68\x3e\xee\x8a\
\x6e\x80\x9c\x8b\xa1\x01\x8d\x4d\x19\x3d\xa2\xca\x44\xc0\xbe\xac\
\xf9\x91\xcc\xf4\xe1\x06\xca\xbf\xe6\x2c\x7e\x13\xcb\x6a\x61\xc8\
\x1c\x88\xdb\x1a\xc4\xb9\x9d\xcc\x73\x3d\x55\xc8\x3f\x31\x9f\x26\
\xdd\xb1\x88\x4e\x0b\xe4\x05\x39\x6f\xbe\x95\xc5\x04\xea\xa2\x62\
\x37\x9b\x56\x84\x88\x01\x51\x7a\x86\xbd\x6a\x25\xc1\x2c\x31\x28\
\x94\xa4\xac\x99\x13\x98\xd3\x36\x0c\x08\xb2\x05\x14\x7e\xed\x84\
\x29\xde\x1b\x01\x19\xb2\xd9\x08\x28\x8c\x54\xcc\x03\xa0\x0a\xa2\
\x75\x46\xd5\xb9\xa5\xe3\x96\x63\xe8\x8e\xe1\x9c\xb4\x13\x9e\x35\
\x9c\xce\x38\x03\x82\x84\x05\x19\x22\x6e\xbb\xbe\x3e\x93\x9f\x00\
\xa6\xcf\xda\x45\x0e\x5f\x07\x91\x57\x6d\x86\xee\xa1\xdb\x90\x29\
\x2a\x9f\xf9\x1a\x53\x08\x54\x1d\x43\x83\xc2\x4e\xc7\xa8\x8f\x91\
\x51\xa0\x40\x98\xb3\x1f\xce\x5f\xe4\x71\x42\x51\x2c\x98\x6a\x78\
\x8d\xe4\x76\x35\xd8\xa5\xea\x94\x23\x11\xb5\xe6\x60\xc9\xa0\xf0\
\xa7\xfe\x68\x36\x91\x27\x26\x07\x82\xea\x88\x47\xdd\x82\x42\xb7\
\x37\x69\xa6\x8e\x4b\xfd\x31\x9c\x3c\x22\x7e\x96\x93\xb2\x52\xe3\
\x14\x3d\x22\xd7\xb1\xc6\xcf\xf2\x9c\xcc\x8f\x8d\x7a\x7c\x55\x7f\
\x4b\x58\xc6\xef\x50\x91\x2b\xa0\x3e\xe1\x47\x1e\x3e\x4d\x88\xa5\
\xf6\x5a\x73\xe9\x71\x45\x0d\x66\x94\x54\x2e\x35\xc0\xc8\x36\x74\
\xa2\x4b\xb9\x87\x97\x69\xaa\x1f\xa8\xfb\x0a\x41\x2a\xb8\xc8\x3f\
\xd3\x19\x3c\x3d\x61\x9d\x08\x05\xc9\x24\x47\x45\x08\x9e\x58\x4d\
\xe9\x57\x55\x0a\x87\x48\x62\xb5\xca\x34\x2d\x69\x8d\x4f\x83\xbb\
\x8a\x7f\xc9\x06\x04\x64\xfb\xe6\x70\x50\xb7\x7d\x33\x49\xc7\x88\
\x29\x3c\x27\x34\xec\x86\x0c\x6d\x66\x6b\x10\x91\x37\xae\x2e\x28\
\xdb\x38\x88\xfb\x07\xcd\x3d\x1d\x53\x66\x0f\xee\x51\x28\x51\x42\
\x14\x32\x89\x69\x2d\x2c\x8c\x34\x36\x66\x30\xcd\xa9\xa2\x32\x82\
\x8a\x34\x77\x72\x84\x1b\xe6\xef\x0a\x61\xa1\x39\x6c\x76\x5d\xcc\
\x36\xb1\xb0\x81\x04\xfa\xb9\x49\x7d\x8c\xdb\x0a\xcc\x5c\x0f\xbf\
\x06\xc7\x58\x59\x2e\x13\x73\x98\xe6\x37\x64\x42\xa1\x35\x48\x3b\
\x27\x9d\x63\x33\x3e\xcc\xab\x16\x44\xaf\x76\x7e\x42\x4c\x6b\x1a\
\x18\x04\xf0\x73\xe5\xd4\x05\x91\x4f\xe4\x57\xc5\x9a\x5b\xd2\x6d\
\x1d\x43\x53\x26\x83\xfb\x74\x74\xa3\x90\x69\xf9\x50\x84\xbf\x96\
\x0e\x0f\x76\x56\x5a\xa9\x86\x21\xb1\x08\x1c\x0f\x67\x23\x14\xb3\
\x8c\x68\x8c\xfa\x04\xe7\xf5\xbf\x33\x13\xf3\x1c\x0e\x69\x8c\xdd\
\x82\xe7\x84\x86\xf3\xa8\x2f\xab\x92\xc9\xfa\x07\x30\x2e\x12\x74\
\xa6\xb3\x3b\xd4\x11\x79\x32\x89\xa6\x1e\x67\x44\x65\x4c\x1a\x9d\
\xf4\x56\x54\x4e\x52\x05\xd4\xe0\x8b\x58\x2c\x6c\x68\xfc\xa8\xa3\
\xe3\x08\x48\xde\x21\x64\x06\x94\x3a\xd9\x45\xb1\x9c\xc6\x65\xfc\
\x4e\x58\x7d\x72\xe6\xdb\x01\x0d\x0a\x00\x45\xd3\xd7\xc1\xb9\xf5\
\x6a\x8a\x40\x2c\xe3\x3c\x3c\xdd\x5a\xf9\x57\x98\x55\x77\x37\x1a\
\xa9\xc1\x0f\x09\x74\x83\x6c\x0b\x08\xb1\x37\x99\xa9\xae\x72\xcb\
\xcb\x22\xa8\xc7\x66\xb2\x81\x2e\x6a\x66\x63\x51\xaf\x6f\xd5\x09\
\x12\x66\x06\x22\x42\x3a\x98\xf6\x41\x1d\x8f\x86\x15\xcb\x89\x9b\
\x4d\x62\x11\x0e\x5e\x81\x18\x6f\xbf\x60\x2a\x90\xee\x05\x78\x15\
\x4b\x14\x3a\xd5\xb0\x32\x14\xc7\xf7\x5a\x97\x50\x17\x36\xa4\x03\
\xcb\x02\xaf\x8c\x18\x0f\xd1\x43\x40\x08\x67\x48\x1d\x40\x9f\x95\
\x95\x46\x28\x01\x70\x5d\x66\xdd\x8e\xd7\xa9\x6a\x95\xa2\x59\xce\
\xf1\xe5\xcb\x1a\x14\xcb\x81\x8a\x20\x3e\x69\x1e\xac\xe9\x74\x09\
\x1b\x4b\xd8\x64\xec\x27\x1d\x38\xc4\x08\xda\x2b\xe9\x44\xa3\x34\
\x89\xc2\x70\x20\x39\xc0\x44\x00\xf2\x84\x4c\x2c\x94\xa6\x72\x67\
\xd4\x28\x70\x60\x4e\x5e\x9e\x78\x3e\x87\xde\x1c\x4c\x27\xc1\xe1\
\x93\x56\x66\x73\x92\x48\xc1\x29\xc6\xa1\xd7\x65\x10\x21\x85\x89\
\x86\x98\x9d\x87\x17\xac\xa0\xc2\x37\x7e\xfc\x78\xc8\xe8\xd5\x57\
\x5f\x45\x95\x86\xc2\x39\x4a\xa3\x31\x51\xd9\x89\x90\xaf\x31\x2b\
\x6e\x68\x09\x7b\xe8\x80\x93\x46\xe2\x6a\x74\x73\xc7\x8e\x1d\xab\
\x57\xaf\xb6\x69\x33\xeb\x28\xc7\xbc\x9a\xc2\xe1\x5e\xdf\x22\xf7\
\x9a\x59\x91\x2f\x58\x6f\x0f\x62\x02\x0e\x0a\xdd\x29\xcb\x93\xf4\
\x8f\xec\xe0\x18\x3f\x3e\xf6\xc7\xd7\x07\xe3\x79\x28\x25\x70\xd5\
\x56\x6d\x19\x84\x00\x3e\xf4\x12\x21\x57\xbf\x46\xc2\x9e\x9e\x2c\
\xc7\x77\x64\xeb\x14\x74\x13\x49\x13\x68\x8a\xea\xea\x6a\x3c\x52\
\x94\x40\xd1\x1c\xf7\xf4\xd3\x4f\x63\x2a\x06\x25\x19\x2d\x2d\x2d\
\x01\x93\x09\x83\xa3\x30\xd5\x17\xee\xe3\xc4\x57\x94\x43\x20\x6b\
\x74\xfc\x61\xd8\xab\xaf\xbe\x7a\xf1\xe2\xc5\x38\xaf\x17\xfb\x19\
\x35\x8d\x71\xab\xaa\xc1\xd5\x8f\x16\xed\xe3\x78\x78\xc6\xee\x35\
\x71\x94\x68\xd4\x6b\x0c\xb2\x82\x39\x9d\x3c\x29\x2e\x43\x81\x71\
\x6e\x62\x36\x1b\xd3\x2d\xac\xae\x6e\x76\xa7\x17\x1f\x69\x6f\xe4\
\x1b\x9f\xae\x6c\xea\xd4\xa9\x6a\xb2\x90\x7c\xc8\x98\x73\xbf\x7e\
\xfd\x7a\xe3\x83\xc2\xed\xae\xe1\xc8\x1d\x7e\xf9\x5d\xb8\x79\x98\
\x6e\x10\x02\xa2\x97\x6a\x60\x8b\x68\x1b\x97\xf6\xe8\x0d\xa5\xe0\
\x99\x01\x8a\xef\x88\x72\xf5\xbf\xd7\x08\x66\x5e\x50\xe0\x52\x91\
\x5a\x58\xb2\xfd\x0e\x96\x1c\x73\xfd\x43\xd5\x0f\x4b\x17\x24\x1d\
\x91\x26\xdf\x4c\x4d\x2d\x34\x2b\xc0\xf4\x01\x0d\xe7\x80\xc2\x6f\
\xfb\x1b\xee\xad\x81\x76\xa3\x2b\xe6\x5d\xd0\x0e\x06\x28\x60\x1d\
\xd6\x5d\x88\xf0\x1c\x98\xb3\x4a\xea\xc4\xad\x7c\x95\x29\xd3\x0a\
\x90\xce\xf6\x24\x7e\x95\x9b\x08\x97\x2c\xe7\x79\xaa\x79\x86\x34\
\xed\x31\xab\x5b\x44\x65\xd7\x14\xf3\xbd\x62\x85\x94\xc6\x37\x3e\
\x5d\xca\xac\x99\xb3\x9e\x7b\xee\xb9\x39\x73\xe6\xda\xdd\x96\x76\
\x0c\x79\x3b\x02\xf2\xb7\x8b\xf1\xe1\x62\xbc\xdd\xca\x49\x09\x0d\
\x1c\x36\xa6\xe1\x74\xf7\xf4\x8c\x19\x35\x0a\x29\x94\xac\x17\x19\
\xa0\xc8\x99\xe6\x5c\x7d\x2c\x84\x7a\x6b\x8e\x93\x4b\x44\xcc\xea\
\x00\x76\x95\x3a\x1a\x3a\xc8\xf4\x49\x91\x58\xac\xed\xba\x09\xdd\
\x3f\x3e\x9e\x21\xe8\x4f\xf4\x5d\xfc\xf2\x97\xbb\xee\xb8\xe3\x0e\
\x54\x23\xf4\xdb\x20\x1c\x34\xfa\xa2\xa1\x30\x9b\xab\x25\x3c\x20\
\x91\x00\x62\x1e\xce\x07\x99\x5f\x21\x11\x50\x8e\xf0\x3e\x70\xff\
\x54\x41\xdc\xbf\x7f\xff\x8d\x37\xdd\x24\x44\x96\xf5\x38\xe9\xa8\
\x89\xee\x36\x0e\x1a\x3e\x86\xe5\xa6\x5c\x99\x93\x03\xd7\x0c\xf7\
\xf2\x98\xf0\xf8\xea\x05\x03\x2e\xbb\xe7\x9e\x7b\x1a\x9b\x9a\x0e\
\xbf\xf5\x16\x62\x8c\x2b\x7b\x31\x8f\x36\x34\x38\x16\xd2\x0f\xbf\
\x80\x8a\x3b\xf6\x0b\xff\x72\xbc\x7c\x8b\xe5\xb2\x00\x3a\x3b\x7a\
\x8b\x20\x1a\x30\xfc\xb4\x3e\x69\xd2\x24\xc0\x5c\x3c\x2a\xdd\x0c\
\x6a\x80\x22\x23\x7e\x07\xc9\x4f\x56\xbd\x54\xce\xea\x1b\xb6\xdf\
\x50\x64\x66\x8d\xa9\x92\x21\x57\x49\x03\xb7\x5e\xd7\xe7\x0c\xbb\
\x1e\xf4\xa9\x81\xf1\x31\xc8\xd0\x60\xba\xa6\xba\xa6\xb6\xa6\xe6\
\x6f\xf2\xde\x4d\x24\x36\x68\x42\x93\xcd\x94\x62\x5a\x1f\x19\x49\
\x54\xbd\xa9\x45\x33\x06\x7f\xdb\x57\x83\xca\x19\x14\x7f\xb3\x25\
\x2a\xe6\x60\x29\x09\x14\xc9\xae\x6b\xe1\x83\x64\x1b\x56\x31\x66\
\x19\x6e\xdc\xb8\xd1\x9b\x09\xc9\x7d\x2f\x02\x0c\x64\xf3\xe1\xf7\
\x61\x84\xe6\x8e\xe5\x58\xb7\x66\xdf\xfd\xbd\x8f\x0f\xf8\x89\x9a\
\x10\xc2\xa8\x87\xc7\x11\xf3\xe4\xa4\x6e\x5f\x1b\x42\x2e\xe4\xed\
\x84\xe6\x75\x04\xb7\x07\x2e\x7d\xb8\x9d\xff\xbe\xc7\x17\xff\xd3\
\x0b\xee\x58\x4e\x3e\xe1\xfd\xc5\xf7\x36\xe0\x63\x20\xab\xff\xcf\
\xef\x93\x36\xec\x33\x7f\x5f\x53\x8e\xb1\xf8\x34\xe8\xfd\x25\xbc\
\xb8\x61\x96\x37\xa7\x36\xe6\xe4\x5f\xc2\xbb\xe5\x64\xb6\xfe\xa1\
\xc7\xff\x3f\x1e\x9f\x29\xce\x54\xe0\x9c\xcf\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x08\xdd\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x55\x08\x00\x00\x00\x00\x93\x44\xbe\xfa\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\
\x01\xd2\xdd\x7e\xfc\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd5\x03\
\x03\x0f\x28\x2b\x21\x8f\xe2\xd5\x00\x00\x00\x02\x62\x4b\x47\x44\
\x00\xff\x87\x8f\xcc\xbf\x00\x00\x08\x6e\x49\x44\x41\x54\x68\xde\
\xb5\x59\x5b\x88\x24\x57\x19\xfe\xbe\xff\x54\x4f\x4f\xcf\x65\x67\
\x76\x76\xa2\x4b\x82\x6c\x92\x85\x44\x9f\x64\x4d\x34\x3b\xe3\x85\
\x40\x1e\x5c\x21\x0f\x01\x03\x89\x20\x08\x21\x5e\x30\xd3\x1b\xf0\
\xb2\xa2\x71\x46\x10\x82\xf8\x20\x28\x44\x1f\x0c\x98\x18\x42\x0c\
\x2e\xae\x04\x0c\x84\x3c\x24\x2f\x31\x18\x44\xcc\x4c\x54\x96\xc4\
\xfb\x0d\x13\x33\xbb\xd3\x3b\x97\xde\x9e\xae\x3a\x9f\x0f\xa7\xaa\
\xba\xab\xbb\xfa\xb2\x93\xf8\x33\x33\x7d\xea\xfc\xf5\xf5\xd4\xf9\
\xcf\xff\xfd\x97\x53\xa6\x5c\x92\x44\x49\x36\x90\x92\x70\x9d\x24\
\xc9\x5b\xd2\xcb\x20\xc8\x7b\xef\xbd\x97\xbc\xb2\x81\xf7\x0a\xd7\
\x52\xaa\x3d\x90\x1e\x90\x89\xa2\x99\x99\x99\x73\xe6\xb2\x81\x99\
\x0b\xd7\xce\x85\xb9\x83\xe9\x4d\x34\x8a\x3b\xe7\xd7\x37\x36\x36\
\x36\xd6\xd7\x37\xd6\x37\xd2\xc1\xc6\xc6\x7a\xb8\x5e\x5f\x0f\x73\
\x07\xd1\xbf\x7c\x7e\x9b\x32\x24\xf8\xc9\xa9\x2f\xaf\xd4\xeb\xf5\
\xfa\xe9\x7a\xbd\x5e\x5f\x59\xa9\xd7\x4f\xd7\xeb\x2b\xa7\xc3\xcc\
\xe9\xd3\xa7\x57\xea\x2b\x2b\xf5\x83\xe8\xcf\x9c\x7a\x12\x89\x41\
\xd8\xba\xf3\xd9\x73\x4f\xbd\xfd\x72\xee\xd9\x3b\xb7\xc2\x1e\x58\
\xbc\xbf\x37\x9e\x34\x9b\xcd\x66\xb3\x39\xe6\xcd\x7b\xfb\xb1\x89\
\x46\x10\xcc\xf7\x29\xec\x10\xd3\x09\xa6\xbf\xd9\xd6\x51\x89\xf7\
\x5e\x64\xba\xb5\x05\x7d\x19\x1e\x84\x49\x02\x47\x08\x40\xaa\xb5\
\x7b\xa9\xd1\xb8\xd4\x68\x34\x1a\x8d\x4b\xdb\xcd\x98\xe4\x48\x1c\
\x21\x19\x49\x8d\x12\x5a\xbc\xb3\xb5\x7d\x39\xf6\x22\x49\xf9\xb8\
\xb5\x7b\xa9\xd1\xc2\x68\xa0\x48\x03\x80\xe2\xa3\xa0\xf3\x37\x0c\
\x4d\x3b\x8d\xcb\x32\x23\x99\xde\x6c\x86\xf6\x76\x23\x71\xa5\x6b\
\xe8\xe0\x01\xc0\x7a\xbf\xbe\x5b\x2c\xdc\xd9\xdc\xba\x0c\xa3\x84\
\x5c\x24\x99\xc5\x8d\x3d\x03\x2c\xdc\x35\xd0\xb8\x96\xde\x3f\x64\
\x95\xbb\xdb\x22\xa5\xf0\x40\xc8\x56\x2c\x11\x3b\xbb\x84\x86\xa1\
\x01\x18\x84\x61\x9b\x05\x22\xee\xfd\xfa\xf4\x7f\x08\x6e\x77\xcf\
\xa0\xa1\x0b\x90\x09\x12\x01\x08\x00\x18\x3e\x49\x20\x9b\x33\x88\
\x22\x52\x03\xa9\xeb\x81\x29\xd8\xee\xbe\x41\x18\x84\xa7\x04\x23\
\x49\x65\xf6\x16\x33\x8b\x1a\x49\x9a\x91\xf0\x1e\x0c\x98\x00\x25\
\x08\x12\xa4\x28\x62\x17\xe9\xdd\x65\x78\x91\x1c\xcd\x83\xc4\x53\
\xa2\xf2\x30\x0c\x28\x35\x97\x28\xdb\xbf\xec\xde\x22\x0f\x18\x4b\
\xa4\xe7\xe4\xcc\xdc\xfc\xe1\xf9\x99\x09\x8a\xcc\xfd\x89\xe0\x65\
\xc9\x8f\xe4\xc1\xd0\x3d\x6e\x13\x52\x6d\x61\x6e\x66\x72\x62\x62\
\x72\xe6\xf0\x42\xd5\x83\x12\x25\x52\x12\xdb\x89\x0d\xdb\xe4\x94\
\x07\xdd\xdc\x42\x11\xe0\x63\x52\xb5\xf9\x28\xdd\x5c\x54\x0e\xcf\
\x2a\x20\xc2\x83\xf9\x36\x07\xe1\xd1\xe1\xc1\x20\x5f\x86\xd8\x4e\
\x20\x4e\xc9\xa7\x76\xa7\xd7\xcc\x8c\xef\x70\x4e\xf4\x03\xe1\xc8\
\x79\x50\x34\x12\x3a\x51\x4c\xe4\xbe\xa0\x89\x8a\xd2\xc7\x53\x92\
\xc4\xed\x5a\xa4\x9c\xd6\x44\x12\xbc\xa5\x04\x0f\xe4\x3c\x50\xb6\
\x65\x99\x27\x76\x76\xb1\x4d\xa2\xc6\xe0\xa9\x09\x27\x0f\x1d\xb9\
\x6a\x7e\x76\x36\x16\x0b\x74\x65\x29\x5e\x39\x0f\xc0\x01\x2b\x20\
\xdb\x6d\x2a\xaa\x0a\x24\x63\x5b\xac\xfd\xe3\xc5\xc7\x7f\xf4\xd4\
\x2b\x5a\x9c\x4a\xc9\x31\xc0\x31\xba\xc2\x9e\x49\xc3\xa2\x09\x5b\
\x09\xfc\xa4\x13\x21\x2d\xb4\xbe\xf7\xd1\x0f\xdc\x76\xcf\x67\xee\
\x3a\xf9\xa1\x33\x2f\xcf\x77\xd8\x30\x2c\x16\xa5\x3c\xe8\x0d\xb5\
\x61\x60\xa4\xa9\x32\x65\x6e\x4a\x24\x10\x9d\xfb\xc8\x17\x7e\xb9\
\x0b\x73\xa6\xbf\x3d\x71\xe7\xb7\xab\x10\x04\x08\x8e\x34\x94\xe1\
\x53\x66\xa7\xc6\x1b\x20\xaa\x2d\x5c\xb5\x18\x91\xf4\xb5\xd7\xee\
\x7e\x2d\xaa\x44\x0e\x89\x07\x9d\x1e\xfa\xca\x74\x88\xb1\x72\xc3\
\x48\x94\xf2\x60\xb0\x91\xe0\xbd\x45\x92\xa4\xe8\xdf\x8c\xe2\x76\
\x9c\x78\x18\x94\x28\xfa\xd9\xcf\xe7\x12\x11\x62\x65\x88\x81\x32\
\x1e\x0c\x0f\xd8\x22\x49\x54\x5e\x55\x5c\x5d\xfa\xd4\x27\x4f\xd0\
\x13\x40\x82\xef\xef\x3a\x01\xaa\x54\x34\x84\xc7\x59\x3e\xc8\x8d\
\x64\x34\x18\x0d\x85\xa0\x48\x12\xf4\x2f\xe1\xe6\x97\x9f\x7b\xe4\
\xb1\x17\x5e\x7c\x9f\x08\x08\x7f\x3c\x3f\x29\x51\x35\xd7\x6d\xfc\
\x02\x1e\x79\x3e\x18\x43\x2a\x9b\xbf\xc2\x03\xef\x6e\x6c\x6e\xee\
\xdd\xf4\xcc\x8d\x61\xe9\xe7\x27\x44\x5f\x99\xf6\xc3\x73\x9a\x71\
\x2c\xd1\xd4\x6f\xfe\x79\xc3\xad\x17\x2a\x51\xe4\x2e\x1c\xb9\x17\
\x04\x88\x2d\x93\x70\xc8\x69\x38\x32\xe4\x03\xa5\xb6\x40\x2f\x4d\
\x72\x73\x46\x2f\xe1\xde\x79\x6f\x04\xaa\xaf\xdf\x08\x0f\x08\x15\
\xd1\xcf\x4e\xcb\x82\x65\xfa\xf1\x21\x11\x86\x7c\x30\xaa\xc2\x81\
\xbb\xf4\xb9\xe7\xef\xda\x89\x24\xb9\x66\x73\x3f\x04\xc8\x6b\x13\
\x3f\x33\xe7\x87\x60\xa9\x1e\x1e\x80\x25\x8c\xcb\x9e\x66\xfa\x83\
\xf3\x31\x29\x8b\x37\xab\xbf\x85\x01\x98\x7e\xcf\xde\xec\x61\x05\
\x6c\x39\x3e\xe7\x41\x27\xb2\x15\xc7\x9d\x6b\x81\x7e\xc7\x13\x30\
\x5e\xd0\xde\x59\x08\x91\x3e\xfe\xae\x89\xc5\x3c\x55\x97\xe2\x91\
\x15\x5e\xe3\xac\x80\x74\x04\x60\x8d\xed\xc5\x27\x5e\x35\xb9\xf8\
\x58\x3d\x5a\x14\xfa\x8b\xc1\xfe\x15\x68\x18\x93\x7b\xc4\xed\x5d\
\x9c\xfb\xc3\x77\xe8\x5d\x72\xf5\x8f\x8f\x4f\x69\x44\x75\x0a\x31\
\xcf\x07\x19\xab\x42\x8c\x83\x15\xc9\x93\x2f\xa2\xbd\x19\xb5\xef\
\xdf\x86\x4b\x8e\x3f\x7e\x33\xb3\x95\x5b\x57\xa5\x59\xc0\xe7\xf9\
\x60\x74\xfd\x9e\xad\xfb\x42\x3c\xb7\xfa\x7b\xc7\xe4\xe4\xb9\x5b\
\xda\x63\x30\x08\x0a\x3c\xe0\x98\x06\xb2\xc6\xce\xc2\x63\x67\x9d\
\xfc\x1d\x4f\x1e\xdf\xb7\x71\xcc\xca\xc0\x03\x14\x36\x08\xbd\xad\
\x47\x16\x7a\x5d\x6b\x6b\xfa\xaf\x0f\x9a\xfc\xe7\x1f\x3e\x12\x5b\
\x9f\xbe\x0c\x8f\xde\x7c\x60\xec\xa2\x65\x5f\x70\x6f\x24\xb5\xef\
\xee\x9a\xff\xe6\xb7\xaa\xde\x86\xd4\xfb\x39\x3e\xe7\x81\xc6\xf2\
\x22\xdb\xdb\x9d\xfe\xd3\x2f\x18\x9f\xf9\x52\x5a\x73\x8d\x14\x48\
\x69\xc2\xe9\x2b\x5b\xca\x76\x5d\x3b\xbe\xba\xde\xd2\xf2\x57\xdb\
\x23\xda\xb3\x1c\x0f\x30\xf0\x40\xc5\xda\x9f\xe8\x6b\x06\x00\x58\
\xdc\x32\xdb\x02\xeb\xf3\xdd\xc5\x69\x99\x74\xf0\x54\xe0\x01\x8a\
\x4d\xc4\x80\x15\xb4\x12\x26\xef\xc4\xc4\xfb\xdb\xc6\xe1\x11\x3a\
\xc7\x0b\xc8\xfb\x03\xa8\xa7\x7c\x09\x13\x5d\x33\x68\xcb\xf6\x6e\
\x79\x47\xeb\xc5\x8a\x17\x4a\xf4\xfd\x78\xe4\xfd\x41\xe6\xae\xa1\
\x7d\x30\x4b\x29\x49\x2a\xb4\x96\xc1\xe6\x09\xd9\x5e\xfc\xf4\x0d\
\x93\xed\x32\x3d\x4b\xf0\x81\x60\x63\x66\x34\x92\x06\xb9\xad\xcf\
\xbe\x74\x47\xd3\xf1\x4a\x24\xad\xae\xa9\x4e\x64\x05\x40\x18\x40\
\x58\x57\xd0\x65\x24\x02\x47\xe7\x2b\x87\x7c\x70\x8e\x82\x1e\xa5\
\xf8\x6c\x52\x69\x0b\x97\x56\xac\x04\xd3\xa2\x4d\x59\x67\x07\x51\
\x11\xbd\x9b\x78\xf8\x9e\xfb\x9e\x9b\x91\xd0\xab\x47\x29\x1e\x2a\
\x36\x20\xc1\x39\xf2\xe0\xda\x55\xc3\x93\x52\xd5\x11\xf7\xdf\xf7\
\xc8\x0f\x6e\x7b\x68\xc6\x17\x78\xa0\xe2\xa0\x83\x4f\x1b\x90\xd0\
\x27\xe4\x19\x2c\x53\x04\xfe\xe5\x4f\x68\xaa\x56\x67\x7f\xf8\x4c\
\xc5\x45\xac\xff\xba\x26\xf6\xea\x4b\xf0\xa1\xb3\x31\x21\xf5\xa7\
\x3c\x2f\x81\xa5\x67\x17\x3a\x72\xf1\xa7\x6c\x27\xb1\xc3\xc3\xce\
\x97\x26\xbf\x5e\xbc\xd8\xe1\xc1\x18\x91\x37\x99\xe5\x7f\x05\x40\
\xfc\x8b\x6c\xac\xf0\xde\xcd\x83\xee\xb3\x8f\x9e\x46\x22\xab\xcd\
\xac\x75\xc3\x71\x3a\xc0\x74\x13\xd3\x28\x5f\xd0\xf7\xe1\x79\x85\
\x3c\xa0\xaf\x7e\x51\x89\x59\x7b\x61\xa5\xed\xae\x98\x07\xe3\x04\
\x5f\xdb\xbb\xe7\xc1\x59\xef\xdf\xfb\xf4\x35\xed\xf1\x4c\xd4\xe1\
\x41\x5f\xf4\x45\x4f\xf5\x11\x4c\x51\x69\x7d\xed\x77\x4f\x3f\xff\
\xc2\xc9\xa6\xa9\x44\x5f\x76\x26\x55\x9e\x0f\xfa\x12\x54\xf0\x0d\
\xb7\xff\xc6\xbf\xce\xf3\xc3\xb7\x4e\xee\xb9\x42\xbd\x61\xc3\x8e\
\xfa\x52\x1e\x8c\x61\x22\xf0\xcd\xbf\x5f\x6c\xc6\x5b\x7f\x7e\xe5\
\x8d\x08\x63\xd6\x51\x50\x0f\x0f\x38\x70\x29\x04\xff\xf3\x26\x9d\
\x59\x34\xe1\x36\x5f\x1f\x7e\x3c\xd9\xed\x45\xc8\xfb\xe4\xfc\x18\
\xab\xa7\x11\xcf\x48\x1a\x5d\x6c\x44\x14\x20\x21\xda\x6a\xb8\x3e\
\x7d\x29\x3e\xef\x93\x35\x72\x05\xd6\xba\xe0\x20\x42\xa4\x60\x9b\
\x6d\xd3\x78\x2b\xc8\x78\x30\x92\xc9\xdc\x8e\x99\x66\x6e\xca\xe2\
\x5d\x62\x7c\x26\x17\xaa\x6b\x4b\x9f\xc2\xc2\xea\x3a\x4e\x77\x39\
\x14\xc8\x0c\xfe\xd0\x64\x41\xdf\xa9\x83\x8a\xf8\x4e\x5d\x84\x51\
\xc5\xa9\xe2\xfc\xa0\x16\x04\xe3\xf1\x8a\xd9\x4e\x3e\x18\xe5\x76\
\x85\x8d\x14\xc0\x71\xdd\xb4\x53\x17\xa5\x2b\x85\xa5\x47\x8d\x9d\
\xca\x32\x90\x38\x52\xf7\x81\xe9\x04\x8b\xfa\xdc\x52\xbd\xf8\x3e\
\x1e\x0c\x94\xe9\x8e\x1f\x00\x9c\x1d\xcf\x44\x63\xe7\x03\x28\x39\
\x34\xe9\x99\x9d\x08\x24\xd3\x53\x1e\x57\x96\x0f\x3a\xe1\x0e\xa5\
\x5c\x30\xb8\xa3\xf0\x69\x58\x4b\xdc\xd1\x92\x5b\xca\xf0\x42\x5f\
\x3e\xb0\xbe\x51\xfa\xe9\x67\xae\x61\x2c\x00\x3e\x9e\x38\x36\x89\
\x3e\xfd\x40\xfc\xb8\xf9\x80\xc9\xdc\xf5\x0b\xe6\xbd\xaf\x2c\x5e\
\x57\x4b\xc6\x6c\x89\x3a\xc5\x12\x86\x56\xcb\x41\x7c\xf5\xea\xeb\
\x8e\x5d\x7b\xec\xfa\xa3\x96\x8c\x73\x7b\xfe\xad\xe1\x1f\x64\x27\
\x26\x3e\x0d\xdd\x69\x2d\xec\x25\xc9\xfb\x74\xa3\xdb\xd1\x54\x6d\
\x0a\x6d\x0d\xd0\xf7\xe1\xe5\xb3\xf3\x22\x4d\xba\x5a\x14\x45\x95\
\xf4\xa7\x52\x89\x82\x54\x2a\x51\x14\x55\x2a\x61\x3e\x8a\x26\x9c\
\x39\x8b\x26\x06\xea\x7b\xf1\x35\x37\x99\xf2\x60\xf6\xd1\xdb\x4f\
\x7d\xec\xed\x97\x53\xb7\x3f\x3a\x2b\x18\x23\x7e\xe2\xec\xd7\xbf\
\xb1\xb6\xb6\xb6\xba\xba\xb6\xba\xb6\x16\x06\x6b\x6b\xab\xe1\x7a\
\x75\x35\xcc\x1d\x40\xbf\xf6\xc0\xd9\xbb\x19\x99\xa8\xb9\x13\x27\
\x97\x96\x96\x96\x96\x97\x97\x96\x97\xd2\xc1\xd2\xd2\x72\xb8\x5e\
\x5e\x0e\x73\x07\xd1\x2f\x9d\x98\x0f\x6f\x63\x95\x24\x49\x92\x24\
\x71\x9c\xc4\xd9\x20\x49\xe2\x70\x1d\xc7\x61\xee\x60\x7a\x4f\x19\
\x08\x3a\xe7\x9c\x73\xe1\x1d\x70\x18\x84\x77\xc3\xe1\x05\xb1\x73\
\x07\xd6\x03\xb4\x42\x42\xcd\x5f\xd4\xa4\xbd\x62\xfe\xba\xed\xc0\
\xfa\xc2\x81\xd4\xff\x45\xfe\x07\x6b\x56\xad\xc0\xb4\x6e\x5e\x96\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x0a\
\x0c\xf7\x1b\xc7\
\x00\x63\
\x00\x6f\x00\x6e\x00\x66\x00\x69\x00\x67\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x08\xab\xda\x07\
\x00\x75\
\x00\x70\x00\x64\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x0c\x9c\xa6\x27\
\x00\x71\
\x00\x75\x00\x65\x00\x72\x00\x79\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\
\x00\x00\x00\x2c\x00\x00\x00\x00\x00\x01\x00\x00\x1b\x97\
\x00\x00\x00\x46\x00\x00\x00\x00\x00\x01\x00\x00\x3c\x03\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| gpl-3.0 | -31,379,279,934,385,216 | 62.974071 | 96 | 0.726783 | false |
mayflower/pwdog | pwdog/config.py | 1 | 1339 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Patrick Otto <[email protected]>
# Franz Pletz <[email protected]>
#
# This file is part of pwdog.
#
# pwdog is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pwdog is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pwdog. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
class Config(object):
def __init__(self, filename, context):
self.context = context
self.parser = ConfigParser.SafeConfigParser()
self.parser.read(filename)
def __getitem__(self, key):
return self.get(key)
def get(self, key):
try:
return self.parser.get(self.context, key)
except ConfigParser.NoOptionError:
try:
return self.parser.get('common', key)
except ConfigParser.NoOptionError:
return None
| gpl-3.0 | 3,462,157,196,561,457,700 | 32.475 | 70 | 0.67289 | false |
abelfunctions/abelfunctions | abelfunctions/riemann_theta/deprecated/riemanntheta.py | 1 | 31845 | """
Computing Riemann Theta Functions
This module implements the algorithms for computing Riemann theta
functions and their derivatives featured in the paper *"Computing
Riemann Theta Functions"* by Deconinck, Heil, Bobenko, van Hoeij, and
Schmies [CRTF].
**DEFINITION OF THE RIEMANN THETA FUNCTION:**
Let `g` be a positive integer, the *genus* of the Riemann theta
function. Let `H_g` denote the Siegel upper half space of dimension
`g(g+1)/2` over `\CC` , that is the space of symmetric complex
matrices whose imaginary parts are positive definite. When `g = 1`,
this is just the complex upper half plane.
The Riemann theta function `\theta : \CC^g \times H_g \to \CC` is
defined by the infinite series
.. math::
\theta( z | \Omega ) = \sum_{ n \in \ZZ^g } e^{ 2 \pi i \left( \tfrac{1}{2} n \cdot \Omega n + n \cdot z \right) }
It is holomorphic in both `z` and `\Omega`. It is quasiperiodic in `z`
with respect to the lattice `\{ M + \Omega N | M,N \in \ZZ^g \}`,
meaning that `\theta(z|\Omega)` is periodic upon translation of `z` by
vectors in `\ZZ^g` and periodic up to a multiplicative exponential
factor upon translation of `z` by vectors in `\Omega \ZZ^g`. As a
consequence, `\theta(z | \Omega)` has exponential growth in the
imaginary parts of `z`.
When `g=1`, the Riemann theta function is the third Jacobi theta
function.
.. math::
\theta( z | \Omega) = \theta_3(\pi z | \Omega) = 1 + 2 \sum_{n=1}^\infty e^{i \pi \Omega n^2} \cos(2 \pi n z)
Riemann theta functions are the fundamental building blocks for
Abelian functions, which generalize the classical elliptic functions
to multiple variables. Like elliptic functions, Abelian functions and
consequently Riemann theta functions arise in many applications such
as integrable partial differential equations, algebraic geometry, and
optimization.
For more information about the basic facts of and definitions
associated with Riemann theta funtions, see the Digital Library of
Mathematics Functions ``http://dlmf.nist.gov/21``.
**ALGORITHM:**
The algorithm in [CRTF] is based on the observation that the
exponential growth of `\theta` can be factored out of the sum. Thus,
we only need to find an approximation for the oscillatory part. The
derivation is omitted here but the key observation is to write `z = x
+ i y` and `\Omega = X + i Y` where `x`, `y`, `X`, and `Y` are real
vectors and matrices. With the exponential growth part factored out
of the sum, the goal is to find the integral points `n \in \ZZ^g` such
that the sum over these points is within `O(\epsilon)` accuracy of the
infinite sum, for a given `z \in \CC^g` and numerical accuracy
`\epsilon`.
By default we use the uniform approximation formulas which use the
same integral points for all `z` for a fixed `\Omega`. This can be
changed by setting ``uniform=False``. This is ill-advised if you need
to compute the Riemann theta function for a fixed `\Omega` for many
different `z`.
**REFERENCES:**
- [CRTF] Computing Riemann Theta Functions. Bernard Deconinck, Matthias
Heil, Alexander Bobenko, Mark van Hoeij and Markus Schmies. Mathematics
of Computation 73 (2004) 1417-1442. The paper is available at
http://www.amath.washington.edu/~bernard/papers/pdfs/computingtheta.pdf.
Accompanying Maple code is available at
http://www.math.fsu.edu/~hoeij/RiemannTheta/
- Digital Library of Mathematics Functions - Riemann Theta Functions ( http://dlmf.nist.gov/21 ).
**AUTHORS:**
- Chris Swierczewski (2011-11): major overhaul to match notation of
[CRTF], numerous bug fixes, documentation, doctests, symbolic
evaluation
- Grady Williams (2012-2013)
"""
import numpy as np
import scipy as sp
import scipy.linalg as la
import riemanntheta_cy
from scipy.special import gamma, gammaincc, gammainccinv,gammaincinv
from scipy.optimize import fsolve
import time
from lattice_reduction import lattice_reduce
#from riemanntheta_omegas import RiemannThetaOmegas
gpu_capable = True
try:
from riemanntheta_cuda import RiemannThetaCuda
except ImportError:
gpu_capable = False
class RiemannTheta_Function(object):
r"""
Creates an instance of the Riemann theta function parameterized by a
Riemann matrix ``Omega``, directional derivative ``derivs``, and
derivative evaluation accuracy radius. See module level documentation
for more information about the Riemann theta function.
The Riemann theta function `\theta : \CC^g \times H_g \to \CC` is defined
by the infinite series
.. math::
\theta( z | \Omega ) = \sum_{ n \in \ZZ^g } e^{ 2 \pi i \left( \tfrac{1}{2} \langle \Omega n, n \rangle + \langle z, n \rangle \right) }
The precision of Riemann theta function evaluation is determined by
the precision of the base ring.
As shown in [CRTF], `n` th order derivatives introduce polynomial growth in
the oscillatory part of the Riemann theta approximations thus making a
global approximation formula impossible. Therefore, one must specify
a ``deriv_accuracy_radius`` of guaranteed accuracy when computing
derivatives of `\theta(z | \Omega)`.
INPUT:
- ``Omega`` -- a Riemann matrix (symmetric with positive definite imaginary part)
- ``deriv`` -- (default: ``[]``) a list of `g`-tuples representing a directional derivative of `\theta`. A list of `n` lists represents an `n`th order derivative.
- ``uniform`` -- (default: ``True``) a unform approximation allows the accurate computation of the Riemann theta function without having to recompute the integer points over which to take the finite sum. See [CRTF] for a more in-depth definition.
- ``deriv_accuracy_radius`` -- (default: 5) the guaranteed radius of accuracy in computing derivatives of theta. This parameter is necessary due to the polynomial growth of the non-doubly exponential part of theta
OUTPUT:
- ``Function_RiemannTheta`` -- a Riemann theta function parameterized by the Riemann matrix `\Omega`, derivatives ``deriv``, whether or not to use a uniform approximation, and derivative accuracy radius ``deriv_accuracy_radius``.
.. note::
For now, only second order derivatives are implemented. Approximation
formulas are derived in [CRTF]. It is not exactly clear how to
generalize these formulas. In most applications, second order
derivatives are suficient.
"""
def __init__(self, uniform=True, deriv_accuracy_radius=5, tileheight = 32, tilewidth = 16):
"""
Defines parameters in constructed class instance.
"""
self.uniform = uniform
self.deriv_accuracy_radius = deriv_accuracy_radius
# cache radii, intpoints, and inverses
self._rad = None
self._intpoints = None
self._Omega = None
self._Yinv = None
self._T = None
self._Tinv = None
self._prec = 1e-8
if (gpu_capable):
self.parRiemann = RiemannThetaCuda(tileheight, tilewidth)
def lattice(self):
r"""
Compute the complex lattice corresponding to the Riemann matix.
.. note::
Not yet implemented.
"""
raise NotImplementedError()
def genus(self):
r"""
The genus of the algebraic curve from which the Riemann matrix is
calculated. If $\Omega$ is not block decomposable then this is just
the dimension of the matrix.
.. note::
Block decomposablility detection is difficult and not yet
implemented. Currently, ``self.genus()`` just returns the size
of the matrix.
"""
return NotImplementedError()
def find_int_points(self,g, c, R, T,start):
r"""
Recursive helper function for computing the integer points needed in
each coordinate direction.
INPUT:
- ``g`` -- the genus. recursively used to determine integer
points along each axis.
- ``c`` -- center of integer point computation. `0 \in \CC^g`
is used when using the uniform approximation.
- ``R`` -- the radius of the ellipsoid along the current axis.
- ``start`` -- the starting integer point for each recursion
along each axis.
OUTPUT:
- ``intpoints`` -- (list) a list of all of the integer points
inside the bounding ellipsoid along a single axis
... todo::
Recursion can be memory intensive in Python. For genus `g<30`
this is a reasonable computation but can be sped up by
writing a loop instead.
"""
a_ = c[g] - R/(np.sqrt(np.pi)*T[g,g])
b_ = c[g] + R/(np.sqrt(np.pi)*T[g,g])
a = np.ceil(a_)
b = np.floor(b_)
# check if we reached the edge of the ellipsoid
if not a <= b: return np.array([])
# last dimension reached: append points
if g == 0:
points = np.array([])
for i in range(a, b+1):
#Note that this algorithm works backwards on the coordinates,
#the last coordinate found is x1 if our coordinates are {x1,x2, ... xn}
points = np.append(np.append([i],start), points)
return points
#
# compute new shifts, radii, start, and recurse
#
newg = g-1
newT = T[:(newg+1),:(newg+1)]
newTinv = la.inv(newT)
pts = []
for n in range(a, b+1):
chat = c[:newg+1]
that = T[:newg+1,g]
newc = (chat.T - (np.dot(newTinv, that)*(n - c[g]))).T
newR = np.sqrt(R**2 - np.pi*(T[g,g] * (n - c[g]))**2) # XXX
newstart = np.append([n],start)
newpts = self.find_int_points(newg,newc,newR,newT,newstart)
pts = np.append(pts,newpts)
return pts
def integer_points(self, Yinv, T, z, g, R):
"""
The set, `U_R`, of the integral points needed to compute Riemann
theta at the complex point $z$ to the numerical precision given
by the Riemann matirix base field precision.
The set `U_R` of [CRTF], (21).
.. math::
\left\{ n \in \ZZ^g : \pi ( n - c )^{t} \cdot Y \cdot
(n - c ) < R^2, |c_j| < 1/2, j=1,\ldots,g \right\}
Since `Y` is positive definite it has Cholesky decomposition
`Y = T^t T`. Letting `\Lambda` be the lattice of vectors
`v(n), n \in ZZ^g` of the form `v(n)=\sqrt{\pi} T (n + [[ Y^{-1} n]])`,
we have that
.. math::
S_R = \left\{ v(n) \in \Lambda : || v(n) || < R \right\} .
Note that since the integer points are only required for oscillatory
part of Riemann theta all over these points are near the point
`0 \in \CC^g`. Additionally, if ``uniform == True`` then the set of
integer points is independent of the input points `z \in \CC^g`.
.. note::
To actually compute `U_R` one needs to compute the convex hull of
`2^{g}` bounding ellipsoids. Since this is computationally
expensive, an ellipsoid centered at `0 \in \CC^g` with large
radius is computed instead. This can cause accuracy issues with
ill-conditioned Riemann matrices, that is, those that produce
long and narrow bounding ellipsoies. See [CRTF] Section ### for
more information.
INPUTS:
- ``Yinv`` -- the inverse of the imaginary part of the Riemann matrix
`\Omega`
- ``T`` -- the Cholesky decomposition of the imaginary part of the
Riemann matrix `\Omega`
- ``z`` -- the point `z \in \CC` at which to compute `\theta(z|\Omega)`
- ``R`` -- the first ellipsoid semi-axis length as computed by ``self.radius()``
"""
# g = Yinv.shape[0]
pi = np.pi
z = np.array(z).reshape((g,1))
x = z.real
y = z.imag
# determine center of ellipsoid.
if self.uniform:
c = np.zeros((g,1))
intc = np.zeros((g,1))
leftc = np.zeros((g,1))
else:
c = Yinv * y
intc = c.round()
leftc = c - intc
int_points = self.find_int_points(g-1,leftc,R,T,[])
return int_points
def radius(self, T, prec, deriv=[]):
r"""
Calculate the radius `R` to compute the value of the theta function
to within `2^{-P + 1}` bits of precision where `P` is the
real / complex precision given by the input matrix. Used primarily
by ``RiemannTheta.integer_points()``.
`R` is the radius of [CRTF] Theorems 2, 4, and 6.
Input
-----
- ``T`` -- the Cholesky decomposition of the imaginary part of the
Riemann matrix `\Omega`
- ``prec`` -- the desired precision of the computation
- ``deriv`` -- (list) (default=``[]``) the derivative, if given.
Radius increases as order of derivative increases.
"""
Pi = np.pi
I = 1.0j
g = np.float64(T.shape[0])
# compute the length of the shortest lattice vector
#U = qflll(T)
A = lattice_reduce(T)
r = min(la.norm(A[:,i]) for i in range(int(g)))
normTinv = la.norm(la.inv(T))
# solve for the radius using:
# * Theorem 3 of [CRTF] (no derivative)
# * Theorem 5 of [CRTF] (first order derivative)
# * Theorem 7 of [CRTF] (second order derivative)
if len(deriv) == 0:
eps = prec
lhs = eps * (2.0/g) * (r/2.0)**g * gamma(g/2.0)
ins = gammainccinv(g/2.0,lhs)
R = np.sqrt(ins) + r/2.0
rad = max( R, (np.sqrt(2*g)+r)/2.0)
elif len(deriv) == 1:
# solve for left-hand side
L = self.deriv_accuracy_radius
normderiv = la.norm(np.array(deriv[0]))
eps = prec
lhs = (eps * (r/2.0)**g) / (np.sqrt(Pi)*g*normderiv*normTinv)
# define right-hand-side function involving the incomplete gamma
# function
def rhs(ins):
"""
Right-hand side function for computing the bounding ellipsoid
radius given a desired maximum error bound for the first
derivative of the Riemann theta function.
"""
return gamma((g+1)/2)*gammaincc((g+1)/2, ins) + \
np.sqrt(Pi)*normTinv*L * gamma(g/2)*gammaincc(g/2, ins) - \
float(lhs)
# define lower bound (guess) and attempt to solve for the radius
lbnd = np.sqrt(g+2 + np.sqrt(g**2+8)) + r
try:
ins = fsolve(rhs, float(lbnd))[0]
except RuntimeWarning:
# fsolve had trouble finding the solution. We try
# a larger initial guess since the radius increases
# as desired precision increases
try:
ins = fsolve(rhs, float(2*lbnd))[0]
except RuntimeWarning:
raise ValueError, "Could not find an accurate bound for the radius. Consider using higher precision."
# solve for radius
R = np.sqrt(ins) + r/2.0
rad = max(R,lbnd)
elif len(deriv) == 2:
# solve for left-hand side
L = self.deriv_accuracy_radius
prodnormderiv = np.prod([la.norm(d) for d in deriv])
eps = prec
lhs = (eps*(r/2.0)**g) / (2*Pi*g*prodnormderiv*normTinv**2)
# define right-hand-side function involving the incomplete gamma
# function
def rhs(ins):
"""
Right-hand side function for computing the bounding ellipsoid
radius given a desired maximum error bound for the second
derivative of the Riemann theta function.
"""
return gamma((g+2)/2)*gammaincc((g+2)/2, ins) + \
2*np.sqrt(Pi)*normTinv*L * \
gamma((g+1)/2)*gammaincc((g+1)/2,ins) + \
Pi*normTinv**2*L**2 * \
gamma(g/2)*gammaincc(g/2,ins) - float(lhs)
# define lower bound (guess) and attempt to solve for the radius
lbnd = np.sqrt(g+4 + np.sqrt(g**2+16)) + r
try:
ins = fsolve(rhs, float(lbnd))[0]
except RuntimeWarning:
# fsolve had trouble finding the solution. We try
# a larger initial guess since the radius increases
# as desired precision increases
try:
ins = fsolve(rhs, float(2*lbnd))[0]
except RuntimeWarning:
raise ValueError, "Could not find an accurate bound for the radius. Consider using higher precision."
# solve for radius
R = np.sqrt(ins) + r/2.0
rad = max(R,lbnd)
else:
# can't computer higher derivatives, yet
raise NotImplementedError("Ellipsoid radius for first and second derivatives not yet implemented.")
return rad
"""
Performs simple re-cacheing of matrices, also prepares them for gpu for processing if necessary.
Input
-----
Omega - the Riemann matrix
X - The real part of Omega
Y - The imaginary part of Omega
Yinv - The inverse of Y
T - The Cholesky Decomposition of Y
g - The genus of the Riemann theta function
prec - The desired precision
deriv - the set of derivatives to compute (Possibly an empty set)
Tinv - The inverse of T
Output
-----
Data structures ready for GPU computation.
"""
def recache(self, Omega, X, Y, Yinv, T, g, prec, deriv, Tinv, batch):
recache_omega = not np.array_equal(self._Omega, Omega)
recache_prec = self._prec != prec
#Check if we've already computed the uniform radius and intpoints for this Omega/Precision
if (recache_omega or recache_prec):
#If not recompute the integer summation set.
self._prec = prec
self._rad = self.radius(T, prec, deriv=deriv)
origin = [0]*g
self._intpoints = self.integer_points(Yinv, T, origin,
g, self._rad)
#If gpu_capable is set to true and batch is set to true then the data structures need to
#be loaded onto the GPU for computation. This code loads them onto the GPU and compiles
#the pyCuda functions.
if (gpu_capable and batch):
self.parRiemann.cache_intpoints(self._intpoints)
#Check if the gpu functions depending on the genus and Omega need to be compiled/recompiled
if (self._Omega is None or not g == self._Omega.shape[0] or self.parRiemann.g is None):
self.parRiemann.compile(g)
self.parRiemann.cache_omega_real(X)
self.parRiemann.cache_omega_imag(Yinv, T)
#Check if the gpu functions depending only on Omega need to be recompiled
else:
#Check if the gpu functions depending on the real part of Omega need to be recompiled
if (not np.array_equal(self._Omega.real, Omega.real)):
self.parRiemann.cache_omega_real(X)
#Check if the gpu functions depending on the imaginary part of Omega need to be recompiled
if (not np.array_equal(self._Omega.imag, Omega.imag)):
self.parRiemann.cache_omega_imag(Yinv, T)
self._Omega = Omega
"""
Handles calls to the GPU.
Input
-----
Z - the set of points to compute theta(z, Omega) at.
deriv - The derivatives to compute (possibly an empty list)
gpu_max - The maximum number of points to compute on the GPU at once
length - The number of points we're computing. (ie. length == |Z|)
Output
-------
u - A list of the exponential growth terms of theta (or deriv(theta)) for each z in Z
v - A list of the approximations of the infite sum of theta (or deriv(theta)) for each z in Z
"""
def gpu_process(self, Z, deriv, gpu_max, length):
v = np.array([])
u = np.array([])
#divide the set z into as many partitions as necessary
num_partitions = (length-1)//(gpu_max) + 1
for i in range(0, num_partitions):
#determine the starting and stopping points of the partition
p_start = (i)*gpu_max
p_stop = min(length, (i+1)*gpu_max)
if (len(deriv) > 0):
v_p = self.parRiemann.compute_v_with_derivs(Z[p_start: p_stop, :], deriv)
else:
v_p = self.parRiemann.compute_v_without_derivs(Z[p_start: p_stop, :])
u_p = self.parRiemann.compute_u()
u = np.concatenate((u, u_p))
v = np.concatenate((v, v_p))
return u,v
"""
Computes the exponential and oscillatory part of the Riemann theta function. Or the directional
derivative of theta.
Input
-----
z - The point (or set of points) to compute the Riemann Theta function at. Note that if z is a set of
points the variable "batch" must be set to true. If z is a single point itshould be in the form of a
1-d numpy array, if z is a set of points it should be a list or 1-d numpy array of 1-d numpy arrays.
Omega - The Riemann matrix
batch - A variable that indicates whether or not a batch of points is being computed.
prec - The desired digits of precision to compute theta up to. Note that precision is limited to double
precision which is about ~15 decimal points.
gpu - Indicates whether or not to do batch computations on a GPU, the default is set to yes if the proper
pyCuda libraries are installed and no otherwise.
gpu_max - The maximum number of points to be computed on a GPU at once.
Output
------
u - A list of the exponential growth terms of theta (or deriv(theta)) for each z in Z
v - A list of the approximations of the infite sum of theta (or deriv(theta)) for each z in Z
"""
def exp_and_osc_at_point(self, z, Omega, batch = False, prec=1e-12, deriv=[], gpu=gpu_capable, gpu_max = 500000):
g = Omega.shape[0]
pi = np.pi
#Process all of the matrices into numpy matrices
X = np.array(Omega.real)
Y = np.array(Omega.imag)
Yinv = np.array(la.inv(Y))
T = np.array(la.cholesky(Y))
Tinv = np.array(la.inv(T))
deriv = np.array(deriv)
#Do recacheing if necessary
self.recache(Omega, X, Y, Yinv, T, g, prec, deriv, Tinv, batch)
# extract real and imaginary parts of input z
length = 1
if batch:
length = len(z)
z = np.array(z).reshape((length, g))
# compute integer points: check for uniform approximation
if self.uniform:
R = self._rad
S = self._intpoints
elif(batch):
raise Exception("Can't compute pointwise approximation for multiple points at once.\nUse uniform approximation or call the function seperately for each point.")
else:
R = self.radius(T, prec, deriv=deriv)
S = self.integer_points(Yinv, T,
Tinv, z, g, R)
#Compute oscillatory and exponential terms
if gpu and batch and (length > gpu_max):
u,v = self.gpu_process(z, deriv, gpu_max, length)
elif gpu and batch and len(deriv) > 0:
v = self.parRiemann.compute_v_with_derivs(z, deriv)
elif gpu and batch:
v = self.parRiemann.compute_v_without_derivs(z)
elif (len(deriv) > 0):
v = riemanntheta_cy.finite_sum_derivatives(X, Yinv, T, z, S, deriv, g, batch)
else:
v = riemanntheta_cy.finite_sum(X, Yinv, T, z, S, g, batch)
if (length > gpu_max and gpu):
#u already computed
pass
elif (gpu and batch):
u = self.parRiemann.compute_u()
elif (batch):
K = len(z)
u = np.zeros(K)
for i in range(K):
w = np.array([z[i,:].imag])
val = np.pi*np.dot(w, np.dot(Yinv,w.T)).item(0,0)
u[i] = val
else:
u = np.pi*np.dot(z.imag,np.dot(Yinv,z.imag.T)).item(0,0)
return u,v
def exponential_part(self, *args, **kwds):
return self.exp_and_osc_at_point(*args, **kwds)[0]
def oscillatory_part(self, *args, **kwds):
return self.exp_and_osc_at_point(*args, **kwds)[1]
"""
TODO: Add documentation
"""
def characteristic(self, chars, z, Omega, deriv = [], prec=1e-8):
val = 0
z = np.matrix(z).T
alpha, beta = np.matrix(chars[0]).T, np.matrix(chars[1]).T
z_tilde = z + np.dot(Omega,alpha) + beta
if len(deriv) == 0:
u,v = self.exp_and_osc_at_point(z_tilde, Omega)
quadratic_term = np.dot(alpha.T, np.dot(Omega,alpha))[0,0]
exp_shift = 2*np.pi*1.0j*(.5*quadratic_term + np.dot(alpha.T, (z + beta)))
theta_val = np.exp(u + exp_shift)*v
elif len(deriv) == 1:
d = deriv[0]
scalar_term = np.exp(2*np.pi*1.0j*(.5*np.dot(alpha.T, np.dot(Omega, alpha)) + np.dot(alpha.T, (z + beta))))
alpha_part = 2*np.pi*1.0j*alpha
theta_eval = self.value_at_point(z_tilde, Omega, prec=prec)
term1 = np.dot(theta_eval*alpha_part.T, d)
term2 = self.value_at_point(z_tilde, Omega, prec=prec, deriv=d)
theta_val = scalar_term*(term1 + term2)
elif len(deriv) == 2:
d1,d2 = np.matrix(deriv[0]).T, np.matrix(deriv[1]).T
scalar_term = np.exp(2*np.pi*1.0j*(.5*np.dot(alpha.T, np.dot(Omega, alpha))[0,0] + np.dot(alpha.T, (z + beta))[0,0]))
#Compute the non-theta hessian
g = Omega.shape[0]
non_theta_hess = np.zeros((g, g), dtype = np.complex128)
theta_eval = self.value_at_point(z_tilde, Omega, prec=prec)
theta_grad = np.zeros(g, dtype=np.complex128)
for i in range(g):
partial = np.zeros(g)
partial[i] = 1.0
theta_grad[i] = self.value_at_point(z_tilde, Omega, prec = prec, deriv = partial)
for n in range(g):
for k in range(g):
non_theta_hess[n,k] = 2*np.pi*1.j*alpha[k,0] * (2*np.pi*1.j*theta_eval*alpha[n,0] + theta_grad[n]) + (2*np.pi*1.j*theta_grad[k]*alpha[n,0])
term1 = np.dot(d1.T, np.dot(non_theta_hess, d2))[0,0]
term2 = self.value_at_point(z_tilde, Omega, prec=prec, deriv=deriv)
theta_val = scalar_term*(term1 + term2)
else:
return NotImplementedError()
return theta_val
r"""
Returns the value of `\theta(z,\Omega)` at a point `z` or set of points if batch is True.
"""
def value_at_point(self, z, Omega, prec=1e-8, deriv=[], gpu=gpu_capable, batch=False):
exp_part, osc_part = self.exp_and_osc_at_point(z, Omega, prec=prec,
deriv=deriv, gpu=gpu,batch=batch)
return np.exp(exp_part) * osc_part
def __call__(self, z, Omega, prec=1e-8, deriv=[], gpu=gpu_capable, batch=False):
r"""
Returns the value of `\theta(z,\Omega)` at a point `z`. Lazy evaluation
is done if the input contains symbolic variables. If batch is set to true
then the functions expects a list/numpy array as input and returns a numpy array as output
"""
return self.value_at_point(z, Omega, prec=prec, deriv=deriv, gpu=gpu, batch=batch)
# declaration of Riemann theta
RiemannTheta = RiemannTheta_Function()
if __name__=="__main__":
print "=== Riemann Theta ==="
theta = RiemannTheta
z = np.array([0,0])
Omega = np.matrix([[1.0j,-0.5],[-0.5,1.0j]])
print "Test #1:"
print theta.value_at_point(z, Omega, batch = False)
print "1.1654 - 1.9522e-15*I"
print
print "Test #2:"
z1 = np.array([1.0j,1.0j])
print theta.value_at_point(z1,Omega)
print "-438.94 + 0.00056160*I"
print
print "Batch Test"
z0 = np.array([0, 0])
z1 = np.array([1.0j,1.0j])
z2 = np.array([.5 + .5j, .5 + .5j])
z3 = np.array([0 + .5j, .33 + .8j])
z4 = np.array([.345 + .768j, -44 - .76j])
print theta.value_at_point([z0,z1,z2,z3,z4],Omega, batch=True)
print
if (gpu_capable):
a = np.random.rand(10)
b = np.random.rand(10)
c = max(b)
b = 1.j*b/(1.0*c)
a = a + b
print a.size
a = a.reshape(5,2)
start1 = time.clock()
print theta.value_at_point(a, Omega, batch=True, prec=1e-12)
print("GPU time to perform calculation: " + str(time.clock() - start1))
start2 = time.clock()
print theta.value_at_point(a, Omega, gpu=False, batch=True,prec=1e-12)
print("CPU time to do same calculation: " + str(time.clock() - start2))
print
print "Derivative Tests:"
print "Calculating directional derivatives at z = [i, 0]"
print
y = np.array([1.0j, 0])
print "For [[1,0]]:"
print theta.value_at_point(y, Omega, deriv = [[1,0]])
print "0 - 146.49i"
print
print "For [[1,0] , [0,1]]: "
print theta.value_at_point(y, Omega, deriv = [[1,0], [0,1]])
print "0 + 0i"
print
print "For [[0,1], [1,0]]: "
print theta.value_at_point(y, Omega, deriv = [[0,1], [1,0]])
print "0 + 0i"
print
print "For [[1,0],[1,0],[1,1]]:"
print theta.value_at_point(y, Omega, deriv = [[1,0], [1,0], [1,1]])
print "0 + 7400.39i"
print
print "For [[1,1],[1,1],[1,1],[1,1]]: "
print theta.value_at_point(y, Omega, deriv = [[1,1],[1,1],[1,1],[1,1]])
print "41743.92 + 0i"
print
print ("GPU Derivative Test")
l = []
for x in range(5):
l.append(y)
#print theta.value_at_point(l, Omega, deriv = [[1,1],[1,1],[1,1],[1,1]], batch=True)
print "Theta w/ Characteristic Test"
z = np.array([1.j,0])
Omega = np.matrix([[1.0j,-0.5],[-0.5,1.0j]])
deriv = [[1,0],[1,0]]
chars = [[0,0],[0,0]]
print theta.characteristic(chars, z, Omega, deriv)
print "Test #3"
import pylab as p
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.pyplot as plt
print "\tCalculating theta..."
fig1 = plt.figure()
ax = fig1.add_subplot(1,1,1)
SIZE = 128
x = np.linspace(0,1,SIZE)
y = np.linspace(0,5,SIZE)
X,Y = p.meshgrid(x,y)
Z = X + Y*1.0j
Z = Z.flatten()
U,V = theta.exp_and_osc_at_point([[z,0] for z in Z], Omega, batch=True)
Z = (V.reshape(SIZE,SIZE)).imag
print "\tPlotting..."
ax.contourf(X,Y,Z,7,antialiased=True)
fig1.show()
print "\tCalculating theta..."
fig2 = plt.figure()
ax = fig2.add_subplot(1,1,1)
SIZE = 512
x = np.linspace(-7,7,SIZE)
y = np.linspace(-7,7,SIZE)
X,Y = p.meshgrid(x,y)
Z = X + Y*1.j
Z = X + Y*1.j
Z = Z.flatten()
w = np.array([[1.j]])
print w
U,V = theta.exp_and_osc_at_point(Z, w, batch = True)
print theta._intpoints
Z = (V.reshape(SIZE,SIZE)).real
print "\tPlotting..."
ax.contourf(X,Y,Z,7,antialiased=True)
fig2.show()
| mit | -6,444,361,531,170,360,000 | 37.882784 | 250 | 0.583859 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.