repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
shenghaozou/PythonGrader | P6/gradeSettings.py | 1 | 8808 | import re
ASSIGNMENT_NAME = 'P6'
ASSIGNMENT_TEST_NUM = 7
EMAIL_SEND = 0
EAMIL_SEND_UPPER_BOUND = 0
def toset(x):
if x != None:
return set(x)
else:
return None
OUTPUT_RESULT_REG_EXP = []
SCRIPT_REG_EXP = []
SCRIPT_EXISTENCE_REG_EXP = []
FUNCTION_ORDER = ['get_level', 'get_walk', 'update', 'run_simulation']
TEST_FUNC = {'get_level':[ {'input_args':[0], 'return_val':0},
{'input_args':[6], 'return_val':2},
{'input_args':[15], 'return_val':4},
{'input_args':[12], 'return_val':4},
{'input_args':[11], 'return_val':3}],
'get_walk':[ {'prerun':'get_walk.py', 'input_args':[3], 'return_val':[1,5,3]},
{'prerun':'get_walk.py', 'input_args':[2], 'return_val':[2,5,3]},
{'prerun':'get_walk2.py', 'input_args':[1], 'return_val':[3,4,3]},
{'prerun':'get_walk2.py', 'input_args':[4], 'return_val':[1,4,3]},
{'prerun':'get_walk.py', 'input_args':[3], 'return_val':set([1,5,3]), 'ret_mapper':'toset'},
{'prerun':'get_walk.py', 'input_args':[2], 'return_val':set([2,5,3]), 'ret_mapper':'toset'},
{'prerun':'get_walk2.py', 'input_args':[1], 'return_val':set([3,4]), 'ret_mapper':'toset'},
{'prerun':'get_walk2.py', 'input_args':[4], 'return_val':set([1,4,3]), 'ret_mapper':'toset'}],
'update':[ {'input_args':[12, -1, 3],'return_val': 9},
{'input_args':[0, 1, 2],'return_val': 2},
{'input_args':[5, -1, 3],'return_val': 2},
{'input_args':[8, 1, 5],'return_val': 13}],
'run_simulation':[ {'input_file': 'run_simulation_1.txt',
'return_val': [0, 2, 4, 6, 8, 10, 8, 6, 4, 2, 4, 6, 8, 10, 12, 10, 8, 6, 4, 6, 8, 10, 12, 14, 12, 10, 8, 6],
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_1_out.txt'},
{'input_file': 'run_simulation_2.txt',
'return_val': [0, 4, 8, 12, 16, 20, 24, 20, 16, 12, 8, 4, 8, 12, 16, 20, 24, 28, 24, 20, 16, 12, 8, 12, 16, 20, 24, 28, 32, 28, 24, 20, 16, 12],
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_2_out.txt'},
{'input_file': 'run_simulation_1.txt',
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_prompt_out.txt'},
{'input_file': 'run_simulation_1.txt',
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_value_out.txt'},
{'input_file': 'run_simulation_3.txt',
'return_val': [0, 2, 4, 6, 8, 10, 12, 10, 8, 6, 4, 6, 8, 10, 12, 14, 16, 14, 12, 10, 8, 10, 12, 14, 16, 18, 20, 18, 16, 14, 12, 14,
16, 18, 20, 22, 24, 22, 20, 18, 16, 18, 20, 22, 24, 26, 28, 26, 24, 22, 20, 22, 24, 26, 28, 30, 32, 30, 28, 26, 24,
26, 28, 30, 32, 34, 36, 34, 32, 30, 28, 30, 32, 34, 36, 38, 40, 38, 36, 34, 32, 34, 36, 38, 40, 42, 44, 42, 40, 38, 36],
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_3_out.txt'}]}
TEST_SCRIPT = {'get_level':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}],
'get_walk':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}],
'update':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}],
'run_simulation':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}]
}
GRADING_RULES_ORDER = ['get_level',
'get_walk',
'update',
'run_simulation_prompt',
'run_simulation_value',
'run_simulation_termination',
'docstring']
GRADING_RULES = {'get_level':{'rules':'and','order':0,'points':1,
'test':[{'type':'func','func_name':'get_level', 'index':0,'check':'return_val','error':'test 1 of get_level'},
{'type':'func','func_name':'get_level', 'index':1,'check':'return_val','error':'test 2 of get_level'},
{'type':'func','func_name':'get_level', 'index':2,'check':'return_val','error':'test 3 of get_level'},
{'type':'func','func_name':'get_level', 'index':3,'check':'return_val','error':'test 4 of get_level'},
{'type':'func','func_name':'get_level', 'index':4,'check':'return_val','error':'test 5 of get_level'}]},
'get_walk':{'rules':'groupadd','order':1,'points':2,
'groups':[(1,[{'type':'func','func_name':'get_walk', 'index':0,'check':'return_val','error':'test 1 of get_walk'},
{'type':'func','func_name':'get_walk', 'index':1,'check':'return_val','error':'test 2 of get_walk'},
{'type':'func','func_name':'get_walk', 'index':2,'check':'return_val','error':'test 3 of get_walk'},
{'type':'func','func_name':'get_walk', 'index':3,'check':'return_val','error':'test 4 of get_walk'}]),
(1,[{'type':'func','func_name':'get_walk', 'index':4,'check':'return_val','error':'test 1 of get_walk set'},
{'type':'func','func_name':'get_walk', 'index':5,'check':'return_val','error':'test 2 of get_walk set'},
{'type':'func','func_name':'get_walk', 'index':6,'check':'return_val','error':'test 3 of get_walk set'},
{'type':'func','func_name':'get_walk', 'index':7,'check':'return_val','error':'test 4 of get_walk set'}])]},
'update': {'rules':'and','order':2,'points':1,
'test':[{'type':'func','func_name':'update', 'index':0,'check':'return_val','error':'test 1 of update'},
{'type':'func','func_name':'update', 'index':1,'check':'return_val','error':'test 2 of update'},
{'type':'func','func_name':'update', 'index':2,'check':'return_val','error':'test 3 of update'},
{'type':'func','func_name':'update', 'index':3,'check':'return_val','error':'test 4 of update'}]},
'run_simulation_prompt':{'rules':'and','order':3,'points':2,
'test':[{'type':'func','func_name':'run_simulation', 'index':2, 'check':'stdout_pat','points':2, 'error':'test of run_simulation prompt correctly'}]},
'run_simulation_value': {'rules':'and','order':4,'points':1,
'test':[{'type':'func','func_name':'run_simulation', 'index':3, 'check':'stdout_pat', 'error':'test of run_simulation values correctly'}]},
'run_simulation_termination':{'rules':'add','order':5,'points':2,
'test':[{'type':'func','func_name':'run_simulation', 'index':0,'check':'return_val','error':'test 1 of run_simulation termination'},
{'type':'func','func_name':'run_simulation', 'index':4,'check':'return_val','error':'test 2 of run_simulation termination'}]},
'docstring':{'rules':'and','order':6,'points':1,
'test':[{'type':'script','func_name':'get_level', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'},
{'type':'script','func_name':'get_walk', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'},
{'type':'script','func_name':'update', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'},
{'type':'script','func_name':'run_simulation', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'}]}
}
SCRIPT_TEST = True
| apache-2.0 | 3,998,847,276,830,594,600 | 81.317757 | 192 | 0.421776 | false |
style95/openwhisk | tools/actionProxy/invoke.py | 3 | 5428 | #!/usr/bin/env python
"""Executable Python script for testing the action proxy.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
This script is useful for testing the action proxy (or its derivatives)
by simulating invoker interactions. Use it in combination with
docker run <image> which starts up the action proxy.
Example:
docker run -i -t -p 8080:8080 dockerskeleton # locally built images may be referenced without a tag
./invoke.py init <action source file>
./invoke.py run '{"some":"json object as a string"}'
For additional help, try ./invoke.py -h
"""
import os
import re
import sys
import json
import base64
import requests
import codecs
import argparse
try:
import argcomplete
except ImportError:
argcomplete = False
def main():
try:
args = parseArgs()
exitCode = {
'init' : init,
'run' : run
}[args.cmd](args)
except Exception as e:
print(e)
exitCode = 1
sys.exit(exitCode)
def dockerHost():
dockerHost = 'localhost'
if 'DOCKER_HOST' in os.environ:
try:
dockerHost = re.compile('tcp://(.*):[\d]+').findall(os.environ['DOCKER_HOST'])[0]
except Exception:
print('cannot determine docker host from %s' % os.environ['DOCKER_HOST'])
sys.exit(-1)
return dockerHost
def containerRoute(args, path):
return 'http://%s:%s/%s' % (args.host, args.port, path)
class objectify(object):
def __init__(self, d):
self.__dict__ = d
def parseArgs():
parser = argparse.ArgumentParser(description='initialize and run an OpenWhisk action container')
parser.add_argument('-v', '--verbose', help='verbose output', action='store_true')
parser.add_argument('--host', help='action container host', default=dockerHost())
parser.add_argument('-p', '--port', help='action container port number', default=8080, type=int)
subparsers = parser.add_subparsers(title='available commands', dest='cmd')
initmenu = subparsers.add_parser('init', help='initialize container with src or zip/tgz file')
initmenu.add_argument('-b', '--binary', help='treat artifact as binary', action='store_true')
initmenu.add_argument('-r', '--run', nargs='?', default=None, help='run after init')
initmenu.add_argument('main', nargs='?', default='main', help='name of the "main" entry method for the action')
initmenu.add_argument('artifact', help='a source file or zip/tgz archive')
initmenu.add_argument('env', nargs='?', help='the environment variables to export to the action, either a reference to a file or an inline JSON object', default=None)
runmenu = subparsers.add_parser('run', help='send arguments to container to run action')
runmenu.add_argument('payload', nargs='?', help='the arguments to send to the action, either a reference to a file or an inline JSON object', default=None)
if argcomplete:
argcomplete.autocomplete(parser)
return parser.parse_args()
def init(args):
main = args.main
artifact = args.artifact
if artifact and (args.binary or artifact.endswith('.zip') or artifact.endswith('tgz') or artifact.endswith('jar')):
with open(artifact, 'rb') as fp:
contents = fp.read()
contents = str(base64.b64encode(contents), 'utf-8')
binary = True
elif artifact != '':
with(codecs.open(artifact, 'r', 'utf-8')) as fp:
contents = fp.read()
binary = False
else:
contents = None
binary = False
r = requests.post(
containerRoute(args, 'init'),
json = {
"value": {
"code": contents,
"binary": binary,
"main": main,
"env": processPayload(args.env)
}
})
print(r.text)
if r.status_code == 200 and args.run != None:
runArgs = objectify({})
runArgs.__dict__ = args.__dict__.copy()
runArgs.payload = args.run
run(runArgs)
def run(args):
value = processPayload(args.payload)
if args.verbose:
print('Sending value: %s...' % json.dumps(value)[0:40])
r = requests.post(containerRoute(args, 'run'), json = {"value": value})
print(str(r.content, 'utf-8'))
def processPayload(payload):
if payload and os.path.exists(payload):
with open(payload) as fp:
return json.load(fp)
try:
d = json.loads(payload if payload else '{}')
if isinstance(d, dict):
return d
else:
raise
except:
print('payload must be a JSON object.')
sys.exit(-1)
if __name__ == '__main__':
main()
| apache-2.0 | -4,198,531,153,512,349,700 | 34.477124 | 170 | 0.639278 | false |
manassolanki/erpnext | erpnext/setup/setup_wizard/operations/company_setup.py | 6 | 4042 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cstr, getdate
from frappe.utils.file_manager import save_file
from .default_website import website_maker
from erpnext.accounts.doctype.account.account import RootNotEditable
def create_fiscal_year_and_company(args):
if (args.get('fy_start_date')):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
if (args.get('company_name')):
frappe.get_doc({
"doctype":"Company",
'company_name':args.get('company_name'),
'enable_perpetual_inventory': 1,
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'create_chart_of_accounts_based_on': 'Standard Template',
'chart_of_accounts': args.get('chart_of_accounts'),
'domain': args.get('domains')[0]
}).insert()
def enable_shopping_cart(args):
# Needs price_lists
frappe.get_doc({
"doctype": "Shopping Cart Settings",
"enabled": 1,
'company': args.get('company_name') ,
'price_list': frappe.db.get_value("Price List", {"selling": 1}),
'default_customer_group': _("Individual"),
'quotation_series': "QTN-",
}).insert()
def create_bank_account(args):
if args.get("bank_account"):
company_name = args.get('company_name')
bank_account_group = frappe.db.get_value("Account",
{"account_type": "Bank", "is_group": 1, "root_type": "Asset",
"company": company_name})
if bank_account_group:
bank_account = frappe.get_doc({
"doctype": "Account",
'account_name': args.get("bank_account"),
'parent_account': bank_account_group,
'is_group':0,
'company': company_name,
"account_type": "Bank",
})
try:
return bank_account.insert()
except RootNotEditable:
frappe.throw(_("Bank account cannot be named as {0}").format(args.get("bank_account")))
except frappe.DuplicateEntryError:
# bank account same as a CoA entry
pass
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for df in edigest.meta.get("fields", {"fieldtype": "Check"}):
if df.fieldname != "scheduler_errors":
edigest.set(df.fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "brand_html",
"<img src='{0}' style='max-width: 40px; max-height: 25px;'> {1}".format(fileurl, args.get("company_name") ))
def create_website(args):
website_maker(args)
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy | gpl-3.0 | 8,346,347,122,581,855,000 | 31.604839 | 112 | 0.672192 | false |
ticosax/django | tests/middleware/tests.py | 5 | 32482 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gzip
import random
import re
from io import BytesIO
from unittest import skipIf
from django.conf import settings
from django.core import mail
from django.http import (
FileResponse, HttpRequest, HttpResponse, HttpResponsePermanentRedirect,
HttpResponseRedirect, StreamingHttpResponse,
)
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import (
BrokenLinkEmailsMiddleware, CommonMiddleware,
)
from django.middleware.gzip import GZipMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.test import RequestFactory, TestCase, override_settings
from django.test.utils import patch_logger
from django.utils import six
from django.utils.encoding import force_str
from django.utils.six.moves import range
from django.utils.six.moves.urllib.parse import quote
@override_settings(ROOT_URLCONF='middleware.urls')
class CommonMiddlewareTest(TestCase):
rf = RequestFactory()
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get('/slash/')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
request = self.rf.get('/noslash')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get('/unknown')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get('/slash')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/slash/')
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring(self):
"""
APPEND_SLASH should preserve querystrings when redirecting.
"""
request = self.rf.get('/slash?test=1')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.url, '/slash/?test=1')
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG(self):
"""
Tests that while in debug mode, an exception is raised with a warning
when a failed attempt is made to POST, PUT, or PATCH to an URL which
would normally be redirected to a slashed version.
"""
msg = "maintaining %s data. Change your form to point to testserver/slash/"
request = self.rf.get('/slash')
request.method = 'POST'
with six.assertRaisesRegex(self, RuntimeError, msg % request.method):
CommonMiddleware().process_request(request)
request = self.rf.get('/slash')
request.method = 'PUT'
with six.assertRaisesRegex(self, RuntimeError, msg % request.method):
CommonMiddleware().process_request(request)
request = self.rf.get('/slash')
request.method = 'PATCH'
with six.assertRaisesRegex(self, RuntimeError, msg % request.method):
CommonMiddleware().process_request(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get('/slash')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted(self):
"""
URLs which require quoting should be redirected to their slash version ok.
"""
request = self.rf.get(quote('/needsquoting#'))
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'/needsquoting%23/')
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www(self):
request = self.rf.get('/path/')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'http://www.testserver/path/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash(self):
request = self.rf.get('/slash/')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/slash/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless(self):
request = self.rf.get('/slash')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/slash/')
# The following tests examine expected behavior given a custom urlconf that
# overrides the default one through the request object.
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash_custom_urlconf(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get('/customurlconf/slash/')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource_custom_urlconf(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
request = self.rf.get('/customurlconf/noslash')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown_custom_urlconf(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get('/customurlconf/unknown')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_custom_urlconf(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertIsNotNone(r,
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/customurlconf/slash/')
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self):
"""
Tests that while in debug mode, an exception is raised with a warning
when a failed attempt is made to POST to an URL which would normally be
redirected to a slashed version.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
request.method = 'POST'
with six.assertRaisesRegex(self, RuntimeError, 'end in a slash'):
CommonMiddleware().process_request(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled_custom_urlconf(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted_custom_urlconf(self):
"""
URLs which require quoting should be redirected to their slash version ok.
"""
request = self.rf.get(quote('/customurlconf/needsquoting#'))
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertIsNotNone(r,
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'/customurlconf/needsquoting%23/')
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www_custom_urlconf(self):
request = self.rf.get('/customurlconf/path/')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'http://www.testserver/customurlconf/path/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash_custom_urlconf(self):
request = self.rf.get('/customurlconf/slash/')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/customurlconf/slash/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless_custom_urlconf(self):
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/customurlconf/slash/')
# Other tests
@override_settings(DISALLOWED_USER_AGENTS=[re.compile(r'foo')])
def test_disallowed_user_agents(self):
with patch_logger('django.request', 'warning') as log_messages:
request = self.rf.get('/slash')
request.META['HTTP_USER_AGENT'] = 'foo'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 403)
self.assertEqual(log_messages, ['Forbidden (User agent): /slash'])
def test_non_ascii_query_string_does_not_crash(self):
"""Regression test for #15152"""
request = self.rf.get('/slash')
request.META['QUERY_STRING'] = force_str('drink=café')
response = CommonMiddleware().process_request(request)
self.assertEqual(response.status_code, 301)
def test_response_redirect_class(self):
request = self.rf.get('/slash')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/slash/')
self.assertIsInstance(r, HttpResponsePermanentRedirect)
def test_response_redirect_class_subclass(self):
class MyCommonMiddleware(CommonMiddleware):
response_redirect_class = HttpResponseRedirect
request = self.rf.get('/slash')
r = MyCommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, '/slash/')
self.assertIsInstance(r, HttpResponseRedirect)
@override_settings(
IGNORABLE_404_URLS=[re.compile(r'foo')],
MANAGERS=['[email protected]'],
)
class BrokenLinkEmailsMiddlewareTest(TestCase):
rf = RequestFactory()
def setUp(self):
self.req = self.rf.get('/regular_url/that/does/not/exist')
self.resp = self.client.get(self.req.path)
def test_404_error_reporting(self):
self.req.META['HTTP_REFERER'] = '/another/url/'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Broken', mail.outbox[0].subject)
def test_404_error_reporting_no_referer(self):
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 0)
def test_404_error_reporting_ignored_url(self):
self.req.path = self.req.path_info = 'foo_url/that/does/not/exist'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 0)
@skipIf(six.PY3, "HTTP_REFERER is str type on Python 3")
def test_404_error_nonascii_referrer(self):
# Such referer strings should not happen, but anyway, if it happens,
# let's not crash
self.req.META['HTTP_REFERER'] = b'http://testserver/c/\xd0\xbb\xd0\xb8/'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
@skipIf(six.PY3, "HTTP_USER_AGENT is str type on Python 3")
def test_404_error_nonascii_user_agent(self):
# Such user agent strings should not happen, but anyway, if it happens,
# let's not crash
self.req.META['HTTP_REFERER'] = '/another/url/'
self.req.META['HTTP_USER_AGENT'] = b'\xd0\xbb\xd0\xb8\xff\xff'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('User agent: \u043b\u0438\ufffd\ufffd\n', mail.outbox[0].body)
def test_custom_request_checker(self):
class SubclassedMiddleware(BrokenLinkEmailsMiddleware):
ignored_user_agent_patterns = (re.compile(r'Spider.*'),
re.compile(r'Robot.*'))
def is_ignorable_request(self, request, uri, domain, referer):
'''Check user-agent in addition to normal checks.'''
if super(SubclassedMiddleware, self).is_ignorable_request(request, uri, domain, referer):
return True
user_agent = request.META['HTTP_USER_AGENT']
return any(pattern.search(user_agent) for pattern in
self.ignored_user_agent_patterns)
self.req.META['HTTP_REFERER'] = '/another/url/'
self.req.META['HTTP_USER_AGENT'] = 'Spider machine 3.4'
SubclassedMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 0)
self.req.META['HTTP_USER_AGENT'] = 'My user agent'
SubclassedMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
@override_settings(ROOT_URLCONF='middleware.cond_get_urls')
class ConditionalGetMiddlewareTest(TestCase):
def setUp(self):
self.req = RequestFactory().get('/')
self.resp = self.client.get(self.req.path_info)
# Tests for the Date header
def test_date_header_added(self):
self.assertNotIn('Date', self.resp)
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertIn('Date', self.resp)
# Tests for the Content-Length header
def test_content_length_header_added(self):
content_length = len(self.resp.content)
self.assertNotIn('Content-Length', self.resp)
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertIn('Content-Length', self.resp)
self.assertEqual(int(self.resp['Content-Length']), content_length)
def test_content_length_header_not_added(self):
resp = StreamingHttpResponse('content')
self.assertNotIn('Content-Length', resp)
resp = ConditionalGetMiddleware().process_response(self.req, resp)
self.assertNotIn('Content-Length', resp)
def test_content_length_header_not_changed(self):
bad_content_length = len(self.resp.content) + 10
self.resp['Content-Length'] = bad_content_length
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(int(self.resp['Content-Length']), bad_content_length)
# Tests for the ETag header
def test_if_none_match_and_no_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_no_if_none_match_and_etag(self):
self.resp['ETag'] = 'eggs'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_none_match_and_same_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 304)
def test_if_none_match_and_different_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
self.resp['ETag'] = 'eggs'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_none_match_and_redirect(self):
self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam'
self.resp['Location'] = '/'
self.resp.status_code = 301
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 301)
def test_if_none_match_and_client_error(self):
self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam'
self.resp.status_code = 400
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 400)
@override_settings(USE_ETAGS=True)
def test_etag(self):
req = HttpRequest()
res = HttpResponse('content')
self.assertTrue(
CommonMiddleware().process_response(req, res).has_header('ETag'))
@override_settings(USE_ETAGS=True)
def test_etag_streaming_response(self):
req = HttpRequest()
res = StreamingHttpResponse(['content'])
res['ETag'] = 'tomatoes'
self.assertEqual(
CommonMiddleware().process_response(req, res).get('ETag'),
'tomatoes')
@override_settings(USE_ETAGS=True)
def test_no_etag_streaming_response(self):
req = HttpRequest()
res = StreamingHttpResponse(['content'])
self.assertFalse(
CommonMiddleware().process_response(req, res).has_header('ETag'))
# Tests for the Last-Modified header
def test_if_modified_since_and_no_last_modified(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_no_if_modified_since_and_last_modified(self):
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_same_last_modified(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_past(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_future(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:41:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_redirect(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
self.resp['Location'] = '/'
self.resp.status_code = 301
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 301)
def test_if_modified_since_and_client_error(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
self.resp.status_code = 400
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 400)
class XFrameOptionsMiddlewareTest(TestCase):
"""
Tests for the X-Frame-Options clickjacking prevention middleware.
"""
def test_same_origin(self):
"""
Tests that the X_FRAME_OPTIONS setting can be set to SAMEORIGIN to
have the middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='sameorigin'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
def test_deny(self):
"""
Tests that the X_FRAME_OPTIONS setting can be set to DENY to
have the middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS='DENY'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'DENY')
with override_settings(X_FRAME_OPTIONS='deny'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'DENY')
def test_defaults_sameorigin(self):
"""
Tests that if the X_FRAME_OPTIONS setting is not set then it defaults
to SAMEORIGIN.
"""
with override_settings(X_FRAME_OPTIONS=None):
del settings.X_FRAME_OPTIONS # restored by override_settings
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
def test_dont_set_if_set(self):
"""
Tests that if the X-Frame-Options header is already set then the
middleware does not attempt to override it.
"""
with override_settings(X_FRAME_OPTIONS='DENY'):
response = HttpResponse()
response['X-Frame-Options'] = 'SAMEORIGIN'
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
response = HttpResponse()
response['X-Frame-Options'] = 'DENY'
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'DENY')
def test_response_exempt(self):
"""
Tests that if the response has a xframe_options_exempt attribute set
to False then it still sets the header, but if it's set to True then
it does not.
"""
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
response = HttpResponse()
response.xframe_options_exempt = False
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
response = HttpResponse()
response.xframe_options_exempt = True
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r.get('X-Frame-Options', None), None)
def test_is_extendable(self):
"""
Tests that the XFrameOptionsMiddleware method that determines the
X-Frame-Options header value can be overridden based on something in
the request or response.
"""
class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware):
# This is just an example for testing purposes...
def get_xframe_options_value(self, request, response):
if getattr(request, 'sameorigin', False):
return 'SAMEORIGIN'
if getattr(response, 'sameorigin', False):
return 'SAMEORIGIN'
return 'DENY'
with override_settings(X_FRAME_OPTIONS='DENY'):
response = HttpResponse()
response.sameorigin = True
r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
request = HttpRequest()
request.sameorigin = True
r = OtherXFrameOptionsMiddleware().process_response(request,
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'DENY')
class GZipMiddlewareTest(TestCase):
"""
Tests the GZip middleware.
"""
short_string = b"This string is too short to be worth compressing."
compressible_string = b'a' * 500
uncompressible_string = b''.join(six.int2byte(random.randint(0, 255)) for _ in range(500))
sequence = [b'a' * 500, b'b' * 200, b'a' * 300]
sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300]
def setUp(self):
self.req = RequestFactory().get('/')
self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate'
self.req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1'
self.resp = HttpResponse()
self.resp.status_code = 200
self.resp.content = self.compressible_string
self.resp['Content-Type'] = 'text/html; charset=UTF-8'
self.stream_resp = StreamingHttpResponse(self.sequence)
self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8'
self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode)
self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8'
@staticmethod
def decompress(gzipped_string):
return gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)).read()
def test_compress_response(self):
"""
Tests that compression is performed on responses with compressible content.
"""
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertEqual(r.get('Content-Length'), str(len(r.content)))
def test_compress_streaming_response(self):
"""
Tests that compression is performed on responses with streaming content.
"""
r = GZipMiddleware().process_response(self.req, self.stream_resp)
self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence))
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertFalse(r.has_header('Content-Length'))
def test_compress_streaming_response_unicode(self):
"""
Tests that compression is performed on responses with streaming Unicode content.
"""
r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode)
self.assertEqual(self.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode))
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertFalse(r.has_header('Content-Length'))
def test_compress_file_response(self):
"""
Tests that compression is performed on FileResponse.
"""
open_file = lambda: open(__file__, 'rb')
with open_file() as file1:
file_resp = FileResponse(file1)
file_resp['Content-Type'] = 'text/html; charset=UTF-8'
r = GZipMiddleware().process_response(self.req, file_resp)
with open_file() as file2:
self.assertEqual(self.decompress(b''.join(r)), file2.read())
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertIsNot(r.file_to_stream, file1)
def test_compress_non_200_response(self):
"""
Tests that compression is performed on responses with a status other than 200.
See #10762.
"""
self.resp.status_code = 404
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
def test_no_compress_short_response(self):
"""
Tests that compression isn't performed on responses with short content.
"""
self.resp.content = self.short_string
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(r.content, self.short_string)
self.assertEqual(r.get('Content-Encoding'), None)
def test_no_compress_compressed_response(self):
"""
Tests that compression isn't performed on responses that are already compressed.
"""
self.resp['Content-Encoding'] = 'deflate'
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(r.content, self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'deflate')
def test_no_compress_uncompressible_response(self):
"""
Tests that compression isn't performed on responses with uncompressible content.
"""
self.resp.content = self.uncompressible_string
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(r.content, self.uncompressible_string)
self.assertEqual(r.get('Content-Encoding'), None)
@override_settings(USE_ETAGS=True)
class ETagGZipMiddlewareTest(TestCase):
"""
Tests if the ETag middleware behaves correctly with GZip middleware.
"""
rf = RequestFactory()
compressible_string = b'a' * 500
def test_compress_response(self):
"""
Tests that ETag is changed after gzip compression is performed.
"""
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate')
response = GZipMiddleware().process_response(request,
CommonMiddleware().process_response(request,
HttpResponse(self.compressible_string)))
gzip_etag = response.get('ETag')
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='')
response = GZipMiddleware().process_response(request,
CommonMiddleware().process_response(request,
HttpResponse(self.compressible_string)))
nogzip_etag = response.get('ETag')
self.assertNotEqual(gzip_etag, nogzip_etag)
| bsd-3-clause | 2,149,251,753,672,232,200 | 42.71467 | 114 | 0.637777 | false |
edusegzy/pychemqt | lib/reaction.py | 1 | 9477 | #!/usr/bin/python
# -*- coding: utf-8 -*-
###############################################################################
# Module to define chemical reaction functionality
###############################################################################
from math import exp, log
import sqlite3
from numpy import polyval
from scipy.optimize import fsolve
from PyQt4.QtGui import QApplication
from lib import unidades
from lib.sql import databank_name
class Reaction(object):
"""Chemical reaction object"""
status = 0
msg = QApplication.translate("pychemqt", "undefined")
error = 0
kwargs = {"comp": [],
"coef": [],
"tipo": 0,
"fase": 0,
"key": 0,
"base": 0,
"customHr": False,
"Hr": 0.0,
"formula": False,
"conversion": None,
"keq": None}
kwargsValue = ("Hr",)
kwargsList = ("tipo", "fase", "key", "base")
kwargsCheck = ("customHr", "formula")
calculateValue = ("DeltaP", "DeltaP_f", "DeltaP_ac", "DeltaP_h",
"DeltaP_v", "DeltaP_100ft", "V", "f", "Re", "Tout")
TEXT_TYPE = [QApplication.translate("pychemqt", "Estequiometric"),
QApplication.translate("pychemqt", "Equilibrium"),
QApplication.translate("pychemqt", "Kinetic"),
QApplication.translate("pychemqt", "Catalitic")]
TEXT_PHASE = [QApplication.translate("pychemqt", "Global"),
QApplication.translate("pychemqt", "Liquid"),
QApplication.translate("pychemqt", "Gas")]
TEXT_BASE = [QApplication.translate("pychemqt", "Mole"),
QApplication.translate("pychemqt", "Mass"),
QApplication.translate("pychemqt", "Partial pressure")]
def __init__(self, **kwargs):
"""constructor, kwargs keys can be:
comp: array with index of reaction components
coef: array with stequiometric coefficient for each component
fase: Phase where reaction work
0 - Global
1 - Liquid
2 - Gas
key: Index of key component
base
0 - Mol
1 - Mass
2 - Partial pressure
Hr: Heat of reaction, calculate from heat of formation if no input
formula: boolean to show compound names in formules
tipo: Kind of reaction
0 - Stequiometric, without equilibrium or kinetic calculations
1 - Equilibrium, without kinetic calculation
2 - Equilibrium by minimization of Gibbs free energy
3 - Kinetic
4 - Catalytic
conversion: conversion value for reaction with tipo=0
keq: equilibrium constant for reation with tipo=1
-it is float if it don't depend with temperature
-it is array if it depends with temperature
"""
self.kwargs = Reaction.kwargs.copy()
if kwargs:
self.__call__(**kwargs)
def __call__(self, **kwargs):
oldkwargs = self.kwargs.copy()
self.kwargs.update(kwargs)
if oldkwargs != self.kwargs and self.isCalculable:
self.calculo()
@property
def isCalculable(self):
self.msg = ""
self.status = 1
if not self.kwargs["comp"]:
self.msg = QApplication.translate("pychemqt", "undefined components")
self.status = 0
return
if not self.kwargs["coef"]:
self.msg = QApplication.translate("pychemqt", "undefined stequiometric")
self.status = 0
return
if self.kwargs["tipo"] == 0:
if self.kwargs["conversion"] is None:
self.msg = QApplication.translate("pychemqt", "undefined conversion")
self.status = 3
elif self.kwargs["tipo"] == 1:
if self.kwargs["keq"] is None:
self.msg = QApplication.translate("pychemqt", "undefined equilibrium constants")
self.status = 3
elif self.kwargs["tipo"] == 2:
pass
elif self.kwargs["tipo"] == 3:
pass
return True
def calculo(self):
self.componentes = self.kwargs["comp"]
self.coef = self.kwargs["coef"]
self.tipo = self.kwargs["tipo"]
self.base = self.kwargs["base"]
self.fase = self.kwargs["fase"]
self.calor = self.kwargs["Hr"]
self.formulas = self.kwargs["formula"]
self.keq = self.kwargs["keq"]
databank = sqlite3.connect(databank_name).cursor()
databank.execute("select nombre, peso_molecular, formula, \
calor_formacion_gas from compuestos where id IN \
%s" % str(tuple(self.componentes)))
nombre = []
peso_molecular = []
formula = []
calor_reaccion = 0
check_estequiometria = 0
for i, compuesto in enumerate(databank):
nombre.append(compuesto[0])
peso_molecular.append(compuesto[1])
formula.append(compuesto[2])
calor_reaccion += compuesto[3]*self.coef[i]
check_estequiometria += self.coef[i]*compuesto[1]
self.nombre = nombre
self.peso_molecular = peso_molecular
self.formula = formula
if self.calor:
self.Hr = self.kwargs.get("Hr", 0)
else:
self.Hr = unidades.MolarEnthalpy(calor_reaccion/abs(
self.coef[self.base]), "Jkmol")
self.error = round(check_estequiometria, 1)
self.state = self.error == 0
self.text = self._txt(self.formulas)
def conversion(self, corriente, T):
"""Calculate reaction conversion
corriente: Corriente instance for reaction
T: Temperature of reaction"""
if self.tipo == 0:
# Material balance without equilibrium or kinetics considerations
alfa = self.kwargs["conversion"]
elif self.tipo == 1:
# Chemical equilibrium without kinetics
if isinstance(self.keq, list):
A, B, C, D, E, F, G, H = self.keq
keq = exp(A+B/T+C*log(T)+D*T+E*T**2+F*T**3+G*T**4+H*T**5)
else:
keq = self.keq
def f(alfa):
conc_out = [
(corriente.caudalunitariomolar[i]+alfa*self.coef[i])
/ corriente.Q.m3h for i in range(len(self.componentes))]
productorio = 1
for i in range(len(self.componentes)):
productorio *= conc_out[i]**self.coef[i]
return keq-productorio
alfa = fsolve(f, 0.5)
print alfa, f(alfa)
avance = alfa*self.coef[self.base]*corriente.caudalunitariomolar[self.base]
Q_out = [corriente.caudalunitariomolar[i]+avance*self.coef[i] /
self.coef[self.base] for i in range(len(self.componentes))]
minimo = min(Q_out)
if minimo < 0:
# The key component is not correct, redo the result
indice = Q_out.index(minimo)
avance = self.coef[indice]*corriente.caudalunitariomolar[indice]
Q_out = [corriente.caudalunitariomolar[i]+avance*self.coef[i] /
self.coef[indice] for i in range(len(self.componentes))]
h = unidades.Power(self.Hr*self.coef[self.base] /
self.coef[indice]*avance, "Jh")
else:
h = unidades.Power(self.Hr*avance, "Jh")
print alfa, avance
caudal = sum(Q_out)
fraccion = [caudal_i/caudal for caudal_i in Q_out]
return fraccion, h
# def cinetica(self, tipo, Ko, Ei):
# """Método que define la velocidad de reacción"""
#
#
def _txt(self, nombre=False):
"""Function to get text representation for reaction"""
if nombre:
txt = self.nombre
else:
txt = self.formula
reactivos = []
productos = []
for i in range(len(self.componentes)):
if self.coef[i] == int(self.coef[i]):
self.coef[i] = int(self.coef[i])
if self.coef[i] < -1:
reactivos.append(str(-self.coef[i])+txt[i])
elif self.coef[i] == -1:
reactivos.append(txt[i])
elif -1 < self.coef[i] < 0:
reactivos.append(str(-self.coef[i])+txt[i])
elif 0 < self.coef[i] < 1:
productos.append(str(self.coef[i])+txt[i])
elif self.coef[i] == 1:
productos.append(txt[i])
elif self.coef[i] > 1:
productos.append(str(self.coef[i])+txt[i])
return " + ".join(reactivos)+" ---> "+" + ".join(productos)
def __repr__(self):
if self.status:
eq = self._txt()
return eq + " " + "Hr= %0.4e Jkmol" % self.Hr
else:
return str(self.msg)
if __name__ == "__main__":
# from lib.corriente import Corriente, Mezcla
# mezcla=Corriente(300, 1, 1000, Mezcla([1, 46, 47, 62], [0.03, 0.01, 0.96, 0]))
# reaccion=Reaction([1, 46, 47, 62], [-2, 0, -1, 2], base=2)
# reaccion.conversion(mezcla)
# print reaccion
reaccion = Reaction(comp=[1, 47, 62], coef=[-2, -1, 2])
print reaccion
| gpl-3.0 | -8,300,069,145,932,927,000 | 36.450593 | 96 | 0.528654 | false |
jqug/microscopy-object-detection | readdata.py | 1 | 10627 | import skimage
from lxml import etree
import os
import glob
from sklearn.cross_validation import train_test_split
import numpy as np
from progress_bar import ProgressBar
from skimage import io
from scipy import misc
def create_sets(img_dir, train_set_proportion=.6, test_set_proportion=.2, val_set_proportion=.2):
'''Split a list of image files up into training, testing and validation sets.'''
imgfilenames = glob.glob(img_dir + '*.jpg')
baseimgfilenames = [os.path.basename(f) for f in imgfilenames]
if train_set_proportion + test_set_proportion < 1:
train,val = train_test_split(np.arange(len(baseimgfilenames)),
train_size=train_set_proportion+test_set_proportion,
test_size=val_set_proportion,
random_state=1)
else:
train = np.arange(len(baseimgfilenames))
val = []
train_test_prop = train_set_proportion + test_set_proportion
train,test = train_test_split(train,
train_size=train_set_proportion/train_test_prop,
test_size=test_set_proportion/train_test_prop,
random_state=1)
trainfiles = [baseimgfilenames[i] for i in train]
testfiles = [baseimgfilenames[i] for i in test]
valfiles = [baseimgfilenames[i] for i in val]
return trainfiles, valfiles,testfiles
def get_patch_labels_for_single_image(img_filename, image_dir,annotation_dir, size, step,width, height, objectclass=None):
'''
Read the XML annotation files to get the labels of each patch for a
given image. The labels are 0 if there is no object in the corresponding
patch, and 1 if an object is present.
'''
annotation_filename = annotation_dir + img_filename[:-3] + 'xml'
boundingboxes = get_bounding_boxes_for_single_image(annotation_filename, objectclass=objectclass)
# Scan through patch locations in the image
labels = []
y = (height-(height/step)*step)/2
while y+(size) < height:
#rows
x = (width-(width/step)*step)/2
while (x+(size) < width):
objecthere=0
for bb in boundingboxes:
margin = 0
xmin = bb[0] + margin
xmax = bb[1] - margin
ymin = bb[2] + margin
ymax = bb[3] - margin
cx = x + size/2
cy = y + size/2
if (cx>xmin and cx<xmax and cy>ymin and cy<ymax):
objecthere = 1
break
# Output the details for this patch
labels.append(objecthere)
x+=step
y += step
return np.array(labels)
#http://codereview.stackexchange.com/questions/31352/overlapping-rectangles
def range_overlap(a_min, a_max, b_min, b_max):
'''Neither range is completely greater than the other
'''
return (a_min <= b_max) and (b_min <= a_max)
def overlap(r1, r2):
'''Overlapping rectangles overlap both horizontally & vertically
'''
return range_overlap(r1[0], r1[1], r2[0], r2[1]) and range_overlap(r1[2], r1[3], r2[2], r2[3])
def get_image_negatives(img, boundingboxes, size, step, grayscale=False, downsample=1, discard_rate=0.9):
'''Negative-labelled patches, taken at random from any part of the image
not overlapping an annotated bounding box.
Since there are typically many potential negative patches in each image, only
the proprtion 1-discard_rate of negative patches are stored.'''
c,height, width = img.shape
patches_per_img = 0
#lazy way to count how many patches we can take
max_y=0
while max_y+(size) < height:
max_x = 0
while max_x+(size) < width:
patches_per_img += 1
max_x += step
max_y += step
max_x /= step
max_y /= step
neg = []
y = (height-(max_y * step))/2
while y+(size) < height:
#rows
x = (width-(max_x * step))/2
while (x+(size) < width):
if np.random.rand()>discard_rate:
left = x
right = x+(size)
top = y
bottom = y+(size)
is_pos=False
for bb in boundingboxes:
if overlap([left,right,top,bottom], bb):
is_pos=True
break
if not is_pos:
patch = img[:, top:bottom:downsample, left:right:downsample]
neg.append(patch.copy()) # without copy seems to leak memory
x += step
y += step
return neg
def get_image_positives(img, boundingboxes, size, downsample=1):
'''Positive-labelled patches, centred on annotated bounding boxes.'''
pos = []
for bb in boundingboxes:
cy = (bb[0] + (bb[1]-bb[0])/2)
cx = (bb[2] + (bb[3]-bb[2])/2)
patch = img[..., cx-size/2:cx+size/2,cy-size/2:cy+size/2]
s= patch.shape
if s[1]<size or s[2]<size:
continue
patch = patch[:,::downsample,::downsample]
pos.append(patch.copy())
return pos
def create_patches(img_basenames, annotation_dir, image_dir, size, step, grayscale=True, progressbar=True, downsample=1, objectclass=None, negative_discard_rate=.9):
'''Extract a set of image patches with labels, from the supplied list of
annotated images. Positive-labelled patches are extracted centered on the
annotated bounding box; negative-labelled patches are extracted at random
from any part of the image which does not overlap an annotated bounding box.'''
if progressbar:
pb = ProgressBar(len(img_basenames))
if not annotation_dir[-1] == os.path.sep:
annotation_dir = annotation_dir + os.path.sep
if not image_dir[-1] == os.path.sep:
image_dir = image_dir + os.path.sep
color_type = 0
if grayscale:
channels=1
else:
channels=3
pos = []
neg = []
s = 1
for img_filename in img_basenames:
if progressbar:
pb.step(s)
s +=1
annotation_filename = annotation_dir + img_filename[:-3] + 'xml'
boundingboxes = get_bounding_boxes_for_single_image(annotation_filename, objectclass)
#colortype = cv2.IMREAD_COLOR
#img = cv2.imread(image_dir + img_filename, colortype)
img = misc.imread(image_dir + img_filename)
height,width,channels=img.shape
img = img.reshape((height, width,channels))
img = np.rollaxis(img,2)
image_pos = get_image_positives(img,boundingboxes,size,downsample=downsample)
pos.append(image_pos)
image_neg = get_image_negatives(img,boundingboxes,size,step,downsample=downsample,discard_rate=negative_discard_rate)
neg.append(image_neg)
pos = [item for sublist in pos for item in sublist]
neg = [item for sublist in neg for item in sublist]
patches = pos+neg
index = np.arange(len(patches))
np.random.seed(0)
np.random.shuffle(index)
np_patches = np.empty((len(patches),channels,size/downsample,size/downsample),dtype=np.uint8)
np_labels = np.empty(len(patches),dtype=int)
max_pos=len(pos)
for i,j in zip(index,xrange(len(index))):
if i < max_pos:
np_patches[j,] = pos[i]
np_labels[j] = 1
else:
np_patches[j,] = neg[i-max_pos]
np_labels[j] = 0
np_labels = np_labels.astype(np.uint8)
return np_labels,np_patches
def balance(X,y,mult_neg=10):
'''Returns an array with all the positive samples and as many negatives as
mult_neg*npos'''
np.random.seed(0)
neg = np.where(y==0)[0]
neg_count = len(neg)
pos = np.where(y==1)[0]
pos_count = len(pos)
np.random.shuffle(neg,)
neg = neg[0:pos_count*mult_neg]
index = np.concatenate((pos, neg))
np.random.shuffle(index)
y = y.take(index)
X = X.take(index,axis=0)
return X,y
def augment(X,y):
'''Create rotated and flipped versions of all patches.'''
shape = X.shape
num_org=shape[0]
shape = (shape[0]*8, shape[1], shape[2], shape[3])
aug_X = np.empty(shape,dtype=np.uint8)
aug_y = np.empty(shape[0],dtype=int)
new_patch_order = np.arange(shape[0])
np.random.shuffle(new_patch_order)
for i,j in zip(new_patch_order,xrange(shape[0])):
orig_patch = i/8
rot_n = i%4
do_flip = i%8>3
x = np.rollaxis(X[orig_patch],0,3 )
if do_flip:
x = np.flipud(x)
x = np.rot90(x,rot_n)
rot_X = np.rollaxis(x,2)
aug_X[j,] = (rot_X)
aug_y[j]=(y[orig_patch])
aug_y = aug_y.astype('uint8')
return aug_X,aug_y
def augment_positives(X,y):
'''Create rotated and flipped versions of only the positive-labelled
patches.'''
pos_indices = np.where(y)[0]
neg_indices = np.where(y==0)[0]
aug_X_pos, aug_y_pos = augment(X[pos_indices,], y[pos_indices])
aug_X = np.vstack((aug_X_pos, X[neg_indices,]))
aug_y = np.hstack((aug_y_pos, y[neg_indices]))
new_order = np.random.permutation(aug_y.shape[0])
aug_X = aug_X[new_order,]
aug_y = aug_y[new_order]
aug_y = aug_y.astype('uint8')
return aug_X, aug_y
def get_bounding_boxes_for_single_image(filename, objectclass=None):
'''
Given an annotation XML filename, get a list of the bounding boxes around
each object (the ground truth object locations).
'''
annofile = filename[:-3] + 'xml'
file_exists = os.path.exists(filename)
boundingboxes = []
if (file_exists):
# Read the bounding boxes from xml annotation
tree = etree.parse(filename)
r = tree.xpath('//bndbox')
if (len(r) != 0):
for i in range(len(r)):
if (objectclass==None) or (objectclass in r[i].getparent().xpath('label')[0].text.lower()):
xmin = round(float(r[i].xpath('xmin')[0].text))
xmin = max(xmin,1)
xmax = round(float(r[i].xpath('xmax')[0].text))
ymin = round(float(r[i].xpath('ymin')[0].text))
ymin = max(ymin,1)
ymax = round(float(r[i].xpath('ymax')[0].text))
xmin, xmax, ymin, ymax = int(xmin),int(xmax),int(ymin),int(ymax)
boundingboxes.append((xmin,xmax,ymin,ymax))
if len(boundingboxes) == 0:
return np.array([])
return np.vstack(boundingboxes)
| mit | -5,541,153,820,586,001,000 | 32.736508 | 165 | 0.580973 | false |
keszybz/gnuplot-py | gp_unix.py | 1 | 8276 | # $Id$
# Copyright (C) 1998-2003 Michael Haggerty <[email protected]>
#
# This file is licensed under the GNU Lesser General Public License
# (LGPL). See LICENSE.txt for details.
"""gp_unix -- an interface to gnuplot used for unix platforms.
This file implements a low-level interface to a gnuplot program for a
unix platform (actually it is used for any non-Windows, non-Mac
system). This file should be imported through gp.py, which in turn
should be imported via 'import Gnuplot' rather than these low-level
interfaces.
"""
# ############ Configuration variables: ################################
class GnuplotOpts:
"""The configuration options for gnuplot on generic platforms.
Store the options in a class to make them easy to import and
modify en masse. If you want to modify the options from the
command line or within a running program, do something like the
following::
import Gnuplot
Gnuplot.GnuplotOpts.gnuplot_command = '/bin/mygnuplot'
"""
# Command to start up the gnuplot program. If your version of
# gnuplot is run otherwise, specify the correct command here. You
# could also specify a full path or append command-line options
# here if you wish.
gnuplot_command = 'gnuplot'
# Recent versions of gnuplot (at least for Xwindows) allow a
# `-persist' command-line option when starting up gnuplot. When
# this option is specified, graph windows remain on the screen
# even after you quit gnuplot (type `q' in the window to close
# it). This can be handy but unfortunately it is not supported by
# older versions of gnuplot. The following configuration variable
# specifies whether the user's version of gnuplot recognizes this
# option or not. You can set this variable to 1 (supports
# -persist) or 0 (doesn't support) yourself; if you leave it with
# the value None then the first time you create a Gnuplot object
# it will try to detect automatically whether your version accepts
# this option.
recognizes_persist = None # test automatically on first use
# What should be the default if the persist option is not
# specified explicitly?
prefer_persist = 0
# Recent versions of gnuplot allow you to specify a `binary'
# option to the splot command for grid data, which means that the
# data file is to be read in binary format. This option saves
# substantial time writing and reading the file, and can also save
# substantial disk space and therefore it is the default for that
# type of plot. But if you have an older version of gnuplot (or
# you prefer text format) you can disable the binary option in
# either of two ways: (a) set the following variable to 0; or (b)
# pass `binary=0' to the GridData constructor. (Note that the
# demo uses binary=0 to maximize portability.)
recognizes_binary_splot = 1
# Data can be passed to gnuplot through a temporary file or as
# inline data (i.e., the filename is set to '-' and the data is
# entered into the gnuplot interpreter followed by 'e'). If
# prefer_inline_data is true, then use the inline method as
# default whenever it is supported. This should be fast but will
# use more memory since currently the inline data is put into a
# big string when the PlotItem is created.
prefer_inline_data = 0
# Does Python implement the threading module and os.mkfifo on this
# operating system? If so, the _FIFOFileItem class will be
# defined in PlotItem.py.
support_fifo = 1
# Should FIFOs be used to send data to gnuplot by default?
prefer_fifo_data = 1
# After a hardcopy is produced, we have to set the terminal type
# back to `on screen' using gnuplot's `set terminal' command. The
# following is the usual setting for Xwindows. If it is wrong,
# change the following line to select the terminal type you prefer
# to use for on-screen work.
default_term = 'x11'
# Gnuplot can plot to a printer by using "set output '| ...'"
# where ... is the name of a program that sends its stdin to a
# printer, or by "set output 'printer_device', where
# 'printer_device is the name of a file-like interface to the
# printer. On my machine the appropriate program is `lpr', as set
# below. On your computer it may be something different (like
# `lp'); you can set that by changing the variable below. You can
# also add options to the print command if needed.
default_lpr = '| lpr'
# Enhanced postscript is an option to the postscript terminal
# driver that requests enhanced treatment of strings (for example,
# font changes, superscripts, and subscripts). Set to 1 to enable
# or 0 to disable. If you have a version of gnuplot earlier than
# 3.7, you should set this to None (*not* 0!) so that the option
# is not used at all.
prefer_enhanced_postscript = 1
# ############ End of configuration options ############################
from os import popen
def test_persist():
"""Determine whether gnuplot recognizes the option '-persist'.
If the configuration variable 'recognizes_persist' is set (i.e.,
to something other than 'None'), return that value. Otherwise,
try to determine whether the installed version of gnuplot
recognizes the -persist option. (If it doesn't, it should emit an
error message with '-persist' in the first line.) Then set
'recognizes_persist' accordingly for future reference.
"""
if GnuplotOpts.recognizes_persist is None:
g = popen('echo | %s -persist 2>&1' % GnuplotOpts.gnuplot_command, 'r')
response = g.readlines()
g.close()
GnuplotOpts.recognizes_persist = not (response and
'-persist' in response[0])
return GnuplotOpts.recognizes_persist
class GnuplotProcess:
"""Unsophisticated interface to a running gnuplot program.
This represents a running gnuplot program and the means to
communicate with it at a primitive level (i.e., pass it commands
or data). When the object is destroyed, the gnuplot program exits
(unless the 'persist' option was set). The communication is
one-way; gnuplot's text output just goes to stdout with no attempt
to check it for error messages.
Members:
'gnuplot' -- the pipe to the gnuplot command.
Methods:
'__init__' -- start up the program.
'__call__' -- pass an arbitrary string to the gnuplot program,
followed by a newline.
'write' -- pass an arbitrary string to the gnuplot program.
'flush' -- cause pending output to be written immediately.
'close' -- close the connection to gnuplot.
"""
def __init__(self, persist=None):
"""Start a gnuplot process.
Create a 'GnuplotProcess' object. This starts a gnuplot
program and prepares to write commands to it.
Keyword arguments:
'persist=1' -- start gnuplot with the '-persist' option,
(which leaves the plot window on the screen even after
the gnuplot program ends, and creates a new plot window
each time the terminal type is set to 'x11'). This
option is not available on older versions of gnuplot.
"""
if persist is None:
persist = GnuplotOpts.prefer_persist
if persist:
if not test_persist():
raise ('-persist does not seem to be supported '
'by your version of gnuplot!')
self.gnuplot = popen('%s -persist' % GnuplotOpts.gnuplot_command,
'w')
else:
self.gnuplot = popen(GnuplotOpts.gnuplot_command, 'w')
# forward write and flush methods:
self.write = self.gnuplot.write
self.flush = self.gnuplot.flush
def close(self):
if self.gnuplot is not None:
self.gnuplot.close()
self.gnuplot = None
def __del__(self):
self.close()
def __call__(self, s):
"""Send a command string to gnuplot, followed by newline."""
self.write(s + '\n')
self.flush()
| lgpl-2.1 | -326,566,317,161,167,000 | 38.222749 | 79 | 0.661672 | false |
cherusk/ansible | lib/ansible/modules/cloud/vmware/vmware_guest.py | 10 | 58143 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_guest
short_description: Manages virtual machines in vcenter
description:
- Create new virtual machines (from templates or not)
- Power on/power off/restart a virtual machine
- Modify, rename or remove a virtual machine
version_added: 2.2
author:
- James Tanner (@jctanner) <[email protected]>
- Loic Blot (@nerzhul) <[email protected]>
notes:
- Tested on vSphere 5.5 and 6.0
requirements:
- "python >= 2.6"
- PyVmomi
options:
state:
description:
- What state should the virtual machine be in?
- If C(state) is set to C(present) and VM exists, ensure the VM configuration conforms to task arguments
required: True
choices: ['present', 'absent', 'poweredon', 'poweredoff', 'restarted', 'suspended', 'shutdownguest', 'rebootguest']
name:
description:
- Name of the VM to work with
required: True
name_match:
description:
- If multiple VMs matching the name, use the first or last found
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is VMware's unique identifier.
- This is required if name is not supplied.
template:
description:
- Template used to create VM.
- If this value is not set, VM is created without using a template.
- If the VM exists already this setting will be ignored.
is_template:
description:
- Flag the instance as a template
default: False
version_added: "2.3"
folder:
description:
- Destination folder, absolute path to find an existing guest or create the new guest
hardware:
description:
- "Manage some VM hardware attributes."
- "Valid attributes are: memory_mb, num_cpus and scsi"
- "scsi: Valid values are buslogic, lsilogic, lsilogicsas and paravirtual (default)"
guest_id:
description:
- "Set the guest ID (Debian, RHEL, Windows...)"
- "This field is required when creating a VM"
- >
Valid values are referenced here:
https://www.vmware.com/support/developer/converter-sdk/conv55_apireference/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html
version_added: "2.3"
disk:
description:
- "A list of disks to add"
- "Valid attributes are: size_[tb,gb,mb,kb], type, datastore and autoselect_datastore"
- "type: Valid value is thin (default: None)"
- "datastore: Datastore to use for the disk. If autoselect_datastore is True, filter datastore selection."
- "autoselect_datastore (bool): select the less used datastore."
resource_pool:
description:
- Affect machine to the given resource pool
- Resource pool should be child of the selected host parent
default: None
version_added: "2.3"
wait_for_ip_address:
description:
- Wait until vCenter detects an IP address for the VM
- This requires vmware-tools (vmtoolsd) to properly work after creation
default: False
force:
description:
- Ignore warnings and complete the actions
datacenter:
description:
- Destination datacenter for the deploy operation
default: ha-datacenter
cluster:
description:
- The cluster name where the VM will run.
version_added: "2.3"
esxi_hostname:
description:
- The esxi hostname where the VM will run.
annotation:
description:
- A note or annotation to include in the VM
version_added: "2.3"
customvalues:
description:
- Define a list of customvalues to set on VM.
- "A customvalue object takes 2 fields 'key' and 'value'."
version_added: "2.3"
networks:
description:
- Network to use should include C(name) or C(vlan) entry
- Add an optional C(ip) and C(netmask) for network configuration
- Add an optional C(gateway) entry to configure a gateway
- Add an optional C(mac) entry to customize mac address
- Add an optional C(dns_servers) or C(domain) entry per interface (Windows)
- Add an optional C(device_type) to configure the virtual NIC (pcnet32, vmxnet2, vmxnet3, e1000, e1000e)
version_added: "2.3"
customization:
description:
- "Parameters to customize template"
- "Common parameters (Linux/Windows):"
- " C(dns_servers) (list): List of DNS servers to configure"
- " C(dns_suffix) (list): List of domain suffixes, aka DNS search path (default: C(domain) parameter)"
- " C(domain) (string): DNS domain name to use"
- " C(hostname) (string): Computer hostname (default: C(name) parameter)"
- "Parameters related to windows customization:"
- " C(autologon) (bool): Auto logon after VM customization (default: False)"
- " C(autologoncount) (int): Number of autologon after reboot (default: 1)"
- " C(domainadmin) (string): User used to join in AD domain (mandatory with joindomain)"
- " C(domainadminpassword) (string): Password used to join in AD domain (mandatory with joindomain)"
- " C(fullname) (string): Server owner name (default: Administrator)"
- " C(joindomain) (string): AD domain to join (Not compatible with C(joinworkgroup))"
- " C(joinworkgroup) (string): Workgroup to join (Not compatible with C(joindomain), default: WORKGROUP)"
- " C(orgname) (string): Organisation name (default: ACME)"
- " C(password) (string): Local administrator password (mandatory)"
- " C(productid) (string): Product ID"
- " C(runonce) (list): List of commands to run at first user logon"
- " C(timezone) (int): Timezone (default: 85) See U(https://msdn.microsoft.com/en-us/library/ms912391(v=winembedded.11).aspx)"
version_added: "2.3"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Create a VM from a template
- name: create the VM
vmware_guest:
hostname: 192.0.2.44
username: [email protected]
password: vmware
validate_certs: no
esxi_hostname: 192.0.2.117
datacenter: datacenter1
folder: testvms
name: testvm_2
state: poweredon
guest_id: centos64guest
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: paravirtual
networks:
- name: VM Network
ip: 192.168.1.100
netmask: 255.255.255.0
mac: 'aa:bb:dd:aa:00:14'
template: template_el7
wait_for_ip_address: yes
delegate_to: localhost
register: deploy
# Clone a VM from Template and customize
- name: Clone template and customize
vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: no
datacenter: datacenter1
cluster: cluster
name: testvm-2
template: template_windows
networks:
- name: VM Network
ip: 192.168.1.100
netmask: 255.255.255.0
gateway: 192.168.1.1
mac: 'aa:bb:dd:aa:00:14'
domain: my_domain
dns_servers:
- 192.168.1.1
- 192.168.1.2
customization:
autologon: True
dns_servers:
- 192.168.1.1
- 192.168.1.2
domain: my_domain
password: new_vm_password
runonce:
- powershell.exe -ExecutionPolicy Unrestricted -File C:\Windows\Temp\Enable-WinRM.ps1 -ForceNewSSLCert
delegate_to: localhost
# Create a VM template
- name: create a VM template
vmware_guest:
hostname: 192.0.2.88
username: [email protected]
password: vmware
validate_certs: no
datacenter: datacenter1
cluster: vmware_cluster_esx
resource_pool: highperformance_pool
folder: testvms
name: testvm_6
is_template: yes
guest_id: debian6_64Guest
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: lsilogic
wait_for_ip_address: yes
delegate_to: localhost
register: deploy
# Rename a VM (requires the VM's uuid)
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
uuid: 421e4592-c069-924d-ce20-7e7533fab926
name: new_name
state: present
delegate_to: localhost
# Remove a VM by uuid
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
uuid: 421e4592-c069-924d-ce20-7e7533fab926
state: absent
delegate_to: localhost
'''
RETURN = """
instance:
descripton: metadata about the new virtualmachine
returned: always
type: dict
sample: None
"""
import os
import time
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six import iteritems
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.vmware import get_all_objs, connect_to_api, gather_vm_facts
try:
import json
except ImportError:
import simplejson as json
HAS_PYVMOMI = False
try:
import pyVmomi
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
pass
class PyVmomiDeviceHelper(object):
""" This class is a helper to create easily VMWare Objects for PyVmomiHelper """
def __init__(self, module):
self.module = module
self.next_disk_unit_number = 0
@staticmethod
def create_scsi_controller(scsi_type):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
if scsi_type == 'lsilogic':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicController()
elif scsi_type == 'paravirtual':
scsi_ctl.device = vim.vm.device.ParaVirtualSCSIController()
elif scsi_type == 'buslogic':
scsi_ctl.device = vim.vm.device.VirtualBusLogicController()
elif scsi_type == 'lsilogicsas':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicSASController()
scsi_ctl.device.deviceInfo = vim.Description()
scsi_ctl.device.slotInfo = vim.vm.device.VirtualDevice.PciBusSlotInfo()
scsi_ctl.device.slotInfo.pciSlotNumber = 16
scsi_ctl.device.controllerKey = 100
scsi_ctl.device.unitNumber = 3
scsi_ctl.device.busNumber = 0
scsi_ctl.device.hotAddRemove = True
scsi_ctl.device.sharedBus = 'noSharing'
scsi_ctl.device.scsiCtlrUnitNumber = 7
return scsi_ctl
@staticmethod
def is_scsi_controller(device):
return isinstance(device, vim.vm.device.VirtualLsiLogicController) or \
isinstance(device, vim.vm.device.ParaVirtualSCSIController) or \
isinstance(device, vim.vm.device.VirtualBusLogicController) or \
isinstance(device, vim.vm.device.VirtualLsiLogicSASController)
def create_scsi_disk(self, scsi_ctl, disk_index=None):
diskspec = vim.vm.device.VirtualDeviceSpec()
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
diskspec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.create
diskspec.device = vim.vm.device.VirtualDisk()
diskspec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
diskspec.device.backing.diskMode = 'persistent'
diskspec.device.controllerKey = scsi_ctl.device.key
assert self.next_disk_unit_number != 7
assert disk_index != 7
"""
Configure disk unit number.
"""
if disk_index is not None:
diskspec.device.unitNumber = disk_index
self.next_disk_unit_number = disk_index + 1
else:
diskspec.device.unitNumber = self.next_disk_unit_number
self.next_disk_unit_number += 1
# unit number 7 is reserved to SCSI controller, increase next index
if self.next_disk_unit_number == 7:
self.next_disk_unit_number += 1
return diskspec
def create_nic(self, device_type, device_label, device_infos):
nic = vim.vm.device.VirtualDeviceSpec()
if device_type == 'pcnet32':
nic.device = vim.vm.device.VirtualPCNet32()
elif device_type == 'vmxnet2':
nic.device = vim.vm.device.VirtualVmxnet2()
elif device_type == 'vmxnet3':
nic.device = vim.vm.device.VirtualVmxnet3()
elif device_type == 'e1000':
nic.device = vim.vm.device.VirtualE1000()
elif device_type == 'e1000e':
nic.device = vim.vm.device.VirtualE1000e()
elif device_type == 'sriov':
nic.device = vim.vm.device.VirtualSriovEthernetCard()
else:
self.module.fail_json(msg="Invalid device_type '%s' for network %s" %
(device_type, device_infos['name']))
nic.device.wakeOnLanEnabled = True
nic.device.addressType = 'assigned'
nic.device.deviceInfo = vim.Description()
nic.device.deviceInfo.label = device_label
nic.device.deviceInfo.summary = device_infos['name']
nic.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic.device.connectable.startConnected = True
nic.device.connectable.allowGuestControl = True
nic.device.connectable.connected = True
if 'mac' in device_infos:
nic.device.macAddress = device_infos['mac']
return nic
class PyVmomiCache(object):
""" This class caches references to objects which are requested multiples times but not modified """
def __init__(self, content):
self.content = content
self.networks = {}
self.clusters = {}
self.esx_hosts = {}
def get_network(self, network):
if network not in self.networks:
self.networks[network] = get_obj(self.content, [vim.Network], network)
return self.networks[network]
def get_cluster(self, cluster):
if cluster not in self.clusters:
self.clusters[cluster] = get_obj(self.content, [vim.ClusterComputeResource], cluster)
return self.clusters[cluster]
def get_esx_host(self, host):
if host not in self.esx_hosts:
self.esx_hosts[host] = get_obj(self.content, [vim.HostSystem], host)
return self.esx_hosts[host]
class PyVmomiHelper(object):
def __init__(self, module):
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi module required')
self.module = module
self.device_helper = PyVmomiDeviceHelper(self.module)
self.params = module.params
self.si = None
self.content = connect_to_api(self.module)
self.configspec = None
self.change_detected = False
self.customspec = None
self.current_vm_obj = None
self.cache = PyVmomiCache(self.content)
def should_deploy_from_template(self):
return self.params.get('template') is not None
def getvm(self, name=None, uuid=None, folder=None):
# https://www.vmware.com/support/developer/vc-sdk/visdk2xpubs/ReferenceGuide/vim.SearchIndex.html
# self.si.content.searchIndex.FindByInventoryPath('DC1/vm/test_folder')
vm = None
searchpath = None
if uuid:
vm = self.content.searchIndex.FindByUuid(uuid=uuid, vmSearch=True)
elif folder:
# Build the absolute folder path to pass into the search method
if not self.params['folder'].startswith('/'):
self.module.fail_json(msg="Folder %(folder)s needs to be an absolute path, starting with '/'." % self.params)
searchpath = '%(datacenter)s%(folder)s' % self.params
# get all objects for this path ...
f_obj = self.content.searchIndex.FindByInventoryPath(searchpath)
if f_obj:
if isinstance(f_obj, vim.Datacenter):
f_obj = f_obj.vmFolder
for c_obj in f_obj.childEntity:
if not isinstance(c_obj, vim.VirtualMachine):
continue
if c_obj.name == name:
vm = c_obj
if self.params['name_match'] == 'first':
break
if vm:
self.current_vm_obj = vm
return vm
def set_powerstate(self, vm, state, force):
"""
Set the power status for a VM determined by the current and
requested states. force is forceful
"""
facts = self.gather_facts(vm)
expected_state = state.replace('_', '').lower()
current_state = facts['hw_power_status'].lower()
result = dict(
changed=False,
failed=False,
)
# Need Force
if not force and current_state not in ['poweredon', 'poweredoff']:
result['failed'] = True
result['msg'] = "VM is in %s power state. Force is required!" % current_state
return result
# State is not already true
if current_state != expected_state:
task = None
try:
if expected_state == 'poweredoff':
task = vm.PowerOff()
elif expected_state == 'poweredon':
task = vm.PowerOn()
elif expected_state == 'restarted':
if current_state in ('poweredon', 'poweringon', 'resetting', 'poweredoff'):
task = vm.Reset()
else:
result['failed'] = True
result['msg'] = "Cannot restart VM in the current state %s" % current_state
elif expected_state == 'suspended':
if current_state in ('poweredon', 'poweringon'):
task = vm.Suspend()
else:
result['failed'] = True
result['msg'] = 'Cannot suspend VM in the current state %s' % current_state
elif expected_state in ['shutdownguest', 'rebootguest']:
if current_state == 'poweredon' and vm.guest.toolsRunningStatus == 'guestToolsRunning':
if expected_state == 'shutdownguest':
task = vm.ShutdownGuest()
else:
task = vm.RebootGuest()
else:
result['failed'] = True
result['msg'] = "VM %s must be in poweredon state & tools should be installed for guest shutdown/reboot" % vm.name
except Exception:
e = get_exception()
result['failed'] = True
result['msg'] = str(e)
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result['failed'] = True
result['msg'] = str(task.info.error.msg)
else:
result['changed'] = True
# need to get new metadata if changed
if result['changed']:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
result['instance'] = facts
return result
def gather_facts(self, vm):
return gather_vm_facts(self.content, vm)
def remove_vm(self, vm):
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.ManagedEntity.html#destroy
task = vm.Destroy()
self.wait_for_task(task)
if task.info.state == 'error':
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
return {'changed': True, 'failed': False}
def configure_guestid(self, vm_obj, vm_creation=False):
# guest_id is not required when using templates
if self.should_deploy_from_template() and self.params.get('guest_id') is None:
return
# guest_id is only mandatory on VM creation
if vm_creation and self.params['guest_id'] is None:
self.module.fail_json(msg="guest_id attribute is mandatory for VM creation")
if vm_obj is None or self.params['guest_id'] != vm_obj.summary.config.guestId:
self.change_detected = True
self.configspec.guestId = self.params['guest_id']
def configure_cpu_and_memory(self, vm_obj, vm_creation=False):
# set cpu/memory/etc
if 'hardware' in self.params:
if 'num_cpus' in self.params['hardware']:
self.configspec.numCPUs = int(self.params['hardware']['num_cpus'])
if vm_obj is None or self.configspec.numCPUs != vm_obj.config.hardware.numCPU:
self.change_detected = True
# num_cpu is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.num_cpus attribute is mandatory for VM creation")
if 'memory_mb' in self.params['hardware']:
self.configspec.memoryMB = int(self.params['hardware']['memory_mb'])
if vm_obj is None or self.configspec.memoryMB != vm_obj.config.hardware.memoryMB:
self.change_detected = True
# memory_mb is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.memory_mb attribute is mandatory for VM creation")
def get_vm_network_interfaces(self, vm=None):
if vm is None:
return []
device_list = []
for device in vm.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualPCNet32) or \
isinstance(device, vim.vm.device.VirtualVmxnet2) or \
isinstance(device, vim.vm.device.VirtualVmxnet3) or \
isinstance(device, vim.vm.device.VirtualE1000) or \
isinstance(device, vim.vm.device.VirtualE1000e) or \
isinstance(device, vim.vm.device.VirtualSriovEthernetCard):
device_list.append(device)
return device_list
def configure_network(self, vm_obj):
# Ignore empty networks, this permits to keep networks when deploying a template/cloning a VM
if len(self.params['networks']) == 0:
return
network_devices = list()
for network in self.params['networks']:
if 'ip' in network or 'netmask' in network:
if 'ip' not in network or not 'netmask' in network:
self.module.fail_json(msg="Both 'ip' and 'netmask' are required together.")
if 'name' in network:
if get_obj(self.content, [vim.Network], network['name']) is None:
self.module.fail_json(msg="Network '%(name)s' does not exists" % network)
elif 'vlan' in network:
dvps = get_all_objs(self.content, [vim.dvs.DistributedVirtualPortgroup])
for dvp in dvps:
if hasattr(dvp.config.defaultPortConfig, 'vlan') and dvp.config.defaultPortConfig.vlan.vlanId == network['vlan']:
network['name'] = dvp.config.name
break
if dvp.config.name == network['vlan']:
network['name'] = dvp.config.name
break
else:
self.module.fail_json(msg="VLAN '%(vlan)s' does not exist" % network)
else:
self.module.fail_json(msg="You need to define a network name or a vlan")
network_devices.append(network)
# List current device for Clone or Idempotency
current_net_devices = self.get_vm_network_interfaces(vm=vm_obj)
if len(network_devices) < len(current_net_devices):
self.module.fail_json(msg="given network device list is lesser than current VM device list (%d < %d). "
"Removing interfaces is not allowed"
% (len(network_devices), len(current_net_devices)))
for key in range(0, len(network_devices)):
# Default device type is vmxnet3, VMWare best practice
device_type = network_devices[key].get('device_type', 'vmxnet3')
nic = self.device_helper.create_nic(device_type,
'Network Adapter %s' % (key + 1),
network_devices[key])
nic_change_detected = False
if key < len(current_net_devices) and (vm_obj or self.should_deploy_from_template()):
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
# Changing mac address has no effect when editing interface
if 'mac' in network_devices[key] and nic.device.macAddress != current_net_devices[key].macAddress:
self.module.fail_json(msg="Changing MAC address has not effect when interface is already present. "
"The failing new MAC address is %s" % nic.device.macAddress)
nic.device = current_net_devices[key]
nic.device.deviceInfo = vim.Description()
else:
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_change_detected = True
if hasattr(self.cache.get_network(network_devices[key]['name']), 'portKeys'):
# VDS switch
pg_obj = get_obj(self.content, [vim.dvs.DistributedVirtualPortgroup], network_devices[key]['name'])
if (nic.device.backing and
(nic.device.backing.port.portgroupKey != pg_obj.key or
nic.device.backing.port.switchUuid != pg_obj.config.distributedVirtualSwitch.uuid)):
nic_change_detected = True
dvs_port_connection = vim.dvs.PortConnection()
dvs_port_connection.portgroupKey = pg_obj.key
dvs_port_connection.switchUuid = pg_obj.config.distributedVirtualSwitch.uuid
nic.device.backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo()
nic.device.backing.port = dvs_port_connection
nic_change_detected = True
else:
# vSwitch
if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo):
nic.device.backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_change_detected = True
net_obj = self.cache.get_network(network_devices[key]['name'])
if nic.device.backing.network != net_obj:
nic.device.backing.network = net_obj
nic_change_detected = True
if nic.device.backing.deviceName != network_devices[key]['name']:
nic.device.backing.deviceName = network_devices[key]['name']
nic_change_detected = True
if nic_change_detected:
self.configspec.deviceChange.append(nic)
self.change_detected = True
def customize_customvalues(self, vm_obj):
if len(self.params['customvalues']) == 0:
return
facts = self.gather_facts(vm_obj)
for kv in self.params['customvalues']:
if 'key' not in kv or 'value' not in kv:
self.module.exit_json(msg="customvalues items required both 'key' and 'value fields.")
# If kv is not kv fetched from facts, change it
if kv['key'] not in facts['customvalues'] or facts['customvalues'][kv['key']] != kv['value']:
try:
vm_obj.setCustomValue(key=kv['key'], value=kv['value'])
self.change_detected = True
except Exception:
e = get_exception()
self.module.fail_json(msg="Failed to set custom value for key='%s' and value='%s'. Error was: %s"
% (kv['key'], kv['value'], e))
def customize_vm(self, vm_obj):
# Network settings
adaptermaps = []
for network in self.params['networks']:
if 'ip' in network and 'netmask' in network:
guest_map = vim.vm.customization.AdapterMapping()
guest_map.adapter = vim.vm.customization.IPSettings()
guest_map.adapter.ip = vim.vm.customization.FixedIp()
guest_map.adapter.ip.ipAddress = str(network['ip'])
guest_map.adapter.subnetMask = str(network['netmask'])
if 'gateway' in network:
guest_map.adapter.gateway = network['gateway']
# On Windows, DNS domain and DNS servers can be set by network interface
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.IPSettings.html
if 'domain' in network:
guest_map.adapter.dnsDomain = network['domain']
elif self.params['customization'].get('domain'):
guest_map.adapter.dnsDomain = self.params['customization']['domain']
if 'dns_servers' in network:
guest_map.adapter.dnsServerList = network['dns_servers']
elif self.params['customization'].get('dns_servers'):
guest_map.adapter.dnsServerList = self.params['customization']['dns_servers']
adaptermaps.append(guest_map)
# Global DNS settings
globalip = vim.vm.customization.GlobalIPSettings()
if 'dns_servers' in self.params['customization']:
globalip.dnsServerList = self.params['customization'].get('dns_servers')
# TODO: Maybe list the different domains from the interfaces here by default ?
if 'dns_suffix' in self.params['customization'] or 'domain' in self.params['customization']:
globalip.dnsSuffixList = self.params['customization'].get('dns_suffix', self.params['customization']['domain'])
if self.params['guest_id']:
guest_id = self.params['guest_id']
else:
guest_id = vm_obj.summary.config.guestId
# If I install a Windows use Sysprep
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.Sysprep.html#field_detail
if 'win' in guest_id:
ident = vim.vm.customization.Sysprep()
ident.userData = vim.vm.customization.UserData()
ident.userData.computerName = vim.vm.customization.FixedName()
ident.userData.computerName.name = str(self.params['customization'].get('hostname', self.params['name']))
ident.userData.fullName = str(self.params['customization'].get('fullname', 'Administrator'))
ident.userData.orgName = str(self.params['customization'].get('orgname', 'ACME'))
ident.guiUnattended = vim.vm.customization.GuiUnattended()
ident.guiUnattended.autoLogon = self.params['customization'].get('autologon', False)
ident.guiUnattended.autoLogonCount = self.params['customization'].get('autologoncount', 1)
ident.guiUnattended.timeZone = self.params['customization'].get('timezone', 85)
ident.identification = vim.vm.customization.Identification()
if self.params['customization'].get('password', '') != '':
ident.guiUnattended.password = vim.vm.customization.Password()
ident.guiUnattended.password.value = str(self.params['customization']['password'])
ident.guiUnattended.password.plainText = True
else:
self.module.fail_json(msg="The 'customization' section requires a 'password' entry, which cannot be empty.")
if 'productid' in self.params['customization']:
ident.userData.orgName = str(self.params['customization']['productid'])
if 'joindomain' in self.params['customization']:
if 'domainadmin' not in self.params['customization'] or 'domainadminpassword' not in self.params['customization']:
self.module.fail_json(msg="'domainadmin' and 'domainadminpassword' entries are mandatory in 'customization' section to use "
"joindomain feature")
ident.identification.domainAdmin = str(self.params['customization'].get('domainadmin'))
ident.identification.joinDomain = str(self.params['customization'].get('joindomain'))
ident.identification.domainAdminPassword = vim.vm.customization.Password()
ident.identification.domainAdminPassword.value = str(self.params['customization'].get('domainadminpassword'))
ident.identification.domainAdminPassword.plainText = True
elif 'joinworkgroup' in self.params['customization']:
ident.identification.joinWorkgroup = str(self.params['customization'].get('joinworkgroup'))
if 'runonce' in self.params['customization']:
ident.guiRunOnce = vim.vm.customization.GuiRunOnce()
ident.guiRunOnce.commandList = self.params['customization']['runonce']
else:
# Else use LinuxPrep
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.LinuxPrep.html
ident = vim.vm.customization.LinuxPrep()
# TODO: Maybe add domain from interface if missing ?
if 'domain' in self.params['customization']:
ident.domain = str(self.params['customization'].get('domain'))
ident.hostName = vim.vm.customization.FixedName()
ident.hostName.name = str(self.params['customization'].get('hostname', self.params['name']))
self.customspec = vim.vm.customization.Specification()
self.customspec.nicSettingMap = adaptermaps
self.customspec.globalIPSettings = globalip
self.customspec.identity = ident
def get_vm_scsi_controller(self, vm_obj):
# If vm_obj doesn't exists no SCSI controller to find
if vm_obj is None:
return None
for device in vm_obj.config.hardware.device:
if self.device_helper.is_scsi_controller(device):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.device = device
return scsi_ctl
return None
def get_configured_disk_size(self, expected_disk_spec):
# what size is it?
if [x for x in expected_disk_spec.keys() if x.startswith('size_') or x == 'size']:
# size_tb, size_gb, size_mb, size_kb, size_b ...?
if 'size' in expected_disk_spec:
expected = ''.join(c for c in expected_disk_spec['size'] if c.isdigit())
unit = expected_disk_spec['size'].replace(expected, '').lower()
expected = int(expected)
else:
param = [x for x in expected_disk_spec.keys() if x.startswith('size_')][0]
unit = param.split('_')[-1].lower()
expected = [x[1] for x in expected_disk_spec.items() if x[0].startswith('size_')][0]
expected = int(expected)
if unit == 'tb':
return expected * 1024 * 1024 * 1024
elif unit == 'gb':
return expected * 1024 * 1024
elif unit == ' mb':
return expected * 1024
elif unit == 'kb':
return expected
self.module.fail_json(
msg='%s is not a supported unit for disk size. Supported units are kb, mb, gb or tb' % unit)
# No size found but disk, fail
self.module.fail_json(
msg="No size, size_kb, size_mb, size_gb or size_tb attribute found into disk configuration")
def configure_disks(self, vm_obj):
# Ignore empty disk list, this permits to keep disks when deploying a template/cloning a VM
if len(self.params['disk']) == 0:
return
scsi_ctl = self.get_vm_scsi_controller(vm_obj)
# Create scsi controller only if we are deploying a new VM, not a template or reconfiguring
if vm_obj is None or scsi_ctl is None:
scsi_ctl = self.device_helper.create_scsi_controller(self.get_scsi_type())
self.change_detected = True
self.configspec.deviceChange.append(scsi_ctl)
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)] \
if vm_obj is not None else None
if disks is not None and self.params.get('disk') and len(self.params.get('disk')) < len(disks):
self.module.fail_json(msg="Provided disks configuration has less disks than "
"the target object (%d vs %d)" % (len(self.params.get('disk')), len(disks)))
disk_index = 0
for expected_disk_spec in self.params.get('disk'):
disk_modified = False
# If we are manipulating and existing objects which has disks and disk_index is in disks
if vm_obj is not None and disks is not None and disk_index < len(disks):
diskspec = vim.vm.device.VirtualDeviceSpec()
# set the operation to edit so that it knows to keep other settings
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
diskspec.device = disks[disk_index]
else:
diskspec = self.device_helper.create_scsi_disk(scsi_ctl, disk_index)
disk_modified = True
# is it thin?
if 'type' in expected_disk_spec:
if expected_disk_spec.get('type', '').lower() == 'thin':
diskspec.device.backing.thinProvisioned = True
# which datastore?
if expected_disk_spec.get('datastore'):
# TODO: This is already handled by the relocation spec,
# but it needs to eventually be handled for all the
# other disks defined
pass
# increment index for next disk search
disk_index += 1
# index 7 is reserved to SCSI controller
if disk_index == 7:
disk_index += 1
kb = self.get_configured_disk_size(expected_disk_spec)
# VMWare doesn't allow to reduce disk sizes
if kb < diskspec.device.capacityInKB:
self.module.fail_json(
msg="Given disk size is lesser than found (%d < %d). Reducing disks is not allowed." %
(kb, diskspec.device.capacityInKB))
if kb != diskspec.device.capacityInKB or disk_modified:
diskspec.device.capacityInKB = kb
self.configspec.deviceChange.append(diskspec)
self.change_detected = True
def select_host(self):
# if the user wants a cluster, get the list of hosts for the cluster and use the first one
if self.params['cluster']:
cluster = self.cache.get_cluster(self.params['cluster'])
if not cluster:
self.module.fail_json(msg="Failed to find a cluster named %(cluster)s" % self.params)
hostsystems = [x for x in cluster.host]
# TODO: add a policy to select host
hostsystem = hostsystems[0]
else:
hostsystem = self.cache.get_esx_host(self.params['esxi_hostname'])
if not hostsystem:
self.module.fail_json(msg="Failed to find a host named %(esxi_hostname)s" % self.params)
return hostsystem
def select_datastore(self, vm_obj=None):
datastore = None
datastore_name = None
if len(self.params['disk']) != 0:
# TODO: really use the datastore for newly created disks
if 'autoselect_datastore' in self.params['disk'][0] and self.params['disk'][0]['autoselect_datastore']:
datastores = get_all_objs(self.content, [vim.Datastore])
if datastores is None or len(datastores) == 0:
self.module.fail_json(msg="Unable to find a datastore list when autoselecting")
datastore_freespace = 0
for ds in datastores:
if ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
if 'datastore' in self.params['disk'][0] and \
isinstance(self.params['disk'][0]['datastore'], str) and \
ds.name.find(self.params['disk'][0]['datastore']) < 0:
continue
datastore = ds
datastore_name = datastore.name
datastore_freespace = ds.summary.freeSpace
elif 'datastore' in self.params['disk'][0]:
datastore_name = self.params['disk'][0]['datastore']
datastore = get_obj(self.content, [vim.Datastore], datastore_name)
else:
self.module.fail_json(msg="Either datastore or autoselect_datastore "
"should be provided to select datastore")
if not datastore and self.should_deploy_from_template():
# use the template's existing DS
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)]
datastore = disks[0].backing.datastore
datastore_name = datastore.name
if not datastore:
self.module.fail_json(msg="Failed to find a matching datastore")
return datastore, datastore_name
def obj_has_parent(self, obj, parent):
assert obj is not None and parent is not None
current_parent = obj
while True:
if current_parent.name == parent.name:
return True
current_parent = current_parent.parent
if current_parent is None:
return False
def select_resource_pool(self, host):
resource_pools = get_all_objs(self.content, [vim.ResourcePool])
for rp in resource_pools.items():
if not rp[0]:
continue
if not hasattr(rp[0], 'parent'):
continue
# Find resource pool on host
if self.obj_has_parent(rp[0].parent, host.parent):
# If no resource_pool selected or it's the selected pool, return it
if self.module.params['resource_pool'] is None or rp[0].name == self.module.params['resource_pool']:
return rp[0]
if self.module.params['resource_pool'] is not None:
self.module.fail_json(msg="Could not find resource_pool %s for selected host %s"
% (self.module.params['resource_pool'], host.name))
else:
self.module.fail_json(msg="Failed to find a resource group for %s" % host.name)
def get_scsi_type(self):
disk_controller_type = "paravirtual"
# set cpu/memory/etc
if 'hardware' in self.params:
if 'scsi' in self.params['hardware']:
if self.params['hardware']['scsi'] in ['buslogic', 'paravirtual', 'lsilogic', 'lsilogicsas']:
disk_controller_type = self.params['hardware']['scsi']
else:
self.module.fail_json(msg="hardware.scsi attribute should be 'paravirtual' or 'lsilogic'")
return disk_controller_type
def deploy_vm(self):
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/clone_vm.py
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.CloneSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.ConfigSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# FIXME:
# - multiple datacenters
# - multiple templates by the same name
# - static IPs
#datacenters = get_all_objs(self.content, [vim.Datacenter])
datacenter = get_obj(self.content, [vim.Datacenter], self.params['datacenter'])
if not datacenter:
self.module.fail_json(msg='No datacenter named %(datacenter)s was found' % self.params)
destfolder = None
if not self.params['folder'].startswith('/'):
self.module.fail_json(msg="Folder %(folder)s needs to be an absolute path, starting with '/'." % self.params)
f_obj = self.content.searchIndex.FindByInventoryPath('/%(datacenter)s%(folder)s' % self.params)
if f_obj is None:
self.module.fail_json(msg='No folder matched the path: %(folder)s' % self.params)
destfolder = f_obj
hostsystem = self.select_host()
if self.should_deploy_from_template():
# FIXME: need to search for this in the same way as guests to ensure accuracy
vm_obj = get_obj(self.content, [vim.VirtualMachine], self.params['template'])
if not vm_obj:
self.module.fail_json(msg="Could not find a template named %(template)s" % self.params)
else:
vm_obj = None
# set the destination datastore for VM & disks
(datastore, datastore_name) = self.select_datastore(vm_obj)
resource_pool = self.select_resource_pool(hostsystem)
self.configspec = vim.vm.ConfigSpec(cpuHotAddEnabled=True, memoryHotAddEnabled=True)
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=vm_obj, vm_creation=True)
self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True)
self.configure_disks(vm_obj=vm_obj)
self.configure_network(vm_obj=vm_obj)
if len(self.params['customization']) > 0 or len(self.params['networks']) > 0:
self.customize_vm(vm_obj=vm_obj)
try:
if self.should_deploy_from_template():
# create the relocation spec
relospec = vim.vm.RelocateSpec()
relospec.host = hostsystem
relospec.datastore = datastore
relospec.pool = resource_pool
clonespec = vim.vm.CloneSpec(template=self.params['is_template'], location=relospec)
if self.customspec:
clonespec.customization = self.customspec
clonespec.config = self.configspec
task = vm_obj.Clone(folder=destfolder, name=self.params['name'], spec=clonespec)
self.change_detected = True
else:
# ConfigSpec require name for VM creation
self.configspec.name = self.params['name']
self.configspec.files = vim.vm.FileInfo(logDirectory=None,
snapshotDirectory=None,
suspendDirectory=None,
vmPathName="[" + datastore_name + "] " + self.params["name"])
task = destfolder.CreateVM_Task(config=self.configspec, pool=resource_pool)
self.change_detected = True
self.wait_for_task(task)
except TypeError:
self.module.fail_json(msg="TypeError was returned, please ensure to give correct inputs.")
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': self.change_detected, 'failed': True, 'msg': task.info.error.msg}
else:
# set annotation
vm = task.info.result
if self.params['annotation']:
annotation_spec = vim.vm.ConfigSpec()
annotation_spec.annotation = str(self.params['annotation'])
task = vm.ReconfigVM_Task(annotation_spec)
self.wait_for_task(task)
self.customize_customvalues(vm_obj=vm)
if self.params['wait_for_ip_address'] or self.params['state'] in ['poweredon', 'restarted']:
self.set_powerstate(vm, 'poweredon', force=False)
if self.params['wait_for_ip_address']:
self.wait_for_vm_ip(vm)
vm_facts = self.gather_facts(vm)
return {'changed': self.change_detected, 'failed': False, 'instance': vm_facts}
def reconfigure_vm(self):
self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=self.current_vm_obj)
self.configure_cpu_and_memory(vm_obj=self.current_vm_obj)
self.configure_disks(vm_obj=self.current_vm_obj)
self.configure_network(vm_obj=self.current_vm_obj)
self.customize_customvalues(vm_obj=self.current_vm_obj)
if self.params['annotation'] and self.current_vm_obj.config.annotation != self.params['annotation']:
self.configspec.annotation = str(self.params['annotation'])
self.change_detected = True
relospec = vim.vm.RelocateSpec()
hostsystem = self.select_host()
relospec.pool = self.select_resource_pool(hostsystem)
change_applied = False
if relospec.pool != self.current_vm_obj.resourcePool:
task = self.current_vm_obj.RelocateVM_Task(spec=relospec)
self.wait_for_task(task)
change_applied = True
# Only send VMWare task if we see a modification
if self.change_detected:
task = self.current_vm_obj.ReconfigVM_Task(spec=self.configspec)
self.wait_for_task(task)
change_applied = True
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': change_applied, 'failed': True, 'msg': task.info.error.msg}
# Rename VM
if self.params['uuid'] and self.params['name'] and self.params['name'] != self.current_vm_obj.config.name:
task = self.current_vm_obj.Rename_Task(self.params['name'])
self.wait_for_task(task)
change_applied = True
if task.info.state == 'error':
return {'changed': change_applied, 'failed': True, 'msg': task.info.error.msg}
# Mark VM as Template
if self.params['is_template']:
self.current_vm_obj.MarkAsTemplate()
change_applied = True
vm_facts = self.gather_facts(self.current_vm_obj)
return {'changed': change_applied, 'failed': False, 'instance': vm_facts}
@staticmethod
def wait_for_task(task):
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.Task.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.TaskInfo.html
# https://github.com/virtdevninja/pyvmomi-community-samples/blob/master/samples/tools/tasks.py
while task.info.state not in ['success', 'error']:
time.sleep(1)
def wait_for_vm_ip(self, vm, poll=100, sleep=5):
ips = None
facts = {}
thispoll = 0
while not ips and thispoll <= poll:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
if facts['ipv4'] or facts['ipv6']:
ips = True
else:
time.sleep(sleep)
thispoll += 1
return facts
def get_obj(content, vimtype, name):
"""
Return an object by name, if name is None the
first found object is returned
"""
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if name:
if c.name == name:
obj = c
break
else:
obj = c
break
container.Destroy()
return obj
def main():
module = AnsibleModule(
argument_spec=dict(
hostname=dict(
type='str',
default=os.environ.get('VMWARE_HOST')
),
username=dict(
type='str',
default=os.environ.get('VMWARE_USER')
),
password=dict(
type='str', no_log=True,
default=os.environ.get('VMWARE_PASSWORD')
),
state=dict(
required=False,
choices=[
'poweredon',
'poweredoff',
'present',
'absent',
'restarted',
'suspended',
'shutdownguest',
'rebootguest'
],
default='present'),
validate_certs=dict(type='bool', default=True),
template_src=dict(type='str', aliases=['template']),
is_template=dict(type='bool', default=False),
annotation=dict(type='str', aliases=['notes']),
customvalues=dict(type='list', default=[]),
name=dict(required=True, type='str'),
name_match=dict(type='str', default='first'),
uuid=dict(type='str'),
folder=dict(type='str', default='/vm'),
guest_id=dict(type='str'),
disk=dict(type='list', default=[]),
hardware=dict(type='dict', default={}),
force=dict(type='bool', default=False),
datacenter=dict(type='str', default='ha-datacenter'),
esxi_hostname=dict(type='str'),
cluster=dict(type='str'),
wait_for_ip_address=dict(type='bool', default=False),
networks=dict(type='list', default=[]),
resource_pool=dict(type='str'),
customization=dict(type='dict', no_log=True, default={}),
),
supports_check_mode=True,
mutually_exclusive=[
['esxi_hostname', 'cluster'],
],
required_together=[
['state', 'force'],
['template'],
],
)
result = {'failed': False, 'changed': False}
# Prepend /vm if it was missing from the folder path, also strip trailing slashes
if not module.params['folder'].startswith('/vm') and module.params['folder'].startswith('/'):
module.params['folder'] = '/vm%(folder)s' % module.params
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.getvm(name=module.params['name'],
folder=module.params['folder'],
uuid=module.params['uuid'])
# VM already exists
if vm:
if module.params['state'] == 'absent':
# destroy it
if module.params['force']:
# has to be poweredoff first
pyv.set_powerstate(vm, 'poweredoff', module.params['force'])
result = pyv.remove_vm(vm)
elif module.params['state'] == 'present':
result = pyv.reconfigure_vm()
elif module.params['state'] in ['poweredon', 'poweredoff', 'restarted', 'suspended', 'shutdownguest', 'rebootguest']:
# set powerstate
tmp_result = pyv.set_powerstate(vm, module.params['state'], module.params['force'])
if tmp_result['changed']:
result["changed"] = True
if not tmp_result["failed"]:
result["failed"] = False
else:
# This should not happen
assert False
# VM doesn't exist
else:
if module.params['state'] in ['poweredon', 'poweredoff', 'present', 'restarted', 'suspended']:
# Create it ...
result = pyv.deploy_vm()
if 'failed' not in result:
result['failed'] = False
if result['failed']:
module.fail_json(**result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | 3,674,283,513,872,331,000 | 42.19688 | 144 | 0.588859 | false |
jasonbot/django | tests/template_tests/filter_tests/test_add.py | 503 | 1688 | from datetime import date, timedelta
from django.template.defaultfilters import add
from django.test import SimpleTestCase
from ..utils import setup
class AddTests(SimpleTestCase):
"""
Tests for #11687 and #16676
"""
@setup({'add01': '{{ i|add:"5" }}'})
def test_add01(self):
output = self.engine.render_to_string('add01', {'i': 2000})
self.assertEqual(output, '2005')
@setup({'add02': '{{ i|add:"napis" }}'})
def test_add02(self):
output = self.engine.render_to_string('add02', {'i': 2000})
self.assertEqual(output, '')
@setup({'add03': '{{ i|add:16 }}'})
def test_add03(self):
output = self.engine.render_to_string('add03', {'i': 'not_an_int'})
self.assertEqual(output, '')
@setup({'add04': '{{ i|add:"16" }}'})
def test_add04(self):
output = self.engine.render_to_string('add04', {'i': 'not_an_int'})
self.assertEqual(output, 'not_an_int16')
@setup({'add05': '{{ l1|add:l2 }}'})
def test_add05(self):
output = self.engine.render_to_string('add05', {'l1': [1, 2], 'l2': [3, 4]})
self.assertEqual(output, '[1, 2, 3, 4]')
@setup({'add06': '{{ t1|add:t2 }}'})
def test_add06(self):
output = self.engine.render_to_string('add06', {'t1': (3, 4), 't2': (1, 2)})
self.assertEqual(output, '(3, 4, 1, 2)')
@setup({'add07': '{{ d|add:t }}'})
def test_add07(self):
output = self.engine.render_to_string('add07', {'d': date(2000, 1, 1), 't': timedelta(10)})
self.assertEqual(output, 'Jan. 11, 2000')
class FunctionTests(SimpleTestCase):
def test_add(self):
self.assertEqual(add('1', '2'), 3)
| bsd-3-clause | -6,339,160,291,653,412,000 | 30.849057 | 99 | 0.567536 | false |
redhat-openstack/manila | manila_tempest_tests/tests/api/admin/test_quotas.py | 2 | 13084 | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config # noqa
from tempest import test # noqa
from manila_tempest_tests import clients_share as clients
from manila_tempest_tests.tests.api import base
CONF = config.CONF
class SharesAdminQuotasTest(base.BaseSharesAdminTest):
@classmethod
def resource_setup(cls):
cls.os = clients.AdminManager()
super(SharesAdminQuotasTest, cls).resource_setup()
cls.user_id = cls.shares_client.user_id
cls.tenant_id = cls.shares_client.tenant_id
@test.attr(type=["gate", "smoke", ])
def test_default_quotas(self):
quotas = self.shares_client.default_quotas(self.tenant_id)
self.assertGreater(int(quotas["gigabytes"]), -2)
self.assertGreater(int(quotas["snapshot_gigabytes"]), -2)
self.assertGreater(int(quotas["shares"]), -2)
self.assertGreater(int(quotas["snapshots"]), -2)
self.assertGreater(int(quotas["share_networks"]), -2)
@test.attr(type=["gate", "smoke", ])
def test_show_quotas(self):
quotas = self.shares_client.show_quotas(self.tenant_id)
self.assertGreater(int(quotas["gigabytes"]), -2)
self.assertGreater(int(quotas["snapshot_gigabytes"]), -2)
self.assertGreater(int(quotas["shares"]), -2)
self.assertGreater(int(quotas["snapshots"]), -2)
self.assertGreater(int(quotas["share_networks"]), -2)
@test.attr(type=["gate", "smoke", ])
def test_show_quotas_for_user(self):
quotas = self.shares_client.show_quotas(self.tenant_id, self.user_id)
self.assertGreater(int(quotas["gigabytes"]), -2)
self.assertGreater(int(quotas["snapshot_gigabytes"]), -2)
self.assertGreater(int(quotas["shares"]), -2)
self.assertGreater(int(quotas["snapshots"]), -2)
self.assertGreater(int(quotas["share_networks"]), -2)
class SharesAdminQuotasUpdateTest(base.BaseSharesAdminTest):
force_tenant_isolation = True
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_shares(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id)
new_quota = int(quotas["shares"]) + 2
# set new quota for shares
updated = client.update_quotas(client.tenant_id, shares=new_quota)
self.assertEqual(int(updated["shares"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_shares(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id, client.user_id)
new_quota = int(quotas["shares"]) - 1
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, client.user_id, shares=new_quota)
self.assertEqual(int(updated["shares"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_snapshots(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id)
new_quota = int(quotas["snapshots"]) + 2
# set new quota for snapshots
updated = client.update_quotas(client.tenant_id, snapshots=new_quota)
self.assertEqual(int(updated["snapshots"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_snapshots(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id, client.user_id)
new_quota = int(quotas["snapshots"]) - 1
# set new quota for snapshots
updated = client.update_quotas(
client.tenant_id, client.user_id, snapshots=new_quota)
self.assertEqual(int(updated["snapshots"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id)
# make quotas for update
gigabytes = int(custom["gigabytes"]) + 2
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, gigabytes=gigabytes)
self.assertEqual(int(updated["gigabytes"]), gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id)
# make quotas for update
snapshot_gigabytes = int(custom["snapshot_gigabytes"]) + 2
# set new quota for shares
updated = client.update_quotas(
client.tenant_id,
snapshot_gigabytes=snapshot_gigabytes)
self.assertEqual(
int(updated["snapshot_gigabytes"]), snapshot_gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id, client.user_id)
# make quotas for update
gigabytes = int(custom["gigabytes"]) - 1
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, client.user_id,
gigabytes=gigabytes)
self.assertEqual(int(updated["gigabytes"]), gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id, client.user_id)
# make quotas for update
snapshot_gigabytes = int(custom["snapshot_gigabytes"]) - 1
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, client.user_id,
snapshot_gigabytes=snapshot_gigabytes)
self.assertEqual(
int(updated["snapshot_gigabytes"]), snapshot_gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_share_networks(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id)
new_quota = int(quotas["share_networks"]) + 2
# set new quota for share-networks
updated = client.update_quotas(
client.tenant_id, share_networks=new_quota)
self.assertEqual(int(updated["share_networks"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_share_networks(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(
client.tenant_id, client.user_id)
new_quota = int(quotas["share_networks"]) - 1
# set new quota for share-networks
updated = client.update_quotas(
client.tenant_id, client.user_id,
share_networks=new_quota)
self.assertEqual(int(updated["share_networks"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_reset_tenant_quotas(self):
client = self.get_client_with_isolated_creds()
# get default_quotas
default = client.default_quotas(client.tenant_id)
# get current quotas
custom = client.show_quotas(client.tenant_id)
# make quotas for update
shares = int(custom["shares"]) + 2
snapshots = int(custom["snapshots"]) + 2
gigabytes = int(custom["gigabytes"]) + 2
snapshot_gigabytes = int(custom["snapshot_gigabytes"]) + 2
share_networks = int(custom["share_networks"]) + 2
# set new quota
updated = client.update_quotas(
client.tenant_id,
shares=shares,
snapshots=snapshots,
gigabytes=gigabytes,
snapshot_gigabytes=snapshot_gigabytes,
share_networks=share_networks)
self.assertEqual(int(updated["shares"]), shares)
self.assertEqual(int(updated["snapshots"]), snapshots)
self.assertEqual(int(updated["gigabytes"]), gigabytes)
self.assertEqual(
int(updated["snapshot_gigabytes"]), snapshot_gigabytes)
self.assertEqual(int(updated["share_networks"]), share_networks)
# reset customized quotas
client.reset_quotas(client.tenant_id)
# verify quotas
reseted = client.show_quotas(client.tenant_id)
self.assertEqual(int(reseted["shares"]), int(default["shares"]))
self.assertEqual(int(reseted["snapshots"]), int(default["snapshots"]))
self.assertEqual(int(reseted["gigabytes"]), int(default["gigabytes"]))
self.assertEqual(int(reseted["share_networks"]),
int(default["share_networks"]))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_shares(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, shares=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('shares'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_shares(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
shares=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('shares'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_snapshots(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, snapshots=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('snapshots'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_snapshots(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
snapshots=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('snapshots'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, gigabytes=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, snapshot_gigabytes=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('snapshot_gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
gigabytes=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
snapshot_gigabytes=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('snapshot_gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_share_networks(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, share_networks=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('share_networks'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_share_networks(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
share_networks=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('share_networks'))
| apache-2.0 | 756,142,312,881,644,400 | 36.276353 | 78 | 0.63635 | false |
MIPS/external-chromium_org | chrome/test/chromedriver/embed_extension_in_cpp.py | 158 | 1124 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Embeds Chrome user data files in C++ code."""
import base64
import optparse
import os
import StringIO
import sys
import zipfile
import cpp_source
def main():
parser = optparse.OptionParser()
parser.add_option(
'', '--directory', type='string', default='.',
help='Path to directory where the cc/h file should be created')
options, args = parser.parse_args()
global_string_map = {}
string_buffer = StringIO.StringIO()
zipper = zipfile.ZipFile(string_buffer, 'w')
for f in args:
zipper.write(f, os.path.basename(f), zipfile.ZIP_STORED)
zipper.close()
global_string_map['kAutomationExtension'] = base64.b64encode(
string_buffer.getvalue())
string_buffer.close()
cpp_source.WriteSource('embedded_automation_extension',
'chrome/test/chromedriver/chrome',
options.directory, global_string_map)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 6,195,955,007,070,498,000 | 26.414634 | 72 | 0.676157 | false |
cloudera/hue | desktop/core/ext-py/pytest-django-3.10.0/tests/test_db_setup.py | 2 | 16810 | import pytest
from pytest_django.lazy_django import get_django_version
from pytest_django_test.db_helpers import (
db_exists,
drop_database,
mark_database,
mark_exists,
skip_if_sqlite_in_memory,
)
def test_db_reuse_simple(django_testdir):
"A test for all backends to check that `--reuse-db` works."
django_testdir.create_test_module(
"""
import pytest
from .app.models import Item
@pytest.mark.django_db
def test_db_can_be_accessed():
assert Item.objects.count() == 0
"""
)
result = django_testdir.runpytest_subprocess("-v", "--reuse-db")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
def test_db_order(django_testdir):
"""Test order in which tests are being executed."""
django_testdir.create_test_module('''
from unittest import TestCase
import pytest
from django.test import SimpleTestCase, TestCase as DjangoTestCase, TransactionTestCase
from .app.models import Item
@pytest.mark.django_db(transaction=True)
def test_run_second_decorator():
pass
def test_run_second_fixture(transactional_db):
pass
def test_run_first_fixture(db):
pass
@pytest.mark.django_db
def test_run_first_decorator():
pass
class MyTestCase(TestCase):
def test_run_last_test_case(self):
pass
class MySimpleTestCase(SimpleTestCase):
def test_run_last_simple_test_case(self):
pass
class MyDjangoTestCase(DjangoTestCase):
def test_run_first_django_test_case(self):
pass
class MyTransactionTestCase(TransactionTestCase):
def test_run_second_transaction_test_case(self):
pass
''')
result = django_testdir.runpytest_subprocess('-v', '-s')
assert result.ret == 0
result.stdout.fnmatch_lines([
"*test_run_first_fixture*",
"*test_run_first_decorator*",
"*test_run_first_django_test_case*",
"*test_run_second_decorator*",
"*test_run_second_fixture*",
"*test_run_second_transaction_test_case*",
"*test_run_last_test_case*",
"*test_run_last_simple_test_case*",
])
def test_db_reuse(django_testdir):
"""
Test the re-use db functionality.
"""
skip_if_sqlite_in_memory()
django_testdir.create_test_module(
"""
import pytest
from .app.models import Item
@pytest.mark.django_db
def test_db_can_be_accessed():
assert Item.objects.count() == 0
"""
)
# Use --create-db on the first run to make sure we are not just re-using a
# database from another test run
drop_database()
assert not db_exists()
# Do not pass in --create-db to make sure it is created when it
# does not exist
result_first = django_testdir.runpytest_subprocess("-v", "--reuse-db")
assert result_first.ret == 0
result_first.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
assert not mark_exists()
mark_database()
assert mark_exists()
result_second = django_testdir.runpytest_subprocess("-v", "--reuse-db")
assert result_second.ret == 0
result_second.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
# Make sure the database has not been re-created
assert mark_exists()
result_third = django_testdir.runpytest_subprocess(
"-v", "--reuse-db", "--create-db"
)
assert result_third.ret == 0
result_third.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
# Make sure the database has been re-created and the mark is gone
assert db_exists()
assert not mark_exists()
class TestSqlite:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_sqlite_test_name_used(self, django_testdir):
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
from django import VERSION
@pytest.mark.django_db
def test_a():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
print(conn.settings_dict)
assert conn.settings_dict['NAME'] == 'test_custom_db_name'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-v")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_a*PASSED*"])
def test_xdist_with_reuse(django_testdir):
pytest.importorskip("xdist")
skip_if_sqlite_in_memory()
drop_database("gw0")
drop_database("gw1")
assert not db_exists("gw0")
assert not db_exists("gw1")
django_testdir.create_test_module(
"""
import pytest
from .app.models import Item
def _check(settings):
# Make sure that the database name looks correct
db_name = settings.DATABASES['default']['NAME']
assert db_name.endswith('_gw0') or db_name.endswith('_gw1')
assert Item.objects.count() == 0
Item.objects.create(name='foo')
assert Item.objects.count() == 1
@pytest.mark.django_db
def test_a(settings):
_check(settings)
@pytest.mark.django_db
def test_b(settings):
_check(settings)
@pytest.mark.django_db
def test_c(settings):
_check(settings)
@pytest.mark.django_db
def test_d(settings):
_check(settings)
"""
)
result = django_testdir.runpytest_subprocess("-vv", "-n2", "-s", "--reuse-db")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
result.stdout.fnmatch_lines(["*PASSED*test_b*"])
result.stdout.fnmatch_lines(["*PASSED*test_c*"])
result.stdout.fnmatch_lines(["*PASSED*test_d*"])
assert db_exists("gw0")
assert db_exists("gw1")
result = django_testdir.runpytest_subprocess("-vv", "-n2", "-s", "--reuse-db")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
result.stdout.fnmatch_lines(["*PASSED*test_b*"])
result.stdout.fnmatch_lines(["*PASSED*test_c*"])
result.stdout.fnmatch_lines(["*PASSED*test_d*"])
result = django_testdir.runpytest_subprocess(
"-vv", "-n2", "-s", "--reuse-db", "--create-db"
)
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
result.stdout.fnmatch_lines(["*PASSED*test_b*"])
result.stdout.fnmatch_lines(["*PASSED*test_c*"])
result.stdout.fnmatch_lines(["*PASSED*test_d*"])
# Cleanup.
drop_database("gw0")
drop_database("gw1")
class TestSqliteWithXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "/tmp/should-not-be-used",
}
}
def test_sqlite_in_memory_used(self, django_testdir):
pytest.importorskip("xdist")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_a():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert 'file:memorydb' in db_name or db_name == ':memory:'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
class TestSqliteWithMultipleDbsAndXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "/tmp/should-not-be-used",
},
"db2": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_sqlite_database_renamed(self, django_testdir):
pytest.importorskip("xdist")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_a():
(conn_db2, conn_default) = sorted(
connections.all(),
key=lambda conn: conn.alias,
)
assert conn_default.vendor == 'sqlite'
db_name = conn_default.creation._get_test_db_name()
# can_share_in_memory_db was removed in Django 2.1, and
# used in _get_test_db_name before.
if getattr(conn_default.features, "can_share_in_memory_db", True):
assert 'file:memorydb' in db_name
else:
assert db_name == ":memory:"
assert conn_db2.vendor == 'sqlite'
db_name = conn_db2.creation._get_test_db_name()
assert db_name.startswith('test_custom_db_name_gw')
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
class TestSqliteWithTox:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_db_with_tox_suffix(self, django_testdir, monkeypatch):
"A test to check that Tox DB suffix works when running in parallel."
monkeypatch.setenv("TOX_PARALLEL_ENV", "py37-django22")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_inner():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert db_name == 'test_custom_db_name_py37-django22'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_inner*PASSED*"])
def test_db_with_empty_tox_suffix(self, django_testdir, monkeypatch):
"A test to check that Tox DB suffix is not used when suffix would be empty."
monkeypatch.setenv("TOX_PARALLEL_ENV", "")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_inner():
(conn,) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert db_name == 'test_custom_db_name'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_inner*PASSED*"])
class TestSqliteWithToxAndXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_db_with_tox_suffix(self, django_testdir, monkeypatch):
"A test to check that both Tox and xdist suffixes work together."
pytest.importorskip("xdist")
monkeypatch.setenv("TOX_PARALLEL_ENV", "py37-django22")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_inner():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert db_name.startswith('test_custom_db_name_py37-django22_gw')
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_inner*"])
class TestSqliteInMemoryWithXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"TEST": {"NAME": ":memory:"},
}
}
def test_sqlite_in_memory_used(self, django_testdir):
pytest.importorskip("xdist")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_a():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert 'file:memorydb' in db_name or db_name == ':memory:'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
@pytest.mark.skipif(
get_django_version() >= (1, 9),
reason=(
"Django 1.9 requires migration and has no concept of initial data fixtures"
),
)
def test_initial_data(django_testdir_initial):
"""Test that initial data gets loaded."""
django_testdir_initial.create_test_module(
"""
import pytest
from .app.models import Item
@pytest.mark.django_db
def test_inner():
assert [x.name for x in Item.objects.all()] \
== ["mark_initial_data"]
"""
)
result = django_testdir_initial.runpytest_subprocess("--tb=short", "-v")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_inner*PASSED*"])
class TestNativeMigrations(object):
""" Tests for Django Migrations """
def test_no_migrations(self, django_testdir):
django_testdir.create_test_module(
"""
import pytest
@pytest.mark.django_db
def test_inner_migrations():
from .app.models import Item
Item.objects.create()
"""
)
migration_file = django_testdir.project_root.join(
"tpkg/app/migrations/0001_initial.py"
)
assert migration_file.isfile()
migration_file.write(
'raise Exception("This should not get imported.")', ensure=True
)
result = django_testdir.runpytest_subprocess(
"--nomigrations", "--tb=short", "-vv", "-s",
)
assert result.ret == 0
assert "Operations to perform:" not in result.stdout.str()
result.stdout.fnmatch_lines(["*= 1 passed in *"])
def test_migrations_run(self, django_testdir):
testdir = django_testdir
testdir.create_test_module(
"""
import pytest
@pytest.mark.django_db
def test_inner_migrations():
from .app.models import Item
Item.objects.create()
"""
)
testdir.create_app_file(
"""
from django.db import migrations, models
def print_it(apps, schema_editor):
print("mark_migrations_run")
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(serialize=False,
auto_created=True,
primary_key=True)),
('name', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.RunPython(
print_it,
),
]
""",
"migrations/0001_initial.py",
)
result = testdir.runpytest_subprocess("--tb=short", "-v", "-s")
assert result.ret == 0
result.stdout.fnmatch_lines(["*mark_migrations_run*"])
result = testdir.runpytest_subprocess(
"--no-migrations", "--migrations", "--tb=short", "-v", "-s"
)
assert result.ret == 0
result.stdout.fnmatch_lines(["*mark_migrations_run*"])
| apache-2.0 | -8,053,944,852,993,336,000 | 28.804965 | 95 | 0.546817 | false |
lfairchild/PmagPy | programs/di_eq.py | 3 | 1144 | #!/usr/bin/env python
from __future__ import print_function
import sys
import numpy
import pmagpy.pmag as pmag
def main():
"""
NAME
di_eq.py
DESCRIPTION
converts dec, inc pairs to x,y pairs using equal area projection
NB: do only upper or lower hemisphere at a time: does not distinguish between up and down.
SYNTAX
di_eq.py [command line options] [< filename]
OPTIONS
-h prints help message and quits
-f FILE, input file
"""
out=""
UP=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
DI=numpy.loadtxt(file,dtype=numpy.float)
else:
DI = numpy.loadtxt(sys.stdin,dtype=numpy.float) # read from standard input
Ds=DI.transpose()[0]
Is=DI.transpose()[1]
if len(DI)>1: #array of data
XY=pmag.dimap_V(Ds,Is)
for xy in XY:
print('%f %f'%(xy[0],xy[1]))
else: # single data point
XY=pmag.dimap(Ds,Is)
print('%f %f'%(XY[0],XY[1]))
if __name__ == "__main__":
main()
| bsd-3-clause | -4,779,181,520,607,800,000 | 24.422222 | 97 | 0.562063 | false |
haroldl/homeworklog | django/contrib/formtools/tests/__init__.py | 151 | 15220 | import os
from django import forms, http
from django.conf import settings
from django.contrib.formtools import preview, wizard, utils
from django.test import TestCase
from django.utils import unittest
success_string = "Done was called!"
class TestFormPreview(preview.FormPreview):
def get_context(self, request, form):
context = super(TestFormPreview, self).get_context(request, form)
context.update({'custom_context': True})
return context
def get_initial(self, request):
return {'field1': 'Works!'}
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class TestForm(forms.Form):
field1 = forms.CharField()
field1_ = forms.CharField()
bool1 = forms.BooleanField(required=False)
class UserSecuredFormPreview(TestFormPreview):
"""
FormPreview with a custum security_hash method
"""
def security_hash(self, request, form):
return "123"
class PreviewTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
# Create a FormPreview instance to share between tests
self.preview = preview.FormPreview(TestForm)
input_template = '<input type="hidden" name="%s" value="%s" />'
self.input = input_template % (self.preview.unused_name('stage'), "%d")
self.test_data = {'field1':u'foo', 'field1_':u'asdf'}
def test_unused_name(self):
"""
Verifies name mangling to get uniue field name.
"""
self.assertEqual(self.preview.unused_name('field1'), 'field1__')
def test_form_get(self):
"""
Test contrib.formtools.preview form retrieval.
Use the client library to see if we can sucessfully retrieve
the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field
is created to manage the stage.
"""
response = self.client.get('/test1/')
stage = self.input % 1
self.assertContains(response, stage, 1)
self.assertEqual(response.context['custom_context'], True)
self.assertEqual(response.context['form'].initial, {'field1': 'Works!'})
def test_form_preview(self):
"""
Test contrib.formtools.preview form preview rendering.
Use the client library to POST to the form to see if a preview
is returned. If we do get a form back check that the hidden
value is correctly managing the state of the form.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 1})
response = self.client.post('/test1/', self.test_data)
# Check to confirm stage is set to 2 in output form.
stage = self.input % 2
self.assertContains(response, stage, 1)
def test_form_submit(self):
"""
Test contrib.formtools.preview form submittal.
Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security
hash and verify sucess.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.assertNotEqual(response.content, success_string)
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_bool_submit(self):
"""
Test contrib.formtools.preview form submittal when form contains:
BooleanField(required=False)
Ticket: #6209 - When an unchecked BooleanField is previewed, the preview
form's hash would be computed with no value for ``bool1``. However, when
the preview form is rendered, the unchecked hidden BooleanField would be
rendered with the string value 'False'. So when the preview form is
resubmitted, the hash would be computed with the value 'False' for
``bool1``. We need to make sure the hashes are the same in both cases.
"""
self.test_data.update({'stage':2})
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash':hash, 'bool1':u'False'})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_form_submit_django12_hash(self):
"""
Test contrib.formtools.preview form submittal, using the hash function
used in Django 1.2
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.assertNotEqual(response.content, success_string)
hash = utils.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_form_submit_django12_hash_custom_hash(self):
"""
Test contrib.formtools.preview form submittal, using the hash function
used in Django 1.2 and a custom security_hash method.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test2/', self.test_data)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.content, success_string)
hash = utils.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test2/', self.test_data)
self.assertNotEqual(response.content, success_string)
class SecurityHashTests(unittest.TestCase):
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
class FormHmacTests(unittest.TestCase):
"""
Same as SecurityHashTests, but with form_hmac
"""
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
class HashTestForm(forms.Form):
name = forms.CharField()
bio = forms.CharField()
class HashTestBlankForm(forms.Form):
name = forms.CharField(required=False)
bio = forms.CharField(required=False)
#
# FormWizard tests
#
class WizardPageOneForm(forms.Form):
field = forms.CharField()
class WizardPageTwoForm(forms.Form):
field = forms.CharField()
class WizardPageTwoAlternativeForm(forms.Form):
field = forms.CharField()
class WizardPageThreeForm(forms.Form):
field = forms.CharField()
class WizardClass(wizard.FormWizard):
def get_template(self, step):
return 'formwizard/wizard.html'
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class UserSecuredWizardClass(WizardClass):
"""
Wizard with a custum security_hash method
"""
def security_hash(self, request, form):
return "123"
class DummyRequest(http.HttpRequest):
def __init__(self, POST=None):
super(DummyRequest, self).__init__()
self.method = POST and "POST" or "GET"
if POST is not None:
self.POST.update(POST)
self._dont_enforce_csrf_checks = True
class WizardTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
# Use a known SECRET_KEY to make security_hash tests deterministic
self.old_SECRET_KEY = settings.SECRET_KEY
settings.SECRET_KEY = "123"
def tearDown(self):
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.SECRET_KEY = self.old_SECRET_KEY
def test_step_starts_at_zero(self):
"""
step should be zero for the first form
"""
response = self.client.get('/wizard/')
self.assertEqual(0, response.context['step0'])
def test_step_increments(self):
"""
step should be incremented when we go to the next page
"""
response = self.client.post('/wizard/', {"0-field":"test", "wizard_step":"0"})
self.assertEqual(1, response.context['step0'])
def test_bad_hash(self):
"""
Form should not advance if the hash is missing or bad
"""
response = self.client.post('/wizard/',
{"0-field":"test",
"1-field":"test2",
"wizard_step": "1"})
self.assertEqual(0, response.context['step0'])
def test_good_hash_django12(self):
"""
Form should advance if the hash is present and good, as calculated using
django 1.2 method.
"""
# We are hard-coding a hash value here, but that is OK, since we want to
# ensure that we don't accidentally change the algorithm.
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
response = self.client.post('/wizard/', data)
self.assertEqual(2, response.context['step0'])
def test_good_hash_django12_subclass(self):
"""
The Django 1.2 method of calulating hashes should *not* be used as a
fallback if the FormWizard subclass has provided their own method
of calculating a hash.
"""
# We are hard-coding a hash value here, but that is OK, since we want to
# ensure that we don't accidentally change the algorithm.
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
response = self.client.post('/wizard2/', data)
self.assertEqual(0, response.context['step0'])
def test_good_hash_current(self):
"""
Form should advance if the hash is present and good, as calculated using
current method.
"""
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
response = self.client.post('/wizard/', data)
self.assertEqual(2, response.context['step0'])
def test_14498(self):
"""
Regression test for ticket #14498. All previous steps' forms should be
validated.
"""
reached = [False]
that = self
class WizardWithProcessStep(WizardClass):
def process_step(self, request, form, step):
that.assertTrue(hasattr(form, 'cleaned_data'))
reached[0] = True
wizard = WizardWithProcessStep([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
def test_14576(self):
"""
Regression test for ticket #14576.
The form of the last step is not passed to the done method.
"""
reached = [False]
that = self
class Wizard(WizardClass):
def done(self, request, form_list):
reached[0] = True
that.assertTrue(len(form_list) == 2)
wizard = Wizard([WizardPageOneForm,
WizardPageTwoForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
def test_15075(self):
"""
Regression test for ticket #15075. Allow modifying wizard's form_list
in process_step.
"""
reached = [False]
that = self
class WizardWithProcessStep(WizardClass):
def process_step(self, request, form, step):
if step == 0:
self.form_list[1] = WizardPageTwoAlternativeForm
if step == 1:
that.assertTrue(isinstance(form, WizardPageTwoAlternativeForm))
reached[0] = True
wizard = WizardWithProcessStep([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
| bsd-3-clause | 8,039,805,062,381,923,000 | 34.811765 | 86 | 0.606176 | false |
VitalPet/account-financial-tools | account_journal_period_close/model/account_period.py | 39 | 2628 | # -*- coding: utf-8 -*-
#
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
# All Rights Reserved
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp.osv import orm, fields
class AccountPeriod(orm.Model):
_inherit = 'account.period'
_columns = {
'journal_period_ids': fields.one2many('account.journal.period',
'period_id', 'Journal states'),
}
def add_all_journals(self, cr, uid, ids, context=None):
this = self.browse(cr, uid, ids, context=context)[0]
journal_period_obj = self.pool.get('account.journal.period')
journal_period_ids = journal_period_obj\
.search(cr, uid, [('period_id', '=', this.id)], context=context)
journal_list = []
for journal_period in journal_period_obj.browse(cr,
uid,
journal_period_ids,
context=context):
journal_list.append(journal_period.journal_id.id)
journal_ids = self.pool.get('account.journal')\
.search(cr, uid, [('id', 'not in', journal_list)], context=context)
for journal_id in journal_ids:
journal_period_obj.create(cr,
uid,
{'period_id': this.id,
'journal_id': journal_id,
'state': this.state})
| agpl-3.0 | -4,961,826,770,113,750,000 | 44.310345 | 79 | 0.5879 | false |
aliaspider/RetroArch | tools/vulkan_loader_generator.py | 17 | 6901 | #!/usr/bin/env python3
import sys
import re
def entry_is_device(entry):
first_arg_type = entry[1][1:].split(' ')[0]
device_types = ['VkDevice', 'VkCommandBuffer', 'VkQueue']
return (first_arg_type in device_types) and (entry[0] != 'vkGetDeviceProcAddr')
def main():
pure_entrypoints = []
entrypoints = []
extensions = []
pure_list = ['vkCreateInstance', 'vkEnumerateInstanceExtensionProperties', 'vkEnumerateInstanceLayerProperties']
with open(sys.argv[1], 'r') as f:
header = f.readlines()
for line in header:
m = re.search('typedef \S+.*PFN_([^\)]+)\)(.*);$', line)
if m and m.group(1)[-3:] != 'KHR' and m.group(1)[-3:] != 'EXT' and m.group(2) != '(void)':
entry = m.group(1)
if entry == 'vkGetInstanceProcAddr':
continue
if entry in pure_list:
pure_entrypoints.append((m.group(1), m.group(2)))
else:
entrypoints.append((m.group(1), m.group(2)))
elif m and (m.group(1)[-3:] == 'KHR' or m.group(1)[-3:] == 'EXT') and m.group(2) != '(void)':
entry = m.group(1)
if 'Android' in entry:
continue
if 'Xlib' in entry:
continue
if 'Xcb' in entry:
continue
if 'Win32' in entry:
continue
if 'Wayland' in entry:
continue
if 'Mir' in entry:
continue
extensions.append((m.group(1), m.group(2)))
with open(sys.argv[2], 'w') as f:
print('''
/* This header is autogenerated by vulkan_loader_generator.py */
#ifndef VULKAN_SYMBOL_WRAPPER_H
#define VULKAN_SYMBOL_WRAPPER_H
#define VK_NO_PROTOTYPES
#include <vulkan/vulkan.h>
#ifdef __cplusplus
extern "C" {
#endif
''', file = f)
for entry in pure_entrypoints:
s = entry[0]
print('extern PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('#define {} vulkan_symbol_wrapper_{}'.format(s, s), file = f)
for entry in entrypoints:
s = entry[0]
print('extern PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('#define {} vulkan_symbol_wrapper_{}'.format(s, s), file = f)
for entry in extensions:
s = entry[0]
print('extern PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('#define {} vulkan_symbol_wrapper_{}'.format(s, s), file = f)
print('''
void vulkan_symbol_wrapper_init(PFN_vkGetInstanceProcAddr get_instance_proc_addr);
PFN_vkGetInstanceProcAddr vulkan_symbol_wrapper_instance_proc_addr(void);
VkBool32 vulkan_symbol_wrapper_load_global_symbols(void);
VkBool32 vulkan_symbol_wrapper_load_core_instance_symbols(VkInstance instance);
VkBool32 vulkan_symbol_wrapper_load_core_symbols(VkInstance instance);
VkBool32 vulkan_symbol_wrapper_load_core_device_symbols(VkDevice device);
VkBool32 vulkan_symbol_wrapper_load_instance_symbol(VkInstance instance, const char *name, PFN_vkVoidFunction *ppSymbol);
VkBool32 vulkan_symbol_wrapper_load_device_symbol(VkDevice device, const char *name, PFN_vkVoidFunction *ppSymbol);
#define VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(instance, name, pfn) vulkan_symbol_wrapper_load_instance_symbol(instance, name, (PFN_vkVoidFunction*) &(pfn))
#define VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_EXTENSION_SYMBOL(instance, name) vulkan_symbol_wrapper_load_instance_symbol(instance, #name, (PFN_vkVoidFunction*) & name)
#define VULKAN_SYMBOL_WRAPPER_LOAD_DEVICE_SYMBOL(device, name, pfn) vulkan_symbol_wrapper_load_device_symbol(device, name, (PFN_vkVoidFunction*) &(pfn))
#define VULKAN_SYMBOL_WRAPPER_LOAD_DEVICE_EXTENSION_SYMBOL(device, name) vulkan_symbol_wrapper_load_device_symbol(device, #name, (PFN_vkVoidFunction*) & name)
''', file = f)
print('''
#ifdef __cplusplus
}
#endif
#endif
''', file = f)
with open(sys.argv[3], 'w') as f:
print('''
/* This header is autogenerated by vulkan_loader_generator.py */
#include "vulkan_symbol_wrapper.h"
''', file = f)
for entry in pure_entrypoints:
s = entry[0]
print('PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
for entry in entrypoints:
s = entry[0]
print('PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
for entry in extensions:
s = entry[0]
print('PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('''
static PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
void vulkan_symbol_wrapper_init(PFN_vkGetInstanceProcAddr get_instance_proc_addr)
{
GetInstanceProcAddr = get_instance_proc_addr;
}
PFN_vkGetInstanceProcAddr vulkan_symbol_wrapper_instance_proc_addr(void)
{
return GetInstanceProcAddr;
}
''', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_instance_symbol(VkInstance instance, const char *name, PFN_vkVoidFunction *ppSymbol)
{
*ppSymbol = GetInstanceProcAddr(instance, name);
return *ppSymbol != NULL;
}''', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_device_symbol(VkDevice device, const char *name, PFN_vkVoidFunction *ppSymbol)
{
*ppSymbol = vkGetDeviceProcAddr(device, name);
return *ppSymbol != NULL;
}''', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_global_symbols(void)
{''', file = f)
for pure in pure_entrypoints:
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(NULL, "{}", {})) return VK_FALSE;'.format(pure[0], pure[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_core_symbols(VkInstance instance)
{''', file = f)
for entry in entrypoints:
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(instance, "{}", {})) return VK_FALSE;'.format(entry[0], entry[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_core_instance_symbols(VkInstance instance)
{''', file = f)
for entry in entrypoints:
if not entry_is_device(entry):
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(instance, "{}", {})) return VK_FALSE;'.format(entry[0], entry[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_core_device_symbols(VkDevice device)
{''', file = f)
for entry in entrypoints:
if entry_is_device(entry):
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_DEVICE_SYMBOL(device, "{}", {})) return VK_FALSE;'.format(entry[0], entry[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
if __name__ == '__main__':
main()
| gpl-3.0 | -831,942,777,880,563,100 | 38.890173 | 166 | 0.615853 | false |
mattclay/ansible | test/units/modules/test_unarchive.py | 11 | 2342 | from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.modules.unarchive import ZipArchive, TgzArchive
class AnsibleModuleExit(Exception):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExitJson(AnsibleModuleExit):
pass
class FailJson(AnsibleModuleExit):
pass
@pytest.fixture
def fake_ansible_module():
return FakeAnsibleModule()
class FakeAnsibleModule:
def __init__(self):
self.params = {}
self.tmpdir = None
def exit_json(self, *args, **kwargs):
raise ExitJson(*args, **kwargs)
def fail_json(self, *args, **kwargs):
raise FailJson(*args, **kwargs)
class TestCaseZipArchive:
@pytest.mark.parametrize(
'side_effect, expected_reason', (
([ValueError, '/bin/zipinfo'], "Unable to find required 'unzip'"),
(ValueError, "Unable to find required 'unzip' or 'zipinfo'"),
)
)
def test_no_zip_zipinfo_binary(self, mocker, fake_ansible_module, side_effect, expected_reason):
mocker.patch("ansible.modules.unarchive.get_bin_path", side_effect=side_effect)
fake_ansible_module.params = {
"extra_opts": "",
"exclude": "",
"include": "",
}
z = ZipArchive(
src="",
b_dest="",
file_args="",
module=fake_ansible_module,
)
can_handle, reason = z.can_handle_archive()
assert can_handle is False
assert expected_reason in reason
assert z.cmd_path is None
class TestCaseTgzArchive:
def test_no_tar_binary(self, mocker, fake_ansible_module):
mocker.patch("ansible.modules.unarchive.get_bin_path", side_effect=ValueError)
fake_ansible_module.params = {
"extra_opts": "",
"exclude": "",
"include": "",
}
fake_ansible_module.check_mode = False
t = TgzArchive(
src="",
b_dest="",
file_args="",
module=fake_ansible_module,
)
can_handle, reason = t.can_handle_archive()
assert can_handle is False
assert 'Unable to find required' in reason
assert t.cmd_path is None
assert t.tar_type is None
| gpl-3.0 | 7,708,063,336,122,327,000 | 24.736264 | 100 | 0.584116 | false |
steventimberman/masterDebater | venv/lib/python2.7/site-packages/django/utils/dateparse.py | 44 | 4235 | """Functions to parse datetime objects."""
# We're using regular expressions rather than time.strptime because:
# - They provide both validation and parsing.
# - They're more flexible for datetimes.
# - The date/datetime/time constructors produce friendlier error messages.
import datetime
import re
from django.utils import six
from django.utils.timezone import get_fixed_timezone, utc
date_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})$'
)
time_re = re.compile(
r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
)
datetime_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
standard_duration_re = re.compile(
r'^'
r'(?:(?P<days>-?\d+) (days?, )?)?'
r'((?:(?P<hours>-?\d+):)(?=\d+:\d+))?'
r'(?:(?P<minutes>-?\d+):)?'
r'(?P<seconds>-?\d+)'
r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
r'$'
)
# Support the sections of ISO 8601 date representation that are accepted by
# timedelta
iso8601_duration_re = re.compile(
r'^(?P<sign>[-+]?)'
r'P'
r'(?:(?P<days>\d+(.\d+)?)D)?'
r'(?:T'
r'(?:(?P<hours>\d+(.\d+)?)H)?'
r'(?:(?P<minutes>\d+(.\d+)?)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?'
r')?'
r'$'
)
def parse_date(value):
"""Parses a string and return a datetime.date.
Raises ValueError if the input is well formatted but not a valid date.
Returns None if the input isn't well formatted.
"""
match = date_re.match(value)
if match:
kw = {k: int(v) for k, v in six.iteritems(match.groupdict())}
return datetime.date(**kw)
def parse_time(value):
"""Parses a string and return a datetime.time.
This function doesn't support time zone offsets.
Raises ValueError if the input is well formatted but not a valid time.
Returns None if the input isn't well formatted, in particular if it
contains an offset.
"""
match = time_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
kw = {k: int(v) for k, v in six.iteritems(kw) if v is not None}
return datetime.time(**kw)
def parse_datetime(value):
"""Parses a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
match = datetime_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
tzinfo = kw.pop('tzinfo')
if tzinfo == 'Z':
tzinfo = utc
elif tzinfo is not None:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) > 3 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = -offset
tzinfo = get_fixed_timezone(offset)
kw = {k: int(v) for k, v in six.iteritems(kw) if v is not None}
kw['tzinfo'] = tzinfo
return datetime.datetime(**kw)
def parse_duration(value):
"""Parses a duration string and returns a datetime.timedelta.
The preferred format for durations in Django is '%d %H:%M:%S.%f'.
Also supports ISO 8601 representation.
"""
match = standard_duration_re.match(value)
if not match:
match = iso8601_duration_re.match(value)
if match:
kw = match.groupdict()
sign = -1 if kw.pop('sign', '+') == '-' else 1
if kw.get('microseconds'):
kw['microseconds'] = kw['microseconds'].ljust(6, '0')
if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'):
kw['microseconds'] = '-' + kw['microseconds']
kw = {k: float(v) for k, v in six.iteritems(kw) if v is not None}
return sign * datetime.timedelta(**kw)
| mit | 7,183,141,940,545,935,000 | 31.328244 | 90 | 0.57686 | false |
booto/dolphin | Externals/fmt/support/manage.py | 10 | 8973 | #!/usr/bin/env python
"""Manage site and releases.
Usage:
manage.py release [<branch>]
manage.py site
"""
from __future__ import print_function
import datetime, docopt, errno, fileinput, json, os
import re, requests, shutil, sys, tempfile
from contextlib import contextmanager
from distutils.version import LooseVersion
from subprocess import check_call
class Git:
def __init__(self, dir):
self.dir = dir
def call(self, method, args, **kwargs):
return check_call(['git', method] + list(args), **kwargs)
def add(self, *args):
return self.call('add', args, cwd=self.dir)
def checkout(self, *args):
return self.call('checkout', args, cwd=self.dir)
def clean(self, *args):
return self.call('clean', args, cwd=self.dir)
def clone(self, *args):
return self.call('clone', list(args) + [self.dir])
def commit(self, *args):
return self.call('commit', args, cwd=self.dir)
def pull(self, *args):
return self.call('pull', args, cwd=self.dir)
def push(self, *args):
return self.call('push', args, cwd=self.dir)
def reset(self, *args):
return self.call('reset', args, cwd=self.dir)
def update(self, *args):
clone = not os.path.exists(self.dir)
if clone:
self.clone(*args)
return clone
def clean_checkout(repo, branch):
repo.clean('-f', '-d')
repo.reset('--hard')
repo.checkout(branch)
class Runner:
def __init__(self, cwd):
self.cwd = cwd
def __call__(self, *args, **kwargs):
kwargs['cwd'] = kwargs.get('cwd', self.cwd)
check_call(args, **kwargs)
def create_build_env():
"""Create a build environment."""
class Env:
pass
env = Env()
# Import the documentation build module.
env.fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(env.fmt_dir, 'doc'))
import build
env.build_dir = 'build'
env.versions = build.versions
# Virtualenv and repos are cached to speed up builds.
build.create_build_env(os.path.join(env.build_dir, 'virtualenv'))
env.fmt_repo = Git(os.path.join(env.build_dir, 'fmt'))
return env
@contextmanager
def rewrite(filename):
class Buffer:
pass
buffer = Buffer()
if not os.path.exists(filename):
buffer.data = ''
yield buffer
return
with open(filename) as f:
buffer.data = f.read()
yield buffer
with open(filename, 'w') as f:
f.write(buffer.data)
fmt_repo_url = '[email protected]:fmtlib/fmt'
def update_site(env):
env.fmt_repo.update(fmt_repo_url)
doc_repo = Git(os.path.join(env.build_dir, 'fmtlib.github.io'))
doc_repo.update('[email protected]:fmtlib/fmtlib.github.io')
for version in env.versions:
clean_checkout(env.fmt_repo, version)
target_doc_dir = os.path.join(env.fmt_repo.dir, 'doc')
# Remove the old theme.
for entry in os.listdir(target_doc_dir):
path = os.path.join(target_doc_dir, entry)
if os.path.isdir(path):
shutil.rmtree(path)
# Copy the new theme.
for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
'conf.py', 'fmt.less']:
src = os.path.join(env.fmt_dir, 'doc', entry)
dst = os.path.join(target_doc_dir, entry)
copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
copy(src, dst)
# Rename index to contents.
contents = os.path.join(target_doc_dir, 'contents.rst')
if not os.path.exists(contents):
os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
# Fix issues in reference.rst/api.rst.
for filename in ['reference.rst', 'api.rst']:
pattern = re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.M)
with rewrite(os.path.join(target_doc_dir, filename)) as b:
b.data = b.data.replace('std::ostream &', 'std::ostream&')
b.data = re.sub(pattern, r'doxygenfunction:: \1(int)', b.data)
b.data = b.data.replace('std::FILE*', 'std::FILE *')
b.data = b.data.replace('unsigned int', 'unsigned')
b.data = b.data.replace('operator""_', 'operator"" _')
# Fix a broken link in index.rst.
index = os.path.join(target_doc_dir, 'index.rst')
with rewrite(index) as b:
b.data = b.data.replace(
'doc/latest/index.html#format-string-syntax', 'syntax.html')
# Build the docs.
html_dir = os.path.join(env.build_dir, 'html')
if os.path.exists(html_dir):
shutil.rmtree(html_dir)
include_dir = env.fmt_repo.dir
if LooseVersion(version) >= LooseVersion('5.0.0'):
include_dir = os.path.join(include_dir, 'include', 'fmt')
elif LooseVersion(version) >= LooseVersion('3.0.0'):
include_dir = os.path.join(include_dir, 'fmt')
import build
build.build_docs(version, doc_dir=target_doc_dir,
include_dir=include_dir, work_dir=env.build_dir)
shutil.rmtree(os.path.join(html_dir, '.doctrees'))
# Create symlinks for older versions.
for link, target in {'index': 'contents', 'api': 'reference'}.items():
link = os.path.join(html_dir, link) + '.html'
target += '.html'
if os.path.exists(os.path.join(html_dir, target)) and \
not os.path.exists(link):
os.symlink(target, link)
# Copy docs to the website.
version_doc_dir = os.path.join(doc_repo.dir, version)
try:
shutil.rmtree(version_doc_dir)
except OSError as e:
if e.errno != errno.ENOENT:
raise
shutil.move(html_dir, version_doc_dir)
def release(args):
env = create_build_env()
fmt_repo = env.fmt_repo
branch = args.get('<branch>')
if branch is None:
branch = 'master'
if not fmt_repo.update('-b', branch, fmt_repo_url):
clean_checkout(fmt_repo, branch)
# Convert changelog from RST to GitHub-flavored Markdown and get the
# version.
changelog = 'ChangeLog.rst'
changelog_path = os.path.join(fmt_repo.dir, changelog)
import rst2md
changes, version = rst2md.convert(changelog_path)
cmakelists = 'CMakeLists.txt'
for line in fileinput.input(os.path.join(fmt_repo.dir, cmakelists),
inplace=True):
prefix = 'set(FMT_VERSION '
if line.startswith(prefix):
line = prefix + version + ')\n'
sys.stdout.write(line)
# Update the version in the changelog.
title_len = 0
for line in fileinput.input(changelog_path, inplace=True):
if line.decode('utf-8').startswith(version + ' - TBD'):
line = version + ' - ' + datetime.date.today().isoformat()
title_len = len(line)
line += '\n'
elif title_len:
line = '-' * title_len + '\n'
title_len = 0
sys.stdout.write(line)
# Add the version to the build script.
script = os.path.join('doc', 'build.py')
script_path = os.path.join(fmt_repo.dir, script)
for line in fileinput.input(script_path, inplace=True):
m = re.match(r'( *versions = )\[(.+)\]', line)
if m:
line = '{}[{}, \'{}\']\n'.format(m.group(1), m.group(2), version)
sys.stdout.write(line)
fmt_repo.checkout('-B', 'release')
fmt_repo.add(changelog, cmakelists, script)
fmt_repo.commit('-m', 'Update version')
# Build the docs and package.
run = Runner(fmt_repo.dir)
run('cmake', '.')
run('make', 'doc', 'package_source')
update_site(env)
# Create a release on GitHub.
fmt_repo.push('origin', 'release')
params = {'access_token': os.getenv('FMT_TOKEN')}
r = requests.post('https://api.github.com/repos/fmtlib/fmt/releases',
params=params,
data=json.dumps({'tag_name': version,
'target_commitish': 'release',
'body': changes, 'draft': True}))
if r.status_code != 201:
raise Exception('Failed to create a release ' + str(r))
id = r.json()['id']
uploads_url = 'https://uploads.github.com/repos/fmtlib/fmt/releases'
package = 'fmt-{}.zip'.format(version)
r = requests.post(
'{}/{}/assets?name={}'.format(uploads_url, id, package),
headers={'Content-Type': 'application/zip'},
params=params, data=open('build/fmt/' + package, 'rb'))
if r.status_code != 201:
raise Exception('Failed to upload an asset ' + str(r))
if __name__ == '__main__':
args = docopt.docopt(__doc__)
if args.get('release'):
release(args)
elif args.get('site'):
update_site(create_build_env())
| gpl-2.0 | 6,713,448,280,147,638,000 | 33.37931 | 79 | 0.579405 | false |
ahb0327/intellij-community | python/lib/Lib/site-packages/django/contrib/contenttypes/models.py | 307 | 4052 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
class ContentTypeManager(models.Manager):
# Cache to avoid re-looking up ContentType objects all over the place.
# This cache is shared by all the get_for_* methods.
_cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self.__class__._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
return ct
def get_for_model(self, model):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = model._meta
while opts.proxy:
model = opts.proxy_for_model
opts = model._meta
key = (opts.app_label, opts.object_name.lower())
try:
ct = self.__class__._cache[self.db][key]
except KeyError:
# Load or create the ContentType entry. The smart_unicode() is
# needed around opts.verbose_name_raw because name_raw might be a
# django.utils.functional.__proxy__ object.
ct, created = self.get_or_create(
app_label = opts.app_label,
model = opts.object_name.lower(),
defaults = {'name': smart_unicode(opts.verbose_name_raw)},
)
self._add_to_cache(self.db, ct)
return ct
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self.__class__._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
model = ct.model_class()
key = (model._meta.app_label, model._meta.object_name.lower())
self.__class__._cache.setdefault(using, {})[key] = ct
self.__class__._cache.setdefault(using, {})[ct.id] = ct
class ContentType(models.Model):
name = models.CharField(max_length=100)
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
ordering = ('name',)
unique_together = (('app_label', 'model'),)
def __unicode__(self):
return self.name
def model_class(self):
"Returns the Python model class for this type of content."
from django.db import models
return models.get_model(self.app_label, self.model)
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| apache-2.0 | -2,538,725,684,188,147,000 | 37.590476 | 86 | 0.607601 | false |
RhodriM/rugby_rankings_py | tests/test_ratings_input.py | 1 | 1207 | import unittest
import rugby_rankings.ratings_input
class TestRatingsInput(unittest.TestCase):
def test_construct(self):
inputObj = rugby_rankings.ratings_input.RatingsInput(0.0, 0.0, 0, 0)
self.assertTrue(
isinstance(inputObj, rugby_rankings.ratings_input.RatingsInput)
)
inputObj = rugby_rankings.ratings_input.RatingsInput(
0.0, 0.0, 0, 0, True, True
)
self.assertTrue(
isinstance(inputObj, rugby_rankings.ratings_input.RatingsInput)
)
def test_types(self):
inputObj = rugby_rankings.ratings_input.RatingsInput(
1.111, 90.199, 3, 2.2
)
self.assertTrue(
isinstance(inputObj.get_rounded_team_a_rating(), float)
)
self.assertEqual(inputObj.get_rounded_team_a_rating(), 1.11)
self.assertEqual(inputObj.get_rounded_team_b_rating(), 90.20)
inputObj = rugby_rankings.ratings_input.RatingsInput(
1.111, 90.199, 3, 2.2, True, True
)
self.assertEqual(inputObj.is_rugby_world_cup, True)
self.assertEqual(inputObj.is_neutral_venue, True)
if __name__ == "__main__":
unittest.main()
| mit | -6,626,536,389,736,616,000 | 25.822222 | 76 | 0.618061 | false |
bverburg/CouchPotatoServer | couchpotato/core/notifications/pushbullet.py | 32 | 3088 | import base64
import json
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
log = CPLog(__name__)
autoload = 'Pushbullet'
class Pushbullet(Notification):
url = 'https://api.pushbullet.com/v2/%s'
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
devices = self.getDevices()
if devices is None:
return False
# Get all the device IDs linked to this user
if not len(devices):
devices = [None]
successful = 0
for device in devices:
response = self.request(
'pushes',
cache = False,
device_iden = device,
type = 'note',
title = self.default_title,
body = toUnicode(message)
)
if response:
successful += 1
else:
log.error('Unable to push notification to Pushbullet device with ID %s' % device)
return successful == len(devices)
def getDevices(self):
return splitString(self.conf('devices'))
def request(self, method, cache = True, **kwargs):
try:
base64string = base64.encodestring('%s:' % self.conf('api_key'))[:-1]
headers = {
"Authorization": "Basic %s" % base64string
}
if cache:
return self.getJsonData(self.url % method, headers = headers, data = kwargs)
else:
data = self.urlopen(self.url % method, headers = headers, data = kwargs)
return json.loads(data)
except Exception as ex:
log.error('Pushbullet request failed')
log.debug(ex)
return None
config = [{
'name': 'pushbullet',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'pushbullet',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'api_key',
'label': 'Access Token',
'description': 'Can be found on <a href="https://www.pushbullet.com/account" target="_blank">Account Settings</a>',
},
{
'name': 'devices',
'default': '',
'advanced': True,
'description': 'IDs of devices to send notifications to, empty = all devices'
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]
| gpl-3.0 | 2,234,443,901,645,350,400 | 28.132075 | 135 | 0.479598 | false |
lhfei/spark-in-action | spark-2.x/src/main/python/wordcount.py | 1 | 1501 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
from operator import add
from pyspark.sql import SparkSession
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: wordcount <file>", file=sys.stderr)
sys.exit(-1)
spark = SparkSession\
.builder\
.appName("PythonWordCount")\
.getOrCreate()
lines = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0])
counts = lines.flatMap(lambda x: x.split(' ')) \
.map(lambda x: (x, 1)) \
.reduceByKey(add)
output = counts.collect()
for (word, count) in output:
print("%s: %i" % (word, count))
spark.stop()
| apache-2.0 | -5,112,120,671,417,800,000 | 32.113636 | 74 | 0.655563 | false |
ismailsunni/inasafe | safe/common/parameters/test/example.py | 6 | 1728 | # coding=utf-8
"""Example usage of custom parameters."""
import sys
from safe.definitions.constants import INASAFE_TEST
from safe.test.utilities import get_qgis_app
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app(qsetting=INASAFE_TEST)
from qgis.PyQt.QtWidgets import QApplication, QWidget, QGridLayout # NOQA
from parameters.qt_widgets.parameter_container import (
ParameterContainer) # NOQA
from safe.common.parameters.default_value_parameter import (
DefaultValueParameter) # NOQA
from safe.common.parameters.default_value_parameter_widget import (
DefaultValueParameterWidget) # NOQA
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "[email protected]"
__revision__ = '$Format:%H$'
def main():
"""Main function to run the example."""
app = QApplication([])
default_value_parameter = DefaultValueParameter()
default_value_parameter.name = 'Value parameter'
default_value_parameter.help_text = 'Help text'
default_value_parameter.description = 'Description'
default_value_parameter.labels = [
'Setting', 'Do not report', 'Custom']
default_value_parameter.options = [0, 1, None]
parameters = [
default_value_parameter
]
extra_parameters = [
(DefaultValueParameter, DefaultValueParameterWidget)
]
parameter_container = ParameterContainer(
parameters, extra_parameters=extra_parameters)
parameter_container.setup_ui()
widget = QWidget()
layout = QGridLayout()
layout.addWidget(parameter_container)
widget.setLayout(layout)
widget.setGeometry(0, 0, 500, 500)
widget.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| gpl-3.0 | -5,598,390,315,238,141,000 | 25.584615 | 74 | 0.697917 | false |
liam2/liam2 | tools/simulation_txt2yaml.py | 1 | 31398 | import csv
import itertools
from itertools import izip
import operator
import os
from os import path
import sys
import yaml
from expr import *
from align_txt2csv import convert_txt_align
# TODO
# - filter fields: output only those which are actually used (comment out
# the rest)
# - convert "leaf" expression literals to the type of the variable being
# defined (only absolutely needed for bool)
# - use "abfrage" to determine fields
# ? remove useless bounds (eg age)
# ? implement choose for top-level filter
# ? build variable dependency tree and enclose any field which is used before it
# is computed in a lag function
# ? generic if -> choose transformation:
# if(c1, v1, if(c2, v2, if(c3, v3, if(c4, v4, 0))))
# ->
# choose(c1, v1,
# c2, v2,
# c3, v3,
# c4, v4)
# ? include original comments
# ? extract common condition parts in a filter to the choose function?
# ? implement between
# TODO manually:
# - if(p_yob=2003-60, MINR[2003], ...
# ->
# if((yob >= 1943) & (yob <= 2000), MINR[yob + 60], 0)
# - divorce function
# - KillPerson: what is not handled by normal "kill" function
def load_renames(fpath):
if fpath is not None:
with open(fpath) as f:
return yaml.load(f)
else:
return {}
def load_txt_def(input_path, name_idx):
with open(input_path, "rb") as f:
lines = list(csv.reader(f, delimiter='\t'))
firstline = lines[0]
colnames = firstline[:name_idx] + firstline[name_idx+1:]
current_obj = None
data = {}
for line in lines[1:]:
if not line:
continue
if all(not cell for cell in line):
continue
name, line = line[name_idx], line[:name_idx] + line[name_idx+1:]
if name.startswith('first_'):
current_obj = name[6:]
data[current_obj] = {}
print("reading '%s' variables" % current_obj)
elif name.startswith('end_'):
current_obj = None
print("done")
elif current_obj is not None:
data[current_obj][name] = dict(zip(colnames, line))
return data
def load_links(input_path):
return load_txt_def(input_path, 0)['linkage']
def load_fields(input_path):
data = load_txt_def(input_path, 1)
typemap = {
'char': float, # should be int but "char" is used all over the place for
# anything
'int': int,
'int1000': float
}
print("determining field types...")
for obj_type, obj_fields in data.iteritems():
print(" *", obj_type)
for name, fdef in obj_fields.iteritems():
real_dtype = typemap.get(fdef['Type'])
if real_dtype is None:
print("Warning: unknown type '%s', using int" % fdef['Type'])
real_dtype = int
ncateg = int(fdef['nCategory'])
if ncateg == 2:
assert fdef['Categories'] == "[0,1]", \
"field %s has 2 categories that are != from [0, 1]" \
% name
real_dtype = bool
elif ncateg > 2:
# TODO: import the list of possible values
real_dtype = int
obj_fields[name] = {'type': real_dtype}
print(" done")
return data
def transpose_table(data):
numrows = len(data)
numcols = len(data[0])
for rownum, row in enumerate(data, 1):
if len(row) != numcols:
raise Exception('line %d has %d columns instead of %d !'
% (rownum, len(row), numcols))
return [[data[rownum][colnum] for rownum in range(numrows)]
for colnum in range(numcols)]
def transpose_and_convert(lines):
transposed = transpose_table(lines)
names = transposed.pop(0)
funcs = [float for _ in range(len(lines))]
funcs[0] = int
converted = [tuple([func(cell.replace('--', 'NaN'))
for cell, func in izip(row, funcs)])
for row in transposed]
return names, converted
def load_av_globals(input_path):
# macro.av is a csv with tabs OR spaces as separator and a header of 1 line
with open(input_path, "rb") as f:
lines = [line.split() for line in f.read().splitlines()]
# eg: "sample 1955Y1 2060Y1"
firstline = lines.pop(0)
assert firstline[0] == "sample"
def year_str2int(s):
return int(s.replace('Y1', ''))
start, stop = year_str2int(firstline[1]), year_str2int(firstline[2])
num_periods = stop - start + 1
names, data = transpose_and_convert(lines)
assert names[0] == 'YEAR'
# rename YEAR to period
names[0] = 'period'
assert len(data) == num_periods
return (start, stop), names, data
def load_agespine(input_path):
# read process names until "end_spine"
with open(input_path, "rb") as f:
lines = [line.strip() for line in f.read().splitlines() if line]
# lines are of the form "regr_p_xxx" or "tran_p_xxx"
return list(itertools.takewhile(lambda l: l != 'end_spine', lines))
# ================================
class TextImporter(object):
keywords = None
def __init__(self, input_path, fields, obj_type, renames):
self.input_path = input_path
self.fields = fields
self.obj_type = obj_type
self.renames = renames
self.current_condition = None
self.conditions = None
def unimplemented(self, pos, line, lines):
print("unimplemented keyword: %s" % line[0])
return pos + 1, None
def skipline(self, pos, line, lines):
return pos + 1, None
def skipifzero(self, pos, line, lines):
if len(line) > 1 and line[1] and float(line[1]):
print("unimplemented keyword:", line[0])
return pos + 1, None
def readvalues(self, *args):
def f(pos, line, lines):
values = [func(str_value)
for func, str_value in zip(args, line[1:])
if func is not None]
if len(args) == 1:
empty = {str: '', int: 0}
values = values[0] if values else empty[args[0]]
return pos + 1, values
return f
def end(self, *args):
raise StopIteration
def numorcond(self, pos, line, lines):
# int=m, + skip line + m * (skipword, int=numand, +
# numand * (str, float=min, float=max))
num_or = int(line[1])
pos += 2
or_conds = []
for i in range(num_or):
line = lines[pos]
num_and = int(line[1]) if len(line) >= 1 else 0
and_conds = [(line[2 + j * 3],
float(line[3 + j * 3]),
float(line[4 + j * 3]))
for j in range(num_and)]
or_conds.append(and_conds)
pos += 1
self.conditions[self.current_condition] = {'condition': or_conds}
# We return self.conditions for each condition. It will be overwritten
# by later conditions if any, but this ensures they are stored even
# if there are actually less conditions than declared.
return pos, self.conditions
# return pos, res
def condition(self, pos, line, lines):
self.current_condition = int(line[1]) - 1
return pos + 1, None
def numconditions(self, pos, line, lines):
self.conditions = [None] * int(line[1])
return pos + 1, None
def load_txt_file(self):
# regr_p_alive_f.txt -> alive_f:
fpath, fname = path.split(self.input_path)
basename, ext = path.splitext(fname)
chunks = basename.split('_', 2)
assert len(chunks[1]) == 1
del chunks[1]
name = '_'.join(chunks)
with open(self.input_path, "rb") as f:
lines = list(csv.reader(f, delimiter='\t'))
values = {'name': name}
pos = 0
while pos < len(lines):
line = lines[pos]
if not line:
pos += 1
continue
keyword = line[0].lower()
if not keyword or keyword.isspace():
pos += 1
continue
f = self.keywords.get(keyword)
if f is None:
print("unknown keyword: '%s'" % keyword)
pos += 1
continue
try:
pos, value = f(pos, line, lines)
if value is not None:
values[keyword] = value
except StopIteration:
break
return values
# ------------------------
# transform to expression
# ------------------------
def var_type(self, name):
var_def = self.fields.get(name)
if var_def is None:
print("Warning: field '%s' not found (assuming int) !" % name)
return int
else:
return var_def['type']
def var_name(self, name):
assert name[1] == '_'
name = name[2:]
return self.renames.get(self.obj_type, {}).get(name, name)
def simplecond2expr(self, cond):
name, minvalue, maxvalue = cond
v = Variable(self.var_name(name), self.var_type(name))
return (v >= minvalue) & (v <= maxvalue)
def andcond2expr(self, andconditions):
if andconditions:
expr = self.simplecond2expr(andconditions[0])
for andcond in andconditions[1:]:
expr = expr & self.simplecond2expr(andcond)
return expr
else:
return True
def condition2expr(self, condition):
assert condition
expr = self.andcond2expr(condition[0])
for orcond in condition[1:]:
if orcond:
expr = expr | self.andcond2expr(orcond)
return expr
def import_file(self):
data = self.load_txt_file()
predictor, expr = self.data2expr(data)
return data['name'], predictor, expr
class RegressionImporter(TextImporter):
def __init__(self, input_path, fields, renames):
TextImporter.__init__(self, input_path, fields, obj_type, renames)
# cfr readdyparam.cpp
self.keywords = {
'file description:': self.readvalues(str),
# Time est toujours = 1 sauf dans trap_p_coeduach.txt
'time': self.skipline, #readvalues(int),
'align': self.readvalues(int),
'predictor': self.readvalues(str, int, int, int),
'numconditions': self.numconditions,
'macro_align_multiple': self.unimplemented, #float,
'mmkt_cond_var': self.unimplemented, #str,
'mmkt_gender_var': self.unimplemented, #str,
'macro_align': self.unimplemented, #float,
'macro_align_relate': self.unimplemented, #str,
'macro_align_type': self.unimplemented, #str,
'ntransformations': self.unimplemented, #int + n * (str, int)
'marrmkt': self.unimplemented, #int + n * (str, str -- which is then parsed)
'condition': self.condition,
'endoffile': self.end,
'numorcond': self.numorcond,
'indepentvar': self.indepentvar,
'interactionterms': self.interactionterms,
'u_varname': self.readvalues(str),
's_u': self.skipifzero, # float, skipword, skipword, str (unused?)
's_v': self.skipifzero, # float (unused in MIDAS?)
'r': self.skipifzero, # float (unused?)
# ignore common junk
'conditions': self.skipline,
'distribution': self.skipline,
'coefficients and structure': self.skipline,
'errorstructure': self.skipline
}
def indepentvar(self, pos, line, lines):
# int = m + skip line +
# m * (skipword, str=name, skipword, float=min,
# float=max, float=coef)
# name="constant" is a special case
num_vars = int(line[1])
pos += 2
vars = []
def floatorempty(s):
return float(s) if s else 0.0
readvariable = self.readvalues(str, None,
floatorempty, floatorempty, floatorempty)
for i in range(num_vars):
line = lines[pos]
pos, values = readvariable(pos, line, lines)
vars.append(values)
self.conditions[self.current_condition]['vars'] = vars
return pos, None
def interactionterms(self, pos, line, lines):
numterms = int(line[1]) if line[1] else 0
if numterms:
print("unimplemented keyword: interactionterms")
return pos + 1, None
# ------------------------
# transform to expression
# ------------------------
def var2expr(self, var):
name, minvalue, maxvalue, coef = var
if name == 'constant':
return coef
else:
v = Variable(self.var_name(name), self.var_type(name))
return v * coef
# return ZeroClip(v, minvalue, maxvalue) * coef
def vars2expr(self, vars):
assert vars
expr = self.var2expr(vars[0])
for var in vars[1:]:
expr = expr + self.var2expr(var)
return expr
def data2expr(self, data):
conditions = data['numorcond']
assert conditions
if len(conditions) == 1:
condition = conditions[0]
expr = self.vars2expr(condition['vars'])
filter_expr = self.condition2expr(condition['condition'])
else:
lastcond = conditions[-1]
cond_expr = self.condition2expr(lastcond['condition'])
expr = Where(cond_expr, self.vars2expr(lastcond['vars']), 0)
filter_expr = cond_expr
for cond in conditions[-2::-1]:
cond_expr = self.condition2expr(cond['condition'])
expr = Where(cond_expr, self.vars2expr(cond['vars']), expr)
filter_expr |= cond_expr
kwargs = {'filter': filter_expr}
predictor, pred_type, _, _ = data['predictor']
predictor = self.var_name(predictor)
if data.get('u_varname'):
# another option would be to do:
#expr += Variable(self.var_name(data['u_varname']))"
kwargs['error_var'] = self.var_name(data['u_varname'])
if bool(data['align']):
kwargs['align'] = 'al_p_%s.csv' % data['name']
if pred_type != 2:
print("unimplemented align for pred_type:", pred_type)
if pred_type == 0: # continuous
expr = ContRegr(expr, **kwargs)
elif pred_type == 1: # clipped continuous
expr = ClipRegr(expr, **kwargs)
elif pred_type == 2: # logit
expr = LogitRegr(expr, **kwargs)
elif pred_type == 3: # logged continuous
expr = LogRegr(expr, **kwargs)
elif pred_type == 4: # clipped logged continuous
print("Converting clipped logged continuous to logged continuous")
expr = LogRegr(expr, **kwargs)
else:
print("unimplemented predictor type:", pred_type)
return predictor, expr
class TransitionImporter(TextImporter):
def __init__(self, input_path, fields, constants, links, obj_type, renames):
TextImporter.__init__(self, input_path, fields, obj_type, renames)
self.constants = constants
self.links = links
# cfr readdyparam.cpp
self.keywords = {
'file description:': self.readvalues(str),
# Time est toujours = 1 sauf dans trap_p_coeduach.txt
'time': self.skipline, #readvalues(int),
'align': self.readvalues(int),
'predictor': self.readvalues(str, int),
'numconditions': self.numconditions,
'condition': self.condition,
'endoffile': self.end,
'numorcond': self.numorcond,
'gen': self.gen, # str
'fpbcalc': self.fpbcalc, # str
'fgen': self.fgen, # str
'zero': self.skipifzero,
'first': self.skipifzero,
'second': self.skipifzero,
'third': self.skipifzero,
# ignore common junk
'conditions': self.skipline,
'type': self.skipline,
}
def gen(self, pos, line, lines):
# min(arg1, arg2)
# max(arg1, arg2)
# setto[value]
# expression with "( ) + - * / ^ , min max"
s = line[1]
# add spaces around operators
s = re.sub(r'([+\-*/^])', r' \1 ', s)
s = s.replace('^', '**')
s = re.sub(r'setto\[([^\]]+)\]', r'\1', s)
self.conditions[self.current_condition]['action'] = s
return pos + 1, None
def fgen(self, pos, line, lines):
# function(args)
# - KillPerson(varname1=amount1;varnamz2=amount2;...)
# -> also updates marital status of spouse
# - duration(variable,const)
# -> const is 1 char
# ... (see smile p.13, 14 and 15)
s = line[1]
s = s.replace('CreatePerson(', "new('person', ")
s = s.replace('newbirth(', "new('person', ")
s = s.replace('newhousehold(', "new('household', ")
s = re.sub(r'duration\((\w+),(\d+)\)', r'duration(\1 == \2)', s)
# remove extra , inserted by above replacements
s = s.replace(', )', ')')
s = s.replace(';', ', ')
# getlink(ps,p_inc) -> ps.p_inc
if "getlink" in s:
s = re.sub(r'getlink\((\w{2}),(\w+)\)', r'\1.\2', s)
link, var = s.split('.')
assert var[1] == '_'
var = var[2:]
s = "%s.%s" % (link, var)
s = s.replace('mean(', 'tavg(')
s = s.replace('prev(', 'lag(')
# prevent name collision
s = s.replace('divorce(', 'do_divorce(')
self.conditions[self.current_condition]['action'] = s
return pos + 1, None
def fpbcalc(self, pos, line, lines):
s = line[1]
s = s.replace('grandom(', 'normal(')
# add space around +, -, * and / operators, if not present
s = re.sub(r'(\S)([+\-*/^])(\S)', r'\1 \2 \3', s)
# idem for < and >
s = re.sub(r'([<>]=?)', r' \1 ', s)
# = -> ==
s = re.sub(r'([^<>])=', r'\1 == ', s)
# CONST[ddddYd] -> CONST[dddd]
s = re.sub(r'([A-Z_][A-Z0-9_]*)\[(\d{4})Y1\]', r'\1[\2]', s)
self.conditions[self.current_condition]['action'] = s
return pos + 1, None
# def zero(self, pos, line, lines):
# if line[1] != "0":
# 1) find line with "predict" keyword
# 2) for each pred_cat, cond.value[n] = float(word[n+1])
# 3) find line with "mean" keyword
# 4) cond.nZero = int(word[1])
# 5) for each pred_cat, cond.mean[n] = float(word[n+1])
# ------------------------
# tranform to expression
# ------------------------
def action2expr(self, data):
const_sample, const_names = self.constants
globals = dict((name, SubscriptableVariable(name))
for name in const_names)
globals.update((name, Variable(self.var_name(name),
self.var_type(name)))
for name in self.fields.keys())
links = [(name, Link(name, link_def['keyorig'], link_def['desttype'],
self.renames.get(link_def['desttype'], {})))
for name, link_def in self.links.iteritems()]
globals.update(links)
return parse(data, globals)
def data2expr(self, data):
# pred_type seem to be ignored for transitions
predictor, pred_type = data['predictor']
local_name = self.var_name(predictor)
conditions = data['numorcond']
assert conditions
# this is a hack to work around useless conditions in liam 1
for cond in conditions:
for orcond in cond['condition']:
if ('p_co_alive', 1.0, 1.0) in orcond:
print(" Warning: removed 'p_co_alive == 1' condition")
orcond.remove(('p_co_alive', 1.0, 1.0))
lastcond = conditions[-1]
if lastcond is None:
raise Exception('Actual number of conditions do not match the '
'number of conditions declared !')
cond_expr = self.condition2expr(lastcond['condition'])
v = Variable(local_name, self.var_type(predictor))
expr = Where(cond_expr, self.action2expr(lastcond['action']), v)
for cond in conditions[-2::-1]:
cond_expr = self.condition2expr(cond['condition'])
expr = Where(cond_expr, self.action2expr(cond['action']), expr)
return local_name, expr
class TrapImporter(TextImporter):
pass
# =====================
def load_processes(input_path, fnames,
fields, constants, links, obj_type, renames):
print("=" * 40)
data = []
predictor_seen = {}
parsed = []
obj_renames = renames.get(obj_type, {})
print("pass 1: parsing files...")
for fname in fnames:
print(" - %s" % fname)
fpath = path.join(input_path, fname)
if fname.startswith('regr_'):
importer = RegressionImporter(fpath, fields, renames)
elif fname.startswith('tran_'):
importer = TransitionImporter(fpath, fields, constants, links,
obj_type, renames)
else:
importer = None
if importer is not None:
fullname, predictor, expr = importer.import_file()
type_, name = fullname.split('_', 1)
name = obj_renames.get(name, name)
fullname = '%s_%s' % (type_, name)
parsed.append((fname, fullname, predictor, expr))
predictor_seen.setdefault(predictor, []).append(fullname)
print("-" * 40)
print("pass 2: simplifying...")
other_types = {
'regr': ('tran', 'trap'),
'tran': ('regr', 'trap'),
'trap': ('tran', 'regr')
}
proc_name_per_file = {}
proc_names = {}
for fname, fullname, predictor, expr in parsed:
print(" - %s (%s)" % (fname, predictor))
type_, name = fullname.split('_', 1)
expr_str = str(simplify(expr))
if len(predictor_seen[predictor]) == 1:
if name != predictor:
print(" renaming '%s' process to '%s'" % (name, predictor))
name = predictor
res = expr_str
else:
conflicting_names = predictor_seen[predictor]
assert len(conflicting_names) > 1
names_to_check = ['%s_%s' % (other_type, name)
for other_type in other_types[type_]]
if any(name in conflicting_names for name in names_to_check):
name = fullname
while name in proc_names:
name += '_dupe'
print(" renaming process to '%s'" % name)
res = {'predictor': predictor,
'expr': expr_str}
proc_names[name] = True
data.append((name, res))
proc_name_per_file[fname] = name
print("=" * 40)
return proc_name_per_file, data
def convert_all_align(input_path):
import glob
for fpath in glob.glob(path.join(input_path, 'al_regr_*.txt')):
convert_txt_align(fpath)
# =====================
# OUTPUT
# =====================
def orderedmap2yaml(items, indent):
sep = '\n' + ' ' * indent
return sep.join("- %s: %s" % f for f in items)
def links2yaml(links):
if links:
# ('hp', {'desttype': 'p', 'prefix': 'p',
# 'origintype': 'h', 'keyorig': 'pid'})]
sep = '\n '
return """
links:
%s""" % sep.join("%s: {type: many2one, target: %s, field: %s}" %
(name, l['desttype'], l['keyorig'])
for name, l in links)
else:
return ''
def process2yaml(processes):
if processes:
sep = '\n '
processes_str = []
for name, expr in processes:
if isinstance(expr, dict):
expr_lines = expr['expr'].splitlines()
# + 2 is for ": "
indent = '\n' + ' ' * (16 + len(expr['predictor']) + 2)
expr_str = indent.join(expr_lines)
process_str = """%s:
%s: %s""" % (name, expr['predictor'], expr_str)
else:
expr_lines = expr.splitlines()
indent = '\n' + ' ' * (12 + len(name) + 2) # + 2 is for ": "
expr = indent.join(expr_lines)
process_str = '%s: %s' % (name, expr)
processes_str.append(process_str)
return """
processes:
%s""" % sep.join(processes_str)
else:
return ''
def constants2yaml(constants):
const_defs = [(name, 'float') for name in constants[1]]
return orderedmap2yaml(const_defs, indent=2)
def entities2yaml(entities):
entity_tmpl = " %s:%s%s%s\n"
e_strings = []
for ent_name, entity in entities.iteritems():
fields = entity['fields']
if fields:
fields = sorted([(fname, f['type'].__name__)
for fname, f in fields.iteritems()])
fields_str = '\n fields:\n %s' \
% orderedmap2yaml(fields, 3)
else:
fields_str = ''
links_str = links2yaml(entity['links'])
process_str = process2yaml(entity['processes'])
e_strings.append(entity_tmpl % (ent_name, fields_str, links_str,
process_str))
return '\n'.join(e_strings)
def process_list2yaml(processes):
s = []
for ent_name, ent_processes in itertools.groupby(processes,
operator.itemgetter(0)):
p_str = ',\n '.join(pname
for ent_name, pname in ent_processes)
s.append(' - %s: [%s]' % (ent_name, p_str))
return '\n'.join(s)
def simulation2yaml(constants, entities, process_list):
constants_str = constants2yaml(constants)
entities_str = entities2yaml(entities)
process_list_str = process_list2yaml(process_list)
return """globals:
periodic:
# period is implicit
%s
entities:
%s
simulation:
processes:
%s
input:
file: base.h5
output:
file: simulation.h5
start_period: 2003 # first simulated period
periods: 20
""" % (constants_str, entities_str, process_list_str)
# =====================
if __name__ == '__main__':
argv = sys.argv
if len(argv) < 3:
print(
"Usage: %s input_path output_path [rename_file] [filtered]" % argv[0]
)
sys.exit()
else:
input_path = argv[1]
output_path = argv[2]
rename_path = None if len(argv) < 4 else argv[3]
filtered = True if len(argv) < 5 else argv[4] == "filtered"
if not path.isdir(input_path):
input_path, fname = path.split(input_path)
else:
fname = None
renames = load_renames(rename_path)
fields_per_obj = load_fields(path.join(input_path, 'dyvardesc.txt'))
constants = load_av_globals(path.join(input_path, 'macro.av'))[:2]
links = load_links(path.join(input_path, 'linkage.txt'))
process_list = load_agespine(path.join(input_path, 'agespine.txt'))
fields = {}
for obj_type, obj_fields in fields_per_obj.iteritems():
for name, fdef in obj_fields.iteritems():
fields['%s_%s' % (obj_type, name)] = fdef
if fname is None:
raw_names = os.listdir(input_path)
else:
raw_names = [fname]
filtered = False
if filtered:
base_names = process_list
else:
base_names = []
for raw_name in raw_names:
basename, ext = path.splitext(raw_name)
if ext == '.txt':
base_names.append(basename)
process_files = []
proc_per_obj = {}
for basename in base_names:
chunks = basename.split('_', 2)
if len(chunks) < 3: # tran_p_x
continue
proc_type, obj_type, name = chunks
if proc_type == 'al':
continue
if len(obj_type) != 1:
continue
file_name = basename + '.txt'
process_files.append((obj_type, file_name))
proc_per_obj.setdefault(obj_type, []).append(file_name)
proc_name_per_file = {}
entities = {}
for obj_type, obj_fields in fields_per_obj.iteritems():
obj_links = [(k, v) for k, v in links.items()
if v['origintype'] == obj_type]
obj_fields.update([(v['keyorig'], {'type': int}) for k, v in obj_links])
obj_proc_files = proc_per_obj.get(obj_type, [])
print("loading processes for %s" % obj_type)
obj_proc_names, obj_processes = load_processes(input_path,
obj_proc_files,
fields, constants, links,
obj_type,
renames)
proc_name_per_file.update(obj_proc_names)
obj_renames = renames.get(obj_type, {})
for old_name in obj_fields.keys():
new_name = obj_renames.get(old_name)
if new_name is not None:
obj_fields[new_name] = obj_fields.pop(old_name)
entities[obj_type] = {
'fields': obj_fields,
'links': obj_links,
'processes': obj_processes
}
process_names = []
for obj, file_name in process_files:
proc_name = proc_name_per_file.get(file_name)
if proc_name is not None:
process_names.append((obj, proc_name))
print("exporting to '%s'" % output_path)
with open(output_path, 'w') as f_out:
# default YAML serialization is ugly, so we produce the string ourselves
f_out.write(simulation2yaml(constants, entities, process_names))
# yaml.dump(yamldata, f_out, default_flow_style=False,
# default_style='"', indent=4)
if fname is None:
convert_all_align(input_path)
print("done.")
| gpl-3.0 | 8,364,246,382,288,008,000 | 33.598639 | 88 | 0.511625 | false |
scalable-networks/gnuradio-3.7.0.1 | gr-filter/python/filter/qa_fractional_interpolator.py | 10 | 3149 | #!/usr/bin/env python
#
# Copyright 2007,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, filter, blocks
import math
def sig_source_f(samp_rate, freq, amp, N):
t = map(lambda x: float(x)/samp_rate, xrange(N))
y = map(lambda x: math.sin(2.*math.pi*freq*x), t)
return y
def sig_source_c(samp_rate, freq, amp, N):
t = map(lambda x: float(x)/samp_rate, xrange(N))
y = map(lambda x: math.cos(2.*math.pi*freq*x) + \
1j*math.sin(2.*math.pi*freq*x), t)
return y
class test_fractional_resampler(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_ff(self):
N = 10000 # number of samples to use
fs = 1000 # baseband sampling rate
rrate = 1.123 # resampling rate
freq = 10
data = sig_source_f(fs, freq, 1, N)
signal = blocks.vector_source_f(data)
op = filter.fractional_interpolator_ff(0, rrate)
snk = blocks.vector_sink_f()
self.tb.connect(signal, op, snk)
self.tb.run()
Ntest = 5000
L = len(snk.data())
t = map(lambda x: float(x)/(fs/rrate), xrange(L))
phase = 0.1884
expected_data = map(lambda x: math.sin(2.*math.pi*freq*x+phase), t)
dst_data = snk.data()
self.assertFloatTuplesAlmostEqual(expected_data[-Ntest:], dst_data[-Ntest:], 3)
def test_002_cc(self):
N = 10000 # number of samples to use
fs = 1000 # baseband sampling rate
rrate = 1.123 # resampling rate
freq = 10
data = sig_source_c(fs, freq, 1, N)
signal = blocks.vector_source_c(data)
op = filter.fractional_interpolator_cc(0.0, rrate)
snk = blocks.vector_sink_c()
self.tb.connect(signal, op, snk)
self.tb.run()
Ntest = 5000
L = len(snk.data())
t = map(lambda x: float(x)/(fs/rrate), xrange(L))
phase = 0.1884
expected_data = map(lambda x: math.cos(2.*math.pi*freq*x+phase) + \
1j*math.sin(2.*math.pi*freq*x+phase), t)
dst_data = snk.data()
self.assertComplexTuplesAlmostEqual(expected_data[-Ntest:], dst_data[-Ntest:], 3)
if __name__ == '__main__':
gr_unittest.run(test_fractional_resampler, "test_fractional_resampler.xml")
| gpl-3.0 | 961,250,160,224,597,600 | 30.49 | 89 | 0.614481 | false |
mvanderkolff/xhtml2pdf | xhtml2pdf/pdf.py | 41 | 1949 | # -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from xhtml2pdf.util import pisaTempFile, getFile
import logging
log = logging.getLogger("xhtml2pdf")
class pisaPDF:
def __init__(self, capacity=-1):
self.capacity = capacity
self.files = []
def addFromURI(self, url, basepath=None):
obj = getFile(url, basepath)
if obj and (not obj.notFound()):
self.files.append(obj.getFile())
addFromFileName = addFromURI
def addFromFile(self, f):
if hasattr(f, "read"):
self.files.append(f)
self.addFromURI(f)
def addFromString(self, data):
self.files.append(pisaTempFile(data, capacity=self.capacity))
def addDocument(self, doc):
if hasattr(doc.dest, "read"):
self.files.append(doc.dest)
def join(self, file=None):
import pyPdf # TODO: Why is this in the middle of everything?
output = pyPdf.PdfFileWriter()
for pdffile in self.files:
input = pyPdf.PdfFileReader(pdffile)
for pageNumber in xrange(input.getNumPages()):
output.addPage(input.getPage(pageNumber))
if file is not None:
output.write(file)
return file
out = pisaTempFile(capacity=self.capacity)
output.write(out)
return out.getvalue()
getvalue = join
__str__ = join
| apache-2.0 | 1,486,583,681,041,580,500 | 28.530303 | 74 | 0.651616 | false |
issyrichards/spartify2 | requests-master/requests/api.py | 160 | 5280 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send data
before giving up, as a float, or a (`connect timeout, read timeout
<user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
session = sessions.Session()
response = session.request(method=method, url=url, **kwargs)
# By explicitly closing the session, we avoid leaving sockets open which
# can trigger a ResourceWarning in some cases, and look like a memory leak
# in others.
session.close()
return response
def get(url, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, json=None, **kwargs):
"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs)
| mit | 2,258,093,387,737,762,000 | 35.164384 | 144 | 0.665909 | false |
aaronorosen/horizon-congress | openstack_dashboard/dashboards/project/stacks/tabs.py | 14 | 5069 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import messages
from horizon import tabs
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.dashboards.project.stacks \
import api as project_api
from openstack_dashboard.dashboards.project.stacks import mappings
from openstack_dashboard.dashboards.project.stacks \
import tables as project_tables
LOG = logging.getLogger(__name__)
class StackTopologyTab(tabs.Tab):
name = _("Topology")
slug = "topology"
template_name = "project/stacks/_detail_topology.html"
preload = False
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStacks"),
("orchestration", "cloudformation:ListStackResources"),),
request)
def get_context_data(self, request):
context = {}
stack = self.tab_group.kwargs['stack']
context['stack_id'] = stack.id
context['d3_data'] = project_api.d3_data(request, stack_id=stack.id)
return context
class StackOverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = "project/stacks/_detail_overview.html"
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStacks"),),
request)
def get_context_data(self, request):
return {"stack": self.tab_group.kwargs['stack']}
class ResourceOverviewTab(tabs.Tab):
name = _("Overview")
slug = "resource_overview"
template_name = "project/stacks/_resource_overview.html"
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStackResource"),),
request)
def get_context_data(self, request):
resource = self.tab_group.kwargs['resource']
resource_url = mappings.resource_to_url(resource)
return {
"resource": resource,
"resource_url": resource_url,
"metadata": self.tab_group.kwargs['metadata']}
class StackEventsTab(tabs.Tab):
name = _("Events")
slug = "events"
template_name = "project/stacks/_detail_events.html"
preload = False
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStackEvents"),),
request)
def get_context_data(self, request):
stack = self.tab_group.kwargs['stack']
try:
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
events = api.heat.events_list(self.request, stack_identifier)
LOG.debug('got events %s' % events)
# The stack id is needed to generate the resource URL.
for event in events:
event.stack_id = stack.id
except Exception:
events = []
messages.error(request, _(
'Unable to get events for stack "%s".') % stack.stack_name)
return {"stack": stack,
"table": project_tables.EventsTable(request, data=events), }
class StackResourcesTab(tabs.Tab):
name = _("Resources")
slug = "resources"
template_name = "project/stacks/_detail_resources.html"
preload = False
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:ListStackResources"),),
request)
def get_context_data(self, request):
stack = self.tab_group.kwargs['stack']
try:
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
resources = api.heat.resources_list(self.request, stack_identifier)
LOG.debug('got resources %s' % resources)
# The stack id is needed to generate the resource URL.
for r in resources:
r.stack_id = stack.id
except Exception:
resources = []
messages.error(request, _(
'Unable to get resources for stack "%s".') % stack.stack_name)
return {"stack": stack,
"table": project_tables.ResourcesTable(
request, data=resources, stack=stack), }
class StackDetailTabs(tabs.TabGroup):
slug = "stack_details"
tabs = (StackTopologyTab, StackOverviewTab, StackResourcesTab,
StackEventsTab)
sticky = True
class ResourceDetailTabs(tabs.TabGroup):
slug = "resource_details"
tabs = (ResourceOverviewTab,)
sticky = True
| apache-2.0 | 7,963,771,650,803,494,000 | 32.348684 | 79 | 0.636615 | false |
zhengzhihust/tablib | tablib/packages/openpyxl/workbook.py | 116 | 6200 | # file openpyxl/workbook.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Workbook is the top-level container for all document information."""
__docformat__ = "restructuredtext en"
# Python stdlib imports
import datetime
import os
# package imports
from .worksheet import Worksheet
from .writer.dump_worksheet import DumpWorksheet, save_dump
from .writer.strings import StringTableBuilder
from .namedrange import NamedRange
from .style import Style
from .writer.excel import save_workbook
from .shared.exc import ReadOnlyWorkbookException
class DocumentProperties(object):
"""High-level properties of the document."""
def __init__(self):
self.creator = 'Unknown'
self.last_modified_by = self.creator
self.created = datetime.datetime.now()
self.modified = datetime.datetime.now()
self.title = 'Untitled'
self.subject = ''
self.description = ''
self.keywords = ''
self.category = ''
self.company = 'Microsoft Corporation'
class DocumentSecurity(object):
"""Security information about the document."""
def __init__(self):
self.lock_revision = False
self.lock_structure = False
self.lock_windows = False
self.revision_password = ''
self.workbook_password = ''
class Workbook(object):
"""Workbook is the container for all other parts of the document."""
def __init__(self, optimized_write = False):
self.worksheets = []
self._active_sheet_index = 0
self._named_ranges = []
self.properties = DocumentProperties()
self.style = Style()
self.security = DocumentSecurity()
self.__optimized_write = optimized_write
self.__optimized_read = False
self.strings_table_builder = StringTableBuilder()
if not optimized_write:
self.worksheets.append(Worksheet(self))
def _set_optimized_read(self):
self.__optimized_read = True
def get_active_sheet(self):
"""Returns the current active sheet."""
return self.worksheets[self._active_sheet_index]
def create_sheet(self, index = None):
"""Create a worksheet (at an optional index).
:param index: optional position at which the sheet will be inserted
:type index: int
"""
if self.__optimized_read:
raise ReadOnlyWorkbookException('Cannot create new sheet in a read-only workbook')
if self.__optimized_write :
new_ws = DumpWorksheet(parent_workbook = self)
else:
new_ws = Worksheet(parent_workbook = self)
self.add_sheet(worksheet = new_ws, index = index)
return new_ws
def add_sheet(self, worksheet, index = None):
"""Add an existing worksheet (at an optional index)."""
if index is None:
index = len(self.worksheets)
self.worksheets.insert(index, worksheet)
def remove_sheet(self, worksheet):
"""Remove a worksheet from this workbook."""
self.worksheets.remove(worksheet)
def get_sheet_by_name(self, name):
"""Returns a worksheet by its name.
Returns None if no worksheet has the name specified.
:param name: the name of the worksheet to look for
:type name: string
"""
requested_sheet = None
for sheet in self.worksheets:
if sheet.title == name:
requested_sheet = sheet
break
return requested_sheet
def get_index(self, worksheet):
"""Return the index of the worksheet."""
return self.worksheets.index(worksheet)
def get_sheet_names(self):
"""Returns the list of the names of worksheets in the workbook.
Names are returned in the worksheets order.
:rtype: list of strings
"""
return [s.title for s in self.worksheets]
def create_named_range(self, name, worksheet, range):
"""Create a new named_range on a worksheet"""
assert isinstance(worksheet, Worksheet)
named_range = NamedRange(name, [(worksheet, range)])
self.add_named_range(named_range)
def get_named_ranges(self):
"""Return all named ranges"""
return self._named_ranges
def add_named_range(self, named_range):
"""Add an existing named_range to the list of named_ranges."""
self._named_ranges.append(named_range)
def get_named_range(self, name):
"""Return the range specified by name."""
requested_range = None
for named_range in self._named_ranges:
if named_range.name == name:
requested_range = named_range
break
return requested_range
def remove_named_range(self, named_range):
"""Remove a named_range from this workbook."""
self._named_ranges.remove(named_range)
def save(self, filename):
""" shortcut """
if self.__optimized_write:
save_dump(self, filename)
else:
save_workbook(self, filename)
| mit | 1,500,311,661,661,735,700 | 32.333333 | 94 | 0.653226 | false |
jpshort/odoo | addons/portal_gamification/__openerp__.py | 381 | 1571 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Portal Gamification',
'version': '1',
'category': 'Tools',
'complexity': 'easy',
'description': """
This module adds security rules for gamification to allow portal users to participate to challenges
===================================================================================================
""",
'author': 'OpenERP SA',
'depends': ['gamification','portal'],
'data': [
'security/ir.model.access.csv',
'security/portal_security.xml',
],
'installable': True,
'auto_install': True,
'category': 'Hidden',
}
| agpl-3.0 | -4,278,666,906,487,907,300 | 36.404762 | 99 | 0.557607 | false |
MickSandoz/compassion-modules | __unported__/child_update_picture_date/wizard/update_child_picture_date.py | 5 | 1907 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <[email protected]>
#
# The licence is in the file __openerp__.py
#
##############################################################################
import traceback
from openerp.osv import orm, fields
class update_child_picture_date(orm.TransientModel):
_name = 'update.child.picture.date'
def update(self, cr, uid, context=None):
count = 1
print('LAUNCH CHILD PICTURE UPDATE')
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(
cr, uid, [('state', 'not in', ['F', 'X']),
('update_done', '=', False)], context=context)
total = str(len(child_ids))
for child in child_obj.browse(cr, uid, child_ids, context):
try:
print('Updating child {0}/{1}').format(str(count), total)
child_obj.get_infos(cr, uid, child.id, context)
child.write({'update_done': True})
except Exception:
if child.state != 'E':
child.write({
'state': 'E',
'previous_state': child.state})
self.pool.get('mail.thread').message_post(
cr, uid, child.id,
traceback.format_exc(), 'Child update',
context={'thread_model': 'compassion.child'})
finally:
count += 1
cr.commit()
return True
class child_compassion(orm.Model):
_inherit = 'compassion.child'
_columns = {
'update_done': fields.boolean('update done')
}
| agpl-3.0 | -1,409,625,517,877,852,000 | 33.981132 | 78 | 0.471421 | false |
niegenug/wesnoth | scons/python_devel.py | 49 | 1381 | # vi: syntax=python:et:ts=4
import sys, os
from config_check_utils import backup_env, restore_env
import distutils.sysconfig
def exists():
return True
def PythonExtension(env, target, source, **kv):
return env.SharedLibrary(target, source, SHLIBPREFIX='', SHLIBSUFFIX=distutils.sysconfig.get_config_var("SO"), **kv)
def generate(env):
env.AddMethod(PythonExtension)
def CheckPython(context):
env = context.env
backup = backup_env(env, ["CPPPATH", "LIBPATH", "LIBS"])
context.Message("Checking for Python... ")
env.AppendUnique(CPPPATH = distutils.sysconfig.get_python_inc())
version = distutils.sysconfig.get_config_var("VERSION")
if not version:
version = sys.version[:3]
if env["PLATFORM"] == "win32":
version = version.replace('.', '')
env.AppendUnique(LIBPATH = distutils.sysconfig.get_config_var("LIBDIR") or \
os.path.join(distutils.sysconfig.get_config_var("prefix"), "libs") )
env.AppendUnique(LIBS = "python" + version)
test_program = """
#include <Python.h>
int main()
{
Py_Initialize();
}
\n"""
if context.TryLink(test_program, ".c"):
context.Result("yes")
return True
else:
context.Result("no")
restore_env(context.env, backup)
return False
config_checks = { "CheckPython" : CheckPython }
| gpl-2.0 | 7,044,031,859,669,092,000 | 31.116279 | 120 | 0.637944 | false |
adam111316/SickGear | lib/rtorrent/compat.py | 180 | 1258 | # Copyright (c) 2013 Chris Lucas, <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import sys
def is_py3():
return sys.version_info[0] == 3
if is_py3():
import xmlrpc.client as xmlrpclib
else:
import xmlrpclib
| gpl-3.0 | -4,105,558,524,421,486,000 | 40.933333 | 72 | 0.765501 | false |
zubair-arbi/edx-platform | lms/djangoapps/debug/views.py | 119 | 2136 | """Views for debugging and diagnostics"""
import pprint
import traceback
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.contrib.auth.decorators import login_required
from django.utils.html import escape
from django.views.decorators.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
from codejail.safe_exec import safe_exec
from mako.exceptions import TopLevelLookupException
@login_required
@ensure_csrf_cookie
def run_python(request):
"""A page to allow testing the Python sandbox on a production server."""
if not request.user.is_staff:
raise Http404
c = {}
c['code'] = ''
c['results'] = None
if request.method == 'POST':
py_code = c['code'] = request.POST.get('code')
g = {}
try:
safe_exec(py_code, g)
except Exception as e:
c['results'] = traceback.format_exc()
else:
c['results'] = pprint.pformat(g)
return render_to_response("debug/run_python_form.html", c)
@login_required
def show_parameters(request):
"""A page that shows what parameters were on the URL and post."""
html = []
for name, value in sorted(request.GET.items()):
html.append(escape("GET {}: {!r}".format(name, value)))
for name, value in sorted(request.POST.items()):
html.append(escape("POST {}: {!r}".format(name, value)))
return HttpResponse("\n".join("<p>{}</p>".format(h) for h in html))
def show_reference_template(request, template):
"""
Shows the specified template as an HTML page. This is used only in debug mode to allow the UX team
to produce and work with static reference templates.
e.g. /template/ux/reference/container.html shows the template under ux/reference/container.html
Note: dynamic parameters can also be passed to the page.
e.g. /template/ux/reference/container.html?name=Foo
"""
try:
return render_to_response(template, request.GET.dict())
except TopLevelLookupException:
return HttpResponseNotFound("Couldn't find template {template}".format(template=template))
| agpl-3.0 | -2,770,769,319,604,355,000 | 33.451613 | 102 | 0.682584 | false |
rue89-tech/edx-analytics-pipeline | edx/analytics/tasks/tests/test_student_engagement.py | 3 | 15248 | """Test student engagement metrics"""
import json
import luigi
from ddt import ddt, data, unpack
from edx.analytics.tasks.student_engagement import StudentEngagementTask, SUBSECTION_VIEWED_MARKER
from edx.analytics.tasks.tests import unittest
from edx.analytics.tasks.tests.opaque_key_mixins import InitializeOpaqueKeysMixin, InitializeLegacyKeysMixin
from edx.analytics.tasks.tests.map_reduce_mixins import MapperTestMixin, ReducerTestMixin
class BaseStudentEngagementTaskMapTest(InitializeOpaqueKeysMixin, MapperTestMixin, unittest.TestCase):
"""Base class for test analysis of detailed student engagement"""
DEFAULT_USER_ID = 10
DEFAULT_TIMESTAMP = "2013-12-17T15:38:32.805444"
DEFAULT_DATE = "2013-12-17"
def setUp(self):
super(BaseStudentEngagementTaskMapTest, self).setUp()
self.initialize_ids()
self.video_id = 'i4x-foo-bar-baz'
self.event_templates = {
'play_video': {
"username": "test_user",
"host": "test_host",
"event_source": "browser",
"event_type": "play_video",
"context": {
"course_id": self.course_id,
"org_id": self.org_id,
"user_id": self.DEFAULT_USER_ID,
},
"time": "{0}+00:00".format(self.DEFAULT_TIMESTAMP),
"ip": "127.0.0.1",
"event": '{"id": "%s", "currentTime": "23.4398", "code": "87389iouhdfh"}' % self.video_id,
"agent": "blah, blah, blah",
"page": None
},
'problem_check': {
"username": "test_user",
"host": "test_host",
"event_source": "server",
"event_type": "problem_check",
"context": {
"course_id": self.course_id,
"org_id": self.org_id,
"user_id": self.DEFAULT_USER_ID,
},
"time": "{0}+00:00".format(self.DEFAULT_TIMESTAMP),
"ip": "127.0.0.1",
"event": {
"problem_id": self.problem_id,
"success": "incorrect",
},
"agent": "blah, blah, blah",
"page": None
}
}
self.default_event_template = 'problem_check'
self.default_key = (self.DEFAULT_DATE, self.course_id, 'test_user')
def create_task(self, interval=None, interval_type=None):
"""Allow arguments to be passed to the task constructor."""
if not interval:
interval = self.DEFAULT_DATE
self.task = StudentEngagementTask(
interval=luigi.DateIntervalParameter().parse(interval),
output_root='/fake/output',
interval_type=interval_type,
)
self.task.init_local()
def assert_date_mappings(self, expected_end_date, actual_event_date):
"""Asserts that an event_date is mapped to the expected date in the key."""
self.assert_single_map_output(
self.create_event_log_line(time="{}T15:38:32.805444".format(actual_event_date)),
(expected_end_date, self.course_id, 'test_user'),
(self.problem_id, 'problem_check', '{}', actual_event_date)
)
@ddt
class StudentEngagementTaskMapTest(BaseStudentEngagementTaskMapTest):
"""Test analysis of detailed student engagement"""
def setUp(self):
super(StudentEngagementTaskMapTest, self).setUp()
self.create_task()
@data(
{'time': "2013-12-01T15:38:32.805444"},
{'username': ''},
{'event_type': None},
{'context': {'course_id': 'lskdjfslkdj'}},
{'event': 'sdfasdf'}
)
def test_invalid_events(self, kwargs):
self.assert_no_map_output_for(self.create_event_log_line(**kwargs))
def test_browser_problem_check_event(self):
template = self.event_templates['problem_check']
self.assert_no_map_output_for(self.create_event_log_line(template=template, event_source='browser'))
def test_incorrect_problem_check(self):
self.assert_single_map_output(
json.dumps(self.event_templates['problem_check']),
self.default_key,
(self.problem_id, 'problem_check', '{}', self.DEFAULT_DATE)
)
def test_correct_problem_check(self):
template = self.event_templates['problem_check']
template['event']['success'] = 'correct'
self.assert_single_map_output(
json.dumps(template),
self.default_key,
(self.problem_id, 'problem_check', json.dumps({'correct': True}), self.DEFAULT_DATE)
)
def test_missing_problem_id(self):
template = self.event_templates['problem_check']
del template['event']['problem_id']
self.assert_no_map_output_for(self.create_event_log_line(template=template))
def test_missing_video_id(self):
template = self.event_templates['play_video']
template['event'] = '{"currentTime": "23.4398", "code": "87389iouhdfh"}'
self.assert_no_map_output_for(self.create_event_log_line(template=template))
def test_play_video(self):
self.assert_single_map_output(
json.dumps(self.event_templates['play_video']),
self.default_key,
(self.video_id, 'play_video', '{}', self.DEFAULT_DATE)
)
def test_implicit_event(self):
self.assert_single_map_output(
self.create_event_log_line(event_type='/jsi18n/', event_source='server'),
self.default_key,
('', '/jsi18n/', '{}', self.DEFAULT_DATE)
)
def test_course_event(self):
self.assert_single_map_output(
self.create_event_log_line(event_type='/courses/foo/bar/', event_source='server'),
self.default_key,
('', '/courses/foo/bar/', '{}', self.DEFAULT_DATE)
)
def test_section_view_event(self):
event_type = '/courses/{0}/courseware/foo/'.format(self.course_id)
self.assert_single_map_output(
self.create_event_log_line(event_type=event_type, event_source='server'),
self.default_key,
('', event_type, '{}', self.DEFAULT_DATE)
)
def test_subsection_event(self):
self.assert_last_subsection_viewed_recognized('foo/bar/')
def assert_last_subsection_viewed_recognized(self, end_of_path):
"""Assert that given a path ending the event is recognized as a subsection view"""
event_type = '/courses/{0}/courseware/{1}'.format(self.course_id, end_of_path)
self.assert_single_map_output(
self.create_event_log_line(event_type=event_type, event_source='server'),
self.default_key,
('', 'marker:last_subsection_viewed', json.dumps({
'path': event_type,
'timestamp': self.DEFAULT_TIMESTAMP,
}), self.DEFAULT_DATE)
)
def test_subsection_sequence_num_event(self):
self.assert_last_subsection_viewed_recognized('foo/bar/10')
def test_subsection_jquery_event(self):
self.assert_last_subsection_viewed_recognized('foo/bar/jquery.js')
@ddt
class WeeklyStudentEngagementTaskMapTest(BaseStudentEngagementTaskMapTest):
"""Test mapping of dates to weekly intervals in student engagement."""
INTERVAL_START = "2013-11-01"
INTERVAL_END = "2014-01-02"
def setUp(self):
super(WeeklyStudentEngagementTaskMapTest, self).setUp()
interval = "{}-{}".format(self.INTERVAL_START, self.INTERVAL_END)
self.create_task(interval=interval, interval_type="weekly")
@data(
("2014-01-01", "2014-01-01"),
("2013-12-25", "2013-12-25"),
("2014-01-01", "2013-12-27"),
("2013-12-25", "2013-12-23"),
)
@unpack
def test_date_mappings(self, expected_end_date, actual_event_date):
self.assert_date_mappings(expected_end_date, actual_event_date)
@ddt
class AllStudentEngagementTaskMapTest(BaseStudentEngagementTaskMapTest):
"""Test mapping of dates to overall interval in student engagement."""
INTERVAL_START = "2013-11-01"
INTERVAL_END = "2014-01-02"
def setUp(self):
super(AllStudentEngagementTaskMapTest, self).setUp()
interval = "{}-{}".format(self.INTERVAL_START, self.INTERVAL_END)
self.create_task(interval=interval, interval_type="all")
@data(
("2014-01-01", "2014-01-01"),
("2014-01-01", "2013-12-25"),
("2014-01-01", "2013-12-27"),
("2014-01-01", "2013-12-23"),
)
@unpack
def test_date_mappings(self, expected_end_date, actual_event_date):
self.assert_date_mappings(expected_end_date, actual_event_date)
class StudentEngagementTaskLegacyMapTest(InitializeLegacyKeysMixin, StudentEngagementTaskMapTest):
"""Test analysis of detailed student engagement using legacy ID formats"""
pass
@ddt
class StudentEngagementTaskReducerTest(ReducerTestMixin, unittest.TestCase):
"""
Tests to verify that engagement data is reduced properly
"""
task_class = StudentEngagementTask
WAS_ACTIVE_COLUMN = 3
PROBLEMS_ATTEMPTED_COLUMN = 4
PROBLEM_ATTEMPTS_COLUMN = 5
PROBLEMS_CORRECT_COLUMN = 6
VIDEOS_PLAYED_COLUMN = 7
FORUM_POSTS_COLUMN = 8
FORUM_REPLIES_COLUMN = 9
FORUM_COMMENTS_COLUMN = 10
TEXTBOOK_PAGES_COLUMN = 11
LAST_SUBSECTION_COLUMN = 12
def setUp(self):
super(StudentEngagementTaskReducerTest, self).setUp()
self.reduce_key = (self.DATE, self.COURSE_ID, self.USERNAME)
def test_any_activity(self):
inputs = [
('', '/foo', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 0,
self.PROBLEM_ATTEMPTS_COLUMN: 0,
self.PROBLEMS_CORRECT_COLUMN: 0,
self.VIDEOS_PLAYED_COLUMN: 0,
self.FORUM_POSTS_COLUMN: 0,
self.FORUM_REPLIES_COLUMN: 0,
self.FORUM_COMMENTS_COLUMN: 0,
self.TEXTBOOK_PAGES_COLUMN: 0,
self.LAST_SUBSECTION_COLUMN: '',
})
def test_single_problem_attempted(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 1,
self.PROBLEM_ATTEMPTS_COLUMN: 1,
self.PROBLEMS_CORRECT_COLUMN: 1,
})
def test_single_problem_attempted_incorrect(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 1,
self.PROBLEM_ATTEMPTS_COLUMN: 1,
self.PROBLEMS_CORRECT_COLUMN: 0,
})
def test_single_problem_attempted_multiple_events(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz', 'problem_check', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 1,
self.PROBLEM_ATTEMPTS_COLUMN: 3,
self.PROBLEMS_CORRECT_COLUMN: 1,
})
def test_multiple_problems_attempted(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz2', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz', 'problem_check', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 2,
self.PROBLEM_ATTEMPTS_COLUMN: 3,
self.PROBLEMS_CORRECT_COLUMN: 2,
})
def test_single_video_played(self):
inputs = [
('foobarbaz', 'play_video', '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.VIDEOS_PLAYED_COLUMN: 1,
})
def test_multiple_video_plays_same_video(self):
inputs = [
('foobarbaz', 'play_video', '{}', self.DATE),
('foobarbaz', 'play_video', '{}', self.DATE),
('foobarbaz', 'play_video', '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.VIDEOS_PLAYED_COLUMN: 1,
})
def test_other_video_events(self):
inputs = [
('foobarbaz', 'pause_video', '{}', self.DATE),
('foobarbaz2', 'seek_video', '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.VIDEOS_PLAYED_COLUMN: 0,
})
@data(
('edx.forum.thread.created', FORUM_POSTS_COLUMN),
('edx.forum.response.created', FORUM_REPLIES_COLUMN),
('edx.forum.comment.created', FORUM_COMMENTS_COLUMN),
('book', TEXTBOOK_PAGES_COLUMN),
)
@unpack
def test_count_events(self, event_type, column_num):
inputs = [
('', event_type, '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
column_num: 1,
})
@data(
('edx.forum.thread.created', FORUM_POSTS_COLUMN),
('edx.forum.response.created', FORUM_REPLIES_COLUMN),
('edx.forum.comment.created', FORUM_COMMENTS_COLUMN),
('book', TEXTBOOK_PAGES_COLUMN),
)
@unpack
def test_multiple_counted_events(self, event_type, column_num):
inputs = [
('', event_type, '{}', self.DATE),
('', event_type, '{}', self.DATE),
]
self._check_output_by_key(inputs, {
column_num: 2,
})
def test_last_subsection(self):
inputs = [
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'foobar',
'timestamp': '2014-12-01T00:00:00.000000',
}), self.DATE),
]
self._check_output_by_key(inputs, {
self.LAST_SUBSECTION_COLUMN: 'foobar',
})
def test_multiple_subsection_views(self):
inputs = [
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'finalpath',
'timestamp': '2014-12-01T00:00:04.000000',
}), self.DATE),
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'foobar',
'timestamp': '2014-12-01T00:00:00.000000',
}), self.DATE),
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'foobar1',
'timestamp': '2014-12-01T00:00:03.000000',
}), self.DATE),
]
self._check_output_by_key(inputs, {
self.LAST_SUBSECTION_COLUMN: 'finalpath',
})
| agpl-3.0 | -7,322,102,819,882,186,000 | 35.742169 | 108 | 0.571222 | false |
wscullin/spack | var/spack/repos/builtin/packages/fontcacheproto/package.py | 3 | 1563 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Fontcacheproto(AutotoolsPackage):
"""X.org FontcacheProto protocol headers."""
homepage = "http://cgit.freedesktop.org/xorg/proto/fontcacheproto"
url = "https://www.x.org/archive/individual/proto/fontcacheproto-0.1.3.tar.gz"
version('0.1.3', '5a91ab914ffbfbc856e6fcde52e6f3e3')
| lgpl-2.1 | -195,328,897,392,362,980 | 44.970588 | 87 | 0.682022 | false |
BT-fgarbely/odoo | addons/stock/report/stock_graph.py | 326 | 4514 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from pychart import *
import pychart.legend
import time
from openerp.report.misc import choice_colors
from openerp import tools
#
# Draw a graph for stocks
#
class stock_graph(object):
def __init__(self, io):
self._datas = {}
self._canvas = canvas.init(fname=io, format='pdf')
self._canvas.set_author("Odoo")
self._canvas.set_title("Stock Level Forecast")
self._names = {}
self.val_min = ''
self.val_max = ''
def add(self, product_id, product_name, datas):
if hasattr(product_name, 'replace'):
product_name=product_name.replace('/', '//')
if product_id not in self._datas:
self._datas[product_id] = {}
self._names[product_id] = tools.ustr(product_name)
for (dt,stock) in datas:
if not dt in self._datas[product_id]:
self._datas[product_id][dt]=0
self._datas[product_id][dt]+=stock
if self.val_min:
self.val_min = min(self.val_min,dt)
else:
self.val_min = dt
self.val_max = max(self.val_max,dt)
def draw(self):
colors = choice_colors(len(self._datas.keys()))
user_color = {}
for user in self._datas.keys():
user_color[user] = colors.pop()
val_min = int(time.mktime(time.strptime(self.val_min,'%Y-%m-%d')))
val_max = int(time.mktime(time.strptime(self.val_max,'%Y-%m-%d')))
plots = []
for product_id in self._datas:
f = fill_style.Plain()
f.bgcolor = user_color[user]
datas = self._datas[product_id].items()
datas = map(lambda x: (int(time.mktime(time.strptime(x[0],'%Y-%m-%d'))),x[1]), datas)
datas.sort()
datas2 = []
val = 0
for d in datas:
val+=d[1]
if len(datas2):
d2 = d[0]-60*61*24
if datas2[-1][0]<d2-1000:
datas2.append((d2,datas2[-1][1]))
datas2.append((d[0],val))
if len(datas2) and datas2[-1][0]<val_max-100:
datas2.append((val_max, datas2[-1][1]))
if len(datas2)==1:
datas2.append( (datas2[0][0]+100, datas2[0][1]) )
st = line_style.T()
st.color = user_color[product_id]
st.width = 1
st.cap_style=1
st.join_style=1
plot = line_plot.T(label=self._names[product_id], data=datas2, line_style=st)
plots.append(plot)
interval = max((val_max-val_min)/15, 86400)
x_axis = axis.X(format=lambda x:'/a60{}'+time.strftime('%Y-%m-%d',time.gmtime(x)), tic_interval=interval, label=None)
# For add the report header on the top of the report.
tb = text_box.T(loc=(300, 500), text="/hL/15/bStock Level Forecast", line_style=None)
tb.draw()
ar = area.T(size = (620,435), x_range=(val_min,val_max+1), y_axis = axis.Y(format="%d", label="Virtual Stock (Unit)"), x_axis=x_axis)
for plot in plots:
ar.add_plot(plot)
ar.draw(self._canvas)
def close(self):
self._canvas.close()
if __name__ == '__main__':
gt = stock_graph('test.pdf')
gt.add(1, 'Pomme', [('2005-07-29', 6), ('2005-07-30', -2), ('2005-07-31', 4)])
gt.add(2, 'Cailloux', [('2005-07-29', 9), ('2005-07-30', -4), ('2005-07-31', 2)])
gt.draw()
gt.close()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -3,023,454,693,498,379,300 | 38.252174 | 141 | 0.541205 | false |
hdknr/paloma | example/app/workers.py | 1 | 1671 | import os
import sys
#
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings")
APP_DIR=os.path.dirname(__file__)
LOG_FILE="/tmp/paloma.log" #: celery worker logfile
PID_FILE="/tmp/paloma.pid" #: celery worker PID file
PID_CAM="/tmp/paloma.pid"
NODE="celery" #: celery = default node
LOG_LEVEL="DEBUG" #: celery log level
def configure(*args):
''' return django-celery parameter for specified args
- args[0] : paloma_worker.py
- args[1] : path this django project application
- args[2] : command
'''
if len(args) < 3 or args[2] == "start" :
#: start worker
#: TODO: Check some exiting process
return [
"celery","worker",
"--loglevel=%s" % LOG_LEVEL,
"--pidfile=%s" % PID_FILE,
"--logfile=%s" % LOG_FILE ,
"-E", # event option for celerycam
"--beat" ,
"--scheduler=djcelery.schedulers.DatabaseScheduler",
]
if len(args) >2 and args[2] == "stop":
#: stop worker
return [
"celery","multi",
"stop",NODE,
"--pidfile=%s" % PID_FILE,
]
if len(args) >2 and args[2] == "cam":
#: TODO: Check some exiting process
return [
"celerycam",
"--pidfile=%s" % PID_CAM,
]
if len(args) >2 and args[2] == "camstop":
#: TODO: Check some exiting process
return [
"celery","multi",
"stop",NODE,
"--pidfile=%s" % PID_CAM,
]
| bsd-2-clause | -606,034,114,957,265,400 | 29.381818 | 68 | 0.479952 | false |
WalterPaixaoCortes/Reusables | labio/labio/argParseWrapper.py | 1 | 5984 | """
Purpose
The purpose of the argParseWrapper module is to create an easy way to use the native argparse module from python distribution
in order to parse command line arguments.
Description
It contains a simple wrapper class for the argparse.Action class, which adds the action attribute and a return_args method
that will return the command line arguments and options parsed and ready to be used.
Dependencies
argparse, labio.configWrapper.
"""
import argparse
#---------------------------------------------------------------------------------------
# [history]
# [15/03/2014 - walter.paixao-cortes] - First version
# [19/03/2014 - walter.paixao-cortes] - Adding comments to generate the documentation.
#---------------------------------------------------------------------------------------
class CustomAction(argparse.Action):
"""
Wrapper class for argparse.Action class.
Adds the action attribute to the object, which is missing from the class.
"""
action = None
"""The action attribute."""
#---------------------------------------------------------------------------------------
# [history]
# [15/03/2014 - walter.paixao-cortes] - First version
# [19/03/2014 - walter.paixao-cortes] - Adding comments to generate the documentation.
#---------------------------------------------------------------------------------------
def return_args(arguments):
"""
Purpose
Parse the arguments from command line, based on a json dictionary.
Description
The method receives and iterates through the arguments dictionary,
creating an instance of :class:`labio.argParseWrapper.CustomAction` for
each argument, that will be added to the parser collection.
Parameter
arguments - a dictionary of json objects describing the options.
Returns
Dynamic class with attributes as the keys of each json object in dictionary
and the values captured from the command line as values.
Json structure
The json structure that represents each argument is as follows:
::
{
short: string - Represents the short version of an optional parameter (e.g. -f).
The string "None" is used when it is an argument, not an optional parameter.
long: string - Represents the short version of an optional parameter (e.g. -file).
The string "None" is used when it is an argument, not an optional parameter.
dest: string - the attribute that will receive the value of the optional parameter.
help: string - The explanation that will be displayed for this optional parameter
when the command line is executed with the ``--help`` option.
metavar: string - The explanation that will be displayed for this argument
when the command line is executed with the ``--help`` option.
type: string - The type of data for this optional parameter or argument (str, int, ...).
action: string - The action that will be executed. See more detail in the argparse documentation.
nargs: string - The number of arguments that an optional parameter should have.
? means 0 or more
1..n means the number of arguments
default: string - The default value when the optional parameter does not have a value set.
const: string - The constant value when the optional parameter does not have a value set.
choices: list - The choices that are valid for an optional argument.
}
"""
#Initializing variables
optItem = None
isOptionCorrect = False
parser = argparse.ArgumentParser()
#iterate through the dictionary, filling an instance of CustomAction and adding to the parser
for item in arguments:
if arguments[item].has_key('short') and arguments[item].has_key('long') and arguments[item].has_key('dest'):
optItem = CustomAction([arguments[item]['short'],arguments[item]['long']],dest=arguments[item]['dest'])
isOptionCorrect = True
if arguments[item].has_key('dest') and isOptionCorrect:
optItem.dest = arguments[item]['dest']
if arguments[item].has_key('action') and isOptionCorrect:
optItem.action = arguments[item]['action']
if arguments[item].has_key('type') and isOptionCorrect:
optItem.type = eval(arguments[item]['type'])
if arguments[item].has_key('nargs') and isOptionCorrect:
optItem.nargs = eval(arguments[item]['nargs'])
else:
optItem.nargs='?'
if arguments[item].has_key('help') and isOptionCorrect:
optItem.help = arguments[item]['help']
if arguments[item].has_key('metavar') and isOptionCorrect:
optItem.metavar = arguments[item]['metavar']
if arguments[item].has_key('default') and isOptionCorrect:
optItem.default = eval(arguments[item]['default'])
if arguments[item].has_key('const') and isOptionCorrect:
optItem.const = eval(arguments[item]['const'])
if arguments[item].has_key('choices') and isOptionCorrect:
optItem.choices = eval(arguments[item]['choices'])
#Add to the parser with different parameters depending if it is an argument or optional parameter
if optItem.option_strings[0] == u'None':
parser.add_argument(optItem.metavar, action=optItem.action, type=optItem.type, nargs=optItem.nargs, help=optItem.help, metavar=optItem.metavar, default=optItem.default, choices=optItem.choices)
else:
if optItem.action is None:
parser.add_argument(optItem.option_strings[0],optItem.option_strings[1], dest=optItem.dest, action=optItem.action, type=optItem.type, nargs=optItem.nargs, help=optItem.help, metavar=optItem.metavar, default=optItem.default, choices=optItem.choices)
else:
parser.add_argument(optItem.option_strings[0],optItem.option_strings[1], dest=optItem.dest, action=optItem.action, help=optItem.help, default=optItem.default)
#Parse the arguments coming from command line and returns a dynamic class
#with the keys of the json objects as attributes.
options = parser.parse_args()
return options
| gpl-2.0 | 2,381,034,933,764,863,500 | 42.362319 | 252 | 0.676638 | false |
nabsboss/CouchPotatoServer | libs/elixir/collection.py | 27 | 4457 | '''
Default entity collection implementation
'''
import sys
import re
class BaseCollection(list):
def __init__(self, entities=None):
list.__init__(self)
if entities is not None:
self.extend(entities)
def extend(self, entities):
for e in entities:
self.append(e)
def clear(self):
del self[:]
def resolve_absolute(self, key, full_path, entity=None, root=None):
if root is None:
root = entity._descriptor.resolve_root
if root:
full_path = '%s.%s' % (root, full_path)
module_path, classname = full_path.rsplit('.', 1)
module = sys.modules[module_path]
res = getattr(module, classname, None)
if res is None:
if entity is not None:
raise Exception("Couldn't resolve target '%s' <%s> in '%s'!"
% (key, full_path, entity.__name__))
else:
raise Exception("Couldn't resolve target '%s' <%s>!"
% (key, full_path))
return res
def __getattr__(self, key):
return self.resolve(key)
# default entity collection
class GlobalEntityCollection(BaseCollection):
def __init__(self, entities=None):
# _entities is a dict of entities keyed on their name.
self._entities = {}
super(GlobalEntityCollection, self).__init__(entities)
def append(self, entity):
'''
Add an entity to the collection.
'''
super(EntityCollection, self).append(entity)
existing_entities = self._entities.setdefault(entity.__name__, [])
existing_entities.append(entity)
def resolve(self, key, entity=None):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
# Do we have a fully qualified entity name?
if '.' in key:
return self.resolve_absolute(key, key, entity)
else:
# Otherwise we look in the entities of this collection
res = self._entities.get(key, None)
if res is None:
if entity:
raise Exception("Couldn't resolve target '%s' in '%s'"
% (key, entity.__name__))
else:
raise Exception("This collection does not contain any "
"entity corresponding to the key '%s'!"
% key)
elif len(res) > 1:
raise Exception("'%s' resolves to several entities, you should"
" use the full path (including the full module"
" name) to that entity." % key)
else:
return res[0]
def clear(self):
self._entities = {}
super(GlobalEntityCollection, self).clear()
# backward compatible name
EntityCollection = GlobalEntityCollection
_leading_dots = re.compile('^([.]*).*$')
class RelativeEntityCollection(BaseCollection):
# the entity=None does not make any sense with a relative entity collection
def resolve(self, key, entity):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
full_path = key
if '.' not in key or key.startswith('.'):
# relative target
# any leading dot is stripped and with each dot removed,
# the entity_module is stripped of one more chunk (starting with
# the last one).
num_dots = _leading_dots.match(full_path).end(1)
full_path = full_path[num_dots:]
chunks = entity.__module__.split('.')
chunkstokeep = len(chunks) - num_dots
if chunkstokeep < 0:
raise Exception("Couldn't resolve relative target "
"'%s' relative to '%s'" % (key, entity.__module__))
entity_module = '.'.join(chunks[:chunkstokeep])
if entity_module and entity_module is not '__main__':
full_path = '%s.%s' % (entity_module, full_path)
root = ''
else:
root = None
return self.resolve_absolute(key, full_path, entity, root=root)
def __getattr__(self, key):
raise NotImplementedError
| gpl-3.0 | -7,679,875,595,149,602,000 | 34.656 | 79 | 0.54319 | false |
pitch-sands/i-MPI | flask/Lib/site-packages/pip-1.5.6-py2.7.egg/pip/log.py | 344 | 9455 | """Logging
"""
import sys
import os
import logging
from pip import backwardcompat
from pip._vendor import colorama, pkg_resources
def _color_wrap(*colors):
def wrapped(inp):
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32)
else consumer.wrapped)
# If consumer isn't stdout or stderr we shouldn't colorize it
if real_consumer not in std:
return False
# If consumer is a tty we should color it
if hasattr(real_consumer, "isatty") and real_consumer.isatty():
return True
# If we have an ASNI term we should color it
if environ.get("TERM") == "ANSI":
return True
# If anything else we should not color it
return False
def should_warn(current_version, removal_version):
# Our Significant digits on versions is 2, so remove everything but the
# first two places.
current_version = ".".join(current_version.split(".")[:2])
removal_version = ".".join(removal_version.split(".")[:2])
# Our warning threshold is one minor version before removal, so we
# decrement the minor version by one
major, minor = removal_version.split(".")
minor = str(int(minor) - 1)
warn_version = ".".join([major, minor])
# Test if our current_version should be a warn
return (pkg_resources.parse_version(current_version)
< pkg_resources.parse_version(warn_version))
class Logger(object):
"""
Logging object for use in command-line script. Allows ranges of
levels, to avoid some redundancy of displayed information.
"""
VERBOSE_DEBUG = logging.DEBUG - 1
DEBUG = logging.DEBUG
INFO = logging.INFO
NOTIFY = (logging.INFO + logging.WARN) / 2
WARN = WARNING = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
COLORS = {
WARN: _color_wrap(colorama.Fore.YELLOW),
ERROR: _color_wrap(colorama.Fore.RED),
FATAL: _color_wrap(colorama.Fore.RED),
}
def __init__(self):
self.consumers = []
self.indent = 0
self.explicit_levels = False
self.in_progress = None
self.in_progress_hanging = False
def add_consumers(self, *consumers):
if sys.platform.startswith("win"):
for level, consumer in consumers:
if hasattr(consumer, "write"):
self.consumers.append(
(level, colorama.AnsiToWin32(consumer)),
)
else:
self.consumers.append((level, consumer))
else:
self.consumers.extend(consumers)
def debug(self, msg, *args, **kw):
self.log(self.DEBUG, msg, *args, **kw)
def info(self, msg, *args, **kw):
self.log(self.INFO, msg, *args, **kw)
def notify(self, msg, *args, **kw):
self.log(self.NOTIFY, msg, *args, **kw)
def warn(self, msg, *args, **kw):
self.log(self.WARN, msg, *args, **kw)
def error(self, msg, *args, **kw):
self.log(self.ERROR, msg, *args, **kw)
def fatal(self, msg, *args, **kw):
self.log(self.FATAL, msg, *args, **kw)
def deprecated(self, removal_version, msg, *args, **kwargs):
"""
Logs deprecation message which is log level WARN if the
``removal_version`` is > 1 minor release away and log level ERROR
otherwise.
removal_version should be the version that the deprecated feature is
expected to be removed in, so something that will not exist in
version 1.7, but will in 1.6 would have a removal_version of 1.7.
"""
from pip import __version__
if should_warn(__version__, removal_version):
self.warn(msg, *args, **kwargs)
else:
self.error(msg, *args, **kwargs)
def log(self, level, msg, *args, **kw):
if args:
if kw:
raise TypeError(
"You may give positional or keyword arguments, not both")
args = args or kw
# render
if args:
rendered = msg % args
else:
rendered = msg
rendered = ' ' * self.indent + rendered
if self.explicit_levels:
## FIXME: should this be a name, not a level number?
rendered = '%02i %s' % (level, rendered)
for consumer_level, consumer in self.consumers:
if self.level_matches(level, consumer_level):
if (self.in_progress_hanging
and consumer in (sys.stdout, sys.stderr)):
self.in_progress_hanging = False
sys.stdout.write('\n')
sys.stdout.flush()
if hasattr(consumer, 'write'):
write_content = rendered + '\n'
if should_color(consumer, os.environ):
# We are printing to stdout or stderr and it supports
# colors so render our text colored
colorizer = self.COLORS.get(level, lambda x: x)
write_content = colorizer(write_content)
consumer.write(write_content)
if hasattr(consumer, 'flush'):
consumer.flush()
else:
consumer(rendered)
def _show_progress(self):
"""Should we display download progress?"""
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
def start_progress(self, msg):
assert not self.in_progress, (
"Tried to start_progress(%r) while in_progress %r"
% (msg, self.in_progress))
if self._show_progress():
sys.stdout.write(' ' * self.indent + msg)
sys.stdout.flush()
self.in_progress_hanging = True
else:
self.in_progress_hanging = False
self.in_progress = msg
self.last_message = None
def end_progress(self, msg='done.'):
assert self.in_progress, (
"Tried to end_progress without start_progress")
if self._show_progress():
if not self.in_progress_hanging:
# Some message has been printed out since start_progress
sys.stdout.write('...' + self.in_progress + msg + '\n')
sys.stdout.flush()
else:
# These erase any messages shown with show_progress (besides .'s)
logger.show_progress('')
logger.show_progress('')
sys.stdout.write(msg + '\n')
sys.stdout.flush()
self.in_progress = None
self.in_progress_hanging = False
def show_progress(self, message=None):
"""If we are in a progress scope, and no log messages have been
shown, write out another '.'"""
if self.in_progress_hanging:
if message is None:
sys.stdout.write('.')
sys.stdout.flush()
else:
if self.last_message:
padding = ' ' * max(0, len(self.last_message) - len(message))
else:
padding = ''
sys.stdout.write('\r%s%s%s%s' %
(' ' * self.indent, self.in_progress, message, padding))
sys.stdout.flush()
self.last_message = message
def stdout_level_matches(self, level):
"""Returns true if a message at this level will go to stdout"""
return self.level_matches(level, self._stdout_level())
def _stdout_level(self):
"""Returns the level that stdout runs at"""
for level, consumer in self.consumers:
if consumer is sys.stdout:
return level
return self.FATAL
def level_matches(self, level, consumer_level):
"""
>>> l = Logger()
>>> l.level_matches(3, 4)
False
>>> l.level_matches(3, 2)
True
>>> l.level_matches(slice(None, 3), 3)
False
>>> l.level_matches(slice(None, 3), 2)
True
>>> l.level_matches(slice(1, 3), 1)
True
>>> l.level_matches(slice(2, 3), 1)
False
"""
if isinstance(level, slice):
start, stop = level.start, level.stop
if start is not None and start > consumer_level:
return False
if stop is not None or stop <= consumer_level:
return False
return True
else:
return level >= consumer_level
@classmethod
def level_for_integer(cls, level):
levels = cls.LEVELS
if level < 0:
return levels[0]
if level >= len(levels):
return levels[-1]
return levels[level]
def move_stdout_to_stderr(self):
to_remove = []
to_add = []
for consumer_level, consumer in self.consumers:
if consumer == sys.stdout:
to_remove.append((consumer_level, consumer))
to_add.append((consumer_level, sys.stderr))
for item in to_remove:
self.consumers.remove(item)
self.consumers.extend(to_add)
logger = Logger()
| bsd-3-clause | 6,310,305,760,913,892,000 | 33.257246 | 88 | 0.553358 | false |
yelizariev/account-financial-tools | account_credit_control/wizard/__init__.py | 40 | 1154 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi, Guewen Baconnier
# Copyright 2012-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import credit_control_emailer
from . import credit_control_marker
from . import credit_control_printer
from . import credit_control_communication
from . import credit_control_policy_changer
| agpl-3.0 | 6,910,343,125,543,812,000 | 45.16 | 78 | 0.638648 | false |
joopert/home-assistant | homeassistant/components/mobile_app/websocket_api.py | 2 | 3785 | """Websocket API for mobile_app."""
import voluptuous as vol
from homeassistant.components.cloud import async_delete_cloudhook
from homeassistant.components.websocket_api import (
ActiveConnection,
async_register_command,
async_response,
error_message,
result_message,
websocket_command,
ws_require_user,
)
from homeassistant.components.websocket_api.const import (
ERR_INVALID_FORMAT,
ERR_NOT_FOUND,
ERR_UNAUTHORIZED,
)
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
CONF_CLOUDHOOK_URL,
CONF_USER_ID,
DATA_CONFIG_ENTRIES,
DATA_DELETED_IDS,
DATA_STORE,
DOMAIN,
)
from .helpers import safe_registration, savable_state
def register_websocket_handlers(hass: HomeAssistantType) -> bool:
"""Register the websocket handlers."""
async_register_command(hass, websocket_get_user_registrations)
async_register_command(hass, websocket_delete_registration)
return True
@ws_require_user()
@async_response
@websocket_command(
{
vol.Required("type"): "mobile_app/get_user_registrations",
vol.Optional(CONF_USER_ID): cv.string,
}
)
async def websocket_get_user_registrations(
hass: HomeAssistantType, connection: ActiveConnection, msg: dict
) -> None:
"""Return all registrations or just registrations for given user ID."""
user_id = msg.get(CONF_USER_ID, connection.user.id)
if user_id != connection.user.id and not connection.user.is_admin:
# If user ID is provided and is not current user ID and current user
# isn't an admin user
connection.send_error(msg["id"], ERR_UNAUTHORIZED, "Unauthorized")
return
user_registrations = []
for config_entry in hass.config_entries.async_entries(domain=DOMAIN):
registration = config_entry.data
if connection.user.is_admin or registration[CONF_USER_ID] is user_id:
user_registrations.append(safe_registration(registration))
connection.send_message(result_message(msg["id"], user_registrations))
@ws_require_user()
@async_response
@websocket_command(
{
vol.Required("type"): "mobile_app/delete_registration",
vol.Required(CONF_WEBHOOK_ID): cv.string,
}
)
async def websocket_delete_registration(
hass: HomeAssistantType, connection: ActiveConnection, msg: dict
) -> None:
"""Delete the registration for the given webhook_id."""
user = connection.user
webhook_id = msg.get(CONF_WEBHOOK_ID)
if webhook_id is None:
connection.send_error(msg["id"], ERR_INVALID_FORMAT, "Webhook ID not provided")
return
config_entry = hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id]
registration = config_entry.data
if registration is None:
connection.send_error(
msg["id"], ERR_NOT_FOUND, "Webhook ID not found in storage"
)
return
if registration[CONF_USER_ID] != user.id and not user.is_admin:
return error_message(
msg["id"], ERR_UNAUTHORIZED, "User is not registration owner"
)
await hass.config_entries.async_remove(config_entry.entry_id)
hass.data[DOMAIN][DATA_DELETED_IDS].append(webhook_id)
store = hass.data[DOMAIN][DATA_STORE]
try:
await store.async_save(savable_state(hass))
except HomeAssistantError:
return error_message(msg["id"], "internal_error", "Error deleting registration")
if CONF_CLOUDHOOK_URL in registration and "cloud" in hass.config.components:
await async_delete_cloudhook(hass, webhook_id)
connection.send_message(result_message(msg["id"], "ok"))
| apache-2.0 | -8,799,296,426,778,394,000 | 30.02459 | 88 | 0.699604 | false |
onitu/onitu | docs/conf.py | 1 | 8416 | # -*- coding: utf-8 -*-
#
# Onitu documentation build configuration file, created by
# sphinx-quickstart on Fri Nov 8 21:18:03 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.httpdomain']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Onitu'
copyright = u'2013, Yannick Péroux, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1-prev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Onitudoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Onitu.tex', u'Onitu Documentation',
u'Yannick PÉROUX, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'onitu', u'Onitu Documentation',
[u'Yannick PÉROUX, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Onitu', u'Onitu Documentation',
u'Yannick PÉROUX, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin', 'Onitu', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit | -6,218,835,167,545,076,000 | 33.600823 | 211 | 0.711108 | false |
eneabio/nova | nova/rootwrap/wrapper.py | 8 | 1974 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
FILTERS_MODULES = ['nova.rootwrap.compute',
'nova.rootwrap.network',
'nova.rootwrap.volume',
]
def load_filters():
"""Load filters from modules present in nova.rootwrap."""
filters = []
for modulename in FILTERS_MODULES:
try:
__import__(modulename)
module = sys.modules[modulename]
filters = filters + module.filterlist
except ImportError:
# It's OK to have missing filters, since filter modules are
# shipped with specific nodes rather than with python-nova
pass
return filters
def match_filter(filters, userargs):
"""
Checks user command and arguments through command filters and
returns the first matching filter, or None is none matched.
"""
found_filter = None
for f in filters:
if f.match(userargs):
# Try other filters if executable is absent
if not os.access(f.exec_path, os.X_OK):
if not found_filter:
found_filter = f
continue
# Otherwise return matching filter for execution
return f
# No filter matched or first missing executable
return found_filter
| apache-2.0 | -8,385,412,597,798,560,000 | 30.333333 | 78 | 0.634245 | false |
taedla01/MissionPlanner | Lib/site-packages/numpy/core/function_base.py | 82 | 5474 | __all__ = ['logspace', 'linspace']
import numeric as _nx
from numeric import array
def linspace(start, stop, num=50, endpoint=True, retstep=False):
"""
Return evenly spaced numbers over a specified interval.
Returns `num` evenly spaced samples, calculated over the
interval [`start`, `stop` ].
The endpoint of the interval can optionally be excluded.
Parameters
----------
start : scalar
The starting value of the sequence.
stop : scalar
The end value of the sequence, unless `endpoint` is set to False.
In that case, the sequence consists of all but the last of ``num + 1``
evenly spaced samples, so that `stop` is excluded. Note that the step
size changes when `endpoint` is False.
num : int, optional
Number of samples to generate. Default is 50.
endpoint : bool, optional
If True, `stop` is the last sample. Otherwise, it is not included.
Default is True.
retstep : bool, optional
If True, return (`samples`, `step`), where `step` is the spacing
between samples.
Returns
-------
samples : ndarray
There are `num` equally spaced samples in the closed interval
``[start, stop]`` or the half-open interval ``[start, stop)``
(depending on whether `endpoint` is True or False).
step : float (only if `retstep` is True)
Size of spacing between samples.
See Also
--------
arange : Similiar to `linspace`, but uses a step size (instead of the
number of samples).
logspace : Samples uniformly distributed in log space.
Examples
--------
>>> np.linspace(2.0, 3.0, num=5)
array([ 2. , 2.25, 2.5 , 2.75, 3. ])
>>> np.linspace(2.0, 3.0, num=5, endpoint=False)
array([ 2. , 2.2, 2.4, 2.6, 2.8])
>>> np.linspace(2.0, 3.0, num=5, retstep=True)
(array([ 2. , 2.25, 2.5 , 2.75, 3. ]), 0.25)
Graphical illustration:
>>> import matplotlib.pyplot as plt
>>> N = 8
>>> y = np.zeros(N)
>>> x1 = np.linspace(0, 10, N, endpoint=True)
>>> x2 = np.linspace(0, 10, N, endpoint=False)
>>> plt.plot(x1, y, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.plot(x2, y + 0.5, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.ylim([-0.5, 1])
(-0.5, 1)
>>> plt.show()
"""
num = int(num)
if num <= 0:
return array([], float)
if endpoint:
if num == 1:
return array([float(start)])
step = (stop-start)/float((num-1))
y = _nx.arange(0, num) * step + start
y[-1] = stop
else:
step = (stop-start)/float(num)
y = _nx.arange(0, num) * step + start
if retstep:
return y, step
else:
return y
def logspace(start,stop,num=50,endpoint=True,base=10.0):
"""
Return numbers spaced evenly on a log scale.
In linear space, the sequence starts at ``base ** start``
(`base` to the power of `start`) and ends with ``base ** stop``
(see `endpoint` below).
Parameters
----------
start : float
``base ** start`` is the starting value of the sequence.
stop : float
``base ** stop`` is the final value of the sequence, unless `endpoint`
is False. In that case, ``num + 1`` values are spaced over the
interval in log-space, of which all but the last (a sequence of
length ``num``) are returned.
num : integer, optional
Number of samples to generate. Default is 50.
endpoint : boolean, optional
If true, `stop` is the last sample. Otherwise, it is not included.
Default is True.
base : float, optional
The base of the log space. The step size between the elements in
``ln(samples) / ln(base)`` (or ``log_base(samples)``) is uniform.
Default is 10.0.
Returns
-------
samples : ndarray
`num` samples, equally spaced on a log scale.
See Also
--------
arange : Similiar to linspace, with the step size specified instead of the
number of samples. Note that, when used with a float endpoint, the
endpoint may or may not be included.
linspace : Similar to logspace, but with the samples uniformly distributed
in linear space, instead of log space.
Notes
-----
Logspace is equivalent to the code
>>> y = np.linspace(start, stop, num=num, endpoint=endpoint)
... # doctest: +SKIP
>>> power(base, y)
... # doctest: +SKIP
Examples
--------
>>> np.logspace(2.0, 3.0, num=4)
array([ 100. , 215.443469 , 464.15888336, 1000. ])
>>> np.logspace(2.0, 3.0, num=4, endpoint=False)
array([ 100. , 177.827941 , 316.22776602, 562.34132519])
>>> np.logspace(2.0, 3.0, num=4, base=2.0)
array([ 4. , 5.0396842 , 6.34960421, 8. ])
Graphical illustration:
>>> import matplotlib.pyplot as plt
>>> N = 10
>>> x1 = np.logspace(0.1, 1, N, endpoint=True)
>>> x2 = np.logspace(0.1, 1, N, endpoint=False)
>>> y = np.zeros(N)
>>> plt.plot(x1, y, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.plot(x2, y + 0.5, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.ylim([-0.5, 1])
(-0.5, 1)
>>> plt.show()
"""
y = linspace(start,stop,num=num,endpoint=endpoint)
return _nx.power(base,y)
| gpl-3.0 | -3,677,860,711,188,999,700 | 31.778443 | 79 | 0.569602 | false |
chokribr/invenio | invenio/modules/upgrader/engine.py | 13 | 19568 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2012, 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Upgrader engine."""
from __future__ import absolute_import
from datetime import datetime
import logging
import re
import sys
import warnings
from flask import current_app
from flask_registry import RegistryProxy, ImportPathRegistry
from sqlalchemy import desc
from invenio.ext.sqlalchemy import db
from .models import Upgrade
from .logging import InvenioUpgraderLogFormatter
from .checks import post_check_bibsched
class InvenioUpgrader(object):
"""Class responsible for loading, sorting and executing upgrades.
A note on cross graph dependencies: An upgrade is uniquely identified
by it's id (part of the filename). This means we do not get into
a situation where an upgrade id will exist in two repositories. One
repository will simply overwrite the other on install.
"""
FILE_LOG_FMT = '*%(prefix)s %(asctime)s %(levelname)-8s ' \
'%(plugin_id)s%(message)s'
CONSOLE_LOG_INFO_FMT = '>>> %(prefix)s%(message)s'
CONSOLE_LOG_FMT = '>>> %(prefix)s%(levelname)s: %(message)s'
def __init__(self, packages=None, global_pre_upgrade=None,
global_post_upgrade=None):
"""Init.
@param global_pre_upgrade: List of callables. Each check will be
executed once per upgrade-batch run. Useful e.g. to check if
bibsched is running.
@param global_post_upgrade: List of callables. Each check will be
executed once per upgrade-batch run. Useful e.g. to tell users
to start bibsched again.
"""
self.upgrades = None
self.history = {}
self.ordered_history = []
self.global_pre_upgrade = global_pre_upgrade or []
self.global_post_upgrade = global_post_upgrade or [
post_check_bibsched
]
if packages is None:
self.packages = current_app.extensions['registry']['packages']
else:
self.packages = RegistryProxy(
'upgrader.packages', ImportPathRegistry, initial=packages)
# Warning related
self.old_showwarning = None
self.warning_occured = 0
self._logger = None
self._logger_file_fmtter = InvenioUpgraderLogFormatter(
self.FILE_LOG_FMT)
self._logger_console_fmtter = InvenioUpgraderLogFormatter(
self.CONSOLE_LOG_FMT, info=self.CONSOLE_LOG_INFO_FMT,)
def estimate(self, upgrades):
"""Estimate the time needed to apply upgrades.
If an upgrades does not specify and estimate it is assumed to be
in the order of 1 second.
@param upgrades: List of upgrades sorted in topological order.
"""
val = 0
for u in upgrades:
if 'estimate' in u:
val += u['estimate']()
else:
val += 1
return val
def human_estimate(self, upgrades):
"""Make a human readable estimated time to completion string.
@param upgrades: List of upgrades sorted in topological order.
"""
val = self.estimate(upgrades)
if val < 60:
return "less than 1 minute"
elif val < 300:
return "less than 5 minutes"
elif val < 600:
return "less than 10 minutes"
elif val < 1800:
return "less than 30 minutes"
elif val < 3600:
return "less than 1 hour"
elif val < 3 * 3600:
return "less than 3 hours"
elif val < 6 * 3600:
return "less than 6 hours"
elif val < 12 * 3600:
return "less than 12 hours"
elif val < 86400:
return "less than 1 day"
else:
return "more than 1 day"
def _setup_log_prefix(self, plugin_id=''):
"""Setup custom warning notification."""
self._logger_console_fmtter.prefix = '%s: ' % plugin_id
self._logger_console_fmtter.plugin_id = plugin_id
self._logger_file_fmtter.prefix = '*'
self._logger_file_fmtter.plugin_id = '%s: ' % plugin_id
def _teardown_log_prefix(self):
"""Tear down custom warning notification."""
self._logger_console_fmtter.prefix = ''
self._logger_console_fmtter.plugin_id = ''
self._logger_file_fmtter.prefix = ' '
self._logger_file_fmtter.plugin_id = ''
def get_logger(self, logfilename=None):
"""Setup logger.
Allow outputting to both a log file and console at the
same time.
"""
if self._logger is None:
self._logger = logging.getLogger('invenio_upgrader')
self._logger.setLevel(logging.INFO)
if logfilename:
fh = logging.FileHandler(logfilename)
fh.setLevel(logging.INFO)
fh.setFormatter(self._logger_file_fmtter)
self._logger.addHandler(fh)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
ch.setFormatter(self._logger_console_fmtter)
self._logger.addHandler(ch)
# Replace show warnings (documented in Python manual)
def showwarning(message, dummy_category, dummy_filename,
dummy_lineno, *dummy_args):
self.warning_occured += 1
logger = self.get_logger()
logger.warning(message)
warnings.showwarning = showwarning
self._teardown_log_prefix()
return self._logger
def has_warnings(self):
"""Determine if a warning has occurred in this upgrader instance."""
return self.warning_occured != 0
def get_warnings_count(self):
"""Get number of warnings issued."""
return self.warning_occured
def pre_upgrade_checks(self, upgrades):
"""Run upgrade pre-checks prior to applying upgrades.
Pre-checks should
in general be fast to execute. Pre-checks may the use the wait_for_user
function, to query the user for confirmation, but should respect the
--yes-i-know option to run unattended.
All pre-checks will be executed even if one fails, however if one pre-
check fails, the upgrade process will be stopped and the user warned.
@param upgrades: List of upgrades sorted in topological order.
"""
errors = []
for check in self.global_pre_upgrade:
self._setup_log_prefix(plugin_id=check.__name__)
try:
check()
except RuntimeError as e:
errors.append((check.__name__, e.args))
for u in upgrades:
if 'pre_upgrade' in u:
self._setup_log_prefix(plugin_id=u['id'])
try:
u['pre_upgrade']()
except RuntimeError as e:
errors.append((u['id'], e.args))
self._teardown_log_prefix()
self._check_errors(errors, "Pre-upgrade check for %s failed with the"
" following errors:")
def _check_errors(self, errors, prefix):
"""Check for errors and possible raise and format an error message.
@param errors: List of error messages.
@param prefix: str, Prefix message for error messages
"""
args = []
for uid, messages in errors:
error_msg = []
error_msg.append(prefix % uid)
for msg in messages:
error_msg.append(" (-) %s" % msg)
args.append("\n".join(error_msg))
if args:
raise RuntimeError(*args)
def post_upgrade_checks(self, upgrades):
"""Run post-upgrade checks after applying all pending upgrades.
Post checks may be used to emit warnings encountered when applying an
upgrade, but post-checks can also be used to advice the user to run
re-indexing or similar long running processes.
Post-checks may query for user-input, but should respect the
--yes-i-know option to run in an unattended mode.
All applied upgrades post-checks are executed.
@param upgrades: List of upgrades sorted in topological order.
"""
errors = []
for u in upgrades:
if 'post_upgrade' in u:
self._setup_log_prefix(plugin_id=u['id'])
try:
u['post_upgrade']()
except RuntimeError as e:
errors.append((u['id'], e.args))
for check in self.global_post_upgrade:
self._setup_log_prefix(plugin_id=check.__name__)
try:
check()
except RuntimeError as e:
errors.append((check.__name__, e.args))
self._teardown_log_prefix()
self._check_errors(errors, "Post-upgrade check for %s failed with the "
"following errors:")
def apply_upgrade(self, upgrade):
"""Apply a upgrade and register that it was successful.
A upgrade may throw a RuntimeError, if an unrecoverable error happens.
@param upgrade: A single upgrade
"""
self._setup_log_prefix(plugin_id=upgrade['id'])
try: # Nested due to Python 2.4
try:
upgrade['do_upgrade']()
self.register_success(upgrade)
except RuntimeError as e:
msg = ["Upgrade error(s):"]
for m in e.args:
msg.append(" (-) %s" % m)
logger = self.get_logger()
logger.error("\n".join(msg))
raise RuntimeError(
"Upgrade '%s' failed. Your installation is in an"
" inconsistent state. Please manually review the upgrade "
"and resolve inconsistencies." % upgrade['id']
)
finally:
self._teardown_log_prefix()
def load_history(self):
"""Load upgrade history from database table.
If upgrade table does not exists, the history is assumed to be empty.
"""
if not self.history:
query = Upgrade.query.order_by(desc(Upgrade.applied))
for u in query.all():
self.history[u.upgrade] = u.applied
self.ordered_history.append(u.upgrade)
def latest_applied_upgrade(self, repository='invenio'):
"""Get the latest applied upgrade for a repository."""
u = Upgrade.query.filter(
Upgrade.upgrade.like("%s_%%" % repository)
).order_by(desc(Upgrade.applied)).first()
return u.upgrade if u else None
def register_success(self, upgrade):
"""Register a successful upgrade."""
u = Upgrade(upgrade=upgrade['id'], applied=datetime.now())
db.session.add(u)
db.session.commit()
def get_history(self):
"""Get history of applied upgrades."""
self.load_history()
return map(lambda x: (x, self.history[x]), self.ordered_history)
def _load_upgrades(self, remove_applied=True):
"""Load upgrade modules.
Upgrade modules are loaded using pluginutils. The pluginutils module
is either loaded from site-packages via normal or via a user-loaded
module supplied in the __init__ method. This is useful when the engine
is running before actually being installed into site-packages.
@param remove_applied: if True, already applied upgrades will not
be included, if False the entire upgrade graph will be
returned.
"""
from invenio.ext.registry import ModuleAutoDiscoverySubRegistry
from invenio.utils.autodiscovery import create_enhanced_plugin_builder
if remove_applied:
self.load_history()
plugin_builder = create_enhanced_plugin_builder(
compulsory_objects={
'do_upgrade': dummy_signgature,
'info': dummy_signgature,
},
optional_objects={
'estimate': dummy_signgature,
'pre_upgrade': dummy_signgature,
'post_upgrade': dummy_signgature,
},
other_data={
'depends_on': (list, []),
},
)
def builder(plugin):
plugin_id = plugin.__name__.split('.')[-1]
data = plugin_builder(plugin)
data['id'] = plugin_id
data['repository'] = self._parse_plugin_id(plugin_id)
return plugin_id, data
# Load all upgrades from installed packages
plugins = dict(map(
builder,
ModuleAutoDiscoverySubRegistry(
'upgrades', registry_namespace=self.packages
)))
return plugins
def _parse_plugin_id(self, plugin_id):
"""Determine repository from plugin id."""
m = re.match("(.+)(_\d{4}_\d{2}_\d{2}_)(.+)", plugin_id)
if m:
return m.group(1)
m = re.match("(.+)(_release_)(.+)", plugin_id)
if m:
return m.group(1)
raise RuntimeError("Repository could not be determined from "
"the upgrade identifier: %s." % plugin_id)
def get_upgrades(self, remove_applied=True):
"""Get upgrades (ordered according to their dependencies).
@param remove_applied: Set to false to return all upgrades, otherwise
already applied upgrades are removed from their graph (incl. all
their dependencies.
"""
if self.upgrades is None:
plugins = self._load_upgrades(remove_applied=remove_applied)
# List of un-applied upgrades in topological order
self.upgrades = map(_upgrade_doc_mapper,
self.order_upgrades(plugins, self.history))
return self.upgrades
def _create_graph(self, upgrades, history={}):
"""Create dependency graph from upgrades.
@param upgrades: Dict of upgrades
@param history: Dict of applied upgrades
"""
graph_incoming = {} # nodes their incoming edges
graph_outgoing = {} # nodes their outgoing edges
# Create graph data structure
for mod in upgrades.values():
# Remove all incoming edges from already applied upgrades
graph_incoming[mod['id']] = filter(lambda x: x not in history,
mod['depends_on'])
# Build graph_outgoing
if mod['id'] not in graph_outgoing:
graph_outgoing[mod['id']] = []
for edge in graph_incoming[mod['id']]:
if edge not in graph_outgoing:
graph_outgoing[edge] = []
graph_outgoing[edge].append(mod['id'])
return (graph_incoming, graph_outgoing)
def find_endpoints(self):
"""Find upgrade end-points (i.e nodes without dependents)."""
plugins = self._load_upgrades(remove_applied=False)
dummy_graph_incoming, graph_outgoing = self._create_graph(plugins, {})
endpoints = {}
for node, outgoing in graph_outgoing.items():
if not outgoing:
repository = plugins[node]['repository']
if repository not in endpoints:
endpoints[repository] = []
endpoints[repository].append(node)
return endpoints
def order_upgrades(self, upgrades, history={}):
"""Order upgrades according to their dependencies.
(topological sort using
Kahn's algorithm - http://en.wikipedia.org/wiki/Topological_sorting).
@param upgrades: Dict of upgrades
@param history: Dict of applied upgrades
"""
graph_incoming, graph_outgoing = self._create_graph(upgrades, history)
# Removed already applied upgrades (assumes all dependencies prior to
# this upgrade has been applied).
for node_id in history.keys():
start_nodes = [node_id, ]
while start_nodes:
node = start_nodes.pop()
# Remove from direct dependents
try:
for d in graph_outgoing[node]:
graph_incoming[d] = filter(lambda x: x != node,
graph_incoming[d])
except KeyError:
warnings.warn("Ghost upgrade %s detected" % node)
# Remove all prior dependencies
if node in graph_incoming:
# Get dependencies, remove node, and recursively
# remove all dependencies.
depends_on = graph_incoming[node]
# Add dependencies to check
for d in depends_on:
graph_outgoing[d] = filter(lambda x: x != node,
graph_outgoing[d])
start_nodes.append(d)
del graph_incoming[node]
# Check for missing dependencies
for node_id, depends_on in graph_incoming.items():
for d in depends_on:
if d not in graph_incoming:
raise RuntimeError("Upgrade %s depends on an unknown"
" upgrade %s" % (node_id, d))
# Nodes with no incoming edges
start_nodes = filter(lambda x: len(graph_incoming[x]) == 0,
graph_incoming.keys())
topo_order = []
while start_nodes:
# Append node_n to list (it has no incoming edges)
node_n = start_nodes.pop()
topo_order.append(node_n)
# For each node m with and edge from n to m
for node_m in graph_outgoing[node_n]:
# Remove the edge n to m
graph_incoming[node_m] = filter(lambda x: x != node_n,
graph_incoming[node_m])
# If m has no incoming edges, add it to start_nodes.
if not graph_incoming[node_m]:
start_nodes.append(node_m)
for node, edges in graph_incoming.items():
if edges:
raise RuntimeError("The upgrades have at least one cyclic "
"dependency involving %s." % node)
return map(lambda x: upgrades[x], topo_order)
def dummy_signgature():
"""Dummy function signature for pluginutils."""
pass
def _upgrade_doc_mapper(x):
"""Map function for ingesting documentation strings into plug-ins."""
try:
x["__doc__"] = x['info']().split("\n")[0].strip()
except Exception:
x["__doc__"] = ''
return x
| gpl-2.0 | -4,705,255,355,286,112,000 | 35.439479 | 79 | 0.570779 | false |
nox/skia | tools/tests/base_unittest.py | 68 | 2416 | #!/usr/bin/python
"""
Copyright 2014 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
A wrapper around the standard Python unittest library, adding features we need
for various unittests within this directory.
"""
import errno
import os
import shutil
import sys
import unittest
# Set the PYTHONPATH to include the tools directory.
sys.path.append(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import find_run_binary
class TestCase(unittest.TestCase):
def shortDescription(self):
"""Tell unittest framework to not print docstrings for test cases."""
return None
def create_empty_dir(self, path):
"""Creates an empty directory at path and returns path.
Args:
path: path on local disk
"""
shutil.rmtree(path=path, ignore_errors=True)
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
return path
def run_command(self, args):
"""Runs a program from the command line and returns stdout.
Args:
args: Command line to run, as a list of string parameters. args[0] is the
binary to run.
Returns:
stdout from the program, as a single string.
Raises:
Exception: the program exited with a nonzero return code.
"""
return find_run_binary.run_command(args)
def find_path_to_program(self, program):
"""Returns path to an existing program binary.
Args:
program: Basename of the program to find (e.g., 'render_pictures').
Returns:
Absolute path to the program binary, as a string.
Raises:
Exception: unable to find the program binary.
"""
return find_run_binary.find_path_to_program(program)
def main(test_case_class):
"""Run the unit tests within the given class.
Raises an Exception if any of those tests fail (in case we are running in the
context of run_all.py, which depends on that Exception to signal failures).
TODO(epoger): Make all of our unit tests use the Python unittest framework,
so we can leverage its ability to run *all* the tests and report failures at
the end.
"""
suite = unittest.TestLoader().loadTestsFromTestCase(test_case_class)
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
raise Exception('failed unittest %s' % test_case_class)
| bsd-3-clause | 553,703,964,622,482,300 | 26.454545 | 79 | 0.701159 | false |
mpdehaan/ansible | lib/ansible/runner/connection_plugins/funcd.py | 62 | 3629 | # Based on local.py (c) 2012, Michael DeHaan <[email protected]>
# Based on chroot.py (c) 2013, Maykel Moya <[email protected]>
# (c) 2013, Michael Scherer <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# ---
# The func transport permit to use ansible over func. For people who have already setup
# func and that wish to play with ansible, this permit to move gradually to ansible
# without having to redo completely the setup of the network.
HAVE_FUNC=False
try:
import func.overlord.client as fc
HAVE_FUNC=True
except ImportError:
pass
import os
from ansible.callbacks import vvv
from ansible import errors
import tempfile
import shutil
class Connection(object):
''' Func-based connections '''
def __init__(self, runner, host, port, *args, **kwargs):
self.runner = runner
self.host = host
self.has_pipelining = False
# port is unused, this go on func
self.port = port
def connect(self, port=None):
if not HAVE_FUNC:
raise errors.AnsibleError("func is not installed")
self.client = fc.Client(self.host)
return self
def exec_command(self, cmd, tmp_path, sudo_user=None, sudoable=False,
executable='/bin/sh', in_data=None, su=None, su_user=None):
''' run a command on the remote minion '''
if su or su_user:
raise errors.AnsibleError("Internal Error: this module does not support running commands via su")
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
vvv("EXEC %s" % (cmd), host=self.host)
p = self.client.command.run(cmd)[self.host]
return (p[0], '', p[1], p[2])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
out_path = self._normalize_path(out_path, '/')
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
self.client.local.copyfile.send(in_path, out_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
in_path = self._normalize_path(in_path, '/')
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
# need to use a tmp dir due to difference of semantic for getfile
# ( who take a # directory as destination) and fetch_file, who
# take a file directly
tmpdir = tempfile.mkdtemp(prefix="func_ansible")
self.client.local.getfile.get(in_path, tmpdir)
shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)),
out_path)
shutil.rmtree(tmpdir)
def close(self):
''' terminate the connection; nothing to do here '''
pass
| gpl-3.0 | 6,136,095,483,007,943,000 | 35.656566 | 113 | 0.653624 | false |
raildo/nova | nova/objects/compute_node.py | 13 | 19773 | # Copyright 2013 IBM Corp
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_serialization import jsonutils
import six
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova.objects import pci_device_pool
from nova import utils
CONF = cfg.CONF
CONF.import_opt('cpu_allocation_ratio', 'nova.compute.resource_tracker')
CONF.import_opt('ram_allocation_ratio', 'nova.compute.resource_tracker')
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class ComputeNode(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added get_by_service_id()
# Version 1.2: String attributes updated to support unicode
# Version 1.3: Added stats field
# Version 1.4: Added host ip field
# Version 1.5: Added numa_topology field
# Version 1.6: Added supported_hv_specs
# Version 1.7: Added host field
# Version 1.8: Added get_by_host_and_nodename()
# Version 1.9: Added pci_device_pools
# Version 1.10: Added get_first_node_by_host_for_old_compat()
# Version 1.11: PciDevicePoolList version 1.1
# Version 1.12: HVSpec version 1.1
# Version 1.13: Changed service_id field to be nullable
# Version 1.14: Added cpu_allocation_ratio and ram_allocation_ratio
VERSION = '1.14'
fields = {
'id': fields.IntegerField(read_only=True),
'service_id': fields.IntegerField(nullable=True),
'host': fields.StringField(nullable=True),
'vcpus': fields.IntegerField(),
'memory_mb': fields.IntegerField(),
'local_gb': fields.IntegerField(),
'vcpus_used': fields.IntegerField(),
'memory_mb_used': fields.IntegerField(),
'local_gb_used': fields.IntegerField(),
'hypervisor_type': fields.StringField(),
'hypervisor_version': fields.IntegerField(),
'hypervisor_hostname': fields.StringField(nullable=True),
'free_ram_mb': fields.IntegerField(nullable=True),
'free_disk_gb': fields.IntegerField(nullable=True),
'current_workload': fields.IntegerField(nullable=True),
'running_vms': fields.IntegerField(nullable=True),
'cpu_info': fields.StringField(nullable=True),
'disk_available_least': fields.IntegerField(nullable=True),
'metrics': fields.StringField(nullable=True),
'stats': fields.DictOfNullableStringsField(nullable=True),
'host_ip': fields.IPAddressField(nullable=True),
'numa_topology': fields.StringField(nullable=True),
# NOTE(pmurray): the supported_hv_specs field maps to the
# supported_instances field in the database
'supported_hv_specs': fields.ListOfObjectsField('HVSpec'),
# NOTE(pmurray): the pci_device_pools field maps to the
# pci_stats field in the database
'pci_device_pools': fields.ObjectField('PciDevicePoolList',
nullable=True),
'cpu_allocation_ratio': fields.FloatField(),
'ram_allocation_ratio': fields.FloatField(),
}
obj_relationships = {
'pci_device_pools': [('1.9', '1.0'), ('1.11', '1.1')],
'supported_hv_specs': [('1.6', '1.0'), ('1.12', '1.1')],
}
def obj_make_compatible(self, primitive, target_version):
super(ComputeNode, self).obj_make_compatible(primitive, target_version)
target_version = utils.convert_version_to_tuple(target_version)
if target_version < (1, 14):
if 'ram_allocation_ratio' in primitive:
del primitive['ram_allocation_ratio']
if 'cpu_allocation_ratio' in primitive:
del primitive['cpu_allocation_ratio']
if target_version < (1, 13) and primitive.get('service_id') is None:
# service_id is non-nullable in versions before 1.13
try:
service = objects.Service.get_by_compute_host(
self._context, primitive['host'])
primitive['service_id'] = service.id
except (exception.ComputeHostNotFound, KeyError):
# NOTE(hanlind): In case anything goes wrong like service not
# found or host not being set, catch and set a fake value just
# to allow for older versions that demand a value to work.
# Setting to -1 will, if value is later used result in a
# ServiceNotFound, so should be safe.
primitive['service_id'] = -1
if target_version < (1, 7) and 'host' in primitive:
del primitive['host']
if target_version < (1, 5) and 'numa_topology' in primitive:
del primitive['numa_topology']
if target_version < (1, 4) and 'host_ip' in primitive:
del primitive['host_ip']
if target_version < (1, 3) and 'stats' in primitive:
# pre 1.3 version does not have a stats field
del primitive['stats']
@staticmethod
def _host_from_db_object(compute, db_compute):
if (('host' not in db_compute or db_compute['host'] is None)
and 'service_id' in db_compute
and db_compute['service_id'] is not None):
# FIXME(sbauza) : Unconverted compute record, provide compatibility
# This has to stay until we can be sure that any/all compute nodes
# in the database have been converted to use the host field
# Service field of ComputeNode could be deprecated in a next patch,
# so let's use directly the Service object
try:
service = objects.Service.get_by_id(
compute._context, db_compute['service_id'])
except exception.ServiceNotFound:
compute['host'] = None
return
try:
compute['host'] = service.host
except (AttributeError, exception.OrphanedObjectError):
# Host can be nullable in Service
compute['host'] = None
elif 'host' in db_compute and db_compute['host'] is not None:
# New-style DB having host as a field
compute['host'] = db_compute['host']
else:
# We assume it should not happen but in case, let's set it to None
compute['host'] = None
@staticmethod
def _from_db_object(context, compute, db_compute):
special_cases = set([
'stats',
'supported_hv_specs',
'host',
'pci_device_pools',
])
fields = set(compute.fields) - special_cases
for key in fields:
value = db_compute[key]
# NOTE(sbauza): Since all compute nodes don't possibly run the
# latest RT code updating allocation ratios, we need to provide
# a backwards compatible way of hydrating them.
# As we want to care about our operators and since we don't want to
# ask them to change their configuration files before upgrading, we
# prefer to hardcode the default values for the ratios here until
# the next release (Mitaka) where the opt default values will be
# restored for both cpu (16.0) and ram (1.5) allocation ratios.
# TODO(sbauza): Remove that in the next major version bump where
# we break compatibilility with old Kilo computes
if key == 'cpu_allocation_ratio' or key == 'ram_allocation_ratio':
if value == 0.0:
# Operator has not yet provided a new value for that ratio
# on the compute node
value = None
if value is None:
# ResourceTracker is not updating the value (old node)
# or the compute node is updated but the default value has
# not been changed
value = getattr(CONF, key)
if value == 0.0 and key == 'cpu_allocation_ratio':
# It's not specified either on the controller
value = 16.0
if value == 0.0 and key == 'ram_allocation_ratio':
# It's not specified either on the controller
value = 1.5
compute[key] = value
stats = db_compute['stats']
if stats:
compute['stats'] = jsonutils.loads(stats)
sup_insts = db_compute.get('supported_instances')
if sup_insts:
hv_specs = jsonutils.loads(sup_insts)
hv_specs = [objects.HVSpec.from_list(hv_spec)
for hv_spec in hv_specs]
compute['supported_hv_specs'] = hv_specs
pci_stats = db_compute.get('pci_stats')
compute.pci_device_pools = pci_device_pool.from_pci_stats(pci_stats)
compute._context = context
# Make sure that we correctly set the host field depending on either
# host column is present in the table or not
compute._host_from_db_object(compute, db_compute)
compute.obj_reset_changes()
return compute
@base.remotable_classmethod
def get_by_id(cls, context, compute_id):
db_compute = db.compute_node_get(context, compute_id)
return cls._from_db_object(context, cls(), db_compute)
# NOTE(hanlind): This is deprecated and should be removed on the next
# major version bump
@base.remotable_classmethod
def get_by_service_id(cls, context, service_id):
db_computes = db.compute_nodes_get_by_service_id(context, service_id)
# NOTE(sbauza): Old version was returning an item, we need to keep this
# behaviour for backwards compatibility
db_compute = db_computes[0]
return cls._from_db_object(context, cls(), db_compute)
@base.remotable_classmethod
def get_by_host_and_nodename(cls, context, host, nodename):
try:
db_compute = db.compute_node_get_by_host_and_nodename(
context, host, nodename)
except exception.ComputeHostNotFound:
# FIXME(sbauza): Some old computes can still have no host record
# We need to provide compatibility by using the old service_id
# record.
# We assume the compatibility as an extra penalty of one more DB
# call but that's necessary until all nodes are upgraded.
try:
service = objects.Service.get_by_compute_host(context, host)
db_computes = db.compute_nodes_get_by_service_id(
context, service.id)
except exception.ServiceNotFound:
# We need to provide the same exception upstream
raise exception.ComputeHostNotFound(host=host)
db_compute = None
for compute in db_computes:
if compute['hypervisor_hostname'] == nodename:
db_compute = compute
# We can avoid an extra call to Service object in
# _from_db_object
db_compute['host'] = service.host
break
if not db_compute:
raise exception.ComputeHostNotFound(host=host)
return cls._from_db_object(context, cls(), db_compute)
@base.remotable_classmethod
def get_first_node_by_host_for_old_compat(cls, context, host,
use_slave=False):
computes = ComputeNodeList.get_all_by_host(context, host, use_slave)
# FIXME(sbauza): Some hypervisors (VMware, Ironic) can return multiple
# nodes per host, we should return all the nodes and modify the callers
# instead.
# Arbitrarily returning the first node.
return computes[0]
@staticmethod
def _convert_stats_to_db_format(updates):
stats = updates.pop('stats', None)
if stats is not None:
updates['stats'] = jsonutils.dumps(stats)
@staticmethod
def _convert_host_ip_to_db_format(updates):
host_ip = updates.pop('host_ip', None)
if host_ip:
updates['host_ip'] = str(host_ip)
@staticmethod
def _convert_supported_instances_to_db_format(updates):
hv_specs = updates.pop('supported_hv_specs', None)
if hv_specs is not None:
hv_specs = [hv_spec.to_list() for hv_spec in hv_specs]
updates['supported_instances'] = jsonutils.dumps(hv_specs)
@staticmethod
def _convert_pci_stats_to_db_format(updates):
pools = updates.pop('pci_device_pools', None)
if pools:
updates['pci_stats'] = jsonutils.dumps(pools.obj_to_primitive())
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
self._convert_stats_to_db_format(updates)
self._convert_host_ip_to_db_format(updates)
self._convert_supported_instances_to_db_format(updates)
self._convert_pci_stats_to_db_format(updates)
db_compute = db.compute_node_create(self._context, updates)
self._from_db_object(self._context, self, db_compute)
@base.remotable
def save(self, prune_stats=False):
# NOTE(belliott) ignore prune_stats param, no longer relevant
updates = self.obj_get_changes()
updates.pop('id', None)
self._convert_stats_to_db_format(updates)
self._convert_host_ip_to_db_format(updates)
self._convert_supported_instances_to_db_format(updates)
self._convert_pci_stats_to_db_format(updates)
db_compute = db.compute_node_update(self._context, self.id, updates)
self._from_db_object(self._context, self, db_compute)
@base.remotable
def destroy(self):
db.compute_node_delete(self._context, self.id)
def update_from_virt_driver(self, resources):
# NOTE(pmurray): the virt driver provides a dict of values that
# can be copied into the compute node. The names and representation
# do not exactly match.
# TODO(pmurray): the resources dict should be formalized.
keys = ["vcpus", "memory_mb", "local_gb", "cpu_info",
"vcpus_used", "memory_mb_used", "local_gb_used",
"numa_topology", "hypervisor_type",
"hypervisor_version", "hypervisor_hostname",
"disk_available_least", "host_ip"]
for key in keys:
if key in resources:
self[key] = resources[key]
# supported_instances has a different name in compute_node
# TODO(pmurray): change virt drivers not to json encode
# values they add to the resources dict
if 'supported_instances' in resources:
si = resources['supported_instances']
if isinstance(si, six.string_types):
si = jsonutils.loads(si)
self.supported_hv_specs = [objects.HVSpec.from_list(s) for s in si]
@base.NovaObjectRegistry.register
class ComputeNodeList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# ComputeNode <= version 1.2
# Version 1.1 ComputeNode version 1.3
# Version 1.2 Add get_by_service()
# Version 1.3 ComputeNode version 1.4
# Version 1.4 ComputeNode version 1.5
# Version 1.5 Add use_slave to get_by_service
# Version 1.6 ComputeNode version 1.6
# Version 1.7 ComputeNode version 1.7
# Version 1.8 ComputeNode version 1.8 + add get_all_by_host()
# Version 1.9 ComputeNode version 1.9
# Version 1.10 ComputeNode version 1.10
# Version 1.11 ComputeNode version 1.11
# Version 1.12 ComputeNode version 1.12
# Version 1.13 ComputeNode version 1.13
# Version 1.14 ComputeNode version 1.14
VERSION = '1.14'
fields = {
'objects': fields.ListOfObjectsField('ComputeNode'),
}
# NOTE(danms): ComputeNode was at 1.2 before we added this
obj_relationships = {
'objects': [('1.0', '1.2'), ('1.1', '1.3'), ('1.2', '1.3'),
('1.3', '1.4'), ('1.4', '1.5'), ('1.5', '1.5'),
('1.6', '1.6'), ('1.7', '1.7'), ('1.8', '1.8'),
('1.9', '1.9'), ('1.10', '1.10'), ('1.11', '1.11'),
('1.12', '1.12'), ('1.13', '1.13'), ('1.14', '1.14')],
}
@base.remotable_classmethod
def get_all(cls, context):
db_computes = db.compute_node_get_all(context)
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
@base.remotable_classmethod
def get_by_hypervisor(cls, context, hypervisor_match):
db_computes = db.compute_node_search_by_hypervisor(context,
hypervisor_match)
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
# NOTE(hanlind): This is deprecated and should be removed on the next
# major version bump
@base.remotable_classmethod
def _get_by_service(cls, context, service_id, use_slave=False):
try:
db_computes = db.compute_nodes_get_by_service_id(
context, service_id)
except exception.ServiceNotFound:
# NOTE(sbauza): Previous behaviour was returning an empty list
# if the service was created with no computes, we need to keep it.
db_computes = []
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
@base.remotable_classmethod
def get_all_by_host(cls, context, host, use_slave=False):
try:
db_computes = db.compute_node_get_all_by_host(context, host,
use_slave)
except exception.ComputeHostNotFound:
# FIXME(sbauza): Some old computes can still have no host record
# We need to provide compatibility by using the old service_id
# record.
# We assume the compatibility as an extra penalty of one more DB
# call but that's necessary until all nodes are upgraded.
try:
service = objects.Service.get_by_compute_host(context, host,
use_slave)
db_computes = db.compute_nodes_get_by_service_id(
context, service.id)
except exception.ServiceNotFound:
# We need to provide the same exception upstream
raise exception.ComputeHostNotFound(host=host)
# We can avoid an extra call to Service object in _from_db_object
for db_compute in db_computes:
db_compute['host'] = service.host
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
| apache-2.0 | 8,978,395,070,462,576,000 | 44.983721 | 79 | 0.5991 | false |
spaceexperiment/forum-app | app/api/views/category.py | 1 | 1784 | from flask import request, session, g, redirect, url_for, abort
from . import api
from ..exceptions import ExistsError
from ..models import Category
from .main import BaseMethodView
class CategoryView(BaseMethodView):
def get(self, id=None):
if id:
instance = Category(id)
if not instance:
abort(404)
instance.category.subs = instance.subs()
return instance.category
categories_subs = []
for id in Category.all_ids():
instance = Category(id)
instance.category.subs = instance.subs()
categories_subs.append(instance.category)
return categories_subs
def post(self):
self.is_admin()
missing_data = self.missing_data(['title'])
if missing_data:
return missing_data
try:
category = Category.create(request.json['title'])
except ExistsError:
return self.error('Category exists', 409)
return category, 201
def put(self, id=None):
self.is_admin()
title = request.json.get('title')
if not title:
return self.bad_request('missing title')
if not Category.get(id):
return abort(404)
category = Category.edit(id, title=title)
return category, 200
def delete(self, id=None):
self.is_admin()
if id:
category = Category.get(id)
if not id or not category:
abort(404)
Category.delete(id)
return '', 200
view = CategoryView.as_view('category')
api.add_url_rule('/category/',view_func=view, methods=['GET', 'POST', ])
api.add_url_rule('/category/<int:id>/', view_func=view,
methods=['GET', 'PUT', 'DELETE'])
| mit | 4,978,601,089,325,236,000 | 28.245902 | 72 | 0.580717 | false |
homme/ansible | test/integration/cleanup_gce.py | 163 | 2589 | '''
Find and delete GCE resources matching the provided --match string. Unless
--yes|-y is provided, the prompt for confirmation prior to deleting resources.
Please use caution, you can easily delete your *ENTIRE* GCE infrastructure.
'''
import os
import re
import sys
import optparse
import yaml
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
_ = Provider.GCE
except ImportError:
print("failed=True " + \
"msg='libcloud with GCE support (0.13.3+) required for this module'")
sys.exit(1)
import gce_credentials
def delete_gce_resources(get_func, attr, opts):
for item in get_func():
val = getattr(item, attr)
if re.search(opts.match_re, val, re.IGNORECASE):
prompt_and_delete(item, "Delete matching %s? [y/n]: " % (item,), opts.assumeyes)
def prompt_and_delete(item, prompt, assumeyes):
if not assumeyes:
assumeyes = raw_input(prompt).lower() == 'y'
assert hasattr(item, 'destroy'), "Class <%s> has no delete attribute" % item.__class__
if assumeyes:
item.destroy()
print ("Deleted %s" % item)
def parse_args():
parser = optparse.OptionParser(usage="%s [options]" % (sys.argv[0],),
description=__doc__)
gce_credentials.add_credentials_options(parser)
parser.add_option("--yes", "-y",
action="store_true", dest="assumeyes",
default=False,
help="Don't prompt for confirmation")
parser.add_option("--match",
action="store", dest="match_re",
default="^ansible-testing-",
help="Regular expression used to find GCE resources (default: %default)")
(opts, args) = parser.parse_args()
gce_credentials.check_required(opts, parser)
return (opts, args)
if __name__ == '__main__':
(opts, args) = parse_args()
# Connect to GCE
gce = gce_credentials.get_gce_driver(opts)
try:
# Delete matching instances
delete_gce_resources(gce.list_nodes, 'name', opts)
# Delete matching snapshots
def get_snapshots():
for volume in gce.list_volumes():
for snapshot in gce.list_volume_snapshots(volume):
yield snapshot
delete_gce_resources(get_snapshots, 'name', opts)
# Delete matching disks
delete_gce_resources(gce.list_volumes, 'name', opts)
except KeyboardInterrupt as e:
print("\nExiting on user command.")
| gpl-3.0 | -1,652,462,611,346,272,300 | 32.623377 | 92 | 0.654307 | false |
nizhikov/ignite | modules/platforms/python/pyignite/datatypes/__init__.py | 11 | 1078 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains classes, used internally by `pyignite` for parsing and
creating binary data.
"""
from .complex import *
from .internal import *
from .null_object import *
from .primitive import *
from .primitive_arrays import *
from .primitive_objects import *
from .standard import *
| apache-2.0 | 455,566,814,023,181,700 | 38.925926 | 75 | 0.769944 | false |
ArcticaProject/vcxsrv | mesalib/src/glsl/nir/nir_opcodes_c.py | 1 | 2036 | #! /usr/bin/env python
#
# Copyright (C) 2014 Connor Abbott
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Connor Abbott ([email protected])
from nir_opcodes import opcodes
from mako.template import Template
template = Template("""
#include "nir.h"
const nir_op_info nir_op_infos[nir_num_opcodes] = {
% for name, opcode in sorted(opcodes.iteritems()):
{
.name = "${name}",
.num_inputs = ${opcode.num_inputs},
.output_size = ${opcode.output_size},
.output_type = ${"nir_type_" + opcode.output_type},
.input_sizes = {
${ ", ".join(str(size) for size in opcode.input_sizes) }
},
.input_types = {
${ ", ".join("nir_type_" + type for type in opcode.input_types) }
},
.algebraic_properties =
${ "0" if opcode.algebraic_properties == "" else " | ".join(
"NIR_OP_IS_" + prop.upper() for prop in
opcode.algebraic_properties.strip().split(" ")) }
},
% endfor
};
""")
print template.render(opcodes=opcodes)
| gpl-3.0 | 2,463,435,750,173,568,000 | 36.018182 | 78 | 0.700884 | false |
demarle/VTK | ThirdParty/Twisted/twisted/cred/strcred.py | 63 | 8301 | # -*- test-case-name: twisted.test.test_strcred -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""
Support for resolving command-line strings that represent different
checkers available to cred.
Examples:
- passwd:/etc/passwd
- memory:admin:asdf:user:lkj
- unix
"""
import sys
from zope.interface import Interface, Attribute
from twisted.plugin import getPlugins
from twisted.python import usage
class ICheckerFactory(Interface):
"""
A factory for objects which provide
L{twisted.cred.checkers.ICredentialsChecker}.
It's implemented by twistd plugins creating checkers.
"""
authType = Attribute(
'A tag that identifies the authentication method.')
authHelp = Attribute(
'A detailed (potentially multi-line) description of precisely '
'what functionality this CheckerFactory provides.')
argStringFormat = Attribute(
'A short (one-line) description of the argument string format.')
credentialInterfaces = Attribute(
'A list of credentials interfaces that this factory will support.')
def generateChecker(argstring):
"""
Return an L{ICredentialChecker} provider using the supplied
argument string.
"""
class StrcredException(Exception):
"""
Base exception class for strcred.
"""
class InvalidAuthType(StrcredException):
"""
Raised when a user provides an invalid identifier for the
authentication plugin (known as the authType).
"""
class InvalidAuthArgumentString(StrcredException):
"""
Raised by an authentication plugin when the argument string
provided is formatted incorrectly.
"""
class UnsupportedInterfaces(StrcredException):
"""
Raised when an application is given a checker to use that does not
provide any of the application's supported credentials interfaces.
"""
# This will be used to warn the users whenever they view help for an
# authType that is not supported by the application.
notSupportedWarning = ("WARNING: This authType is not supported by "
"this application.")
def findCheckerFactories():
"""
Find all objects that implement L{ICheckerFactory}.
"""
return getPlugins(ICheckerFactory)
def findCheckerFactory(authType):
"""
Find the first checker factory that supports the given authType.
"""
for factory in findCheckerFactories():
if factory.authType == authType:
return factory
raise InvalidAuthType(authType)
def makeChecker(description):
"""
Returns an L{twisted.cred.checkers.ICredentialsChecker} based on the
contents of a descriptive string. Similar to
L{twisted.application.strports}.
"""
if ':' in description:
authType, argstring = description.split(':', 1)
else:
authType = description
argstring = ''
return findCheckerFactory(authType).generateChecker(argstring)
class AuthOptionMixin:
"""
Defines helper methods that can be added on to any
L{usage.Options} subclass that needs authentication.
This mixin implements three new options methods:
The opt_auth method (--auth) will write two new values to the
'self' dictionary: C{credInterfaces} (a dict of lists) and
C{credCheckers} (a list).
The opt_help_auth method (--help-auth) will search for all
available checker plugins and list them for the user; it will exit
when finished.
The opt_help_auth_type method (--help-auth-type) will display
detailed help for a particular checker plugin.
@cvar supportedInterfaces: An iterable object that returns
credential interfaces which this application is able to support.
@cvar authOutput: A writeable object to which this options class
will send all help-related output. Default: L{sys.stdout}
"""
supportedInterfaces = None
authOutput = sys.stdout
def supportsInterface(self, interface):
"""
Returns whether a particular credentials interface is supported.
"""
return (self.supportedInterfaces is None
or interface in self.supportedInterfaces)
def supportsCheckerFactory(self, factory):
"""
Returns whether a checker factory will provide at least one of
the credentials interfaces that we care about.
"""
for interface in factory.credentialInterfaces:
if self.supportsInterface(interface):
return True
return False
def addChecker(self, checker):
"""
Supply a supplied credentials checker to the Options class.
"""
# First figure out which interfaces we're willing to support.
supported = []
if self.supportedInterfaces is None:
supported = checker.credentialInterfaces
else:
for interface in checker.credentialInterfaces:
if self.supportsInterface(interface):
supported.append(interface)
if not supported:
raise UnsupportedInterfaces(checker.credentialInterfaces)
# If we get this far, then we know we can use this checker.
if 'credInterfaces' not in self:
self['credInterfaces'] = {}
if 'credCheckers' not in self:
self['credCheckers'] = []
self['credCheckers'].append(checker)
for interface in supported:
self['credInterfaces'].setdefault(interface, []).append(checker)
def opt_auth(self, description):
"""
Specify an authentication method for the server.
"""
try:
self.addChecker(makeChecker(description))
except UnsupportedInterfaces, e:
raise usage.UsageError(
'Auth plugin not supported: %s' % e.args[0])
except InvalidAuthType, e:
raise usage.UsageError(
'Auth plugin not recognized: %s' % e.args[0])
except Exception, e:
raise usage.UsageError('Unexpected error: %s' % e)
def _checkerFactoriesForOptHelpAuth(self):
"""
Return a list of which authTypes will be displayed by --help-auth.
This makes it a lot easier to test this module.
"""
for factory in findCheckerFactories():
for interface in factory.credentialInterfaces:
if self.supportsInterface(interface):
yield factory
break
def opt_help_auth(self):
"""
Show all authentication methods available.
"""
self.authOutput.write("Usage: --auth AuthType[:ArgString]\n")
self.authOutput.write("For detailed help: --help-auth-type AuthType\n")
self.authOutput.write('\n')
# Figure out the right width for our columns
firstLength = 0
for factory in self._checkerFactoriesForOptHelpAuth():
if len(factory.authType) > firstLength:
firstLength = len(factory.authType)
formatString = ' %%-%is\t%%s\n' % firstLength
self.authOutput.write(formatString % ('AuthType', 'ArgString format'))
self.authOutput.write(formatString % ('========', '================'))
for factory in self._checkerFactoriesForOptHelpAuth():
self.authOutput.write(
formatString % (factory.authType, factory.argStringFormat))
self.authOutput.write('\n')
raise SystemExit(0)
def opt_help_auth_type(self, authType):
"""
Show help for a particular authentication type.
"""
try:
cf = findCheckerFactory(authType)
except InvalidAuthType:
raise usage.UsageError("Invalid auth type: %s" % authType)
self.authOutput.write("Usage: --auth %s[:ArgString]\n" % authType)
self.authOutput.write("ArgString format: %s\n" % cf.argStringFormat)
self.authOutput.write('\n')
for line in cf.authHelp.strip().splitlines():
self.authOutput.write(' %s\n' % line.rstrip())
self.authOutput.write('\n')
if not self.supportsCheckerFactory(cf):
self.authOutput.write(' %s\n' % notSupportedWarning)
self.authOutput.write('\n')
raise SystemExit(0)
| bsd-3-clause | 1,770,969,183,393,619,200 | 29.744444 | 79 | 0.647392 | false |
vrieni/orange | Orange/OrangeWidgets/Data/OWSave.py | 6 | 5594 | from OWWidget import *
import OWGUI
import re, os.path
from exceptions import Exception
NAME = "Save"
DESCRIPTION = "Saves data to file."
LONG_DESCRIPTION = ""
ICON = "icons/Save.svg"
PRIORITY = 90
AUTHOR = "Janez Demsar"
AUTHOR_EMAIL = "janez.demsar(@at@)fri.uni-lj.si"
INPUTS = [("Data", Orange.data.Table, "dataset", Default)]
class OWSave(OWWidget):
settingsList = ["recentFiles", "selectedFileName"]
savers = {".txt": orange.saveTxt,
".tab": orange.saveTabDelimited,
".names": orange.saveC45,
".test": orange.saveC45,
".data": orange.saveC45,
".csv": orange.saveCsv
}
# exclude C50 since it has the same extension and we do not need saving to it anyway
registeredFileTypes = [ft for ft in orange.getRegisteredFileTypes() if len(ft)>3 and ft[3] and not ft[0]=="C50"]
dlgFormats = 'Tab-delimited files (*.tab)\nHeaderless tab-delimited (*.txt)\nComma separated (*.csv)\nC4.5 files (*.data)\nRetis files (*.rda *.rdo)\n' \
+ "\n".join("%s (%s)" % (ft[:2]) for ft in registeredFileTypes) \
+ "\nAll files(*.*)"
savers.update(dict((lower(ft[1][1:]), ft[3]) for ft in registeredFileTypes))
re_filterExtension = re.compile(r"\(\*(?P<ext>\.[^ )]+)")
def __init__(self,parent=None, signalManager = None):
OWWidget.__init__(self, parent, signalManager, "Save", wantMainArea = 0, resizingEnabled = 0)
self.inputs = [("Data", ExampleTable, self.dataset)]
self.outputs = []
self.recentFiles=[]
self.selectedFileName = "None"
self.data = None
self.filename = ""
self.loadSettings()
# vb = OWGUI.widgetBox(self.controlArea)
rfbox = OWGUI.widgetBox(self.controlArea, "Filename", orientation="horizontal", addSpace=True)
self.filecombo = OWGUI.comboBox(rfbox, self, "filename")
self.filecombo.setMinimumWidth(200)
# browse = OWGUI.button(rfbox, self, "...", callback = self.browseFile, width=25)
button = OWGUI.button(rfbox, self, '...', callback = self.browseFile, disabled=0)
button.setIcon(self.style().standardIcon(QStyle.SP_DirOpenIcon))
button.setSizePolicy(QSizePolicy.Maximum, QSizePolicy.Fixed)
fbox = OWGUI.widgetBox(self.controlArea, "Save")
self.save = OWGUI.button(fbox, self, "Save", callback = self.saveFile, default=True)
self.save.setDisabled(1)
OWGUI.rubber(self.controlArea)
#self.adjustSize()
self.setFilelist()
self.resize(260, 100)
self.filecombo.setCurrentIndex(0)
if self.selectedFileName != "":
if os.path.exists(self.selectedFileName):
self.openFile(self.selectedFileName)
else:
self.selectedFileName = ""
def dataset(self, data):
self.data = data
self.save.setDisabled(data == None)
def browseFile(self):
if self.recentFiles:
startfile = self.recentFiles[0]
else:
startfile = os.path.expanduser("~/")
# filename, selectedFilter = QFileDialog.getSaveFileNameAndFilter(self, 'Save Orange Data File', startfile,
# self.dlgFormats, self.dlgFormats.splitlines()[0])
# filename = str(filename)
# The preceding lines should work as per API, but do not; it's probably a PyQt bug as per March 2010.
# The following is a workaround.
# (As a consequence, filter selection is not taken into account when appending a default extension.)
filename, selectedFilter = QFileDialog.getSaveFileName(self, 'Save Orange Data File', startfile,
self.dlgFormats), self.dlgFormats.splitlines()[0]
filename = unicode(filename)
if not filename or not os.path.split(filename)[1]:
return
ext = lower(os.path.splitext(filename)[1])
if not ext in self.savers:
filt_ext = self.re_filterExtension.search(str(selectedFilter)).group("ext")
if filt_ext == ".*":
filt_ext = ".tab"
filename += filt_ext
self.addFileToList(filename)
self.saveFile()
def saveFile(self, *index):
self.error()
if self.data is not None:
combotext = unicode(self.filecombo.currentText())
if combotext == "(none)":
QMessageBox.information( None, "Error saving data", "Unable to save data. Select first a file name by clicking the '...' button.", QMessageBox.Ok + QMessageBox.Default)
return
filename = self.recentFiles[self.filecombo.currentIndex()]
fileExt = lower(os.path.splitext(filename)[1])
if fileExt == "":
fileExt = ".tab"
try:
self.savers[fileExt](filename, self.data)
except Exception, (errValue):
self.error(str(errValue))
return
self.error()
def addFileToList(self, fn):
if fn in self.recentFiles:
self.recentFiles.remove(fn)
self.recentFiles.insert(0,fn)
self.setFilelist()
def setFilelist(self):
"""Set the GUI filelist"""
self.filecombo.clear()
if self.recentFiles:
self.filecombo.addItems([os.path.split(file)[1] for file in self.recentFiles])
else:
self.filecombo.addItem("(none)")
if __name__ == "__main__":
a=QApplication(sys.argv)
owf=OWSave()
owf.show()
a.exec_()
owf.saveSettings()
| gpl-3.0 | 9,171,081,184,410,673,000 | 36.543624 | 184 | 0.59975 | false |
locustio/locust | locust/test/test_wait_time.py | 1 | 2342 | import random
import time
from locust import User, TaskSet, between, constant, constant_pacing
from locust.exception import MissingWaitTimeError
from .testcases import LocustTestCase
class TestWaitTime(LocustTestCase):
def test_between(self):
class MyUser(User):
wait_time = between(3, 9)
class TaskSet1(TaskSet):
pass
class TaskSet2(TaskSet):
wait_time = between(20.0, 21.0)
u = MyUser(self.environment)
ts1 = TaskSet1(u)
ts2 = TaskSet2(u)
for i in range(100):
w = u.wait_time()
self.assertGreaterEqual(w, 3)
self.assertLessEqual(w, 9)
w = ts1.wait_time()
self.assertGreaterEqual(w, 3)
self.assertLessEqual(w, 9)
for i in range(100):
w = ts2.wait_time()
self.assertGreaterEqual(w, 20)
self.assertLessEqual(w, 21)
def test_constant(self):
class MyUser(User):
wait_time = constant(13)
class TaskSet1(TaskSet):
pass
self.assertEqual(13, MyUser(self.environment).wait_time())
self.assertEqual(13, TaskSet1(MyUser(self.environment)).wait_time())
def test_default_wait_time(self):
class MyUser(User):
pass # default is wait_time = constant(0)
class TaskSet1(TaskSet):
pass
self.assertEqual(0, MyUser(self.environment).wait_time())
self.assertEqual(0, TaskSet1(MyUser(self.environment)).wait_time())
taskset = TaskSet1(MyUser(self.environment))
start_time = time.perf_counter()
taskset.wait()
self.assertLess(time.perf_counter() - start_time, 0.002)
def test_constant_pacing(self):
class MyUser(User):
wait_time = constant_pacing(0.1)
class TS(TaskSet):
pass
ts = TS(MyUser(self.environment))
ts2 = TS(MyUser(self.environment))
previous_time = time.perf_counter()
for i in range(7):
ts.wait()
since_last_run = time.perf_counter() - previous_time
self.assertLess(abs(0.1 - since_last_run), 0.02)
previous_time = time.perf_counter()
time.sleep(random.random() * 0.1)
_ = ts2.wait_time()
_ = ts2.wait_time()
| mit | 7,875,299,310,437,960,000 | 28.64557 | 76 | 0.577284 | false |
icio/github3.py | tests/unit/test_null.py | 10 | 1832 | from .helper import UnitHelper
from github3.null import NullObject
import pytest
class TestNullObject(UnitHelper):
described_class = NullObject
def create_instance_of_described_class(self):
return self.described_class()
def test_returns_empty_list(self):
assert list(self.instance) == []
def test_contains_nothing(self):
assert 'foo' not in self.instance
def test_returns_itself_when_called(self):
assert self.instance('foo', 'bar', 'bogus') is self.instance
def test_returns_empty_string(self):
assert str(self.instance) == ''
def test_allows_arbitrary_attributes(self):
assert self.instance.attr is self.instance
def test_allows_arbitrary_attributes_to_be_set(self):
self.instance.attr = 'new'
assert self.instance.attr is self.instance
def test_provides_an_api_to_check_if_it_is_null(self):
assert self.instance.is_null()
def test_stops_iteration(self):
with pytest.raises(StopIteration):
next(self.instance)
def test_next_raises_stops_iteration(self):
with pytest.raises(StopIteration):
self.instance.next()
def test_getitem_returns_itself(self):
assert self.instance['attr'] is self.instance
def test_setitem_sets_nothing(self):
self.instance['attr'] = 'attr'
assert self.instance['attr'] is self.instance
def test_turns_into_unicode(self):
unicode_str = b''.decode('utf-8')
try:
assert unicode(self.instance) == unicode_str
except NameError:
assert str(self.instance) == unicode_str
def test_instances_are_falsey(self):
if self.instance:
pytest.fail()
def test_instances_can_be_coerced_to_zero(self):
assert int(self.instance) == 0
| bsd-3-clause | 7,603,991,016,720,208,000 | 28.548387 | 68 | 0.651201 | false |
gmist/fix-5studio | main/auth/twitter.py | 6 | 1468 | # coding: utf-8
from __future__ import absolute_import
import flask
import auth
import config
import model
import util
from main import app
twitter_config = dict(
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authorize',
base_url='https://api.twitter.com/1.1/',
consumer_key=config.CONFIG_DB.twitter_consumer_key,
consumer_secret=config.CONFIG_DB.twitter_consumer_secret,
request_token_url='https://api.twitter.com/oauth/request_token',
)
twitter = auth.create_oauth_app(twitter_config, 'twitter')
@app.route('/api/auth/callback/twitter/')
def twitter_authorized():
response = twitter.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (
response['oauth_token'],
response['oauth_token_secret'],
)
user_db = retrieve_user_from_twitter(response)
return auth.signin_user_db(user_db)
@twitter.tokengetter
def get_twitter_token():
return flask.session.get('oauth_token')
@app.route('/signin/twitter/')
def signin_twitter():
return auth.signin_oauth(twitter)
def retrieve_user_from_twitter(response):
auth_id = 'twitter_%s' % response['user_id']
user_db = model.User.get_by('auth_ids', auth_id)
return user_db or auth.create_user_db(
auth_id=auth_id,
name=response['screen_name'],
username=response['screen_name'],
)
| mit | 7,253,663,417,025,386,000 | 24.310345 | 66 | 0.714578 | false |
js0701/chromium-crosswalk | tools/perf_expectations/make_expectations.py | 37 | 12595 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# For instructions see:
# http://www.chromium.org/developers/tree-sheriffs/perf-sheriffs
import hashlib
import math
import optparse
import os
import re
import subprocess
import sys
import time
import urllib2
try:
import json
except ImportError:
import simplejson as json
__version__ = '1.0'
EXPECTATIONS_DIR = os.path.dirname(os.path.abspath(__file__))
DEFAULT_CONFIG_FILE = os.path.join(EXPECTATIONS_DIR,
'chromium_perf_expectations.cfg')
DEFAULT_TOLERANCE = 0.05
USAGE = ''
def ReadFile(filename):
try:
file = open(filename, 'rb')
except IOError, e:
print >> sys.stderr, ('I/O Error reading file %s(%s): %s' %
(filename, e.errno, e.strerror))
raise e
contents = file.read()
file.close()
return contents
def ConvertJsonIntoDict(string):
"""Read a JSON string and convert its contents into a Python datatype."""
if len(string) == 0:
print >> sys.stderr, ('Error could not parse empty string')
raise Exception('JSON data missing')
try:
jsondata = json.loads(string)
except ValueError, e:
print >> sys.stderr, ('Error parsing string: "%s"' % string)
raise e
return jsondata
# Floating point representation of last time we fetched a URL.
last_fetched_at = None
def FetchUrlContents(url):
global last_fetched_at
if last_fetched_at and ((time.time() - last_fetched_at) <= 0.5):
# Sleep for half a second to avoid overloading the server.
time.sleep(0.5)
try:
last_fetched_at = time.time()
connection = urllib2.urlopen(url)
except urllib2.HTTPError, e:
if e.code == 404:
return None
raise e
text = connection.read().strip()
connection.close()
return text
def GetRowData(data, key):
rowdata = []
# reva and revb always come first.
for subkey in ['reva', 'revb']:
if subkey in data[key]:
rowdata.append('"%s": %s' % (subkey, data[key][subkey]))
# Strings, like type, come next.
for subkey in ['type', 'better']:
if subkey in data[key]:
rowdata.append('"%s": "%s"' % (subkey, data[key][subkey]))
# Finally the main numbers come last.
for subkey in ['improve', 'regress', 'tolerance']:
if subkey in data[key]:
rowdata.append('"%s": %s' % (subkey, data[key][subkey]))
return rowdata
def GetRowDigest(rowdata, key):
sha1 = hashlib.sha1()
rowdata = [str(possibly_unicode_string).encode('ascii')
for possibly_unicode_string in rowdata]
sha1.update(str(rowdata) + key)
return sha1.hexdigest()[0:8]
def WriteJson(filename, data, keys, calculate_sha1=True):
"""Write a list of |keys| in |data| to the file specified in |filename|."""
try:
file = open(filename, 'wb')
except IOError, e:
print >> sys.stderr, ('I/O Error writing file %s(%s): %s' %
(filename, e.errno, e.strerror))
return False
jsondata = []
for key in keys:
rowdata = GetRowData(data, key)
if calculate_sha1:
# Include an updated checksum.
rowdata.append('"sha1": "%s"' % GetRowDigest(rowdata, key))
else:
if 'sha1' in data[key]:
rowdata.append('"sha1": "%s"' % (data[key]['sha1']))
jsondata.append('"%s": {%s}' % (key, ', '.join(rowdata)))
jsondata.append('"load": true')
jsontext = '{%s\n}' % ',\n '.join(jsondata)
file.write(jsontext + '\n')
file.close()
return True
def FloatIsInt(f):
epsilon = 1.0e-10
return abs(f - int(f)) <= epsilon
last_key_printed = None
def Main(args):
def OutputMessage(message, verbose_message=True):
global last_key_printed
if not options.verbose and verbose_message:
return
if key != last_key_printed:
last_key_printed = key
print '\n' + key + ':'
print ' %s' % message
parser = optparse.OptionParser(usage=USAGE, version=__version__)
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='enable verbose output')
parser.add_option('-s', '--checksum', action='store_true',
help='test if any changes are pending')
parser.add_option('-c', '--config', dest='config_file',
default=DEFAULT_CONFIG_FILE,
help='set the config file to FILE', metavar='FILE')
options, args = parser.parse_args(args)
if options.verbose:
print 'Verbose output enabled.'
config = ConvertJsonIntoDict(ReadFile(options.config_file))
# Get the list of summaries for a test.
base_url = config['base_url']
# Make the perf expectations file relative to the path of the config file.
perf_file = os.path.join(
os.path.dirname(options.config_file), config['perf_file'])
perf = ConvertJsonIntoDict(ReadFile(perf_file))
# Fetch graphs.dat for this combination.
perfkeys = perf.keys()
# In perf_expectations.json, ignore the 'load' key.
perfkeys.remove('load')
perfkeys.sort()
write_new_expectations = False
found_checksum_mismatch = False
for key in perfkeys:
value = perf[key]
tolerance = value.get('tolerance', DEFAULT_TOLERANCE)
better = value.get('better', None)
# Verify the checksum.
original_checksum = value.get('sha1', '')
if 'sha1' in value:
del value['sha1']
rowdata = GetRowData(perf, key)
computed_checksum = GetRowDigest(rowdata, key)
if original_checksum == computed_checksum:
OutputMessage('checksum matches, skipping')
continue
elif options.checksum:
found_checksum_mismatch = True
continue
# Skip expectations that are missing a reva or revb. We can't generate
# expectations for those.
if not(value.has_key('reva') and value.has_key('revb')):
OutputMessage('missing revision range, skipping')
continue
revb = int(value['revb'])
reva = int(value['reva'])
# Ensure that reva is less than revb.
if reva > revb:
temp = reva
reva = revb
revb = temp
# Get the system/test/graph/tracename and reftracename for the current key.
matchData = re.match(r'^([^/]+)\/([^/]+)\/([^/]+)\/([^/]+)$', key)
if not matchData:
OutputMessage('cannot parse key, skipping')
continue
system = matchData.group(1)
test = matchData.group(2)
graph = matchData.group(3)
tracename = matchData.group(4)
reftracename = tracename + '_ref'
# Create the summary_url and get the json data for that URL.
# FetchUrlContents() may sleep to avoid overloading the server with
# requests.
summary_url = '%s/%s/%s/%s-summary.dat' % (base_url, system, test, graph)
summaryjson = FetchUrlContents(summary_url)
if not summaryjson:
OutputMessage('ERROR: cannot find json data, please verify',
verbose_message=False)
return 0
# Set value's type to 'relative' by default.
value_type = value.get('type', 'relative')
summarylist = summaryjson.split('\n')
trace_values = {}
traces = [tracename]
if value_type == 'relative':
traces += [reftracename]
for trace in traces:
trace_values.setdefault(trace, {})
# Find the high and low values for each of the traces.
scanning = False
for line in summarylist:
jsondata = ConvertJsonIntoDict(line)
try:
rev = int(jsondata['rev'])
except ValueError:
print ('Warning: skipping rev %r because could not be parsed '
'as an integer.' % jsondata['rev'])
continue
if rev <= revb:
scanning = True
if rev < reva:
break
# We found the upper revision in the range. Scan for trace data until we
# find the lower revision in the range.
if scanning:
for trace in traces:
if trace not in jsondata['traces']:
OutputMessage('trace %s missing' % trace)
continue
if type(jsondata['traces'][trace]) != type([]):
OutputMessage('trace %s format not recognized' % trace)
continue
try:
tracevalue = float(jsondata['traces'][trace][0])
except ValueError:
OutputMessage('trace %s value error: %s' % (
trace, str(jsondata['traces'][trace][0])))
continue
for bound in ['high', 'low']:
trace_values[trace].setdefault(bound, tracevalue)
trace_values[trace]['high'] = max(trace_values[trace]['high'],
tracevalue)
trace_values[trace]['low'] = min(trace_values[trace]['low'],
tracevalue)
if 'high' not in trace_values[tracename]:
OutputMessage('no suitable traces matched, skipping')
continue
if value_type == 'relative':
# Calculate assuming high deltas are regressions and low deltas are
# improvements.
regress = (float(trace_values[tracename]['high']) -
float(trace_values[reftracename]['low']))
improve = (float(trace_values[tracename]['low']) -
float(trace_values[reftracename]['high']))
elif value_type == 'absolute':
# Calculate assuming high absolutes are regressions and low absolutes are
# improvements.
regress = float(trace_values[tracename]['high'])
improve = float(trace_values[tracename]['low'])
# So far we've assumed better is lower (regress > improve). If the actual
# values for regress and improve are equal, though, and better was not
# specified, alert the user so we don't let them create a new file with
# ambiguous rules.
if better == None and regress == improve:
OutputMessage('regress (%s) is equal to improve (%s), and "better" is '
'unspecified, please fix by setting "better": "lower" or '
'"better": "higher" in this perf trace\'s expectation' % (
regress, improve), verbose_message=False)
return 1
# If the existing values assume regressions are low deltas relative to
# improvements, swap our regress and improve. This value must be a
# scores-like result.
if 'regress' in perf[key] and 'improve' in perf[key]:
if perf[key]['regress'] < perf[key]['improve']:
assert(better != 'lower')
better = 'higher'
temp = regress
regress = improve
improve = temp
else:
# Sometimes values are equal, e.g., when they are both 0,
# 'better' may still be set to 'higher'.
assert(better != 'higher' or
perf[key]['regress'] == perf[key]['improve'])
better = 'lower'
# If both were ints keep as int, otherwise use the float version.
originally_ints = False
if FloatIsInt(regress) and FloatIsInt(improve):
originally_ints = True
if better == 'higher':
if originally_ints:
regress = int(math.floor(regress - abs(regress*tolerance)))
improve = int(math.ceil(improve + abs(improve*tolerance)))
else:
regress = regress - abs(regress*tolerance)
improve = improve + abs(improve*tolerance)
else:
if originally_ints:
improve = int(math.floor(improve - abs(improve*tolerance)))
regress = int(math.ceil(regress + abs(regress*tolerance)))
else:
improve = improve - abs(improve*tolerance)
regress = regress + abs(regress*tolerance)
# Calculate the new checksum to test if this is the only thing that may have
# changed.
checksum_rowdata = GetRowData(perf, key)
new_checksum = GetRowDigest(checksum_rowdata, key)
if ('regress' in perf[key] and 'improve' in perf[key] and
perf[key]['regress'] == regress and perf[key]['improve'] == improve and
original_checksum == new_checksum):
OutputMessage('no change')
continue
write_new_expectations = True
OutputMessage('traces: %s' % trace_values, verbose_message=False)
OutputMessage('before: %s' % perf[key], verbose_message=False)
perf[key]['regress'] = regress
perf[key]['improve'] = improve
OutputMessage('after: %s' % perf[key], verbose_message=False)
if options.checksum:
if found_checksum_mismatch:
return 1
else:
return 0
if write_new_expectations:
print '\nWriting expectations... ',
WriteJson(perf_file, perf, perfkeys)
print 'done'
else:
if options.verbose:
print ''
print 'No changes.'
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
| bsd-3-clause | -3,387,298,855,177,554,000 | 32.057743 | 80 | 0.624295 | false |
MediaKraken/MediaKraken_Deployment | source/database_async/db_base_metadata_async.py | 1 | 8531 | import inspect
from common import common_logging_elasticsearch_httpx
async def db_metadata_guid_from_media_guid(self, guid, db_connection=None):
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
return await db_conn.fetchval('select mm_media_metadata_guid'
' from mm_media'
' where mm_media_guid = $1', guid)
async def db_meta_insert_tmdb(self, uuid_id, series_id, data_title, data_json,
data_image_json, db_connection=None):
"""
# insert metadata from themoviedb
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
await db_conn.execute('insert into mm_metadata_movie (mm_metadata_guid,'
' mm_metadata_media_id,'
' mm_metadata_name,'
' mm_metadata_json,'
' mm_metadata_localimage_json)'
' values ($1,$2,$3,$4,$5)',
uuid_id, series_id, data_title,
data_json, data_image_json)
await db_conn.execute('commit')
async def db_meta_guid_by_imdb(self, imdb_uuid, db_connection=None):
"""
# metadata guid by imdb id
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
return await db_conn.fetchval('select mm_metadata_guid'
' from mm_metadata_movie'
' where mm_metadata_media_id->\'imdb\' ? $1',
imdb_uuid)
async def db_meta_guid_by_tmdb(self, tmdb_uuid, db_connection=None):
"""
# see if metadata exists type and id
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
return await db_conn.fetchval('select mm_metadata_guid'
' from mm_metadata_movie'
' where mm_metadata_media_id = $1',
tmdb_uuid)
async def db_find_metadata_guid(self, media_name, media_release_year, db_connection=None):
"""
Lookup id by name/year
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
metadata_guid = None
if media_release_year is not None:
# for year and -3/+3 year as well
meta_results = await db_conn.fetch('select mm_metadata_guid from mm_metadata_movie'
' where (LOWER(mm_metadata_name) = $1'
' or lower(mm_metadata_json->>\'original_title\') = $2)'
' and substring(mm_metadata_json->>\'release_date\''
' from 0 for 5)'
' in ($3,$4,$5,$6,$7,$8,$9)',
media_name.lower(), media_name.lower(),
str(media_release_year),
str(int(media_release_year) + 1),
str(int(media_release_year) + 2),
str(int(media_release_year) + 3),
str(int(media_release_year) - 1),
str(int(media_release_year) - 2),
str(int(media_release_year) - 3))
else:
meta_results = await db_conn.fetch('select mm_metadata_guid from mm_metadata_movie'
' where (LOWER(mm_metadata_name) = $1'
' or lower(mm_metadata_json->>\'original_title\') = $2)',
media_name.lower(), media_name.lower())
for row_data in meta_results:
# TODO should probably handle multiple results better. Perhaps a notification?
metadata_guid = row_data['mm_metadata_guid']
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
"db find metadata guid": metadata_guid})
break
return metadata_guid
| gpl-3.0 | 793,320,040,041,768,300 | 57.431507 | 117 | 0.327746 | false |
dwightgunning/django | tests/auth_tests/test_mixins.py | 274 | 8335 | from django.contrib.auth import models
from django.contrib.auth.mixins import (
LoginRequiredMixin, PermissionRequiredMixin, UserPassesTestMixin,
)
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import RequestFactory, TestCase
from django.views.generic import View
class AlwaysTrueMixin(UserPassesTestMixin):
def test_func(self):
return True
class AlwaysFalseMixin(UserPassesTestMixin):
def test_func(self):
return False
class EmptyResponseView(View):
def get(self, request, *args, **kwargs):
return HttpResponse()
class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView):
pass
class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView):
pass
class StackedMixinsView1(LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
raise_exception = True
class StackedMixinsView2(PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
raise_exception = True
class AccessMixinTests(TestCase):
factory = RequestFactory()
def test_stacked_mixins_success(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
view = StackedMixinsView2.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
def test_stacked_mixins_missing_permission(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser',))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
def test_stacked_mixins_not_logged_in(self):
user = models.User.objects.create(username='joe', password='qwerty')
user.is_authenticated = lambda: False
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
class UserPassesTestTests(TestCase):
factory = RequestFactory()
def _test_redirect(self, view=None, url='/accounts/login/?next=/rand'):
if not view:
view = AlwaysFalseView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, url)
def test_default(self):
self._test_redirect()
def test_custom_redirect_url(self):
class AView(AlwaysFalseView):
login_url = '/login/'
self._test_redirect(AView.as_view(), '/login/?next=/rand')
def test_custom_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = 'goto'
self._test_redirect(AView.as_view(), '/accounts/login/?goto=/rand')
def test_no_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = None
self._test_redirect(AView.as_view(), '/accounts/login/')
def test_raise_exception(self):
class AView(AlwaysFalseView):
raise_exception = True
request = self.factory.get('/rand')
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, AView.as_view(), request)
def test_raise_exception_custom_message(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
permission_denied_message = msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_raise_exception_custom_message_function(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
def get_permission_denied_message(self):
return msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_user_passes(self):
view = AlwaysTrueView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 200)
class LoginRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
def test_login_required(self):
"""
Check that login_required works on a simple view wrapped in a
login_required decorator.
"""
class AView(LoginRequiredMixin, EmptyResponseView):
pass
view = AView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual('/accounts/login/?next=/rand', response.url)
request = self.factory.get('/rand')
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
class PermissionsRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = 'auth.add_customuser'
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission']
raise_exception = True
request = self.factory.get('/rand')
request.user = self.user
self.assertRaises(PermissionDenied, AView.as_view(), request)
| bsd-3-clause | 1,535,350,661,884,959,700 | 32.207171 | 110 | 0.660588 | false |
s20121035/rk3288_android5.1_repo | external/chromium_org/third_party/closure_compiler/compiler_customization_test.py | 40 | 6461 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from checker import Checker
from processor import FileCache, Processor
ASSERT_FILE = os.path.join("..", "..", "ui", "webui", "resources", "js",
"assert.js")
CR_FILE = os.path.join("..", "..", "ui", "webui", "resources", "js", "cr.js")
UI_FILE = os.path.join("..", "..", "ui", "webui", "resources", "js", "cr",
"ui.js")
def rel_to_abs(rel_path):
script_path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(script_path, rel_path)
class CompilerCustomizationTest(unittest.TestCase):
_ASSERT_DEFINITION = Processor(rel_to_abs(ASSERT_FILE)).contents
_CR_DEFINE_DEFINITION = Processor(rel_to_abs(CR_FILE)).contents
_CR_UI_DECORATE_DEFINITION = Processor(rel_to_abs(UI_FILE)).contents
def setUp(self):
self._checker = Checker()
def _runChecker(self, source_code):
file_path = "/script.js"
FileCache._cache[file_path] = source_code
return self._checker.check(file_path)
def _runCheckerTestExpectError(self, source_code, expected_error):
_, output = self._runChecker(source_code)
self.assertTrue(expected_error in output,
msg="Expected chunk: \n%s\n\nOutput:\n%s\n" % (
expected_error, output))
def _runCheckerTestExpectSuccess(self, source_code):
return_code, output = self._runChecker(source_code)
self.assertTrue(return_code == 0,
msg="Expected success, got return code %d\n\nOutput:\n%s\n" % (
return_code, output))
def testGetInstance(self):
self._runCheckerTestExpectError("""
var cr = {
/** @param {!Function} ctor */
addSingletonGetter: function(ctor) {
ctor.getInstance = function() {
return ctor.instance_ || (ctor.instance_ = new ctor());
};
}
};
/** @constructor */
function Class() {
/** @param {number} num */
this.needsNumber = function(num) {};
}
cr.addSingletonGetter(Class);
Class.getInstance().needsNumber("wrong type");
""", "ERROR - actual parameter 1 of Class.needsNumber does not match formal "
"parameter")
def testCrDefineFunctionDefinition(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @param {number} num */
function internalName(num) {}
return {
needsNumber: internalName
};
});
a.b.c.needsNumber("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.needsNumber does not match formal "
"parameter")
def testCrDefineFunctionAssignment(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @param {number} num */
var internalName = function(num) {};
return {
needsNumber: internalName
};
});
a.b.c.needsNumber("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.needsNumber does not match formal "
"parameter")
def testCrDefineConstructorDefinitionPrototypeMethod(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @constructor */
function ClassInternalName() {}
ClassInternalName.prototype = {
/** @param {number} num */
method: function(num) {}
};
return {
ClassExternalName: ClassInternalName
};
});
new a.b.c.ClassExternalName().method("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.ClassExternalName.prototype.method "
"does not match formal parameter")
def testCrDefineConstructorAssignmentPrototypeMethod(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @constructor */
var ClassInternalName = function() {};
ClassInternalName.prototype = {
/** @param {number} num */
method: function(num) {}
};
return {
ClassExternalName: ClassInternalName
};
});
new a.b.c.ClassExternalName().method("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.ClassExternalName.prototype.method "
"does not match formal parameter")
def testCrDefineEnum(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @enum {string} */
var internalNameForEnum = {key: 'wrong_type'};
return {
exportedEnum: internalNameForEnum
};
});
/** @param {number} num */
function needsNumber(num) {}
needsNumber(a.b.c.exportedEnum.key);
""", "ERROR - actual parameter 1 of needsNumber does not match formal "
"parameter")
def testObjectDefineProperty(self):
self._runCheckerTestExpectSuccess("""
/** @constructor */
function Class() {}
Object.defineProperty(Class.prototype, 'myProperty', {});
alert(new Class().myProperty);
""")
def testCrDefineProperty(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION + """
/** @constructor */
function Class() {}
cr.defineProperty(Class.prototype, 'myProperty', cr.PropertyKind.JS);
alert(new Class().myProperty);
""")
def testCrDefinePropertyTypeChecking(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
/** @constructor */
function Class() {}
cr.defineProperty(Class.prototype, 'booleanProp', cr.PropertyKind.BOOL_ATTR);
/** @param {number} num */
function needsNumber(num) {}
needsNumber(new Class().booleanProp);
""", "ERROR - actual parameter 1 of needsNumber does not match formal "
"parameter")
def testCrDefineOnCrWorks(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION + """
cr.define('cr', function() {
return {};
});
""")
def testAssertWorks(self):
self._runCheckerTestExpectSuccess(self._ASSERT_DEFINITION + """
/** @return {?string} */
function f() {
return "string";
}
/** @type {!string} */
var a = assert(f());
""")
def testAssertInstanceofWorks(self):
self._runCheckerTestExpectSuccess(self._ASSERT_DEFINITION + """
/** @constructor */
function Class() {}
/** @return {Class} */
function f() {
var a = document.createElement('div');
return assertInstanceof(a, Class);
}
""")
def testCrUiDecorateWorks(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION +
self._CR_UI_DECORATE_DEFINITION + """
/** @constructor */
function Class() {}
/** @return {Class} */
function f() {
var a = document.createElement('div');
cr.ui.decorate(a, Class);
return a;
}
""")
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | -3,230,904,830,558,759,400 | 25.698347 | 78 | 0.67033 | false |
CapOM/ChromiumGStreamerBackend | tools/telemetry/third_party/gsutilz/third_party/boto/boto/sdb/item.py | 153 | 6885 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import base64
class Item(dict):
"""
A ``dict`` sub-class that serves as an object representation of a
SimpleDB item. An item in SDB is similar to a row in a relational
database. Items belong to a :py:class:`Domain <boto.sdb.domain.Domain>`,
which is similar to a table in a relational database.
The keys on instances of this object correspond to attributes that are
stored on the SDB item.
.. tip:: While it is possible to instantiate this class directly, you may
want to use the convenience methods on :py:class:`boto.sdb.domain.Domain`
for that purpose. For example, :py:meth:`boto.sdb.domain.Domain.get_item`.
"""
def __init__(self, domain, name='', active=False):
"""
:type domain: :py:class:`boto.sdb.domain.Domain`
:param domain: The domain that this item belongs to.
:param str name: The name of this item. This name will be used when
querying for items using methods like
:py:meth:`boto.sdb.domain.Domain.get_item`
"""
dict.__init__(self)
self.domain = domain
self.name = name
self.active = active
self.request_id = None
self.encoding = None
self.in_attribute = False
self.converter = self.domain.connection.converter
def startElement(self, name, attrs, connection):
if name == 'Attribute':
self.in_attribute = True
self.encoding = attrs.get('encoding', None)
return None
def decode_value(self, value):
if self.encoding == 'base64':
self.encoding = None
return base64.decodestring(value)
else:
return value
def endElement(self, name, value, connection):
if name == 'ItemName':
self.name = self.decode_value(value)
elif name == 'Name':
if self.in_attribute:
self.last_key = self.decode_value(value)
else:
self.name = self.decode_value(value)
elif name == 'Value':
if self.last_key in self:
if not isinstance(self[self.last_key], list):
self[self.last_key] = [self[self.last_key]]
value = self.decode_value(value)
if self.converter:
value = self.converter.decode(value)
self[self.last_key].append(value)
else:
value = self.decode_value(value)
if self.converter:
value = self.converter.decode(value)
self[self.last_key] = value
elif name == 'BoxUsage':
try:
connection.box_usage += float(value)
except:
pass
elif name == 'RequestId':
self.request_id = value
elif name == 'Attribute':
self.in_attribute = False
else:
setattr(self, name, value)
def load(self):
"""
Loads or re-loads this item's attributes from SDB.
.. warning::
If you have changed attribute values on an Item instance,
this method will over-write the values if they are different in
SDB. For any local attributes that don't yet exist in SDB,
they will be safe.
"""
self.domain.get_attributes(self.name, item=self)
def save(self, replace=True):
"""
Saves this item to SDB.
:param bool replace: If ``True``, delete any attributes on the remote
SDB item that have a ``None`` value on this object.
"""
self.domain.put_attributes(self.name, self, replace)
# Delete any attributes set to "None"
if replace:
del_attrs = []
for name in self:
if self[name] is None:
del_attrs.append(name)
if len(del_attrs) > 0:
self.domain.delete_attributes(self.name, del_attrs)
def add_value(self, key, value):
"""
Helps set or add to attributes on this item. If you are adding a new
attribute that has yet to be set, it will simply create an attribute
named ``key`` with your given ``value`` as its value. If you are
adding a value to an existing attribute, this method will convert the
attribute to a list (if it isn't already) and append your new value
to said list.
For clarification, consider the following interactive session:
.. code-block:: python
>>> item = some_domain.get_item('some_item')
>>> item.has_key('some_attr')
False
>>> item.add_value('some_attr', 1)
>>> item['some_attr']
1
>>> item.add_value('some_attr', 2)
>>> item['some_attr']
[1, 2]
:param str key: The attribute to add a value to.
:param object value: The value to set or append to the attribute.
"""
if key in self:
# We already have this key on the item.
if not isinstance(self[key], list):
# The key isn't already a list, take its current value and
# convert it to a list with the only member being the
# current value.
self[key] = [self[key]]
# Add the new value to the list.
self[key].append(value)
else:
# This is a new attribute, just set it.
self[key] = value
def delete(self):
"""
Deletes this item in SDB.
.. note:: This local Python object remains in its current state
after deletion, this only deletes the remote item in SDB.
"""
self.domain.delete_item(self)
| bsd-3-clause | -8,353,511,660,481,222,000 | 37.898305 | 82 | 0.592447 | false |
dudepare/django | django/contrib/admindocs/urls.py | 574 | 1183 | from django.conf.urls import url
from django.contrib.admindocs import views
urlpatterns = [
url('^$',
views.BaseAdminDocsView.as_view(template_name='admin_doc/index.html'),
name='django-admindocs-docroot'),
url('^bookmarklets/$',
views.BookmarkletsView.as_view(),
name='django-admindocs-bookmarklets'),
url('^tags/$',
views.TemplateTagIndexView.as_view(),
name='django-admindocs-tags'),
url('^filters/$',
views.TemplateFilterIndexView.as_view(),
name='django-admindocs-filters'),
url('^views/$',
views.ViewIndexView.as_view(),
name='django-admindocs-views-index'),
url('^views/(?P<view>[^/]+)/$',
views.ViewDetailView.as_view(),
name='django-admindocs-views-detail'),
url('^models/$',
views.ModelIndexView.as_view(),
name='django-admindocs-models-index'),
url('^models/(?P<app_label>[^\.]+)\.(?P<model_name>[^/]+)/$',
views.ModelDetailView.as_view(),
name='django-admindocs-models-detail'),
url('^templates/(?P<template>.*)/$',
views.TemplateDetailView.as_view(),
name='django-admindocs-templates'),
]
| bsd-3-clause | 9,114,568,968,733,334,000 | 35.96875 | 78 | 0.609467 | false |
patilsangram/erpnext | erpnext/patches/v10_0/set_auto_created_serial_no_in_stock_entry.py | 17 | 1708 | # Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
serialised_items = [d.name for d in frappe.get_all("Item", filters={"has_serial_no": 1})]
if not serialised_items:
return
for dt in ["Stock Entry Detail", "Purchase Receipt Item", "Purchase Invoice Item"]:
cond = ""
if dt=="Purchase Invoice Item":
cond = """ and parent in (select name from `tabPurchase Invoice`
where `tabPurchase Invoice`.name = `tabPurchase Invoice Item`.parent and update_stock=1)"""
item_rows = frappe.db.sql("""
select name
from `tab{0}`
where conversion_factor != 1
and docstatus = 1
and ifnull(serial_no, '') = ''
and item_code in ({1})
{2}
""".format(dt, ', '.join(['%s']*len(serialised_items)), cond), tuple(serialised_items))
if item_rows:
sle_serial_nos = dict(frappe.db.sql("""
select voucher_detail_no, serial_no
from `tabStock Ledger Entry`
where ifnull(serial_no, '') != ''
and voucher_detail_no in (%s)
""".format(', '.join(['%s']*len(item_rows))),
tuple([d[0] for d in item_rows])))
batch_size = 100
for i in range(0, len(item_rows), batch_size):
batch_item_rows = item_rows[i:i + batch_size]
when_then = []
for item_row in batch_item_rows:
when_then.append('WHEN `name` = "{row_name}" THEN "{value}"'.format(
row_name=item_row[0],
value=sle_serial_nos.get(item_row[0])))
frappe.db.sql("""
update
`tab{doctype}`
set
serial_no = CASE {when_then_cond} ELSE `serial_no` END
""".format(
doctype = dt,
when_then_cond=" ".join(when_then)
)) | gpl-3.0 | -8,319,387,747,381,059,000 | 29.517857 | 95 | 0.620609 | false |
agentfog/qiime | scripts/upgma_cluster.py | 15 | 2742 | #!/usr/bin/env python
# File created on 09 Feb 2010
from __future__ import division
__author__ = "Justin Kuczynski"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["Justin Kuczynski"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Justin Kuczynski"
__email__ = "[email protected]"
from qiime.util import parse_command_line_parameters
from qiime.util import make_option
from qiime.hierarchical_cluster import single_file_upgma, multiple_file_upgma
import os
script_info = {}
script_info['brief_description'] = """Build a UPGMA tree comparing samples"""
script_info[
'script_description'] = """In addition to using PCoA, it can be useful to cluster samples using UPGMA (Unweighted Pair Group Method with Arithmetic mean, also known as average linkage). As with PCoA, the input to this step is a distance matrix (i.e. resulting file from beta_diversity.py)."""
script_info['script_usage'] = []
script_info['script_usage'].append(
("""UPGMA Cluster (Single File):""",
"""To perform UPGMA clustering on a single distance matrix (e.g.: beta_div.txt, a result file from beta_diversity.py) use the following idiom:""",
"""%prog -i $PWD/beta_div.txt -o $PWD/beta_div_cluster.tre"""))
script_info['script_usage'].append(
("""UPGMA Cluster (Multiple Files):""",
"""The script also functions in batch mode if a folder is supplied as input. This script operates on every file in the input directory and creates a corresponding upgma tree file in the output directory, e.g.:""",
"""%prog -i $PWD/beta_div_folder -o $PWD/beta_div_folder_results/"""))
script_info[
'output_description'] = """The output is a newick formatted tree compatible with most standard tree viewing programs. Batch processing is also available, allowing the analysis of an entire directory of distance matrices."""
script_info['required_options'] = [
make_option('-i', '--input_path',
type='existing_path', help='input path. directory for batch processing, ' +
'filename for single file operation'),
make_option('-o', '--output_path',
type='new_path', help='output path. directory for batch processing, ' +
'filename for single file operation'),
]
script_info['optional_options'] = []
script_info['version'] = __version__
def main():
option_parser, opts, args = parse_command_line_parameters(**script_info)
if os.path.isdir(opts.input_path):
multiple_file_upgma(opts.input_path, opts.output_path)
elif os.path.isfile(opts.input_path):
single_file_upgma(opts.input_path, opts.output_path)
else:
print("io error, check input file path")
exit(1)
if __name__ == "__main__":
main()
| gpl-2.0 | -1,893,105,543,927,502,600 | 45.474576 | 296 | 0.681619 | false |
donce/django-cms | cms/models/fields.py | 11 | 4349 | # -*- coding: utf-8 -*-
from cms.forms.fields import PageSelectFormField
from cms.models.pagemodel import Page
from cms.models.placeholdermodel import Placeholder
from cms.utils.placeholder import PlaceholderNoAction, validate_placeholder_name
from django.db import models
class PlaceholderField(models.ForeignKey):
def __init__(self, slotname, default_width=None, actions=PlaceholderNoAction, **kwargs):
if kwargs.get('related_name', None) == '+':
raise ValueError("PlaceholderField does not support disabling of related names via '+'.")
if not callable(slotname):
validate_placeholder_name(slotname)
self.slotname = slotname
self.default_width = default_width
self.actions = actions()
if 'to' in kwargs:
del(kwargs['to'])
kwargs.update({'null': True}) # always allow Null
kwargs.update({'editable': False}) # never allow edits in admin
super(PlaceholderField, self).__init__(Placeholder, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(PlaceholderField, self).deconstruct()
kwargs['slotname'] = self.slotname
return name, path, args, kwargs
def _get_new_placeholder(self, instance):
return Placeholder.objects.create(slot=self._get_placeholder_slot(instance), default_width=self.default_width)
def _get_placeholder_slot(self, model_instance):
if callable(self.slotname):
slotname = self.slotname(model_instance)
validate_placeholder_name(slotname)
else:
slotname = self.slotname
return slotname
def pre_save(self, model_instance, add):
if not model_instance.pk:
setattr(model_instance, self.name, self._get_new_placeholder(model_instance))
else:
slot = self._get_placeholder_slot(model_instance)
placeholder = getattr(model_instance, self.name)
if not placeholder:
setattr(model_instance, self.name, self._get_new_placeholder(model_instance))
placeholder = getattr(model_instance, self.name)
if placeholder.slot != slot:
placeholder.slot = slot
placeholder.save()
return super(PlaceholderField, self).pre_save(model_instance, add)
def save_form_data(self, instance, data):
data = getattr(instance, self.name, '')
if not isinstance(data, Placeholder):
data = self._get_new_placeholder(instance)
super(PlaceholderField, self).save_form_data(instance, data)
def south_field_triple(self):
"Returns a suitable description of this field for South."
# We'll just introspect ourselves, since we inherit.
from south.modelsinspector import introspector
field_class = "django.db.models.fields.related.ForeignKey"
args, kwargs = introspector(self)
# That's our definition!
return (field_class, args, kwargs)
def contribute_to_class(self, cls, name):
super(PlaceholderField, self).contribute_to_class(cls, name)
if not hasattr(cls._meta, 'placeholder_field_names'):
cls._meta.placeholder_field_names = []
if not hasattr(cls._meta, 'placeholder_fields'):
cls._meta.placeholder_fields = {}
cls._meta.placeholder_field_names.append(name)
cls._meta.placeholder_fields[self] = name
self.model = cls
class PageField(models.ForeignKey):
default_form_class = PageSelectFormField
default_model_class = Page
def __init__(self, **kwargs):
# We hard-code the `to` argument for ForeignKey.__init__
# since a PageField can only be a ForeignKey to a Page
kwargs['to'] = self.default_model_class
super(PageField, self).__init__(**kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': self.default_form_class,
}
defaults.update(kwargs)
return super(PageField, self).formfield(**defaults)
def south_field_triple(self):
"Returns a suitable description of this field for South."
from south.modelsinspector import introspector
field_class = "django.db.models.fields.related.ForeignKey"
args, kwargs = introspector(self)
return (field_class, args, kwargs)
| bsd-3-clause | -4,439,365,943,613,584,000 | 41.223301 | 118 | 0.649575 | false |
Frodox/buildbot | master/buildbot/test/unit/test_changes_gerritchangesource.py | 3 | 14848 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc[''], 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.utils import iteritems
import datetime
import json
import types
from twisted.internet import defer
from twisted.internet import error
from twisted.internet import reactor
from twisted.python import failure
from twisted.trial import unittest
from buildbot.changes import gerritchangesource
from buildbot.test.fake import httpclientservice as fakehttpclientservice
from buildbot.test.fake import fakedb
from buildbot.test.fake.change import Change
from buildbot.test.util import changesource
class TestGerritHelpers(unittest.TestCase):
def test_proper_json(self):
self.assertEqual(u"Justin Case <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"username": "justincase",
"name": "Justin Case",
"email": "[email protected]"
}))
def test_missing_username(self):
self.assertEqual(u"Justin Case <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"name": "Justin Case",
"email": "[email protected]"
}))
def test_missing_name(self):
self.assertEqual(u"unknown <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"email": "[email protected]"
}))
self.assertEqual(u"gerrit <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"email": "[email protected]"
}, u"gerrit"))
self.assertEqual(u"justincase <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"username": "justincase",
"email": "[email protected]"
}, u"gerrit"))
def test_missing_email(self):
self.assertEqual(u"Justin Case",
gerritchangesource._gerrit_user_to_author({
"username": "justincase",
"name": "Justin Case"
}))
self.assertEqual(u"Justin Case",
gerritchangesource._gerrit_user_to_author({
"name": "Justin Case"
}))
self.assertEqual(u"justincase",
gerritchangesource._gerrit_user_to_author({
"username": "justincase"
}))
self.assertEqual(u"unknown",
gerritchangesource._gerrit_user_to_author({
}))
self.assertEqual(u"gerrit",
gerritchangesource._gerrit_user_to_author({
}, u"gerrit"))
class TestGerritChangeSource(changesource.ChangeSourceMixin,
unittest.TestCase):
def setUp(self):
return self.setUpChangeSource()
def tearDown(self):
return self.tearDownChangeSource()
def newChangeSource(self, host, user, *args, **kwargs):
s = gerritchangesource.GerritChangeSource(
host, user, *args, **kwargs)
self.attachChangeSource(s)
s.configureService()
return s
# tests
def test_describe(self):
s = self.newChangeSource('somehost', 'someuser')
self.assertSubstring("GerritChangeSource", s.describe())
def test_name(self):
s = self.newChangeSource('somehost', 'someuser')
self.assertEqual("GerritChangeSource:someuser@somehost:29418", s.name)
s = self.newChangeSource('somehost', 'someuser', name="MyName")
self.assertEqual("MyName", s.name)
# TODO: test the backoff algorithm
# this variable is reused in test_steps_source_repo
# to ensure correct integration between change source and repo step
expected_change = {'category': u'patchset-created',
'files': ['unknown'],
'repository': u'ssh://someuser@somehost:29418/pr',
'author': u'Dustin <[email protected]>',
'comments': u'fix 1234',
'project': u'pr',
'branch': u'br/4321',
'revlink': u'http://buildbot.net',
'codebase': None,
'revision': u'abcdef',
'src': None,
'when_timestamp': None,
'properties': {u'event.change.owner.email': u'[email protected]',
u'event.change.subject': u'fix 1234',
u'event.change.project': u'pr',
u'event.change.owner.name': u'Dustin',
u'event.change.number': u'4321',
u'event.change.url': u'http://buildbot.net',
u'event.change.branch': u'br',
u'event.type': u'patchset-created',
u'event.patchSet.revision': u'abcdef',
u'event.patchSet.number': u'12'}}
def test_lineReceived_patchset_created(self):
s = self.newChangeSource('somehost', 'someuser')
d = s.lineReceived(json.dumps(dict(
type="patchset-created",
change=dict(
branch="br",
project="pr",
number="4321",
owner=dict(name="Dustin", email="[email protected]"),
url="http://buildbot.net",
subject="fix 1234"
),
patchSet=dict(revision="abcdef", number="12")
)))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
for k, v in iteritems(c):
self.assertEqual(self.expected_change[k], v)
return d
change_merged_event = {
"type": "change-merged",
"change": {
"branch": "br",
"project": "pr",
"number": "4321",
"owner": {"name": "Chuck", "email": "[email protected]"},
"url": "http://buildbot.net",
"subject": "fix 1234"},
"patchSet": {"revision": "abcdefj", "number": "13"}
}
def test_handled_events_filter_true(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user', handled_events=["change-merged"])
d = s.lineReceived(json.dumps(self.change_merged_event))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
self.assertEqual(c["category"], "change-merged")
self.assertEqual(c["branch"], "br")
return d
def test_handled_events_filter_false(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user')
d = s.lineReceived(json.dumps(self.change_merged_event))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 0)
return d
def test_custom_handler(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user',
handled_events=["change-merged"])
def custom_handler(self, properties, event):
event['change']['project'] = "world"
return self.addChangeFromEvent(properties, event)
# Patches class to not bother with the inheritance
s.eventReceived_change_merged = types.MethodType(custom_handler, s)
d = s.lineReceived(json.dumps(self.change_merged_event))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
self.assertEqual(c['project'], "world")
return d
def test_startStreamProcess_bytes_output(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user', debug=True)
exp_argv = ['ssh', 'some_choosy_user@somehost', '-p', '29418']
exp_argv += ['gerrit', 'stream-events']
def spawnProcess(pp, cmd, argv, env):
self.assertEqual([cmd, argv], [exp_argv[0], exp_argv])
pp.errReceived(b'test stderr\n')
pp.outReceived(b'{"type":"dropped-output"}\n')
so = error.ProcessDone(None)
pp.processEnded(failure.Failure(so))
self.patch(reactor, 'spawnProcess', spawnProcess)
s.startStreamProcess()
class TestGerritEventLogPoller(changesource.ChangeSourceMixin,
unittest.TestCase):
NOW_TIMESTAMP = 1479302598
EVENT_TIMESTAMP = 1479302599
NOW_FORMATTED = '2016-16-11 13:23:18'
EVENT_FORMATTED = '2016-16-11 13:23:19'
OBJECTID = 1234
@defer.inlineCallbacks
def setUp(self):
yield self.setUpChangeSource()
yield self.master.startService()
@defer.inlineCallbacks
def tearDown(self):
yield self.master.stopService()
yield self.tearDownChangeSource()
@defer.inlineCallbacks
def newChangeSource(self, **kwargs):
auth = kwargs.pop('auth', ('log', 'pass'))
self._http = yield fakehttpclientservice.HTTPClientService.getFakeService(
self.master, self, 'gerrit', auth=auth)
self.changesource = gerritchangesource.GerritEventLogPoller(
'gerrit', auth=auth, gitBaseURL="ssh://someuser@somehost:29418", pollAtLaunch=False, **kwargs)
@defer.inlineCallbacks
def startChangeSource(self):
yield self.changesource.setServiceParent(self.master)
yield self.attachChangeSource(self.changesource)
# tests
@defer.inlineCallbacks
def test_now(self):
yield self.newChangeSource()
self.changesource.now()
@defer.inlineCallbacks
def test_describe(self):
# describe is not used yet in buildbot nine, but it can still be useful in the future, so lets
# implement and test it
yield self.newChangeSource()
self.assertSubstring('GerritEventLogPoller', self.changesource.describe())
@defer.inlineCallbacks
def test_name(self):
yield self.newChangeSource()
self.assertEqual('GerritEventLogPoller:gerrit', self.changesource.name)
@defer.inlineCallbacks
def test_lineReceived_patchset_created(self):
self.master.db.insertTestData([
fakedb.Object(id=self.OBJECTID, name='GerritEventLogPoller:gerrit',
class_name='GerritEventLogPoller')])
yield self.newChangeSource()
self.changesource.now = lambda: datetime.datetime.utcfromtimestamp(self.NOW_TIMESTAMP)
self._http.expect(method='get', ep='/plugins/events-log/events/',
params={'t1': self.NOW_FORMATTED},
content_json=dict(
type="patchset-created",
change=dict(
branch="br",
project="pr",
number="4321",
owner=dict(name="Dustin", email="[email protected]"),
url="http://buildbot.net",
subject="fix 1234"
),
eventCreatedOn=self.EVENT_TIMESTAMP,
patchSet=dict(revision="abcdef", number="12")))
yield self.startChangeSource()
yield self.changesource.poll()
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
for k, v in iteritems(c):
self.assertEqual(TestGerritChangeSource.expected_change[k], v)
self.master.db.state.assertState(self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP)
# do a second poll, it should ask for the next events
self._http.expect(method='get', ep='/plugins/events-log/events/',
params={'t1': self.EVENT_FORMATTED},
content_json=dict(
type="patchset-created",
change=dict(
branch="br",
project="pr",
number="4321",
owner=dict(name="Dustin", email="[email protected]"),
url="http://buildbot.net",
subject="fix 1234"
),
eventCreatedOn=self.EVENT_TIMESTAMP + 1,
patchSet=dict(revision="abcdef", number="12")))
yield self.changesource.poll()
self.master.db.state.assertState(self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP + 1)
class TestGerritChangeFilter(unittest.TestCase):
def test_basic(self):
ch = Change(**TestGerritChangeSource.expected_change)
f = gerritchangesource.GerritChangeFilter(
branch=["br"], eventtype=["patchset-created"])
self.assertTrue(f.filter_change(ch))
f = gerritchangesource.GerritChangeFilter(
branch="br2", eventtype=["patchset-created"])
self.assertFalse(f.filter_change(ch))
f = gerritchangesource.GerritChangeFilter(
branch="br", eventtype="ref-updated")
self.assertFalse(f.filter_change(ch))
self.assertEqual(
repr(f),
'<GerritChangeFilter on prop:event.change.branch == br and prop:event.type == ref-updated>')
| gpl-2.0 | -7,526,128,971,953,130,000 | 40.591036 | 106 | 0.556573 | false |
darrenabbey/ymap | scripts_genomes/genome_process_for_standard_bins.GC_bias_1.py | 2 | 7151 | # Input arguments: (Those with '[*]' at end are used here.)
# 2) genome : [String]: defines genome in use for project. (ex. 'Ca_a') [*]
# 3) workingDir : [String]: Directory where links in system are stored. (ex. '/home/bermanj/shared/links/) [*]
#
# Process input files:
# 1) Restriction-digested genome file.
# *) Load usable fragment definitions into array : fragments[i][chr#,bpStart,bpEnd, data_count,data_max,data_ave]
# [0 ,1 ,2 , 3 ,4 ,5 ]
# 2) Read counts for strain of interest dataset.
# -) Find max read count on fragment.
# -) Find average read count along fragment.
#
# Generate output file:
# 3) Output values for each fragment in a tab-delimited text file.
# Each line contains information for one fragment = [chr_num,bp_start,bp_end, data_count,data_max,length]
# 0) chr_num : Numerical chromosome identifier, defined for each genome in "figure_details.txt".
# 1) bp_start : Start bp coordinate along chromosome.
# 2) bp_end : End bp coordinate along chromosome.
# 3) GC_ratio : Ratio of bases as 'G' or 'C' in fragment.
# 4) Comment lines in output begin with '###'.
#
import string, sys, re, time
userName = sys.argv[1];
genomeName = sys.argv[2];
main_dir = sys.argv[3];
logName = sys.argv[4];
t0 = time.clock()
with open(logName, "a") as myfile:
myfile.write("\t\t\t*================================================================*\n")
myfile.write("\t\t\t| Log of 'genome_process_for_standard_bins.GC_bias_1.py' |\n")
myfile.write("\t\t\t*----------------------------------------------------------------*\n")
#============================================================================================================
# Process restriction-digested genome file.
#------------------------------------------------------------------------------------------------------------
# Example FASTQ header line.
# >Ca_a.chr1 (9638..10115) (478bp) [*]
# FASTA entries with header lines ending in '[*]' are usable.
with open(logName, "a") as myfile:
myfile.write("\n\t\t\tProcessing standard bin fragmented genome file.")
# Find name of genome FASTA file for species being examined.
# Read in and parse : "links_dir/main_script_dir/genome_specific/[genome]/reference.txt"
workingDir = main_dir + 'users/' + userName + '/genomes/' + genomeName + '/';
reference_file = workingDir + '/reference.txt'
refFile = open(reference_file,'r')
refFASTA = refFile.read().strip()
refFile.close()
# Open standard-fragmented genome FASTQ file.
standardBins_FASTA_file = workingDir + string.replace(refFASTA, '.fasta','.standard_bins.fasta')
standardBins_FASTA_data = open(standardBins_FASTA_file,'r')
#............................................................................................................
# Setup array and counter for tracking fragment definition data.
fragments = []
fragment_counter = 0
## Process digested FASTQ genome file, line by line.
while True:
# Line pairs have the following structure.
# >Ca_a.chr1 (9638..10115) (478bp) [*]
# ATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGC
line1 = standardBins_FASTA_data.readline()
line2 = standardBins_FASTA_data.readline()
if not line2:
break # EOF
first_char = line1[:1];
if first_char == ">":
# Line is header to FASTQ entry.
line_parts = string.split(string.strip(line1))
chrGenomeAndNum_string = line_parts[0]
bp_coordinate_string = line_parts[1]
fragment_size_string = line_parts[2]
if len(line_parts) > 3:
fragment_usable_string = line_parts[3]
if fragment_usable_string[1] == "*":
# Fragment is usable, so the details should be placed into fragments structure.
# chr number.
# fragment start bp.
# fragment end bp.
# split the chr string by '.' character, then trim off the first three characters ('chr') from the second substring.
# string has format of : ">Ca_a.chr1"
genomeName_string,chrNum_string = chrGenomeAndNum_string.split(".")
chr_num = int(float(chrNum_string.replace("chr","")))
# string has format of : "(9638..10115)"
coordinates = bp_coordinate_string.replace('(','').replace(')','').replace('..',' ').split()
bp_start = int(float(coordinates[0]))
bp_end = int(float(coordinates[1]))
GC_ratio = 0 # placeholder value.
sequence = line2;
G_count = sequence.count('G') + sequence.count('g')
C_count = sequence.count('C') + sequence.count('c')
T_count = sequence.count('T') + sequence.count('t')
A_count = sequence.count('A') + sequence.count('a')
if (float(G_count+C_count+T_count+A_count) == 0):
GC_ratio = 0
else:
GC_ratio = (G_count+C_count)/float(G_count+C_count+T_count+A_count)
fragments.append([chr_num,bp_start,bp_end,GC_ratio])
fragment_counter += 1
standardBins_FASTA_data.close()
# Put fragment counter into a general use variable.
numFragments = fragment_counter
#------------------------------------------------------------------------------------------------------------
# End of code section to parse restriction fragments from genome.
#============================================================================================================
print "### ", time.clock() - t0, "seconds to parse restriction fragments from digested genome."
t1 = time.clock()
print '### numFragments = ' + str(numFragments);
print '### Data from each fragment: [chrNum, bpStart, bpEnd, GC_ratio]'
#============================================================================================================
# Code section to output information about genome restriction fragments.
#------------------------------------------------------------------------------------------------------------
with open(logName, "a") as myfile:
myfile.write("\n\t\t\tOutputting GC-ratios of standard-bin fragmented genome.")
for fragment in range(1,numFragments):
# Output a line for each fragment.
# fragments[fragment-1] = [chr_num,bp_start,bp_end, GC_ratio]
# 0) chr_num
# 1) bp_start
# 2) bp_end
# 3) GC_ratio
chr_num = fragments[fragment-1][0]
bp_start = fragments[fragment-1][1]
bp_end = fragments[fragment-1][2]
GC_ratio = fragments[fragment-1][3]
print str(chr_num) + '\t' + str(bp_start) + '\t' + str(bp_end) + '\t' + str(GC_ratio)
#------------------------------------------------------------------------------------------------------------
# End of code section to output information about fragments.
#============================================================================================================
print "### ", time.clock() - t1, "seconds to output basic stats of each restriction fragment."
print "### ", time.clock() - t0, "seconds to complete processing of fragment definitions."
with open(logName, "a") as myfile:
myfile.write("\n\t\t\tTime to process = " + str(time.clock()-t0) )
myfile.write("\n\t\t* 'py/genome_process_for_standard_bins.GC_bias_1.py' completed. *")
| mit | -9,136,194,222,161,701,000 | 45.435065 | 127 | 0.551391 | false |
flumotion-mirror/flumotion-flash | flumotion/component/encoders/flv/flv.py | 1 | 1114 | # -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
from flumotion.component import feedcomponent
class FLVEncoder(feedcomponent.EncoderComponent):
checkTimestamp = True
checkOffset = True
def get_pipeline_string(self, properties):
return "ffmpegcolorspace ! ffenc_flv name=encoder"
def configure_pipeline(self, pipeline, properties):
element = pipeline.get_by_name('encoder')
if 'bitrate' in properties:
element.set_property('bitrate', properties['bitrate'])
| lgpl-2.1 | -8,250,751,326,807,032,000 | 34.935484 | 73 | 0.733393 | false |
oczkers/pyllegro | pyllegro/core.py | 1 | 13373 | # -*- coding: utf-8 -*-
"""
gshop.allegro
~~~~~~~~~~~~~~~~~~~
A lightweight wrapper around the Allegro webapi.
"""
import sys
import time
import logging
from zeep import Client
from zeep.exceptions import Fault
from requests.exceptions import ConnectionError
# from socket import error as socketError
from decimal import Decimal
from base64 import b64encode
from hashlib import sha256
if sys.version_info[0] == 3:
long = int
# logger - zeep
logger_zeep = logging.getLogger('zeep')
logger_zeep.setLevel(logging.ERROR)
# logger - allegro
logger_allegro = logging.getLogger(__name__)
def magicDecode(var):
"""Decode unicode to string."""
if var:
var = var.encode('utf8')
return var
def chunked(l, n):
"""Chunk one big list into few small lists."""
return [l[i:i + n] for i in range(0, len(l), n)]
class Allegro(object):
def __init__(self, username, passwd, webapi_key, debug=False):
self.debug = debug
self.webapi_url = 'https://webapi.allegro.pl/service.php?wsdl'
self.username = username
self.passwd_hash = b64encode(sha256(passwd.encode('utf-8')).digest()).decode('utf-8') # hash password
self.webapi_key = webapi_key
self.last_event_id = 0
# init soap client & login
# self.client = Client(self.webapi_url)
# self.ArrayOfLong = self.client.get_type('ns0:ArrayOfLong') # this should be done by zeep...
self.token = self.login(self.username, self.passwd_hash, self.webapi_key)
def __relogin__(self):
"""Forced logging. Returns token."""
while True:
try:
return self.login(self.username, self.passwd_hash, self.webapi_key)
# except socketError as e:
# logger_allegro.warning(e)
# time.sleep(5)
except:
print(sys.exc_info())
print('Unknown login error')
logger_allegro.warning('Unknown login error')
logger_allegro.exception(sys.exc_info())
time.sleep(5)
def __ask__(self, service, **kwargs):
"""Ask allegro (catch errors). Returns response."""
# TODO: send error code/message to mail
if self.debug:
print('ALLEGRO: %s %s' % (service, kwargs)) # DEBUG
while True:
if service not in ('doGetSiteJournalDeals', 'doGetSiteJournalDealsInfo'):
kwargs['sessionHandle'] = self.token
else:
kwargs['sessionId'] = self.token
# process only if token avaible
try:
return self.client.service[service](**kwargs)
except Fault as e:
if e.code in ('ERR_NO_SESSION', 'ERR_SESSION_EXPIRED'):
print('zly identyfikator, relogowanie')
time.sleep(5)
self.token = self.__relogin__()
elif e.code == 'ERR_INTERNAL_SYSTEM_ERROR':
print('internal server error')
time.sleep(5)
else:
print(sys.exc_info())
print(e)
print(e.code)
time.sleep(5)
self.token = self.__relogin__()
except ConnectionError as e:
print('connection error')
print(e)
time.sleep(5)
# except socketError as e:
# logger_allegro.warning(e)
# time.sleep(5)
# except SoapFault as e:
# if e[0] == 'ERR_SESSION_EXPIRED' or e[0] == 'ERR_NO_SESSION':
# # logger_allegro.debug('Session expired - relogging.')
# logger_allegro.debug(e)
# self.token = self.__relogin__()
# elif e[0] == 'ERR_INTERNAL_SYSTEM_ERROR':
# logger_allegro.debug(e)
# time.sleep(5)
# # elif e[0] == 'ERR_AUCTION_KILLED': # deleted by allegro admin
# # pass
# else:
# logger_allegro.warning(e)
# time.sleep(5)
# self.token = self.__relogin__()
except:
print(sys.exc_info())
print('Unknown soap error')
logger_allegro.warning('Unknown soap error')
logger_allegro.exception(sys.exc_info())
time.sleep(5)
self.token = self.__relogin__()
def login(self, username, passwd_hash, webapi_key, country_code=1):
"""Log in (sets self.token). Returns token (session_handle)."""
self.client = Client(self.webapi_url)
self.ArrayOfLong = self.client.get_type('ns0:ArrayOfLong') # this should be done by zeep...
ver_key = self.client.service.doQuerySysStatus(1, 1, webapi_key)['verKey']
return self.client.service.doLoginEnc(username, passwd_hash,
country_code, webapi_key,
ver_key)['sessionHandlePart']
def getAuctionDetails(self, auction_id):
"""Return basic auction details (doShowItemInfoExt)."""
return self.__ask__('doShowItemInfoExt',
itemId=auction_id,
# getDesc=0,
# getImageUrl=0,
# getAttribs=0,
# getPostageOptions=0,
# getCompanyInfo=0
) # ['itemListInfoExt']
def getBids(self, auction_id):
"""Retrieve all bids in given auction."""
bids = {}
rc = self.__ask__('doGetBidItem2', itemId=auction_id)
if rc:
for i in rc:
i = i['bidsArray']
bids[long(i['item'][1])] = {
'price': Decimal(i['item'][6]),
'quantity': int(i['item'][5]),
'date_buy': i['item'][7]
}
return bids
def getBuyerInfo(self, auction_id, buyer_id):
"""Return buyer info."""
# TODO: add price from getBids
rc = self.__ask__('doGetPostBuyData', itemsArray=self.ArrayOfLong([auction_id]), buyerFilterArray=self.ArrayOfLong([buyer_id]))
rc = rc[0]['usersPostBuyData']['item'][0]['userData']
return {'allegro_aid': auction_id,
'allegro_uid': rc['userId'],
'allegro_login': magicDecode(rc['userLogin']),
'name': magicDecode(rc['userFirstName']),
'surname': magicDecode(rc['userLastName']),
'company': magicDecode(rc['userCompany']),
'postcode': magicDecode(rc['userPostcode']),
'city': magicDecode(rc['userCity']),
'address': magicDecode(rc['userAddress']),
'email': magicDecode(rc['userEmail']),
'phone': rc['userPhone']}
def getOrders(self, auction_ids):
"""Return orders details."""
orders = {}
# chunk list (only 25 auction_ids per request)
for chunk in chunked(auction_ids, 25):
# auctions = [{'item': auction_id} for auction_id in chunk] # TODO?: is it needed?
auctions = self.ArrayOfLong(chunk)
rc = self.__ask__('doGetPostBuyData', itemsArray=auctions)
for auction in rc:
orders_auction = []
bids = self.getBids(auction['itemId'])
# get orders details
# for i in auction.get('usersPostBuyData', ()):
if not auction['usersPostBuyData']: # empty
continue
for i in auction['usersPostBuyData']['item']:
i = i['userData']
if i['userId'] not in bids: # temporary(?) webapi bug fix
continue
orders_auction.append({
'allegro_aid': auction['itemId'],
'allegro_uid': i['userId'],
'allegro_login': magicDecode(i['userLogin']),
'name': magicDecode(i['userFirstName']),
'surname': magicDecode(i['userLastName']),
'company': magicDecode(i['userCompany']),
'postcode': magicDecode(i['userPostcode']),
'city': magicDecode(i['userCity']),
'address': magicDecode(i['userAddress']),
'email': magicDecode(i['userEmail']),
'phone': i['userPhone'],
'price': bids[i['userId']]['price'],
'quantity': bids[i['userId']]['quantity'],
'date_buy': bids[i['userId']]['date_buy']
})
orders[auction['itemId']] = orders_auction
return orders
def getTotalPaid(self, auction_id, buyer_id):
"""Return total paid from buyer on single auction."""
# TODO: it has to be better way to check payments.
date_end = long(time.time())
date_start = date_end - 60 * 60 * 24 * 90
rc = self.__ask__('doGetMyIncomingPayments',
buyerId=buyer_id,
itemId=auction_id,
transRecvDateFrom=date_start,
transRecvDateTo=date_end,
transPageLimit=25, # notneeded | TODO: can be more than 25 payments
transOffset=0)
paid = 0
for t in (rc or []):
# t = t['item']
if t['payTransStatus'] == u'Zakończona' and t['payTransIncomplete'] == 0:
if t['payTransItId'] == 0: # wplata laczna
for td in t['payTransDetails']['item']:
if td['payTransDetailsItId'] == auction_id:
paid += Decimal(str(td['payTransDetailsPrice']))
else: # wplata pojedyncza
paid += Decimal(str(t['payTransAmount']))
return paid
def getJournal(self, start=0):
"""Get all journal events from start."""
# TODO: while len(journaldeals) < 100
pass
def getJournalDealsInfo(self, start=0):
"""Return all events ammount (from start)."""
rc = self.__ask__('doGetSiteJournalDealsInfo',
journalStart=start)
return rc['dealEventsCount']
def getJournalDeals(self, start=None):
"""Return all journal events from start."""
# 1 - utworzenie aktu zakupowego (deala), 2 - utworzenie formularza pozakupowego (karta platnosci), 3 - anulowanie formularza pozakupowego (karta platnosci), 4 - zakończenie (opłacenie) transakcji przez PzA
if start is not None:
self.last_event_id = start
events = []
while self.getJournalDealsInfo(self.last_event_id) > 0:
rc = self.__ask__('doGetSiteJournalDeals', journalStart=self.last_event_id)
for i in rc:
events.append({
'allegro_did': i['dealId'],
'deal_status': i['dealEventType'],
'transaction_id': i['dealTransactionId'],
'time': i['dealEventTime'],
'event_id': i['dealEventId'],
'allegro_aid': i['dealItemId'],
'allegro_uid': i['dealBuyerId'],
# 'seller_id': i['dealSellerId '],
'quantity': i['dealQuantity']
})
self.last_event_id = rc[-1]['dealEventId']
return events
# feedback
def getWaitingFeedbacks(self):
"""Return all waiting feedbacks from buyers."""
# TODO: return sorted dictionary (negative/positive/neutral)
feedbacks = []
offset = 0
amount = self.__ask__('doGetWaitingFeedbacksCount')
while amount > 0:
rc = self.__ask__('doGetWaitingFeedbacks',
offset=offset, packageSize=200)
feedbacks.extend(rc['feWaitList'])
amount -= 200
offset += 1
return feedbacks
def doFeedback(self, item_id, use_comment_template, buyer_id, comment, comment_type, op):
"""http://allegro.pl/webapi/documentation.php/show/id,42"""
return self.__ask__('doFeedback',
feItemId=item_id,
feUseCommentTemplate=use_comment_template,
feToUserId=buyer_id,
feComment=comment,
feCommentType=comment_type,
feOp=op)['feedbackId']
# refund
def doSendRefundForms(self, item_id, buyer_id, reason, quantity_sold):
"""http://allegro.pl/webapi/documentation.php/show/id,201"""
# TODO: deprecated
return self.__ask__('doSendRefundForms',
sendRefundFormsDataArr={
'item': {
'itemId': item_id, 'buyerId': buyer_id,
'refundReason': reason, 'itemQuantitySold': quantity_sold
}
})['sendRefundFormsResultsArr']
| lgpl-3.0 | -1,098,039,346,399,221,400 | 41.852564 | 214 | 0.506731 | false |
dariomalchiodi/yaplf | test/test_models.py | 1 | 3023 |
#*****************************************************************************
# Copyright (C) 2010 Dario Malchiodi <[email protected]>
#
# This file is part of yaplf.
# yaplf is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
# yaplf is distributed in the hope that it will be useful, but without any
# warranty; without even the implied warranty of merchantability or fitness
# for a particular purpose. See the GNU Lesser General Public License for
# more details.
# You should have received a copy of the GNU Lesser General Public License
# along with yaplf; if not, see <http://www.gnu.org/licenses/>.
#
#*****************************************************************************
import unittest
from yaplf.models import *
from yaplf.models.neural import *
class Test(unittest.TestCase):
"""Unit tests for models module of yaplf."""
def test_Model(self):
"""Test yaplf Model."""
from yaplf.utility.error import MSE, MaxError
from yaplf.data import LabeledExample
sample = (LabeledExample( (-1,), (-1,) ), LabeledExample((0,), (0,)),
LabeledExample((1,), (1,)))
model = ConstantModel(0)
self.assertEqual(model.test(sample, MSE()), 2.0 / 3)
self.assertEqual(model.test(sample, MaxError()), 1)
def test_ConstandModel(self):
"""Test yalpf ConstantModel."""
from numpy import random
model = ConstantModel(0)
self.assertEqual(model.compute(1), 0)
self.assertEqual(model.compute((1, 3)), 0)
self.assertEqual(model.compute("string"), 0)
self.assertEqual(model.compute(random.normal()), 0)
def test_Perceptron(self):
"""Test yalpf Perceptron."""
Perceptron(((1, 1),))
Perceptron(((1, 1), (8, -4)))
self.assertRaises(ValueError, Perceptron, ((1, 1), (8, -4, 9)))
Perceptron(((1, 1),), threshold = (-1,))
Perceptron(((1, 1), (8, -4)), threshold = (-1, 1))
self.assertRaises(ValueError, Perceptron, ((1, 1), (8, -4)),
threshold = (-1,))
from yaplf.utility.activation import SigmoidActivationFunction
from numpy import array
Perceptron(((1, 1),), threshold = (-1,),
activation = SigmoidActivationFunction())
self.assertEqual(Perceptron(((1, 1),)).compute((0, 2)), 1)
self.assertEqual(Perceptron(((1, 1),),
activation=SigmoidActivationFunction()).compute((0, 2)),
0.88079707797788231)
self.assertEqual(Perceptron(((1, 1),), threshold=(1,),
activation=SigmoidActivationFunction()).compute((0, 2)),
0.7310585786300049)
self.assertEqual(Perceptron(((1, -1), (-1, 1)),
threshold = (-1, 1)).compute((0, 1)).tolist(), [1, 1])
if __name__ == "__main__":
unittest.main() | lgpl-3.0 | 6,286,507,239,552,671,000 | 41.591549 | 78 | 0.58915 | false |
jcoady9/python-for-android | python3-alpha/python3-src/Lib/distutils/bcppcompiler.py | 179 | 14935 | """distutils.bcppcompiler
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
for the Borland C++ compiler.
"""
# This implementation by Lyle Johnson, based on the original msvccompiler.py
# module and using the directions originally published by Gordon Williams.
# XXX looks like there's a LOT of overlap between these two classes:
# someone should sit down and factor out the common code as
# WindowsCCompiler! --GPW
import os
from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError, UnknownFileError
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.file_util import write_file
from distutils.dep_util import newer
from distutils import log
class BCPPCompiler(CCompiler) :
"""Concrete class that implements an interface to the Borland C/C++
compiler, as defined by the CCompiler abstract class.
"""
compiler_type = 'bcpp'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = _c_extensions + _cpp_extensions
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
# These executables are assumed to all be in the path.
# Borland doesn't seem to use any special registry settings to
# indicate their installation locations.
self.cc = "bcc32.exe"
self.linker = "ilink32.exe"
self.lib = "tlib.exe"
self.preprocess_options = None
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_static = []
self.ldflags_exe = ['/Gn', '/q', '/x']
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
# -- Worker methods ------------------------------------------------
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
compile_opts = extra_preargs or []
compile_opts.append ('-c')
if debug:
compile_opts.extend (self.compile_options_debug)
else:
compile_opts.extend (self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
# XXX why do the normpath here?
src = os.path.normpath(src)
obj = os.path.normpath(obj)
# XXX _setup_compile() did a mkpath() too but before the normpath.
# Is it possible to skip the normpath?
self.mkpath(os.path.dirname(obj))
if ext == '.res':
# This is already a binary file -- skip it.
continue # the 'for' loop
if ext == '.rc':
# This needs to be compiled to a .res file -- do it now.
try:
self.spawn (["brcc32", "-fo", obj, src])
except DistutilsExecError as msg:
raise CompileError(msg)
continue # the 'for' loop
# The next two are both for the real compiler.
if ext in self._c_extensions:
input_opt = ""
elif ext in self._cpp_extensions:
input_opt = "-P"
else:
# Unknown file type -- no extra options. The compiler
# will probably fail, but let it just in case this is a
# file the compiler recognizes even if we don't.
input_opt = ""
output_opt = "-o" + obj
# Compiler command line syntax is: "bcc32 [options] file(s)".
# Note that the source file names must appear at the end of
# the command line.
try:
self.spawn ([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs + [src])
except DistutilsExecError as msg:
raise CompileError(msg)
return objects
# compile ()
def create_static_lib (self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
(objects, output_dir) = self._fix_object_args (objects, output_dir)
output_filename = \
self.library_filename (output_libname, output_dir=output_dir)
if self._need_link (objects, output_filename):
lib_args = [output_filename, '/u'] + objects
if debug:
pass # XXX what goes here?
try:
self.spawn ([self.lib] + lib_args)
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# create_static_lib ()
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# XXX this ignores 'build_temp'! should follow the lead of
# msvccompiler.py
(objects, output_dir) = self._fix_object_args (objects, output_dir)
(libraries, library_dirs, runtime_library_dirs) = \
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
if runtime_library_dirs:
log.warn("I don't know what to do with 'runtime_library_dirs': %s",
str(runtime_library_dirs))
if output_dir is not None:
output_filename = os.path.join (output_dir, output_filename)
if self._need_link (objects, output_filename):
# Figure out linker args based on type of target.
if target_desc == CCompiler.EXECUTABLE:
startup_obj = 'c0w32'
if debug:
ld_args = self.ldflags_exe_debug[:]
else:
ld_args = self.ldflags_exe[:]
else:
startup_obj = 'c0d32'
if debug:
ld_args = self.ldflags_shared_debug[:]
else:
ld_args = self.ldflags_shared[:]
# Create a temporary exports file for use by the linker
if export_symbols is None:
def_file = ''
else:
head, tail = os.path.split (output_filename)
modname, ext = os.path.splitext (tail)
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
def_file = os.path.join (temp_dir, '%s.def' % modname)
contents = ['EXPORTS']
for sym in (export_symbols or []):
contents.append(' %s=_%s' % (sym, sym))
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# Borland C++ has problems with '/' in paths
objects2 = map(os.path.normpath, objects)
# split objects in .obj and .res files
# Borland C++ needs them at different positions in the command line
objects = [startup_obj]
resources = []
for file in objects2:
(base, ext) = os.path.splitext(os.path.normcase(file))
if ext == '.res':
resources.append(file)
else:
objects.append(file)
for l in library_dirs:
ld_args.append("/L%s" % os.path.normpath(l))
ld_args.append("/L.") # we sometimes use relative paths
# list of object files
ld_args.extend(objects)
# XXX the command-line syntax for Borland C++ is a bit wonky;
# certain filenames are jammed together in one big string, but
# comma-delimited. This doesn't mesh too well with the
# Unix-centric attitude (with a DOS/Windows quoting hack) of
# 'spawn()', so constructing the argument list is a bit
# awkward. Note that doing the obvious thing and jamming all
# the filenames and commas into one argument would be wrong,
# because 'spawn()' would quote any filenames with spaces in
# them. Arghghh!. Apparently it works fine as coded...
# name of dll/exe file
ld_args.extend([',',output_filename])
# no map file and start libraries
ld_args.append(',,')
for lib in libraries:
# see if we find it and if there is a bcpp specific lib
# (xxx_bcpp.lib)
libfile = self.find_library_file(library_dirs, lib, debug)
if libfile is None:
ld_args.append(lib)
# probably a BCPP internal library -- don't warn
else:
# full name which prefers bcpp_xxx.lib over xxx.lib
ld_args.append(libfile)
# some default libraries
ld_args.append ('import32')
ld_args.append ('cw32mt')
# def file for export symbols
ld_args.extend([',',def_file])
# add resource files
ld_args.append(',')
ld_args.extend(resources)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath (os.path.dirname (output_filename))
try:
self.spawn ([self.linker] + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# link ()
# -- Miscellaneous methods -----------------------------------------
def find_library_file (self, dirs, lib, debug=0):
# List of effective library names to try, in order of preference:
# xxx_bcpp.lib is better than xxx.lib
# and xxx_d.lib is better than xxx.lib if debug is set
#
# The "_bcpp" suffix is to handle a Python installation for people
# with multiple compilers (primarily Distutils hackers, I suspect
# ;-). The idea is they'd have one static library for each
# compiler they care about, since (almost?) every Windows compiler
# seems to have a different format for static libraries.
if debug:
dlib = (lib + "_d")
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
else:
try_names = (lib + "_bcpp", lib)
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename(name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# overwrite the one from CCompiler to support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
(ext, src_name))
if strip_dir:
base = os.path.basename (base)
if ext == '.res':
# these can go unchanged
obj_names.append (os.path.join (output_dir, base + ext))
elif ext == '.rc':
# these need to be compiled to .res-files
obj_names.append (os.path.join (output_dir, base + '.res'))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def preprocess (self,
source,
output_file=None,
macros=None,
include_dirs=None,
extra_preargs=None,
extra_postargs=None):
(_, macros, include_dirs) = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = ['cpp32.exe'] + pp_opts
if output_file is not None:
pp_args.append('-o' + output_file)
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or the
# source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError as msg:
print(msg)
raise CompileError(msg)
# preprocess()
| apache-2.0 | -1,643,218,921,075,377,000 | 37.002545 | 80 | 0.529494 | false |
ahb0327/intellij-community | python/helpers/coverage/bytecode.py | 209 | 2036 | """Bytecode manipulation for coverage.py"""
import opcode, types
from coverage.backward import byte_to_int
class ByteCode(object):
"""A single bytecode."""
def __init__(self):
# The offset of this bytecode in the code object.
self.offset = -1
# The opcode, defined in the `opcode` module.
self.op = -1
# The argument, a small integer, whose meaning depends on the opcode.
self.arg = -1
# The offset in the code object of the next bytecode.
self.next_offset = -1
# The offset to jump to.
self.jump_to = -1
class ByteCodes(object):
"""Iterator over byte codes in `code`.
Returns `ByteCode` objects.
"""
# pylint: disable=R0924
def __init__(self, code):
self.code = code
def __getitem__(self, i):
return byte_to_int(self.code[i])
def __iter__(self):
offset = 0
while offset < len(self.code):
bc = ByteCode()
bc.op = self[offset]
bc.offset = offset
next_offset = offset+1
if bc.op >= opcode.HAVE_ARGUMENT:
bc.arg = self[offset+1] + 256*self[offset+2]
next_offset += 2
label = -1
if bc.op in opcode.hasjrel:
label = next_offset + bc.arg
elif bc.op in opcode.hasjabs:
label = bc.arg
bc.jump_to = label
bc.next_offset = offset = next_offset
yield bc
class CodeObjects(object):
"""Iterate over all the code objects in `code`."""
def __init__(self, code):
self.stack = [code]
def __iter__(self):
while self.stack:
# We're going to return the code object on the stack, but first
# push its children for later returning.
code = self.stack.pop()
for c in code.co_consts:
if isinstance(c, types.CodeType):
self.stack.append(c)
yield code
| apache-2.0 | -1,538,157,550,117,243,000 | 26.146667 | 77 | 0.530452 | false |
jarvys/django-1.7-jdb | tests/db_backends/tests.py | 32 | 1074 | from django.test import TestCase
from django.db.backends import BaseDatabaseWrapper
class DummyDatabaseWrapper(BaseDatabaseWrapper):
pass
class DummyObject(object):
alias = None
class DbBackendTests(TestCase):
def test_compare_db_wrapper_with_another_object(self):
wrapper = BaseDatabaseWrapper({})
self.assertFalse(wrapper == 'not-a-db-wrapper')
def test_compare_db_wrapper_with_another_object_with_alias(self):
wrapper = BaseDatabaseWrapper({})
obj = DummyObject()
obj.alias = wrapper.alias = 'foobar'
self.assertFalse(wrapper == obj)
def test_negate_compare_db_wrapper_with_another_object(self):
wrapper = BaseDatabaseWrapper({})
self.assertTrue(wrapper != 'not-a-db-wrapper')
def test_compare_db_wrappers(self):
wrapper1 = DummyDatabaseWrapper({})
wrapper2 = BaseDatabaseWrapper({})
wrapper1.alias = wrapper2.alias = 'foo'
self.assertTrue(wrapper1 == wrapper2)
wrapper1.alias = 'bar'
self.assertFalse(wrapper1 == wrapper2)
| bsd-3-clause | -4,903,524,571,859,792,000 | 28.833333 | 69 | 0.676909 | false |
mhvk/astropy | astropy/coordinates/builtin_frames/altaz.py | 3 | 5451 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from astropy import units as u
from astropy.utils.decorators import format_doc
from astropy.coordinates import representation as r
from astropy.coordinates.baseframe import BaseCoordinateFrame, RepresentationMapping, base_doc
from astropy.coordinates.attributes import (TimeAttribute,
QuantityAttribute,
EarthLocationAttribute)
__all__ = ['AltAz']
_90DEG = 90*u.deg
doc_components = """
az : `~astropy.coordinates.Angle`, optional, keyword-only
The Azimuth for this object (``alt`` must also be given and
``representation`` must be None).
alt : `~astropy.coordinates.Angle`, optional, keyword-only
The Altitude for this object (``az`` must also be given and
``representation`` must be None).
distance : `~astropy.units.Quantity` ['length'], optional, keyword-only
The Distance for this object along the line-of-sight.
pm_az_cosalt : `~astropy.units.Quantity` ['angular speed'], optional, keyword-only
The proper motion in azimuth (including the ``cos(alt)`` factor) for
this object (``pm_alt`` must also be given).
pm_alt : `~astropy.units.Quantity` ['angular speed'], optional, keyword-only
The proper motion in altitude for this object (``pm_az_cosalt`` must
also be given).
radial_velocity : `~astropy.units.Quantity` ['speed'], optional, keyword-only
The radial velocity of this object."""
doc_footer = """
Other parameters
----------------
obstime : `~astropy.time.Time`
The time at which the observation is taken. Used for determining the
position and orientation of the Earth.
location : `~astropy.coordinates.EarthLocation`
The location on the Earth. This can be specified either as an
`~astropy.coordinates.EarthLocation` object or as anything that can be
transformed to an `~astropy.coordinates.ITRS` frame.
pressure : `~astropy.units.Quantity` ['pressure']
The atmospheric pressure as an `~astropy.units.Quantity` with pressure
units. This is necessary for performing refraction corrections.
Setting this to 0 (the default) will disable refraction calculations
when transforming to/from this frame.
temperature : `~astropy.units.Quantity` ['temperature']
The ground-level temperature as an `~astropy.units.Quantity` in
deg C. This is necessary for performing refraction corrections.
relative_humidity : `~astropy.units.Quantity` ['dimensionless'] or number
The relative humidity as a dimensionless quantity between 0 to 1.
This is necessary for performing refraction corrections.
obswl : `~astropy.units.Quantity` ['length']
The average wavelength of observations as an `~astropy.units.Quantity`
with length units. This is necessary for performing refraction
corrections.
Notes
-----
The refraction model is based on that implemented in ERFA, which is fast
but becomes inaccurate for altitudes below about 5 degrees. Near and below
altitudes of 0, it can even give meaningless answers, and in this case
transforming to AltAz and back to another frame can give highly discrepant
results. For much better numerical stability, leave the ``pressure`` at
``0`` (the default), thereby disabling the refraction correction and
yielding "topocentric" horizontal coordinates.
"""
@format_doc(base_doc, components=doc_components, footer=doc_footer)
class AltAz(BaseCoordinateFrame):
"""
A coordinate or frame in the Altitude-Azimuth system (Horizontal
coordinates) with respect to the WGS84 ellipsoid. Azimuth is oriented
East of North (i.e., N=0, E=90 degrees). Altitude is also known as
elevation angle, so this frame is also in the Azimuth-Elevation system.
This frame is assumed to *include* refraction effects if the ``pressure``
frame attribute is non-zero.
The frame attributes are listed under **Other Parameters**, which are
necessary for transforming from AltAz to some other system.
"""
frame_specific_representation_info = {
r.SphericalRepresentation: [
RepresentationMapping('lon', 'az'),
RepresentationMapping('lat', 'alt')
]
}
default_representation = r.SphericalRepresentation
default_differential = r.SphericalCosLatDifferential
obstime = TimeAttribute(default=None)
location = EarthLocationAttribute(default=None)
pressure = QuantityAttribute(default=0, unit=u.hPa)
temperature = QuantityAttribute(default=0, unit=u.deg_C)
relative_humidity = QuantityAttribute(default=0, unit=u.dimensionless_unscaled)
obswl = QuantityAttribute(default=1*u.micron, unit=u.micron)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def secz(self):
"""
Secant of the zenith angle for this coordinate, a common estimate of
the airmass.
"""
return 1/np.sin(self.alt)
@property
def zen(self):
"""
The zenith angle (or zenith distance / co-altitude) for this coordinate.
"""
return _90DEG.to(self.alt.unit) - self.alt
# self-transform defined in cirs_observed_transforms.py
| bsd-3-clause | -619,457,488,633,602,400 | 41.92126 | 94 | 0.683177 | false |
kayhayen/Nuitka | nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Options/PackageOption.py | 7 | 1965 | #
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PackageOption.py 2014/07/05 09:42:21 garyo"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def PackageOption(*args, **kw):
global warned
if not warned:
msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.PackageVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 | -423,020,038,802,458,240 | 38.3 | 103 | 0.755216 | false |
Sephyros/URI-Python | 01 - Iniciante/1006.py | 1 | 1180 | # URI Online Judge | 1006
# Média 2
#
# Adaptado por Neilor Tonin, URI Brasil
# Timelimit: 1
#
# Leia 3 valores, no caso, variáveis A, B e C, que são as três notas de um aluno. A seguir, calcule a média do aluno,
# sabendo que a nota A tem peso 2, a nota B tem peso 3 e a nota C tem peso 5. Considere que cada nota pode ir
# de 0 até 10.0, sempre com uma casa decimal.
#
# Entrada
# O arquivo de entrada contém 3 valores com uma casa decimal, de dupla precisão (double).
#
# Saída
# Imprima a variável MEDIA conforme exemplo abaixo, com 1 dígito após o ponto decimal e com um espaço em branco antes e
# depois da igualdade. Assim como todos os problemas, não esqueça de imprimir o fim de linha após o resultado, caso
# contrário, você receberá "Presentation Error".
# Exemplos de Entrada Exemplos de Saída
#
# 5.0 MEDIA = 6.3
# 6.0
# 7.0
#
# 5.0 MEDIA = 9.0
# 10.0
# 10.0
#
# 10.0 MEDIA = 7.5
# 10.0
# 5.0
#
# Resolvido por Leonardo Vinicius Maciel (aka Sephyros)
A = float(input())
B = float(input())
C = float(input())
MEDIA = ((A * 2.0) + (B * 3.0) + (C * 5.0))/10
print("MEDIA = %.1f" % MEDIA)
| gpl-3.0 | 7,447,770,970,794,053,000 | 28.74359 | 119 | 0.647414 | false |
pydlv/rlauncher | requests/packages/chardet/big5freq.py | 3133 | 82594 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
Big5CharToFreqOrder = (
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
#Everything below is of no interest for detection purpose
2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
13968,13969,13970,13971,13972) #13973
# flake8: noqa
| mit | 1,350,161,219,097,926,700 | 88.290811 | 103 | 0.770903 | false |
asascience-open/ooi-ui-services | ooiservices/app/uframe/events_create_update.py | 1 | 12767 | """
Asset Management - Events: Create and update functions.
"""
__author__ = 'Edna Donoughe'
from ooiservices.app.uframe.uframe_tools import (uframe_get_asset_by_uid, get_uframe_event, uframe_put_event,
uframe_postto, uframe_create_cruise, uframe_create_calibration)
from ooiservices.app.uframe.common_tools import (get_event_types, get_supported_event_types, get_event_class)
from ooiservices.app.uframe.common_tools import convert_status_value_for_display
from ooiservices.app.uframe.events_validate_fields import (events_validate_all_required_fields_are_provided,
events_validate_user_required_fields_are_provided)
# Create event.
def create_event_type(request_data):
""" Create a new event. Return new event on success, or raise exception on error.
Response on success:
{
"message" : "Element created successfully.",
"id" : 14501,
"statusCode" : "CREATED"
}
"""
action = 'create'
try:
# Verify minimum required fields to proceed with create (event_type and uid)
# Required field: event_type
if 'eventType' not in request_data:
message = 'No eventType in request data to create event.'
raise Exception(message)
event_type = request_data['eventType']
if event_type not in get_event_types():
message = 'The event type provided %s is invalid.' % event_type
raise Exception(message)
# If event type create/update not yet supported, raise exception.
if event_type not in get_supported_event_types():
message = 'Event type %s \'%s\' is not supported.' % (event_type, action)
raise Exception(message)
# Required field: assetUid
uid = None
if event_type != 'CRUISE_INFO':
if 'assetUid' not in request_data:
message = 'No assetUid in request data to create event %s.' % event_type
raise Exception(message)
uid = request_data['assetUid']
if not uid:
message = 'The assetUid is empty or null, unable to create a %s event.' % event_type
raise Exception(message)
# Event name not really provided by UI, fill with event type unless CALIBRATION event.
if event_type != 'CALIBRATION_DATA':
request_data['eventName'] = event_type
# Validate data fields to ensure required fields are provided for create.
data = events_validate_all_required_fields_are_provided(event_type, request_data, action=action)
events_validate_user_required_fields_are_provided(event_type, data, action=action)
# Add '@class' field to data; remove 'lastModifiedTimestamp' field; ensure eventId is set to -1.
# Get event class
event_class = get_event_class(event_type)
data['@class'] = event_class
if 'lastModifiedTimestamp' in data:
del data['lastModifiedTimestamp']
# Set eventId for create
data['eventId'] = -1
# Create event.
id = 0
id = perform_uframe_create_event(event_type, uid, data)
if id < 1:
message = 'Failed to create %s event for asset with uid %s' % (event_type, uid)
raise Exception(message)
# Get newly created event and return.
event = get_uframe_event(id)
# Post process event content for display.
event = post_process_event(event)
return event
except Exception as err:
message = str(err)
raise Exception(message)
# Prepare event for display.
def post_process_event(event):
""" Process event from uframe before returning for display (in UI).
"""
try:
if not event:
message = 'The event provided for post processing is empty.'
raise Exception(message)
if '@class' in event:
del event['@class']
if 'eventType' in event:
if event['eventType'] == 'ASSET_STATUS':
event['status'] = convert_status_value_for_display(event['status'])
return event
except Exception as err:
message = 'Error post-processing event for display. %s' % str(err)
raise Exception(message)
# Update event.
def update_event_type(id, data):
""" Update an existing event, no success return event, on error raise exception.
"""
debug = False
action = 'update'
try:
# Verify minimum required fields to proceed with update (event_type and uid)
if 'eventId' not in data:
message = 'An event id must be provided in the request data.'
raise Exception(message)
# Required field: event_type
if 'eventType' not in data:
message = 'An event type must be provided in the request data.'
raise Exception(message)
# Get event type, verify if valid event type.
event_type = data['eventType']
if event_type not in get_event_types():
message = 'The event type provided %s is invalid.' % event_type
raise Exception(message)
# If event type create/update not yet supported, raise exception.
if event_type not in get_supported_event_types():
message = 'Event type %s \'%s\' is not supported.' % (event_type, action)
raise Exception(message)
# Event name not really provided by UI, fill with event type unless CALIBRATION event.
if event_type != 'CALIBRATION_DATA':
data['eventName'] = event_type
# Validate data fields to ensure required fields are provided for update.
data = events_validate_all_required_fields_are_provided(event_type, data, action=action)
events_validate_user_required_fields_are_provided(event_type, data, action=action)
# Verify uid provided in data for all event types except CRUISE_INFO.
uid = None
if event_type != 'CRUISE_INFO' and event_type != 'DEPLOYMENT':
# Required field: assetUid
if 'assetUid' not in data:
message = 'No assetUid in request data to update event %s.' % event_type
raise Exception(message)
uid = data['assetUid']
if not uid or uid is None:
message = 'The assetUid provided is empty or null, unable to update event %s.' % event_type
raise Exception(message)
# Verify eventId provided and of type int.
# Required field: eventId
if 'eventId' not in data:
message = 'No eventId in request data to update event %s.' % event_type
raise Exception(message)
if not isinstance(data['eventId'], int):
message = 'The event id value (%r) must be an integer, it is type: %s' % \
(data['eventId'], str(type(data['eventId'])))
raise Exception(message)
if data['eventId'] != id:
message = 'The event id (\'%r\') provided in data is not equal to id (%d) in url.' % (data['eventId'], id)
raise Exception(message)
# Get event class and add @class field to data
event_class = get_event_class(event_type)
data['@class'] = event_class
# Update event in uframe
updated_id = uframe_put_event(event_type, id, data)
if updated_id <= 0:
message = 'Failed to update %s event in uframe for id %d.' % (event_type, id)
raise Exception(message)
if updated_id != id:
message = 'The event id returned from event update (%d) is not equal to original id (%d).' % (updated_id, id)
# Get updated event, return event
event = get_uframe_event(id)
if debug: print '\n event: ', event
if event['eventType'] == 'ASSET_STATUS':
event['status'] = convert_status_value_for_display(event['status'])
if debug: print '\n event[status]: ', event['status']
return event
except Exception as err:
message = str(err)
raise Exception(message)
def perform_uframe_create_event(event_type, uid, data):
""" Create event using uframe interface determined by event type.
"""
try:
if event_type != 'CRUISE_INFO':
if uid is None or not uid:
message = 'Unable to create %s event for asset with uid: \'%s\'.' % (event_type, uid)
raise Exception(message)
# Create cruise_info event using/events/cruise POST
if event_type == 'CRUISE_INFO':
id = uframe_create_cruise(event_type, data)
# Create calibration_data event
elif event_type == 'CALIBRATION_DATA':
if not isinstance(data['eventId'], int):
message = 'The event id value (%r) must be an integer, it is type: %s' % \
(data['eventId'], str(type(data['eventId'])))
raise Exception(message)
id = create_calibration_data_event(event_type, uid, data)
# Create event using /events/postto/uid POST
else:
if event_type == 'DEPLOYMENT':
message = 'Create event type DEPLOYMENT is not supported through the events create/update interface.'
raise Exception(message)
id = uframe_postto(event_type, uid, data)
if id is None or id <= 0:
message = 'Failed to create and retrieve event from uframe for asset uid: \'%s\'. ' % uid
raise Exception(message)
return id
except Exception as err:
message = str(err)
raise Exception(message)
def create_calibration_data_event(event_type, uid, data):
success_codes = [201, 204]
try:
# create calibration data using /assets/cal POST
event_name = None
if 'eventName' in data:
event_name = data['eventName']
if calibration_data_exists(uid, event_name):
message = 'Calibration data event name \'%s\' exists for asset with uid \'%s\'.' % (event_name, uid)
raise Exception(message)
status_code = uframe_create_calibration(event_type, uid, data)
if status_code not in success_codes:
message = 'Failed to create calibration data for asset uid \'%s\', event name \'%s\'.' % (uid, event_name)
raise Exception(message)
# Get eventId for calibration data event where eventName is event_name and asset uid is uid.
id, _ = get_calibration_event_id(uid, event_name)
return id
except Exception as err:
message = str(err)
raise Exception(message)
def get_calibration_event_id(uid, event_name):
"""
"calibration" : [ {
"@class" : ".XCalibration",
"name" : "CC_a1",
"calData" : [ {
"@class" : ".XCalibrationData",
"values" : [ -1.493703E-4 ],
"dimensions" : [ 1 ],
"cardinality" : 0,
"comments" : "Test entry",
"eventId" : 31534,
"assetUid" : "A01682",
"eventType" : "CALIBRATION_DATA",
"eventName" : "CC_a1",
"eventStartTime" : 1443614400000,
"eventStopTime" : null,
"notes" : null,
"dataSource" : "API:createCalibration:2016-08-31T22:37:22.096Z",
"lastModifiedTimestamp" : 1472683042096
} ]
} ],
"""
id = None
last_modified = None
try:
asset = uframe_get_asset_by_uid(uid)
calibrations = asset['calibration']
for cal in calibrations:
if 'name' in cal:
if cal['name'] == event_name:
# Get eventId
if 'calData' in cal:
for item in cal['calData']:
if 'eventId' in item:
id = item['eventId']
last_modified = item['lastModifiedTimestamp']
break
if id is None:
message = 'Failed to locate calibration name \'%s\' in asset with uid %s.' % (event_name, uid)
raise Exception(message)
return id, last_modified
except Exception as err:
message = str(err)
raise Exception(message)
def calibration_data_exists(uid, event_name):
""" Determine if calibration data contains event name. Return True or False.
"""
result = False
try:
try:
event_id, _ = get_calibration_event_id(uid, event_name)
except:
event_id = 0
if event_id > 0:
result = True
return result
except Exception as err:
message = str(err)
raise Exception(message) | apache-2.0 | -2,366,507,318,431,808,500 | 39.277603 | 121 | 0.585651 | false |
miing/mci_migo | webui/views/devices.py | 1 | 11714 | # Copyright 2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import re
from base64 import b16encode
from collections import namedtuple
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.template.response import TemplateResponse
from django.views.generic import View
from django.utils.translation import ugettext as _
from gargoyle.decorators import switch_is_active
from gargoyle import gargoyle
from M2Crypto.Rand import rand_bytes
from oath.hotp import accept_hotp, hotp
from identityprovider.forms import HOTPDeviceForm, DeviceRenameForm
from identityprovider.models import AuthenticationDevice
from identityprovider.models import twofactor
from identityprovider.models.twofactor import get_otp_type
from webui.decorators import require_twofactor_enabled, sso_login_required
from webui.views.const import (
DEVICE_ADDED,
DEVICE_DELETED,
DEVICE_GENERATION_WARNING,
DEVICE_RENAMED,
OTP_MATCH_ERROR,
)
from webui.views.utils import HttpResponseSeeOther, allow_only
DEVICE_ADDITION = 'device-addition'
DEVICE_LIST = 'device-list'
CodePageDetails = namedtuple('CodePageDetails', 'codes page start position')
def get_context(request, **kwargs):
kwargs['current_section'] = 'devices'
return RequestContext(request, kwargs)
device_types = {
'yubi': _('Yubikey'),
'google': _('Google Authenticator'),
'generic': _('Authentication device'),
'paper': _('Printable Backup Codes'),
}
def generate_key(n):
"""Returns an OATH/HOTP key as a string of n raw bytes."""
# An OATH/HOTP key is just bunch of random (in the "unpredictable"
# sense) bits, of certain quantities (e.g. 160 bits or 20 bytes)
# that are compatible with the AES algorithms.
# From openssl's documentation:
#
# RAND_bytes() puts num cryptographically strong pseudo-random
# bytes into buf. An error occurs if the PRNG has not been
# seeded with enough randomness to ensure an unpredictable byte
# sequence.
#
# openssl's RAND_bytes(num) function is available in Python as
# M2Crypto.Rand.rand_bytes(num).
return b16encode(rand_bytes(n))
def get_unique_device_name_for_user(name, user):
"""Returns the name with an extra number to make it unique if it exists in
existing_names
"""
original_name = name
counter = 1
existing_names = [device.name for device in user.devices.all()]
while name in existing_names:
name = '%s (%d)' % (original_name, counter)
counter += 1
return name
@sso_login_required
@require_twofactor_enabled
@allow_only('GET')
def device_list(request):
paper_renewals = list(request.user.paper_devices_needing_renewal)
context = get_context(
request, device_addition_path=reverse(DEVICE_ADDITION),
devices=request.user.devices.all(),
need_backup_device_warning=request.user.need_backup_device_warning,
paper_devices_needing_renewal=paper_renewals
)
return render_to_response('device/list.html', context)
@sso_login_required
@require_twofactor_enabled
@allow_only('GET', 'POST')
def device_addition(request):
if request.user.has_twofactor_devices():
if not (twofactor.is_upgraded(request) and
twofactor.is_fresh(request)):
return redirect_to_login(
request.get_full_path(),
reverse('twofactor')
)
if request.method == 'GET':
context = get_context(request, device_list_path=reverse(DEVICE_LIST))
return render_to_response('device/types.html', context)
device_type = request.POST.get('type')
if device_type not in device_types.keys():
return render_to_response('device/types.html', get_context(request))
if device_type == 'paper':
return _device_addition_paper(request)
return _device_addition_standard(request, device_type)
def _device_addition_paper(request):
hex_key = generate_key(20)
device_name = get_unique_device_name_for_user(device_types['paper'],
request.user)
device = _create_device(request, device_name, hex_key, 0, 'paper')
return HttpResponseSeeOther(reverse('device-print', args=(device.id,)))
def _device_addition_standard(request, device_type):
error = None
if 'hex_key' in request.POST:
hex_key = request.POST.get('hex_key')
else:
# TODO: 20 bytes = 160 bits; this will change based on
# device-type.
hex_key = generate_key(20)
if 'name' not in request.POST:
initial_name = get_unique_device_name_for_user(
device_types.get(device_type), request.user)
form = HOTPDeviceForm(initial={'name': initial_name})
else:
form = HOTPDeviceForm(request.POST)
if form.is_valid():
device_name = get_unique_device_name_for_user(
form.cleaned_data['name'], request.user)
otp = form.cleaned_data['otp']
otp_type = get_otp_type(otp)
accepted, new_counter = accept_hotp(
hex_key, otp, 0, otp_type, drift=settings.HOTP_DRIFT,
backward_drift=settings.HOTP_BACKWARDS_DRIFT)
if accepted:
_create_device(request, device_name, hex_key,
new_counter, device_type)
return HttpResponseSeeOther(reverse(DEVICE_LIST))
# Otherwise, set the error flag and fall through...
error = OTP_MATCH_ERROR
# Google would base32-encode, yubi would hex-encode, etc. There
# might even be multiple formats displayed simultaneously.
formatted_key = re.sub('(.{4})', r'\1 ', hex_key).strip()
ctx = get_context(
request,
device_list_path=reverse(DEVICE_LIST),
type=device_type,
ident="/".join([settings.TWOFACTOR_SERVICE_IDENT,
request.user.preferredemail.email]),
hex_key=hex_key,
form=form,
formatted_key=formatted_key,
error=error,
)
return render_to_response('device/addition-%s.html' % device_type, ctx)
def _create_device(request, device_name, hex_key, counter, device_type):
device = AuthenticationDevice.objects.create(
account=request.user,
name=device_name,
key=hex_key,
counter=counter,
device_type=device_type
)
twofactor.login(request)
messages.success(request,
DEVICE_ADDED.format(name=device_name), 'temporary')
return device
@switch_is_active('PAPER_DEVICE')
@sso_login_required(require_twofactor=True, require_twofactor_freshness=True)
@require_twofactor_enabled
@allow_only('GET')
def device_print(request, device_id):
device = _get_device_or_404(device_id, request.user)
if device.device_type != 'paper':
raise Http404
details = _codes_for_position(device)
remaining_codes = settings.TWOFACTOR_PAPER_CODES - details.position
generation_enabled = (
remaining_codes <= settings.TWOFACTOR_PAPER_CODES_ALLOW_GENERATION)
if generation_enabled:
messages.warning(request, DEVICE_GENERATION_WARNING)
context = get_context(
request,
codes=details.codes,
counter=details.position,
device_id=device.id,
generation_enabled=generation_enabled,
)
return TemplateResponse(request, 'device/print-codes.html', context)
def _codes_for_position(device, next_page=False):
# use integer division to round the "window" boundaries
page_size = settings.TWOFACTOR_PAPER_CODES
page, page_position = divmod(device.counter, page_size)
if next_page:
page += 1
page_start = page * page_size
codes = [hotp(device.key, i, 'dec6')
for i in range(page_start, page_start + page_size)]
return CodePageDetails(codes, page, page_start, page_position)
@switch_is_active('PAPER_DEVICE')
@sso_login_required(require_twofactor=True, require_twofactor_freshness=True)
@require_twofactor_enabled
@allow_only('GET', 'POST')
def device_generate(request, device_id):
device = _get_device_or_404(device_id, request.user)
if device.device_type != 'paper':
raise Http404
# find the next page of codes
details = _codes_for_position(device, next_page=True)
if request.method == 'GET':
context = get_context(
request,
codes=details.codes,
device_id=device.id,
)
return TemplateResponse(request, 'device/generate-codes.html', context)
device.counter = details.start
device.save()
return HttpResponseRedirect(reverse('device-print', args=(device.id,)))
def _get_device_or_404(device_id, user):
"""Explicit helper function to ensure we don't forget to limit by user."""
return get_object_or_404(AuthenticationDevice, id=device_id, account=user)
@sso_login_required(require_twofactor=True, require_twofactor_freshness=True)
@require_twofactor_enabled
@allow_only('GET', 'POST')
def device_removal(request, device_id):
device = _get_device_or_404(device_id, request.user)
if request.method != 'POST':
context = get_context(request, device_list_path=reverse(DEVICE_LIST),
name=device.name)
return render_to_response('device/removal.html', context)
device.delete()
# We should probably send an e-mail to the user stating which
# device was removed. As a security measure, this would be much
# stronger if bugs #784813, #784817, and #784818 were done.
if not request.user.has_twofactor_devices():
request.user.twofactor_required = False
request.user.save()
twofactor.logout(request)
messages.success(request, DEVICE_DELETED.format(name=device.name))
return HttpResponseSeeOther('/device-list')
class DeviceRenameView(View):
def get(self, request, device_id):
device = _get_device_or_404(device_id, request.user)
form = DeviceRenameForm({'name': device.name})
context = get_context(
request, device_list_path=reverse(DEVICE_LIST), form=form)
return render_to_response('device/rename.html', context)
def post(self, request, device_id):
device = _get_device_or_404(device_id, request.user)
form = DeviceRenameForm(request.POST)
if form.is_valid():
original_name = device.name
device.name = form.cleaned_data['name']
device.save()
messages.success(request,
DEVICE_RENAMED.format(original=original_name,
renamed=device.name))
return HttpResponseRedirect(reverse(DEVICE_LIST))
context = get_context(
request, device_list_path=reverse(DEVICE_LIST), form=form)
return render_to_response('device/rename.html', context)
device_rename = sso_login_required(
require_twofactor=True,
require_twofactor_freshness=True)(DeviceRenameView.as_view())
@allow_only('GET')
def device_help(request):
if gargoyle.is_active('CAN_VIEW_SUPPORT_PHONE', request.user):
support_phone = settings.SUPPORT_PHONE
else:
support_phone = ''
context = RequestContext(request, {'support_phone': support_phone})
return render_to_response('device/device-help.html', context)
| agpl-3.0 | 3,464,751,272,790,637,600 | 33.967164 | 79 | 0.668089 | false |
GabrieleAndrea/MEGAnnotator | bin/SPAdes/share/spades/pyyaml3/reader.py | 272 | 6854 | # This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
__all__ = ['Reader', 'ReaderError']
from .error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, bytes):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `bytes` object,
# - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream):
self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = ''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, str):
self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+'\0'
elif isinstance(stream, bytes):
self.name = "<byte string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = None
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in '\n\x85\u2028\u2029' \
or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += '\0'
self.raw_buffer = None
break
def update_raw(self, size=4096):
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
else:
self.raw_buffer += data
self.stream_pointer += len(data)
if not data:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass
| gpl-3.0 | 8,118,727,755,371,595,000 | 34.697917 | 86 | 0.546104 | false |
Godiyos/python-for-android | python-modules/twisted/twisted/python/dxprofile.py | 61 | 1528 | # Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
DEPRECATED since Twisted 8.0.
Utility functions for reporting bytecode frequencies to Skip Montanaro's
stat collector.
This module requires a version of Python build with DYNAMIC_EXCUTION_PROFILE,
and optionally DXPAIRS, defined to be useful.
"""
import sys, types, xmlrpclib, warnings
warnings.warn("twisted.python.dxprofile is deprecated since Twisted 8.0.",
category=DeprecationWarning)
def rle(iterable):
"""
Run length encode a list.
"""
iterable = iter(iterable)
runlen = 1
result = []
try:
previous = iterable.next()
except StopIteration:
return []
for element in iterable:
if element == previous:
runlen = runlen + 1
continue
else:
if isinstance(previous, (types.ListType, types.TupleType)):
previous = rle(previous)
result.append([previous, runlen])
previous = element
runlen = 1
if isinstance(previous, (types.ListType, types.TupleType)):
previous = rle(previous)
result.append([previous, runlen])
return result
def report(email, appname):
"""
Send an RLE encoded version of sys.getdxp() off to our Top Men (tm)
for analysis.
"""
if hasattr(sys, 'getdxp') and appname:
dxp = xmlrpclib.ServerProxy("http://manatee.mojam.com:7304")
dxp.add_dx_info(appname, email, sys.version_info[:3], rle(sys.getdxp()))
| apache-2.0 | -9,000,950,117,111,381,000 | 26.285714 | 80 | 0.643979 | false |
kingvuplus/boom2 | lib/python/Plugins/SystemPlugins/AnimationSetup/plugin.py | 15 | 8964 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigNumber, ConfigSelectionNumber, getConfigListEntry
from Plugins.Plugin import PluginDescriptor
from enigma import setAnimation_current, setAnimation_speed
# default = slide to left
g_default = {
"current": 0,
"speed" : 20,
}
g_max_speed = 30
g_animation_paused = False
g_orig_show = None
g_orig_doClose = None
config.misc.window_animation_default = ConfigNumber(default=g_default["current"])
config.misc.window_animation_speed = ConfigSelectionNumber(1, g_max_speed, 1, default=g_default["speed"])
class AnimationSetupConfig(ConfigListScreen, Screen):
skin= """
<screen position="center,center" size="600,140" title="Animation Settings">
<widget name="config" position="0,0" size="600,100" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="0,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,100" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
</screen>
"""
def __init__(self, session):
self.session = session
self.entrylist = []
Screen.__init__(self, session)
ConfigListScreen.__init__(self, self.entrylist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions",], {
"ok" : self.keyGreen,
"green" : self.keyGreen,
"yellow" : self.keyYellow,
"red" : self.keyRed,
"cancel" : self.keyRed,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Default"))
self.makeConfigList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Animation Setup'))
def keyGreen(self):
config.misc.window_animation_speed.save()
setAnimation_speed(int(config.misc.window_animation_speed.value))
self.close()
def keyRed(self):
config.misc.window_animation_speed.cancel()
self.close()
def keyYellow(self):
global g_default
config.misc.window_animation_speed.value = g_default["speed"]
self.makeConfigList()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def makeConfigList(self):
self.entrylist = []
entrySpeed = getConfigListEntry(_("Animation Speed"), config.misc.window_animation_speed)
self.entrylist.append(entrySpeed)
self["config"].list = self.entrylist
self["config"].l.setList(self.entrylist)
class AnimationSetupScreen(Screen):
animationSetupItems = [
{"idx":0, "name":_("Disable Animations")},
{"idx":1, "name":_("Simple fade")},
{"idx":2, "name":_("Grow drop")},
{"idx":3, "name":_("Grow from left")},
{"idx":4, "name":_("Popup")},
{"idx":5, "name":_("Slide drop")},
{"idx":6, "name":_("Slide left to right")},
{"idx":7, "name":_("Slide top to bottom")},
{"idx":8, "name":_("Stripes")},
]
skin = """
<screen name="AnimationSetup" position="center,center" size="580,400" title="Animation Setup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" zPosition="1" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#18188b" transparent="1" />
<widget name="list" position="10,60" size="560,364" scrollbarMode="showOnDemand" />
<widget source="introduction" render="Label" position="0,370" size="560,40" zPosition="10" font="Regular;20" valign="center" backgroundColor="#25062748" transparent="1" />
</screen>"""
def __init__(self, session):
self.skin = AnimationSetupScreen.skin
Screen.__init__(self, session)
self.animationList = []
self["introduction"] = StaticText(_("* current animation"))
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Setting"))
self["key_blue"] = StaticText(_("Preview"))
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.keyclose,
"save": self.ok,
"ok" : self.ok,
"yellow": self.config,
"blue": self.preview
}, -3)
self["list"] = MenuList(self.animationList)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
l = []
for x in self.animationSetupItems:
key = x.get("idx", 0)
name = x.get("name", "??")
if key == config.misc.window_animation_default.value:
name = "* %s" % (name)
l.append( (name, key) )
self["list"].setList(l)
def ok(self):
current = self["list"].getCurrent()
if current:
key = current[1]
config.misc.window_animation_default.value = key
config.misc.window_animation_default.save()
setAnimation_current(key)
self.close()
def keyclose(self):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
self.close()
def config(self):
self.session.open(AnimationSetupConfig)
def preview(self):
current = self["list"].getCurrent()
if current:
global g_animation_paused
tmp = g_animation_paused
g_animation_paused = False
setAnimation_current(current[1])
self.session.open(MessageBox, current[0], MessageBox.TYPE_INFO, timeout=3)
g_animation_paused = tmp
def checkAttrib(self, paused):
global g_animation_paused
if g_animation_paused is paused and self.skinAttributes is not None:
for (attr, value) in self.skinAttributes:
if attr == "animationPaused" and value in ("1", "on"):
return True
return False
def screen_show(self):
global g_animation_paused
if g_animation_paused:
setAnimation_current(0)
g_orig_show(self)
if checkAttrib(self, False):
g_animation_paused = True
def screen_doClose(self):
global g_animation_paused
if checkAttrib(self, True):
g_animation_paused = False
setAnimation_current(config.misc.window_animation_default.value)
g_orig_doClose(self)
def animationSetupMain(session, **kwargs):
session.open(AnimationSetupScreen)
def startAnimationSetup(menuid):
if menuid != "osd_menu":
return []
return [( _("Animations"), animationSetupMain, "animation_setup", None)]
def sessionAnimationSetup(session, reason, **kwargs):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
global g_orig_show, g_orig_doClose
if g_orig_show is None:
g_orig_show = Screen.show
if g_orig_doClose is None:
g_orig_doClose = Screen.doClose
Screen.show = screen_show
Screen.doClose = screen_doClose
def Plugins(**kwargs):
plugin_list = [
PluginDescriptor(
name = "Animations",
description = "Setup UI animations",
where = PluginDescriptor.WHERE_MENU,
needsRestart = False,
fnc = startAnimationSetup),
PluginDescriptor(
where = PluginDescriptor.WHERE_SESSIONSTART,
needsRestart = False,
fnc = sessionAnimationSetup),
]
return plugin_list;
| gpl-2.0 | -8,683,468,355,515,691,000 | 35.291498 | 213 | 0.697791 | false |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/files/etc/apps/headphones/lib/unidecode/x07a.py | 252 | 4669 | data = (
'Xi ', # 0x00
'Kao ', # 0x01
'Lang ', # 0x02
'Fu ', # 0x03
'Ze ', # 0x04
'Shui ', # 0x05
'Lu ', # 0x06
'Kun ', # 0x07
'Gan ', # 0x08
'Geng ', # 0x09
'Ti ', # 0x0a
'Cheng ', # 0x0b
'Tu ', # 0x0c
'Shao ', # 0x0d
'Shui ', # 0x0e
'Ya ', # 0x0f
'Lun ', # 0x10
'Lu ', # 0x11
'Gu ', # 0x12
'Zuo ', # 0x13
'Ren ', # 0x14
'Zhun ', # 0x15
'Bang ', # 0x16
'Bai ', # 0x17
'Ji ', # 0x18
'Zhi ', # 0x19
'Zhi ', # 0x1a
'Kun ', # 0x1b
'Leng ', # 0x1c
'Peng ', # 0x1d
'Ke ', # 0x1e
'Bing ', # 0x1f
'Chou ', # 0x20
'Zu ', # 0x21
'Yu ', # 0x22
'Su ', # 0x23
'Lue ', # 0x24
'[?] ', # 0x25
'Yi ', # 0x26
'Xi ', # 0x27
'Bian ', # 0x28
'Ji ', # 0x29
'Fu ', # 0x2a
'Bi ', # 0x2b
'Nuo ', # 0x2c
'Jie ', # 0x2d
'Zhong ', # 0x2e
'Zong ', # 0x2f
'Xu ', # 0x30
'Cheng ', # 0x31
'Dao ', # 0x32
'Wen ', # 0x33
'Lian ', # 0x34
'Zi ', # 0x35
'Yu ', # 0x36
'Ji ', # 0x37
'Xu ', # 0x38
'Zhen ', # 0x39
'Zhi ', # 0x3a
'Dao ', # 0x3b
'Jia ', # 0x3c
'Ji ', # 0x3d
'Gao ', # 0x3e
'Gao ', # 0x3f
'Gu ', # 0x40
'Rong ', # 0x41
'Sui ', # 0x42
'You ', # 0x43
'Ji ', # 0x44
'Kang ', # 0x45
'Mu ', # 0x46
'Shan ', # 0x47
'Men ', # 0x48
'Zhi ', # 0x49
'Ji ', # 0x4a
'Lu ', # 0x4b
'Su ', # 0x4c
'Ji ', # 0x4d
'Ying ', # 0x4e
'Wen ', # 0x4f
'Qiu ', # 0x50
'Se ', # 0x51
'[?] ', # 0x52
'Yi ', # 0x53
'Huang ', # 0x54
'Qie ', # 0x55
'Ji ', # 0x56
'Sui ', # 0x57
'Xiao ', # 0x58
'Pu ', # 0x59
'Jiao ', # 0x5a
'Zhuo ', # 0x5b
'Tong ', # 0x5c
'Sai ', # 0x5d
'Lu ', # 0x5e
'Sui ', # 0x5f
'Nong ', # 0x60
'Se ', # 0x61
'Hui ', # 0x62
'Rang ', # 0x63
'Nuo ', # 0x64
'Yu ', # 0x65
'Bin ', # 0x66
'Ji ', # 0x67
'Tui ', # 0x68
'Wen ', # 0x69
'Cheng ', # 0x6a
'Huo ', # 0x6b
'Gong ', # 0x6c
'Lu ', # 0x6d
'Biao ', # 0x6e
'[?] ', # 0x6f
'Rang ', # 0x70
'Zhuo ', # 0x71
'Li ', # 0x72
'Zan ', # 0x73
'Xue ', # 0x74
'Wa ', # 0x75
'Jiu ', # 0x76
'Qiong ', # 0x77
'Xi ', # 0x78
'Qiong ', # 0x79
'Kong ', # 0x7a
'Yu ', # 0x7b
'Sen ', # 0x7c
'Jing ', # 0x7d
'Yao ', # 0x7e
'Chuan ', # 0x7f
'Zhun ', # 0x80
'Tu ', # 0x81
'Lao ', # 0x82
'Qie ', # 0x83
'Zhai ', # 0x84
'Yao ', # 0x85
'Bian ', # 0x86
'Bao ', # 0x87
'Yao ', # 0x88
'Bing ', # 0x89
'Wa ', # 0x8a
'Zhu ', # 0x8b
'Jiao ', # 0x8c
'Qiao ', # 0x8d
'Diao ', # 0x8e
'Wu ', # 0x8f
'Gui ', # 0x90
'Yao ', # 0x91
'Zhi ', # 0x92
'Chuang ', # 0x93
'Yao ', # 0x94
'Tiao ', # 0x95
'Jiao ', # 0x96
'Chuang ', # 0x97
'Jiong ', # 0x98
'Xiao ', # 0x99
'Cheng ', # 0x9a
'Kou ', # 0x9b
'Cuan ', # 0x9c
'Wo ', # 0x9d
'Dan ', # 0x9e
'Ku ', # 0x9f
'Ke ', # 0xa0
'Zhui ', # 0xa1
'Xu ', # 0xa2
'Su ', # 0xa3
'Guan ', # 0xa4
'Kui ', # 0xa5
'Dou ', # 0xa6
'[?] ', # 0xa7
'Yin ', # 0xa8
'Wo ', # 0xa9
'Wa ', # 0xaa
'Ya ', # 0xab
'Yu ', # 0xac
'Ju ', # 0xad
'Qiong ', # 0xae
'Yao ', # 0xaf
'Yao ', # 0xb0
'Tiao ', # 0xb1
'Chao ', # 0xb2
'Yu ', # 0xb3
'Tian ', # 0xb4
'Diao ', # 0xb5
'Ju ', # 0xb6
'Liao ', # 0xb7
'Xi ', # 0xb8
'Wu ', # 0xb9
'Kui ', # 0xba
'Chuang ', # 0xbb
'Zhao ', # 0xbc
'[?] ', # 0xbd
'Kuan ', # 0xbe
'Long ', # 0xbf
'Cheng ', # 0xc0
'Cui ', # 0xc1
'Piao ', # 0xc2
'Zao ', # 0xc3
'Cuan ', # 0xc4
'Qiao ', # 0xc5
'Qiong ', # 0xc6
'Dou ', # 0xc7
'Zao ', # 0xc8
'Long ', # 0xc9
'Qie ', # 0xca
'Li ', # 0xcb
'Chu ', # 0xcc
'Shi ', # 0xcd
'Fou ', # 0xce
'Qian ', # 0xcf
'Chu ', # 0xd0
'Hong ', # 0xd1
'Qi ', # 0xd2
'Qian ', # 0xd3
'Gong ', # 0xd4
'Shi ', # 0xd5
'Shu ', # 0xd6
'Miao ', # 0xd7
'Ju ', # 0xd8
'Zhan ', # 0xd9
'Zhu ', # 0xda
'Ling ', # 0xdb
'Long ', # 0xdc
'Bing ', # 0xdd
'Jing ', # 0xde
'Jing ', # 0xdf
'Zhang ', # 0xe0
'Yi ', # 0xe1
'Si ', # 0xe2
'Jun ', # 0xe3
'Hong ', # 0xe4
'Tong ', # 0xe5
'Song ', # 0xe6
'Jing ', # 0xe7
'Diao ', # 0xe8
'Yi ', # 0xe9
'Shu ', # 0xea
'Jing ', # 0xeb
'Qu ', # 0xec
'Jie ', # 0xed
'Ping ', # 0xee
'Duan ', # 0xef
'Shao ', # 0xf0
'Zhuan ', # 0xf1
'Ceng ', # 0xf2
'Deng ', # 0xf3
'Cui ', # 0xf4
'Huai ', # 0xf5
'Jing ', # 0xf6
'Kan ', # 0xf7
'Jing ', # 0xf8
'Zhu ', # 0xf9
'Zhu ', # 0xfa
'Le ', # 0xfb
'Peng ', # 0xfc
'Yu ', # 0xfd
'Chi ', # 0xfe
'Gan ', # 0xff
)
| gpl-2.0 | 1,929,247,342,009,774,000 | 17.096899 | 20 | 0.392161 | false |
primepix/django-sentry | sentry/filters.py | 2 | 5486 | """
sentry.filters
~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
# Widget api is pretty ugly
from __future__ import absolute_import
from django.conf import settings as django_settings
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.html import escape
from sentry.conf import settings
class Widget(object):
def __init__(self, filter, request):
self.filter = filter
self.request = request
def get_query_string(self):
return self.filter.get_query_string()
class TextWidget(Widget):
def render(self, value, placeholder='', **kwargs):
return mark_safe('<div class="filter-text"><p class="textfield"><input type="text" name="%(name)s" value="%(value)s" placeholder="%(placeholder)s"/></p><p class="submit"><input type="submit" class="search-submit"/></p></div>' % dict(
name=self.filter.get_query_param(),
value=escape(value),
placeholder=escape(placeholder or 'enter %s' % self.filter.label.lower()),
))
class ChoiceWidget(Widget):
def render(self, value, **kwargs):
choices = self.filter.get_choices()
query_string = self.get_query_string()
column = self.filter.get_query_param()
output = ['<ul class="%s-list filter-list" rel="%s">' % (self.filter.column, column)]
output.append('<li%(active)s><a href="%(query_string)s&%(column)s=">Any %(label)s</a></li>' % dict(
active=not value and ' class="active"' or '',
query_string=query_string,
label=self.filter.label,
column=column,
))
for key, val in choices.iteritems():
key = unicode(key)
output.append('<li%(active)s rel="%(key)s"><a href="%(query_string)s&%(column)s=%(key)s">%(value)s</a></li>' % dict(
active=value == key and ' class="active"' or '',
column=column,
key=key,
value=val,
query_string=query_string,
))
output.append('</ul>')
return mark_safe('\n'.join(output))
class SentryFilter(object):
label = ''
column = ''
widget = ChoiceWidget
# This must be a string
default = ''
show_label = True
def __init__(self, request):
self.request = request
def is_set(self):
return bool(self.get_value())
def get_value(self):
return self.request.GET.get(self.get_query_param(), self.default) or ''
def get_query_param(self):
return getattr(self, 'query_param', self.column)
def get_widget(self):
return self.widget(self, self.request)
def get_query_string(self):
column = self.column
query_dict = self.request.GET.copy()
if 'p' in query_dict:
del query_dict['p']
if column in query_dict:
del query_dict[self.column]
return '?' + query_dict.urlencode()
def get_choices(self):
from sentry.models import FilterValue
return SortedDict((l, l) for l in FilterValue.objects.filter(key=self.column)\
.values_list('value', flat=True)\
.order_by('value'))
def get_query_set(self, queryset):
from sentry.models import MessageIndex
kwargs = {self.column: self.get_value()}
if self.column.startswith('data__'):
return MessageIndex.objects.get_for_queryset(queryset, **kwargs)
return queryset.filter(**kwargs)
def process(self, data):
"""``self.request`` is not available within this method"""
return data
def render(self):
widget = self.get_widget()
return widget.render(self.get_value())
class StatusFilter(SentryFilter):
label = 'Status'
column = 'status'
default = '0'
def get_choices(self):
return SortedDict([
(0, 'Unresolved'),
(1, 'Resolved'),
])
class LoggerFilter(SentryFilter):
label = 'Logger'
column = 'logger'
class ServerNameFilter(SentryFilter):
label = 'Server Name'
column = 'server_name'
def get_query_set(self, queryset):
return queryset.filter(message_set__server_name=self.get_value()).distinct()
class SiteFilter(SentryFilter):
label = 'Site'
column = 'site'
def process(self, data):
if 'site' in data:
return data
if settings.SITE is None:
if 'django.contrib.sites' in django_settings.INSTALLED_APPS:
from django.contrib.sites.models import Site
try:
settings.SITE = Site.objects.get_current().name
except Site.DoesNotExist:
settings.SITE = ''
else:
settings.SITE = ''
if settings.SITE:
data['site'] = settings.SITE
return data
def get_query_set(self, queryset):
return queryset.filter(message_set__site=self.get_value()).distinct()
class LevelFilter(SentryFilter):
label = 'Level'
column = 'level'
def get_choices(self):
return SortedDict((str(k), v) for k, v in settings.LOG_LEVELS)
def get_query_set(self, queryset):
return queryset.filter(level__gte=self.get_value())
| bsd-3-clause | -418,618,235,165,186,700 | 32.048193 | 241 | 0.582209 | false |
kyleabeauchamp/FAHMunge | FAHMunge/fah.py | 1 | 9735 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Kyle A. Beauchamp
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""
Code for merging and munging trajectories from FAH datasets.
"""
##############################################################################
# imports
##############################################################################
from __future__ import print_function, division
import os
import glob
import tarfile
from mdtraj.formats.hdf5 import HDF5TrajectoryFile
import mdtraj as md
import tables
from mdtraj.utils.contextmanagers import enter_temp_directory
from mdtraj.utils import six
def keynat(string):
'''A natural sort helper function for sort() and sorted()
without using regular expression.
>>> items = ('Z', 'a', '10', '1', '9')
>>> sorted(items)
['1', '10', '9', 'Z', 'a']
>>> sorted(items, key=keynat)
['1', '9', '10', 'Z', 'a']
'''
r = []
for c in string:
try:
c = int(c)
try:
r[-1] = r[-1] * 10 + c
except:
r.append(c)
except:
r.append(c)
return r
##############################################################################
# globals
##############################################################################
def strip_water(allatom_filename, protein_filename, protein_atom_indices, min_num_frames=1):
"""Strip water (or other) atoms from a Core17, Core18, or OCore FAH HDF5 trajectory.
Parameters
----------
allatom_filename : str
Path to HDF5 trajectory with all atoms. This trajectory must have been generated by
concatenate_core17 or concatenate_siegetank--e.g. it must include
extra metadata that lists the XTC files (bzipped or in OCore directories) that
have already been processed. This file will not be modified.
protein_filename : str
Path to HDF5 trajectory with all just protein atoms. This trajectory must have been generated by
concatenate_core17 or concatenate_siegetank--e.g. it must include
extra metadata that lists the XTC files (bzipped or in OCore directories) that
have already been processed. This file will be appended to.
protein_atom_indices : np.ndarray, dtype='int'
List of atom indices to extract from allatom HDF5 file.
min_num_frames : int, optional, default=1
Skip if below this number.
"""
if not os.path.exists(allatom_filename):
print("Skipping, %s not found" % allatom_filename)
return
trj_allatom = HDF5TrajectoryFile(allatom_filename, mode='r')
if len(trj_allatom) < min_num_frames:
print("Must have at least %d frames in %s to proceed!" % (min_num_frames, allatom_filename))
return
if hasattr(trj_allatom.root, "processed_filenames"):
key = "processed_filenames" # Core17, Core18 style data
elif hasattr(trj_allatom.root, "processed_directories"):
key = "processed_directories" # Siegetank style data
else:
raise(ValueError("Can't find processed files in %s" % allatom_filename))
trj_protein = HDF5TrajectoryFile(protein_filename, mode='a')
try:
trj_protein._create_earray(where='/', name=key, atom=tables.StringAtom(1024), shape=(0,))
trj_protein.topology = trj_allatom.topology.subset(protein_atom_indices)
except tables.NodeError:
pass
n_frames_allatom = len(trj_allatom)
try:
n_frames_protein = len(trj_protein)
except tables.NoSuchNodeError:
n_frames_protein = 0
filenames_allatom = getattr(trj_allatom.root, key)
filenames_protein = getattr(trj_protein._handle.root, key) # Hacky workaround of MDTraj bug #588
n_files_allatom = len(filenames_allatom)
n_files_protein = len(filenames_protein)
print("Found %d,%d filenames and %d,%d frames in %s and %s, respectively." % (n_files_allatom, n_files_protein, n_frames_allatom, n_frames_protein, allatom_filename, protein_filename))
if n_frames_protein > n_frames_allatom:
raise(ValueError("Found more frames in protein trajectory (%d) than allatom trajectory (%d)" % (n_frames_protein, n_frames_allatom)))
if n_files_protein > n_files_allatom:
raise(ValueError("Found more filenames in protein trajectory (%d) than allatom trajectory (%d)" % (n_files_protein, n_files_allatom)))
if n_frames_protein == n_frames_allatom or n_files_allatom == n_files_protein:
if not (n_frames_protein == n_frames_allatom and n_files_allatom == n_files_protein):
raise(ValueError("The trajectories must match in BOTH n_frames and n_filenames or NEITHER."))
else:
print("Same number of frames and filenames found, skipping.")
return
trj_allatom.seek(n_frames_protein) # Jump forward past what we've already stripped.
coordinates, time, cell_lengths, cell_angles, velocities, kineticEnergy, potentialEnergy, temperature, alchemicalLambda = trj_allatom.read()
trj_protein.write(coordinates=coordinates[:, protein_atom_indices], time=time, cell_lengths=cell_lengths, cell_angles=cell_angles) # Ignoring the other fields for now, TODO.
filenames_protein.append(filenames_allatom[n_files_protein:])
def concatenate_core17(path, top, output_filename):
"""Concatenate tar bzipped XTC files created by Folding@Home Core17.
Parameters
----------
path : str
Path to directory containing "results-*.tar.bz2". E.g. a single CLONE directory.
top : mdtraj.Topology
Topology for system
output_filename : str
Filename of output HDF5 file to generate.
Notes
-----
We use HDF5 because it provides an easy way to store the metadata associated
with which files have already been processed.
"""
glob_input = os.path.join(path, "results-*.tar.bz2")
filenames = glob.glob(glob_input)
filenames = sorted(filenames, key=keynat)
if len(filenames) <= 0:
return
trj_file = HDF5TrajectoryFile(output_filename, mode='a')
try:
trj_file._create_earray(where='/', name='processed_filenames',atom=trj_file.tables.StringAtom(1024), shape=(0,))
trj_file.topology = top.topology
except trj_file.tables.NodeError:
pass
for filename in filenames:
if six.b(filename) in trj_file._handle.root.processed_filenames: # On Py3, the pytables list of filenames has type byte (e.g. b"hey"), so we need to deal with this via six.
print("Already processed %s" % filename)
continue
with enter_temp_directory():
print("Processing %s" % filename)
archive = tarfile.open(filename, mode='r:bz2')
archive.extract("positions.xtc")
trj = md.load("positions.xtc", top=top)
for frame in trj:
trj_file.write(coordinates=frame.xyz, cell_lengths=frame.unitcell_lengths, cell_angles=frame.unitcell_angles)
trj_file._handle.root.processed_filenames.append([filename])
def concatenate_ocore(path, top, output_filename):
"""Concatenate XTC files created by Siegetank OCore.
Parameters
----------
path : str
Path to stream directory containing frame directories /0, /1, /2
etc.
top : mdtraj.Topology
Topology for system
output_filename : str
Filename of output HDF5 file to generate.
Notes
-----
We use HDF5 because it provides an easy way to store the metadata associated
with which files have already been processed.
"""
sorted_folders = sorted(os.listdir(path), key=lambda value: int(value))
sorted_folders = [os.path.join(path, folder) for folder in sorted_folders]
if len(sorted_folders) <= 0:
return
trj_file = HDF5TrajectoryFile(output_filename, mode='a')
try:
trj_file._create_earray(where='/', name='processed_folders',atom=trj_file.tables.StringAtom(1024), shape=(0,))
trj_file.topology = top.topology
except trj_file.tables.NodeError:
pass
for folder in sorted_folders:
if six.b(folder) in trj_file._handle.root.processed_folders: # On Py3, the pytables list of filenames has type byte (e.g. b"hey"), so we need to deal with this via six.
print("Already processed %s" % folder)
continue
print("Processing %s" % folder)
xtc_filename = os.path.join(folder, "frames.xtc")
trj = md.load(xtc_filename, top=top)
for frame in trj:
trj_file.write(coordinates=frame.xyz, cell_lengths=frame.unitcell_lengths, cell_angles=frame.unitcell_angles)
trj_file._handle.root.processed_folders.append([folder])
| lgpl-2.1 | 5,793,126,494,510,251,000 | 39.903361 | 188 | 0.62753 | false |
gusai-francelabs/datafari | windows/python/Lib/site-packages/pip/_vendor/requests/exceptions.py | 895 | 2517 | # -*- coding: utf-8 -*-
"""
requests.exceptions
~~~~~~~~~~~~~~~~~~~
This module contains the set of Requests' exceptions.
"""
from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
request."""
def __init__(self, *args, **kwargs):
"""
Initialize RequestException with `request` and `response` objects.
"""
response = kwargs.pop('response', None)
self.response = response
self.request = kwargs.pop('request', None)
if (response is not None and not self.request and
hasattr(response, 'request')):
self.request = self.response.request
super(RequestException, self).__init__(*args, **kwargs)
class HTTPError(RequestException):
"""An HTTP error occurred."""
class ConnectionError(RequestException):
"""A Connection error occurred."""
class ProxyError(ConnectionError):
"""A proxy error occurred."""
class SSLError(ConnectionError):
"""An SSL error occurred."""
class Timeout(RequestException):
"""The request timed out.
Catching this error will catch both
:exc:`~requests.exceptions.ConnectTimeout` and
:exc:`~requests.exceptions.ReadTimeout` errors.
"""
class ConnectTimeout(ConnectionError, Timeout):
"""The request timed out while trying to connect to the remote server.
Requests that produced this error are safe to retry.
"""
class ReadTimeout(Timeout):
"""The server did not send any data in the allotted amount of time."""
class URLRequired(RequestException):
"""A valid URL is required to make a request."""
class TooManyRedirects(RequestException):
"""Too many redirects."""
class MissingSchema(RequestException, ValueError):
"""The URL schema (e.g. http or https) is missing."""
class InvalidSchema(RequestException, ValueError):
"""See defaults.py for valid schemas."""
class InvalidURL(RequestException, ValueError):
""" The URL provided was somehow invalid. """
class ChunkedEncodingError(RequestException):
"""The server declared chunked encoding but sent an invalid chunk."""
class ContentDecodingError(RequestException, BaseHTTPError):
"""Failed to decode response content"""
class StreamConsumedError(RequestException, TypeError):
"""The content for this response was already consumed"""
class RetryError(RequestException):
"""Custom retries logic failed"""
| apache-2.0 | -1,447,260,705,400,122,400 | 24.424242 | 74 | 0.692094 | false |
ToonTownInfiniteRepo/ToontownInfinite | Panda3D-1.9.0/python/Lib/JOD/NewJamoDrum.py | 3 | 10076 | """
A generic Jam-o-Drum input interface for the Jam-o-Drum that uses the OptiPAC
for both spinners and pads.
@author: U{Ben Buchwald <[email protected]>}
Last Updated: 2/27/2006
"""
from direct.showbase.DirectObject import DirectObject
import string, sys, md5
from pandac.PandaModules import Filename
from pandac.PandaModules import WindowProperties
from pandac.PandaModules import ConfigVariableList
class JamoDrum(DirectObject):
"""
Class representing input from a Jam-o-Drum. To handle Jam-o-Drum input
accept the Panda messages JOD_SPIN_x and JOD_HIT_x where x is a number between
0 and 3 for the 4 stations. Spin messages also pass a parameter which is the
angle spun in degrees. Hit messages also pass a parameter which is the force
the pad was hit with in the range 0.0-1.0 (will probably be fairly low). With
or without actual Jam-o-Drum hardware this class will automatically respond
to the keys (j,k,l),(s,d,f),(w,e,r), and (u,i,o) corresponding to spin left 10
degrees, hit with full force, and spin right 10 degrees respectively for each
of the stations. You must call L{poll} periodically to receive input from the
real Jam-o-Drum hardware.
"""
def __init__(self, useJOD=None):
"""
@keyword useJOD: connected to actual drumpads and spinners to read from (default: read from config.prc)
@type useJOD: bool
"""
self.configPath = Filename("/c/jamoconfig.txt")
self.logPath = Filename("/c/jamoconfig.log")
self.clearConfig()
self.simulate()
self.log = sys.stdout
self.configMissing = 0
self.hardwareChanged = 0
if (useJOD==None):
useJOD = base.config.GetBool("want-jamodrum", True)
self.useJOD = useJOD
if (useJOD):
self.setLog(self.logPath)
self.devindices = range(1,base.win.getNumInputDevices())
self.readConfigFile(self.configPath)
self.prepareDevices()
props = WindowProperties()
props.setCursorHidden(1)
if (sys.platform == "win32"):
props.setZOrder(WindowProperties.ZTop)
base.win.requestProperties(props)
self.setLog(None)
def setLog(self, fn):
if (self.log != sys.stdout):
self.log.close()
self.log = sys.stdout
if (fn):
try:
self.log = open(fn.toOsSpecific(), "w")
except:
self.log = sys.stdout
def generateMouseDigest(self):
m = md5.md5()
for i in range(base.win.getNumInputDevices()):
m.update(base.win.getInputDeviceName(i))
m.update("\n")
return m.hexdigest()
def reportDevices(self):
for devindex in self.devindices:
self.log.write("Encoder Detected: "+base.win.getInputDeviceName(devindex)+"\n")
def clearConfig(self):
self.ratio = 8.71
self.wheelConfigs = [[0,0],[0,0],[0,0],[0,0]]
self.padConfigs = [[0,0],[0,0],[0,0],[0,0]]
def getIntVal(self, spec):
try:
return int(spec)
except:
return -1
def setWheelConfig(self, station, axis, device):
if (axis=="x") or (axis=="X"): axis=0
if (axis=="y") or (axis=="Y"): axis=1
istation = self.getIntVal(station)
iaxis = self.getIntVal(axis)
if (istation < 0) or (istation > 3):
self.log.write("Wheel Config: Invalid station index "+str(station)+"\n")
return
if (iaxis < 0) or (iaxis > 1):
self.log.write("Wheel Config: Invalid axis index "+str(axis)+"\n")
return
self.wheelConfigs[istation] = [iaxis, str(device)]
def setPadConfig(self, station, button, device):
istation = self.getIntVal(station)
ibutton = self.getIntVal(button)
if (istation < 0) or (istation > 3):
self.log.write("Pad Config: Invalid station index "+str(station)+"\n")
return
if (ibutton < 0) or (ibutton > 2):
self.log.write("Pad Config: Invalid button index "+str(button)+"\n")
return
self.padConfigs[istation] = [ibutton, device]
def readConfigFile(self, fn):
digest = self.generateMouseDigest()
self.clearConfig()
try:
file = open(fn.toOsSpecific(),"r")
lines = file.readlines()
file.close()
except:
self.configMissing = 1
self.log.write("Could not read "+fn.toOsSpecific()+"\n")
return
for line in lines:
line = line.strip(" \t\r\n")
if (line=="") or (line[0]=="#"):
continue
words = line.split(" ")
if (words[0]=="wheel"):
if (len(words)==4):
self.setWheelConfig(words[1],words[2],words[3])
else:
self.log.write("Wheel Config: invalid syntax\n")
elif (words[0]=="pad"):
if (len(words)==4):
self.setPadConfig(words[1],words[2],words[3])
else:
self.log.write("Pad Config: invalid syntax\n")
elif (words[0]=="ratio"):
try:
self.ratio = float(words[1])
except:
self.log.write("Ratio Config: invalid syntax\n")
elif (words[0]=="digest"):
if (len(words)==2):
if (digest != words[1]):
self.hardwareChanged = 1
else:
self.log.write("Digest: invalid syntax")
else:
self.log.write("Unrecognized config directive "+line+"\n")
def writeConfigFile(self, fn):
try:
file = open(fn.toOsSpecific(),"w")
file.write("ratio "+str(self.ratio)+"\n")
for i in range(4):
wheelinfo = self.wheelConfigs[i]
file.write("wheel "+str(i)+" "+str(wheelinfo[0])+" "+wheelinfo[1]+"\n")
padinfo = self.padConfigs[i]
file.write("pad "+str(i)+" "+str(padinfo[0])+" "+padinfo[1]+"\n")
file.close()
except:
self.log.write("Could not write "+fn.toOsSpecific()+"\n")
def findWheel(self, devaxis, devname):
for wheelindex in range(4):
wheelinfo = self.wheelConfigs[wheelindex]
wheelaxis = wheelinfo[0]
wheeldevice = wheelinfo[1]
if (devname == wheeldevice) and (devaxis == wheelaxis):
return wheelindex
return -1
def findPad(self, devbutton, devname):
for padindex in range(4):
padinfo = self.padConfigs[padindex]
padbutton = padinfo[0]
paddevice = padinfo[1]
if (devname == paddevice) and (devbutton == padbutton):
return padindex
return -1
def prepareDevices(self):
"""
Each axis or button will be associated with a wheel or pad.
Any axis or button not in the config list will be associated
with wheel -1 or pad -1.
"""
self.polls = []
for devindex in range(1, base.win.getNumInputDevices()):
devname = base.win.getInputDeviceName(devindex)
for devaxis in range(2):
target = self.findWheel(devaxis, devname)
self.log.write("Axis "+str(devaxis)+" of "+devname+" controls wheel "+str(target)+"\n")
self.polls.append([devaxis, devindex, target, 0])
for devbutton in range(3):
target = self.findPad(devbutton, devname)
sig = "mousedev"+str(devindex)+"-mouse"+str(devbutton+1)
self.log.write("Button "+str(devbutton)+" of "+devname+" controls pad "+str(target)+"\n")
self.ignore(sig)
self.accept(sig, self.hit, [target, 1.0])
def simulate(self,spin=10.0,hit=1.0):
"""
Accept keyboard keys to simulate Jam-o-Drum input.
@keyword spin: degrees to spin for each keystroke (default: 10.0)
@type spin: float
@keyword hit: force to hit for each keystroke (default: 1.0)
@type hit: float
"""
self.accept('k',self.hit,[0,hit])
self.accept('d',self.hit,[1,hit])
self.accept('e',self.hit,[2,hit])
self.accept('i',self.hit,[3,hit])
self.accept('j',self.spin,[0,spin])
self.accept('l',self.spin,[0,-spin])
self.accept('s',self.spin,[1,spin])
self.accept('f',self.spin,[1,-spin])
self.accept('w',self.spin,[2,-spin])
self.accept('r',self.spin,[2,spin])
self.accept('u',self.spin,[3,-spin])
self.accept('o',self.spin,[3,spin])
# end simulate
def poll(self):
"""
Call this each frame to poll actual drumpads and spinners for input.
If input occurs messages will be sent.
"""
if (not self.useJOD):
return
offsets = [0.0,0.0,0.0,0.0]
for info in self.polls:
axis = info[0]
devindex = info[1]
wheel = info[2]
last = info[3]
if (axis == 0):
pos = base.win.getPointer(devindex).getX()
else:
pos = base.win.getPointer(devindex).getY()
if (pos != last):
diff = (pos-last)/self.ratio
if (wheel < 0):
offsets[0] += diff
offsets[1] += diff
offsets[2] += diff
offsets[3] += diff
else:
offsets[wheel] += diff
info[3] = pos
for i in range(4):
if (offsets[i] != 0.0):
self.spin(i,offsets[i])
def spin(self,station,angle):
"""
Sends a JOD_SPIN_<station> message
"""
sig = "JOD_SPIN_"+str(station)
messenger.send(sig,[angle])
def hit(self,station,force):
"""
Sends a JOD_HIT_<station> message
"""
if (station < 0):
for station in range(4):
sig = "JOD_HIT_"+str(station)
messenger.send(sig,[force])
else:
sig = "JOD_HIT_"+str(station)
messenger.send(sig,[force])
# end class JamoDrum
| mit | -6,825,525,793,610,439,000 | 34.60424 | 111 | 0.557166 | false |
brennie/reviewboard | reviewboard/ssh/utils.py | 3 | 3339 | from __future__ import unicode_literals
import os
import paramiko
from django.utils import six
from reviewboard.ssh.client import SSHClient
from reviewboard.ssh.errors import (BadHostKeyError, SSHAuthenticationError,
SSHError, SSHInvalidPortError)
from reviewboard.ssh.policy import RaiseUnknownHostKeyPolicy
SSH_PORT = 22
try:
import urlparse
uses_netloc = urlparse.uses_netloc
urllib_parse = urlparse.urlparse
except ImportError:
import urllib.parse
uses_netloc = urllib.parse.uses_netloc
urllib_parse = urllib.parse.urlparse
# A list of known SSH URL schemes.
ssh_uri_schemes = ["ssh", "sftp"]
uses_netloc.extend(ssh_uri_schemes)
def humanize_key(key):
"""Returns a human-readable key as a series of hex characters."""
return ':'.join(["%02x" % ord(c) for c in key.get_fingerprint()])
def is_ssh_uri(url):
"""Returns whether or not a URL represents an SSH connection."""
return urllib_parse(url)[0] in ssh_uri_schemes
def check_host(netloc, username=None, password=None, namespace=None):
"""
Checks if we can connect to a host with a known key.
This will raise an exception if we cannot connect to the host. The
exception will be one of BadHostKeyError, UnknownHostKeyError, or
SCMError.
"""
from django.conf import settings
client = SSHClient(namespace=namespace)
client.set_missing_host_key_policy(RaiseUnknownHostKeyPolicy())
kwargs = {}
if ':' in netloc:
hostname, port = netloc.split(':')
try:
port = int(port)
except ValueError:
raise SSHInvalidPortError(port)
else:
hostname = netloc
port = SSH_PORT
# We normally want to notify on unknown host keys, but not when running
# unit tests.
if getattr(settings, 'RUNNING_TEST', False):
client.set_missing_host_key_policy(paramiko.WarningPolicy())
kwargs['allow_agent'] = False
try:
client.connect(hostname, port, username=username, password=password,
pkey=client.get_user_key(), **kwargs)
except paramiko.BadHostKeyException as e:
raise BadHostKeyError(e.hostname, e.key, e.expected_key)
except paramiko.AuthenticationException as e:
# Some AuthenticationException instances have allowed_types set,
# and some don't.
allowed_types = getattr(e, 'allowed_types', [])
if 'publickey' in allowed_types:
key = client.get_user_key()
else:
key = None
raise SSHAuthenticationError(allowed_types=allowed_types, user_key=key)
except paramiko.SSHException as e:
msg = six.text_type(e)
if msg == 'No authentication methods available':
raise SSHAuthenticationError
else:
raise SSHError(msg)
def register_rbssh(envvar):
"""Registers rbssh in an environment variable.
This is a convenience method for making sure that rbssh is set properly
in the environment for different tools. In some cases, we need to
specifically place it in the system environment using ``os.putenv``,
while in others (Mercurial, Bazaar), we need to place it in ``os.environ``.
"""
envvar = envvar.encode('utf-8')
os.putenv(envvar, b'rbssh')
os.environ[envvar] = b'rbssh'
| mit | 8,464,793,154,522,550,000 | 29.633028 | 79 | 0.667865 | false |
hbenniou/trunk | examples/gts-horse/gts-random-pack.py | 10 | 3271 |
""" CAUTION:
Running this script can take very long!
"""
from numpy import arange
from yade import pack
import pylab
# define the section shape as polygon in 2d; repeat first point at the end to close the polygon
poly=((1e-2,5e-2),(5e-2,2e-2),(7e-2,-2e-2),(1e-2,-5e-2),(1e-2,5e-2))
# show us the meridian shape
#pylab.plot(*zip(*poly)); pylab.xlim(xmin=0); pylab.grid(); pylab.title('Meridian of the revolution surface\n(close to continue)'); pylab.gca().set_aspect(aspect='equal',adjustable='box'); pylab.show()
# angles at which we want this polygon to appear
thetas=arange(0,pi/2,pi/24)
# create 3d points from the 2d ones, turning the 2d meridian around the +y axis
# for each angle, put the poly a little bit higher (+2e-3*theta);
# this is just to demonstrate that you can do whatever here as long as the resulting
# meridian has the same number of points
#
# There is origin (translation) and orientation arguments, allowing to transform all the 3d points once computed.
#
# without these transformation, it would look a little simpler:
# pts=pack.revolutionSurfaceMeridians([[(pt[0],pt[1]+2e-3*theta) for pt in poly] for theta in thetas],thetas
#
pts=pack.revolutionSurfaceMeridians([[(pt[0],pt[1]+1e-2*theta) for pt in poly] for theta in thetas],thetas,origin=Vector3(0,-.05,.1),orientation=Quaternion((1,1,0),pi/4))
# connect meridians to make surfaces
# caps will close it at the beginning and the end
# threshold will merge points closer than 1e-4; this is important: we want it to be closed for filling
surf=pack.sweptPolylines2gtsSurface(pts,capStart=True,capEnd=True,threshold=1e-4)
# add the surface as facets to the simulation, to make it visible
O.bodies.append(pack.gtsSurface2Facets(surf,color=(1,0,1)))
# now fill the inGtsSurface predicate constructed form the same surface with sphere packing generated by TriaxialTest
# with given radius and standard deviation (see documentation of pack.randomDensePack)
#
# The memoizeDb will save resulting packing into given file and next time, if you run with the same
# parameters (or parameters that can be scaled to the same one),
# it will load the packing instead of running the triaxial compaction again.
# Try running for the second time to see the speed difference!
memoizeDb='/tmp/gts-triax-packings.sqlite'
sp=SpherePack()
sp=pack.randomDensePack(pack.inGtsSurface(surf),radius=5e-3,rRelFuzz=1e-4,memoizeDb=memoizeDb,returnSpherePack=True)
sp.toSimulation()
# We could also fill the horse with triaxial packing, but have nice approximation, the triaxial would run terribly long,
# since horse discard most volume of its bounding box
# Here, we would use a very crude one, however
if 1:
import gts
horse=gts.read(open('horse.coarse.gts')) #; horse.scale(.25,.25,.25)
O.bodies.append(pack.gtsSurface2Facets(horse))
sp=pack.randomDensePack(pack.inGtsSurface(horse),radius=5e-3,memoizeDb=memoizeDb,returnSpherePack=True)
sp.toSimulation()
horse.translate(.07,0,0)
O.bodies.append(pack.gtsSurface2Facets(horse))
# specifying spheresInCell makes the packing periodic, with the given number of spheres, proportions being equal to that of the predicate
sp=pack.randomDensePack(pack.inGtsSurface(horse),radius=1e-3,spheresInCell=2000,memoizeDb=memoizeDb,returnSpherePack=True)
sp.toSimulation()
| gpl-2.0 | -7,740,019,440,590,569,000 | 57.410714 | 201 | 0.772241 | false |
stscieisenhamer/pyqtgraph | pyqtgraph/units.py | 55 | 1402 | # -*- coding: utf-8 -*-
## Very simple unit support:
## - creates variable names like 'mV' and 'kHz'
## - the value assigned to the variable corresponds to the scale prefix
## (mV = 0.001)
## - the actual units are purely cosmetic for making code clearer:
##
## x = 20*pA is identical to x = 20*1e-12
## No unicode variable names (μ,Ω) allowed until python 3
SI_PREFIXES = 'yzafpnum kMGTPEZY'
UNITS = 'm,s,g,W,J,V,A,F,T,Hz,Ohm,S,N,C,px,b,B'.split(',')
allUnits = {}
def addUnit(p, n):
g = globals()
v = 1000**n
for u in UNITS:
g[p+u] = v
allUnits[p+u] = v
for p in SI_PREFIXES:
if p == ' ':
p = ''
n = 0
elif p == 'u':
n = -2
else:
n = SI_PREFIXES.index(p) - 8
addUnit(p, n)
cm = 0.01
def evalUnits(unitStr):
"""
Evaluate a unit string into ([numerators,...], [denominators,...])
Examples:
N m/s^2 => ([N, m], [s, s])
A*s / V => ([A, s], [V,])
"""
pass
def formatUnits(units):
"""
Format a unit specification ([numerators,...], [denominators,...])
into a string (this is the inverse of evalUnits)
"""
pass
def simplify(units):
"""
Cancel units that appear in both numerator and denominator, then attempt to replace
groups of units with single units where possible (ie, J/s => W)
"""
pass
| mit | -6,974,808,280,071,847,000 | 20.890625 | 88 | 0.542857 | false |
chjw8016/GreenOdoo7-haibao | openerp/addons/point_of_sale/controllers/main.py | 56 | 5627 | # -*- coding: utf-8 -*-
import logging
import simplejson
import os
import openerp
from openerp.addons.web.controllers.main import manifest_list, module_boot, html_template
class PointOfSaleController(openerp.addons.web.http.Controller):
_cp_path = '/pos'
@openerp.addons.web.http.httprequest
def app(self, req, s_action=None, **kw):
js = "\n ".join('<script type="text/javascript" src="%s"></script>' % i for i in manifest_list(req, None, 'js'))
css = "\n ".join('<link rel="stylesheet" href="%s">' % i for i in manifest_list(req, None, 'css'))
cookie = req.httprequest.cookies.get("instance0|session_id")
session_id = cookie.replace("%22","")
template = html_template.replace('<html','<html manifest="/pos/manifest?session_id=%s"'%session_id)
r = template % {
'js': js,
'css': css,
'modules': simplejson.dumps(module_boot(req)),
'init': 'var wc = new s.web.WebClient();wc.appendTo($(document.body));'
}
return r
@openerp.addons.web.http.httprequest
def manifest(self, req, **kwargs):
""" This generates a HTML5 cache manifest files that preloads the categories and products thumbnails
and other ressources necessary for the point of sale to work offline """
ml = ["CACHE MANIFEST"]
# loading all the images in the static/src/img/* directories
def load_css_img(srcdir,dstdir):
for f in os.listdir(srcdir):
path = os.path.join(srcdir,f)
dstpath = os.path.join(dstdir,f)
if os.path.isdir(path) :
load_css_img(path,dstpath)
elif f.endswith(('.png','.PNG','.jpg','.JPG','.jpeg','.JPEG','.gif','.GIF')):
ml.append(dstpath)
imgdir = openerp.modules.get_module_resource('point_of_sale','static/src/img');
load_css_img(imgdir,'/point_of_sale/static/src/img')
products = req.session.model('product.product')
for p in products.search_read([('pos_categ_id','!=',False)], ['name']):
product_id = p['id']
url = "/web/binary/image?session_id=%s&model=product.product&field=image&id=%s" % (req.session_id, product_id)
ml.append(url)
categories = req.session.model('pos.category')
for c in categories.search_read([],['name']):
category_id = c['id']
url = "/web/binary/image?session_id=%s&model=pos.category&field=image&id=%s" % (req.session_id, category_id)
ml.append(url)
ml += ["NETWORK:","*"]
m = "\n".join(ml)
return m
@openerp.addons.web.http.jsonrequest
def dispatch(self, request, iface, **kwargs):
method = 'iface_%s' % iface
return getattr(self, method)(request, **kwargs)
@openerp.addons.web.http.jsonrequest
def scan_item_success(self, request, ean):
"""
A product has been scanned with success
"""
print 'scan_item_success: ' + str(ean)
return
@openerp.addons.web.http.jsonrequest
def scan_item_error_unrecognized(self, request, ean):
"""
A product has been scanned without success
"""
print 'scan_item_error_unrecognized: ' + str(ean)
return
@openerp.addons.web.http.jsonrequest
def help_needed(self, request):
"""
The user wants an help (ex: light is on)
"""
print "help_needed"
return
@openerp.addons.web.http.jsonrequest
def help_canceled(self, request):
"""
The user stops the help request
"""
print "help_canceled"
return
@openerp.addons.web.http.jsonrequest
def weighting_start(self, request):
print "weighting_start"
return
@openerp.addons.web.http.jsonrequest
def weighting_read_kg(self, request):
print "weighting_read_kg"
return 0.0
@openerp.addons.web.http.jsonrequest
def weighting_end(self, request):
print "weighting_end"
return
@openerp.addons.web.http.jsonrequest
def payment_request(self, request, price):
"""
The PoS will activate the method payment
"""
print "payment_request: price:"+str(price)
return 'ok'
@openerp.addons.web.http.jsonrequest
def payment_status(self, request):
print "payment_status"
return { 'status':'waiting' }
@openerp.addons.web.http.jsonrequest
def payment_cancel(self, request):
print "payment_cancel"
return
@openerp.addons.web.http.jsonrequest
def transaction_start(self, request):
print 'transaction_start'
return
@openerp.addons.web.http.jsonrequest
def transaction_end(self, request):
print 'transaction_end'
return
@openerp.addons.web.http.jsonrequest
def cashier_mode_activated(self, request):
print 'cashier_mode_activated'
return
@openerp.addons.web.http.jsonrequest
def cashier_mode_deactivated(self, request):
print 'cashier_mode_deactivated'
return
@openerp.addons.web.http.jsonrequest
def open_cashbox(self, request):
print 'open_cashbox'
return
@openerp.addons.web.http.jsonrequest
def print_receipt(self, request, receipt):
print 'print_receipt' + str(receipt)
return
@openerp.addons.web.http.jsonrequest
def print_pdf_invoice(self, request, pdfinvoice):
print 'print_pdf_invoice' + str(pdfinvoice)
return
| mit | -2,109,445,237,641,725,000 | 32.1 | 127 | 0.60032 | false |
jejimenez/django | django/contrib/postgres/lookups.py | 199 | 1175 | from django.db.models import Lookup, Transform
class PostgresSimpleLookup(Lookup):
def as_sql(self, qn, connection):
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return '%s %s %s' % (lhs, self.operator, rhs), params
class FunctionTransform(Transform):
def as_sql(self, qn, connection):
lhs, params = qn.compile(self.lhs)
return "%s(%s)" % (self.function, lhs), params
class DataContains(PostgresSimpleLookup):
lookup_name = 'contains'
operator = '@>'
class ContainedBy(PostgresSimpleLookup):
lookup_name = 'contained_by'
operator = '<@'
class Overlap(PostgresSimpleLookup):
lookup_name = 'overlap'
operator = '&&'
class HasKey(PostgresSimpleLookup):
lookup_name = 'has_key'
operator = '?'
class HasKeys(PostgresSimpleLookup):
lookup_name = 'has_keys'
operator = '?&'
class HasAnyKeys(PostgresSimpleLookup):
lookup_name = 'has_any_keys'
operator = '?|'
class Unaccent(FunctionTransform):
bilateral = True
lookup_name = 'unaccent'
function = 'UNACCENT'
| bsd-3-clause | 6,577,103,604,244,673,000 | 22.039216 | 61 | 0.657872 | false |
bholley/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/test/test_extensions.py | 413 | 16128 | #!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for extensions module."""
import unittest
import zlib
import set_sys_path # Update sys.path to locate mod_pywebsocket module.
from mod_pywebsocket import common
from mod_pywebsocket import extensions
class ExtensionsTest(unittest.TestCase):
"""A unittest for non-class methods in extensions.py"""
def test_parse_window_bits(self):
self.assertRaises(ValueError, extensions._parse_window_bits, None)
self.assertRaises(ValueError, extensions._parse_window_bits, 'foobar')
self.assertRaises(ValueError, extensions._parse_window_bits, ' 8 ')
self.assertRaises(ValueError, extensions._parse_window_bits, 'a8a')
self.assertRaises(ValueError, extensions._parse_window_bits, '00000')
self.assertRaises(ValueError, extensions._parse_window_bits, '00008')
self.assertRaises(ValueError, extensions._parse_window_bits, '0x8')
self.assertRaises(ValueError, extensions._parse_window_bits, '9.5')
self.assertRaises(ValueError, extensions._parse_window_bits, '8.0')
self.assertTrue(extensions._parse_window_bits, '8')
self.assertTrue(extensions._parse_window_bits, '15')
self.assertRaises(ValueError, extensions._parse_window_bits, '-8')
self.assertRaises(ValueError, extensions._parse_window_bits, '0')
self.assertRaises(ValueError, extensions._parse_window_bits, '7')
self.assertRaises(ValueError, extensions._parse_window_bits, '16')
self.assertRaises(
ValueError, extensions._parse_window_bits, '10000000')
class CompressionMethodParameterParserTest(unittest.TestCase):
"""A unittest for _parse_compression_method which parses the compression
method description used by perframe-compression and permessage-compression
extension in their "method" extension parameter.
"""
def test_parse_method_simple(self):
method_list = extensions._parse_compression_method('foo')
self.assertEqual(1, len(method_list))
method = method_list[0]
self.assertEqual('foo', method.name())
self.assertEqual(0, len(method.get_parameters()))
def test_parse_method_with_parameter(self):
method_list = extensions._parse_compression_method('foo; x; y=10')
self.assertEqual(1, len(method_list))
method = method_list[0]
self.assertEqual('foo', method.name())
self.assertEqual(2, len(method.get_parameters()))
self.assertTrue(method.has_parameter('x'))
self.assertEqual(None, method.get_parameter_value('x'))
self.assertTrue(method.has_parameter('y'))
self.assertEqual('10', method.get_parameter_value('y'))
def test_parse_method_with_quoted_parameter(self):
method_list = extensions._parse_compression_method(
'foo; x="Hello World"; y=10')
self.assertEqual(1, len(method_list))
method = method_list[0]
self.assertEqual('foo', method.name())
self.assertEqual(2, len(method.get_parameters()))
self.assertTrue(method.has_parameter('x'))
self.assertEqual('Hello World', method.get_parameter_value('x'))
self.assertTrue(method.has_parameter('y'))
self.assertEqual('10', method.get_parameter_value('y'))
def test_parse_method_multiple(self):
method_list = extensions._parse_compression_method('foo, bar')
self.assertEqual(2, len(method_list))
self.assertEqual('foo', method_list[0].name())
self.assertEqual(0, len(method_list[0].get_parameters()))
self.assertEqual('bar', method_list[1].name())
self.assertEqual(0, len(method_list[1].get_parameters()))
def test_parse_method_multiple_methods_with_quoted_parameter(self):
method_list = extensions._parse_compression_method(
'foo; x="Hello World", bar; y=10')
self.assertEqual(2, len(method_list))
self.assertEqual('foo', method_list[0].name())
self.assertEqual(1, len(method_list[0].get_parameters()))
self.assertTrue(method_list[0].has_parameter('x'))
self.assertEqual('Hello World',
method_list[0].get_parameter_value('x'))
self.assertEqual('bar', method_list[1].name())
self.assertEqual(1, len(method_list[1].get_parameters()))
self.assertTrue(method_list[1].has_parameter('y'))
self.assertEqual('10', method_list[1].get_parameter_value('y'))
def test_create_method_desc_simple(self):
params = common.ExtensionParameter('foo')
desc = extensions._create_accepted_method_desc('foo',
params.get_parameters())
self.assertEqual('foo', desc)
def test_create_method_desc_with_parameters(self):
params = common.ExtensionParameter('foo')
params.add_parameter('x', 'Hello, World')
params.add_parameter('y', '10')
desc = extensions._create_accepted_method_desc('foo',
params.get_parameters())
self.assertEqual('foo; x="Hello, World"; y=10', desc)
class DeflateFrameExtensionProcessorParsingTest(unittest.TestCase):
"""A unittest for checking that DeflateFrameExtensionProcessor parses given
extension parameter correctly.
"""
def test_registry(self):
processor = extensions.get_extension_processor(
common.ExtensionParameter('deflate-frame'))
self.assertIsInstance(processor,
extensions.DeflateFrameExtensionProcessor)
processor = extensions.get_extension_processor(
common.ExtensionParameter('x-webkit-deflate-frame'))
self.assertIsInstance(processor,
extensions.DeflateFrameExtensionProcessor)
def test_minimal_offer(self):
processor = extensions.DeflateFrameExtensionProcessor(
common.ExtensionParameter('perframe-deflate'))
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertEqual(zlib.MAX_WBITS,
processor._rfc1979_deflater._window_bits)
self.assertFalse(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_max_window_bits(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('max_window_bits', '10')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertEqual(10, processor._rfc1979_deflater._window_bits)
def test_offer_with_out_of_range_max_window_bits(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('max_window_bits', '0')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_max_window_bits_without_value(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('max_window_bits', None)
processor = extensions.DeflateFrameExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_no_context_takeover(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('no_context_takeover', None)
processor = extensions.DeflateFrameExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertTrue(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_no_context_takeover_with_value(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('no_context_takeover', 'foobar')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_unknown_parameter(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('foo', 'bar')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
class PerMessageDeflateExtensionProcessorParsingTest(unittest.TestCase):
"""A unittest for checking that PerMessageDeflateExtensionProcessor parses
given extension parameter correctly.
"""
def test_registry(self):
processor = extensions.get_extension_processor(
common.ExtensionParameter('permessage-deflate'))
self.assertIsInstance(processor,
extensions.PerMessageDeflateExtensionProcessor)
def test_minimal_offer(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertEqual(zlib.MAX_WBITS,
processor._rfc1979_deflater._window_bits)
self.assertFalse(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_max_window_bits(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_max_window_bits', '10')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('server_max_window_bits', '10')],
response.get_parameters())
self.assertEqual(10, processor._rfc1979_deflater._window_bits)
def test_offer_with_out_of_range_max_window_bits(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_max_window_bits', '0')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_max_window_bits_without_value(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_max_window_bits', None)
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_no_context_takeover(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_no_context_takeover', None)
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('server_no_context_takeover', None)],
response.get_parameters())
self.assertTrue(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_no_context_takeover_with_value(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_no_context_takeover', 'foobar')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_unknown_parameter(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('foo', 'bar')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
class PerMessageDeflateExtensionProcessorBuildingTest(unittest.TestCase):
"""A unittest for checking that PerMessageDeflateExtensionProcessor builds
a response based on specified options correctly.
"""
def test_response_with_max_window_bits(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('client_max_window_bits', None)
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
processor.set_client_max_window_bits(10)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('client_max_window_bits', '10')],
response.get_parameters())
def test_response_with_max_window_bits_without_client_permission(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
processor.set_client_max_window_bits(10)
response = processor.get_extension_response()
self.assertIsNone(response)
def test_response_with_true_for_no_context_takeover(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
processor.set_client_no_context_takeover(True)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('client_no_context_takeover', None)],
response.get_parameters())
def test_response_with_false_for_no_context_takeover(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
processor.set_client_no_context_takeover(False)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
class PerMessageCompressExtensionProcessorTest(unittest.TestCase):
def test_registry(self):
processor = extensions.get_extension_processor(
common.ExtensionParameter('permessage-compress'))
self.assertIsInstance(processor,
extensions.PerMessageCompressExtensionProcessor)
if __name__ == '__main__':
unittest.main()
# vi:sts=4 sw=4 et
| mpl-2.0 | -3,178,761,645,231,186,400 | 43.8 | 79 | 0.692832 | false |
Jgarcia-IAS/localizacion | openerp/addons/l10n_be_invoice_bba/partner.py | 379 | 2268 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Created by Luc De Meyer
# Copyright (c) 2010 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import time
from openerp.tools.translate import _
class res_partner(osv.osv):
""" add field to indicate default 'Communication Type' on customer invoices """
_inherit = 'res.partner'
def _get_comm_type(self, cr, uid, context=None):
res = self.pool.get('account.invoice')._get_reference_type(cr, uid,context=context)
return res
_columns = {
'out_inv_comm_type': fields.selection(_get_comm_type, 'Communication Type', change_default=True,
help='Select Default Communication Type for Outgoing Invoices.' ),
'out_inv_comm_algorithm': fields.selection([
('random','Random'),
('date','Date'),
('partner_ref','Customer Reference'),
], 'Communication Algorithm',
help='Select Algorithm to generate the Structured Communication on Outgoing Invoices.' ),
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + \
['out_inv_comm_type', 'out_inv_comm_algorithm']
_default = {
'out_inv_comm_type': 'none',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 1,777,546,695,650,925,300 | 39.5 | 104 | 0.616843 | false |
PeterLauris/aifh | vol1/python-examples/examples/example_ocr.py | 4 | 8445 | #!/usr/bin/env python
"""
Artificial Intelligence for Humans
Volume 1: Fundamental Algorithms
Python Version
http://www.aifh.org
http://www.jeffheaton.com
Code repository:
https://github.com/jeffheaton/aifh
Copyright 2013 by Jeff Heaton
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For more information on Heaton Research copyrights, licenses
and trademarks visit:
http://www.heatonresearch.com/copyright
============================================================================================================
This example shows how to do very basic OCR using distance metrics. To use this program draw
a character under "Draw Here". Then input the letter that you drew in the box next to "Learn:".
Click the "Learn:" button and this character is added to the trained characters. Repeat this for
a few characters. Finally, draw a character and click recognize. Your previous characters will
be scanned and the character with the shortest distance is shown.
"""
__author__ = 'jheaton'
import Tkinter as tk
import tkMessageBox
import sys
from scipy.spatial import distance
class Application(tk.Frame):
DRAW_AREA = 256
DOWN_SAMPLE_WIDTH = 5
DOWN_SAMPLE_HEIGHT = 7
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.grid()
self.b1 = None
self.canvas_draw = None
self.x_old = None
self.y_old = None
self.button_quit = None
self.button_recognize = None
self.button_learn = None
self.entry_learn_char = None
self.button_clear = None
self.list_learned = None
self.learned_patterns = {}
self.create_widgets()
self.clear()
def create_widgets(self):
l1 = tk.Label(self, text="Draw Here")
l1.grid(row=0, column=0)
l1 = tk.Label(self, text="Trained Characters")
l1.grid(row=0, column=1, columnspan=2)
self.canvas_draw = tk.Canvas(self, width=Application.DRAW_AREA, height=Application.DRAW_AREA)
self.canvas_draw.grid(row=1, column=0)
self.list_learned = tk.Listbox(self, height=10, )
self.list_learned.grid(row=1, column=1, sticky=tk.N + tk.E + tk.S + tk.W, columnspan=2)
self.button_learn = tk.Button(self, text='Learn:', command=self.learn)
self.button_learn.grid(row=2, column=0, sticky=tk.N + tk.E + tk.S + tk.W)
self.entry_learn_char = tk.Entry(self)
self.entry_learn_char.grid(row=2, column=1, sticky=tk.N + tk.E + tk.S + tk.W, columnspan=2)
self.button_recognize = tk.Button(self, text='Recognize', command=self.recognize)
self.button_recognize.grid(row=3, column=0, sticky=tk.N + tk.E + tk.S + tk.W)
self.button_quit = tk.Button(self, text='Quit', command=self.quit)
self.button_quit.grid(row=3, column=1, sticky=tk.N + tk.E + tk.S + tk.W)
self.button_clear = tk.Button(self, text='Clear', command=self.clear)
self.button_clear.grid(row=3, column=2, sticky=tk.N + tk.E + tk.S + tk.W)
self.canvas_draw.bind("<Motion>", self.motion)
self.canvas_draw.bind("<ButtonPress-1>", self.b1down)
self.canvas_draw.bind("<ButtonRelease-1>", self.b1up)
def b1down(self, event):
self.b1 = "down" # you only want to draw when the button is down
# because "Motion" events happen -all the time-
def b1up(self, event):
self.b1 = "up"
self.x_old = None # reset the line when you let go of the button
self.y_old = None
def motion(self, event):
if self.b1 == "down":
if self.x_old is not None and self.y_old is not None:
event.widget.create_line(self.x_old, self.y_old, event.x, event.y, smooth=tk.TRUE)
#self.plot_line(self.xold,self.yold,event.x,event.y)
self.draw_data[event.y][event.x] = True
self.x_old = event.x
self.y_old = event.y
def vertical_line_clear(self, col):
for row in range(0, Application.DRAW_AREA):
if self.draw_data[row][col]:
return False
return True
def horizontal_line_clear(self, row):
for col in range(0, Application.DRAW_AREA):
if self.draw_data[row][col]:
return False
return True
def down_sample_region(self, x, y):
start_x = int(self.clip_left + (x * self.ratioX))
start_x = int(self.clip_top + (y * self.ratioY))
end_x = int(start_x + self.ratioX)
end_y = int(start_x + self.ratioY)
for yy in range(start_x, end_y + 1):
for xx in range(start_x, end_x + 1):
if self.draw_data[yy][xx]:
return True
return False
def down_sample(self):
# Find bounding rectangle.
# Find left side of bounding rectangle
self.clip_left = 0
for col in range(0, Application.DRAW_AREA):
if not self.vertical_line_clear(col):
self.clip_left = col
break
# Find right side of bounding rectangle
self.clip_right = 0
for col in range(Application.DRAW_AREA - 1, -1, -1):
if not self.vertical_line_clear(col):
self.clip_right = col
break
# Find top side of bounding rectangle
self.clip_top = 0
for row in range(0, Application.DRAW_AREA):
if not self.horizontal_line_clear(row):
self.clip_top = row
break
# Find bottom side of bounding rectangle
self.clip_bottom = 0
for row in range(Application.DRAW_AREA - 1, -1, -1):
if not self.horizontal_line_clear(row):
self.clip_bottom = row
break
self.canvas_draw.create_rectangle(
self.clip_left,
self.clip_top,
self.clip_right,
self.clip_bottom)
# Now down sample to 5x7.
result = []
self.ratioX = float(self.clip_right - self.clip_left) / Application.DOWN_SAMPLE_WIDTH
self.ratioY = float(self.clip_bottom - self.clip_top) / Application.DOWN_SAMPLE_HEIGHT
for y in range(0, Application.DOWN_SAMPLE_HEIGHT):
for x in range(0, Application.DOWN_SAMPLE_WIDTH):
if self.down_sample_region(x, y):
result.append(1)
else:
result.append(0)
return result
def clear(self):
self.entry_learn_char.delete(0, tk.END)
self.canvas_draw.delete("all")
self.draw_data = [[False] * Application.DRAW_AREA for _ in range(Application.DRAW_AREA)]
def recognize(self):
best = "?"
best_distance = sys.float_info.max
sample = self.down_sample()
for key in self.learned_patterns.keys():
other_sample = self.learned_patterns[key]
dist = distance.euclidean(sample, other_sample)
if dist < best_distance:
best_distance = dist
best = key
tkMessageBox.showinfo("Learn", "I believe you drew a: " + best)
def learn(self):
learned_char = self.entry_learn_char.get()
if len(learned_char) > 1 or len(learned_char) == 0:
tkMessageBox.showinfo("Learn", "Please enter a single character to learn")
return
if learned_char in self.learned_patterns:
tkMessageBox.showinfo("Learn", "Already learned that character, please choose another")
return
self.list_learned.insert(tk.END, learned_char)
self.learned_patterns[learned_char] = self.down_sample()
# Clear and notify user.
self.clear()
tkMessageBox.showinfo("Learn", "Learned the pattern for: " + learned_char)
app = Application()
app.master.title('Python OCR')
app.mainloop() | apache-2.0 | -4,258,145,730,547,948,500 | 36.207048 | 108 | 0.596803 | false |
wagtail/wagtail | wagtail/tests/testapp/migrations/0058_blockcountsstreammodel_minmaxcountstreammodel.py | 6 | 1214 | # Generated by Django 2.1.7 on 2019-03-28 02:30
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('tests', '0057_customdocumentwithauthor'),
]
operations = [
migrations.CreateModel(
name='BlockCountsStreamModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', wagtail.core.fields.StreamField([('text', wagtail.core.blocks.CharBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])),
],
),
migrations.CreateModel(
name='MinMaxCountStreamModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', wagtail.core.fields.StreamField([('text', wagtail.core.blocks.CharBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])),
],
),
]
| bsd-3-clause | -4,844,687,278,163,032,000 | 39.466667 | 209 | 0.626853 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.